allow running ucsc_gather --fastq without specifying an analysis name or model
[htsworkflow.git] / extra / ucsc_encode_submission / ucsc_gather.py
index eb315c7ffc21ef5c6789ab19618fb73eee75491b..6172df70a5b11947505bede7bc5a51dc9179754d 100755 (executable)
 #!/usr/bin/env python
 from ConfigParser import SafeConfigParser
+import fnmatch
 from glob import glob
 import json
 import logging
+import netrc
 from optparse import OptionParser
 import os
 from pprint import pprint, pformat
 import shlex
 from StringIO import StringIO
-import time
+import stat
+from subprocess import Popen, PIPE
 import sys
+import time
 import types
 import urllib
 import urllib2
 import urlparse
 
-
 from htsworkflow.util import api
-from htsworkflow.pipelines.sequences import \
-    create_sequence_table, \
-    scan_for_sequences
+from htsworkflow.util.rdfhelp import \
+     dafTermOntology, \
+     fromTypedNode, \
+     get_model, \
+     get_serializer, \
+     load_into_model, \
+     submissionOntology 
+from htsworkflow.submission.daf import DAFMapper, get_submission_uri
+from htsworkflow.submission.condorfastq import CondorFastqExtract
+
+logger = logging.getLogger('ucsc_gather')
 
-def make_submission_name(ininame):
-    base = os.path.basename(ininame)
-    name, ext = os.path.splitext(base)
-    return name + '.tgz'
+def main(cmdline=None):
+    parser = make_parser()
+    opts, args = parser.parse_args(cmdline)
+    
+    if opts.debug:
+        logging.basicConfig(level = logging.DEBUG )
+    elif opts.verbose:
+        logging.basicConfig(level = logging.INFO )
+    else:
+        logging.basicConfig(level = logging.WARNING )        
+    
+    apidata = api.make_auth_from_opts(opts, parser)
 
-def make_ddf_name(pathname):
-    base = os.path.basename(pathname)
-    name, ext = os.path.splitext(base)
-    return name + '.ddf'
+    model = get_model(opts.load_model)
+    if opts.name:
+        mapper = DAFMapper(opts.name, opts.daf,  model)
+        submission_uri = get_submission_uri(opts.name)
+        
+    if opts.load_rdf is not None:
+        load_into_model(model, 'turtle', opts.load_rdf, submission_uri)
 
-def make_condor_name(pathname):
-    base = os.path.basename(pathname)
-    name, ext = os.path.splitext(base)
-    return name + '.condor'
+    if opts.makeddf and opts.daf is None:
+        parser.error("Please specify your daf when making ddf files")
+
+    if len(args) == 0:
+        parser.error("I need at least one library submission-dir input file")
+        
+    library_result_map = []
+    for a in args:
+        library_result_map.extend(read_library_result_map(a))
+
+    if opts.make_tree_from is not None:
+        make_tree_from(opts.make_tree_from, library_result_map)
+            
+    #if opts.daf is not None:
+    #    link_daf(opts.daf, library_result_map)
+
+    if opts.fastq:
+        extractor = CondorFastqExtract(opts.host, apidata, opts.sequence,
+                                       force=opts.force)
+        extractor.build_fastqs(library_result_map)
+
+    if opts.scan_submission:
+        scan_submission_dirs(mapper, library_result_map)
+
+    if opts.makeddf:
+        make_all_ddfs(mapper, library_result_map, force=opts.force)
+
+    if opts.print_rdf:
+        writer = get_serializer()
+        print writer.serialize_model_to_string(model)
+
+        
+def make_parser():
+    parser = OptionParser()
+
+    parser.add_option('--name', help="Set submission name")
+    parser.add_option('--load-model', default=None,
+      help="Load model database")
+    parser.add_option('--load-rdf', default=None,
+      help="load rdf statements into model")
+    parser.add_option('--print-rdf', action="store_true", default=False,
+      help="print ending model state")
+
+    # commands
+    parser.add_option('--make-tree-from',
+                      help="create directories & link data files",
+                      default=None)
+    parser.add_option('--fastq', help="generate scripts for making fastq files",
+                      default=False, action="store_true")
+
+    parser.add_option('--scan-submission', default=False, action="store_true",
+                      help="Import metadata for submission into our model")
     
-def make_submit_script(target, header, body_list):
-    """
-    write out a text file
+    parser.add_option('--makeddf', help='make the ddfs', default=False,
+                      action="store_true")
+    
+    parser.add_option('--daf', default=None, help='specify daf name')
+    parser.add_option('--force', default=False, action="store_true",
+                      help="Force regenerating fastqs")
 
-    this was intended for condor submit scripts
+    # debugging
+    parser.add_option('--verbose', default=False, action="store_true",
+                      help='verbose logging')
+    parser.add_option('--debug', default=False, action="store_true",
+                      help='debug logging')
+
+    api.add_auth_options(parser)
     
-    Args:
-      target (str or stream): 
-        if target is a string, we will open and close the file
-        if target is a stream, the caller is responsible.
-
-      header (str);
-        header to write at the beginning of the file
-      body_list (list of strs):
-        a list of blocks to add to the file.
+    return parser
+
+def make_tree_from(source_path, library_result_map):
+    """Create a tree using data files from source path.
     """
-    if type(target) in types.StringTypes:
-        f = open(target,'w')
-    else:
-        f = target
-    f.write(header)
-    for entry in body_list:
-        f.write(entry)
-    if type(target) in types.StringTypes:
-        f.close()
+    for lib_id, lib_path in library_result_map:
+        if not os.path.exists(lib_path):
+            logging.info("Making dir {0}".format(lib_path))
+            os.mkdir(lib_path)
+        source_lib_dir = os.path.join(source_path, lib_path)
+        if os.path.exists(source_lib_dir):
+            pass
+        for filename in os.listdir(source_lib_dir):
+            source_pathname = os.path.join(source_lib_dir, filename)
+            target_pathname = os.path.join(lib_path, filename)
+            if not os.path.exists(source_pathname):
+                raise IOError("{0} does not exist".format(source_pathname))
+            if not os.path.exists(target_pathname):
+                os.symlink(source_pathname, target_pathname)
+                logging.info(
+                    'LINK {0} to {1}'.format(source_pathname, target_pathname))
+    
 
-def parse_filelist(file_string):
-    return file_string.split(',')
+def link_daf(daf_path, library_result_map):
+    if not os.path.exists(daf_path):
+        raise RuntimeError("%s does not exist, how can I link to it?" % (daf_path,))
 
-def validate_filelist(files):
-    """
-    Die if a file doesn't exist in a file list
+    base_daf = os.path.basename(daf_path)
+    
+    for lib_id, result_dir in library_result_map:
+        if not os.path.exists(result_dir):
+            raise RuntimeError("Couldn't find target directory %s" %(result_dir,))
+        submission_daf = os.path.join(result_dir, base_daf)
+        if not os.path.exists(submission_daf):
+            if not os.path.exists(daf_path):
+                raise RuntimeError("Couldn't find daf: %s" %(daf_path,))
+            os.link(daf_path, submission_daf)
+
+
+def scan_submission_dirs(view_map, library_result_map):
+    """Look through our submission directories and collect needed information
     """
-    for f in files:
-        if not os.path.exists(f):
-            raise RuntimeError("%s does not exist" % (f,))
+    for lib_id, result_dir in library_result_map:
+        view_map.import_submission_dir(result_dir, lib_id)
+        
+def make_all_ddfs(view_map, library_result_map, make_condor=True, force=False):
+    dag_fragment = []
+    for lib_id, result_dir in library_result_map:
+        submissionNode = view_map.get_submission_node(result_dir)
+        dag_fragment.extend(
+            make_ddf(view_map, submissionNode, make_condor, result_dir)
+        )
+
+    if make_condor and len(dag_fragment) > 0:
+        dag_filename = 'submission.dagman'
+        if not force and os.path.exists(dag_filename):
+            logging.warn("%s exists, please delete" % (dag_filename,))
+        else:
+            f = open(dag_filename,'w')
+            f.write( os.linesep.join(dag_fragment))
+            f.write( os.linesep )
+            f.close()
+            
 
-def read_ddf_ini(filename, output=sys.stdout):
+def make_ddf(view_map, submissionNode, make_condor=False, outdir=None):
     """
-    Read a ini file and dump out a tab delmited text file
+    Make ddf files, and bonus condor file
     """
-    file_list = []
-    config = SafeConfigParser()
-    config.read(filename)
+    dag_fragments = []
+    curdir = os.getcwd()
+    if outdir is not None:
+        os.chdir(outdir)
+    output = sys.stdout
 
-    order_by = shlex.split(config.get("config", "order_by"))
+    name = fromTypedNode(view_map.model.get_target(submissionNode, submissionOntology['name']))
+    if name is None:
+        logging.error("Need name for %s" % (str(submissionNode)))
+        return []
+    
+    ddf_name = name + '.ddf'
+    output = sys.stdout
+    # output = open(ddf_name,'w')
 
-    output.write("\t".join(order_by))
+    # filename goes first
+    variables = ['filename']
+    variables.extend(view_map.get_daf_variables())
+    output.write('\t'.join(variables))
     output.write(os.linesep)
-    sections = config.sections()
-    sections.sort()
-    for section in sections:
-        if section == "config":
-            # skip the config block
-            continue
-        values = []
-        for key in order_by:
-            v = config.get(section, key)
-            values.append(v)
-            if key == 'files':
-                file_list.extend(parse_filelist(v))
-                
-        output.write("\t".join(values))
+    
+    submission_views = view_map.model.get_targets(submissionNode, submissionOntology['has_view'])
+    file_list = []
+    for viewNode in submission_views:
+        record = []
+        for variable_name in variables:
+            varNode = dafTermOntology[variable_name]
+            values = [fromTypedNode(v) for v in list(view_map.model.get_targets(viewNode, varNode))]
+            if variable_name == 'filename':
+                file_list.extend(values)
+            if len(values) == 0:
+                attribute = "#None#"
+            elif len(values) == 1:
+                attribute = values[0]
+            else:
+                attribute = ",".join(values)
+            record.append(attribute)
+        output.write('\t'.join(record))
         output.write(os.linesep)
-    return file_list
             
+    logging.info(
+        "Examined {0}, found files: {1}".format(
+            str(submissionNode), ", ".join(file_list)))
+
+    #file_list.append(daf_name)
+    #if ddf_name is not None:
+    #    file_list.append(ddf_name)
+    #
+    #if make_condor:
+    #    archive_condor = make_condor_archive_script(ininame, file_list)
+    #    upload_condor = make_condor_upload_script(ininame)
+    #    
+    #    dag_fragments.extend( 
+    #        make_dag_fragment(ininame, archive_condor, upload_condor)
+    #    ) 
+        
+    os.chdir(curdir)
+    
+    return dag_fragments
+
+
+def read_library_result_map(filename):
+    """
+    Read a file that maps library id to result directory.
+    Does not support spaces in filenames. 
+    
+    For example:
+      10000 result/foo/bar
+    """
+    stream = open(filename,'r')
+
+    results = []
+    for line in stream:
+        line = line.rstrip()
+        if not line.startswith('#') and len(line) > 0 :
+            library_id, result_dir = line.split()
+            results.append((library_id, result_dir))
+    return results
+
+
 def make_condor_archive_script(ininame, files):
     script = """Universe = vanilla
 
 Executable = /bin/tar
-arguments = czvf ../%(archivename)s %(filelist)s
+arguments = czvhf ../%(archivename)s %(filelist)s
 
 Error = compress.err.$(Process).log
 Output = compress.out.$(Process).log
-Log = compress.log
+Log = /tmp/submission-compress-%(user)s.log
 initialdir = %(initialdir)s
+environment="GZIP=-3"
+request_memory = 20
 
 queue 
 """
@@ -121,431 +290,162 @@ queue
 
     context = {'archivename': make_submission_name(ininame),
                'filelist': " ".join(files),
-               'initialdir': os.getcwd()}
+               'initialdir': os.getcwd(),
+               'user': os.getlogin()}
 
-    condor_script = make_condor_name(ininame)
+    condor_script = make_condor_name(ininame, 'archive')
     condor_stream = open(condor_script,'w')
     condor_stream.write(script % context)
     condor_stream.close()
+    return condor_script
 
-def make_ddf(ininame,  daf_name, guess_ddf=False, make_condor=False, outdir=None):
-    """
-    Make ddf files, and bonus condor file
-    """
-    curdir = os.getcwd()
-    if outdir is not None:
-        os.chdir(outdir)
-    output = sys.stdout
-    ddf_name = None
-    if guess_ddf:
-        ddf_name = make_ddf_name(ininame)
-        print ddf_name
-        output = open(ddf_name,'w')
-
-    file_list = read_ddf_ini(ininame, output)
 
-    file_list.append(daf_name)
-    if ddf_name is not None:
-        file_list.append(ddf_name)
-
-    if make_condor:
-        make_condor_archive_script(ininame, file_list)
-        
-    os.chdir(curdir)
+def make_condor_upload_script(ininame):
+    script = """Universe = vanilla
 
+Executable = /usr/bin/lftp
+arguments = -c put ../%(archivename)s -o ftp://%(ftpuser)s:%(ftppassword)s@%(ftphost)s/%(archivename)s
 
-def get_library_info(host, apidata, library_id):
-    url = api.library_url(host, library_id)
-    contents = api.retrieve_info(url, apidata)
-    return contents
-    
-def read_library_result_map(filename):
-    stream = open(filename,'r')
-
-    results = []
-    for line in stream:
-        library_id, result_dir = line.strip().split()
-        results.append((library_id, result_dir))
-    return results
+Error = upload.err.$(Process).log
+Output = upload.out.$(Process).log
+Log = /tmp/submission-upload-%(user)s.log
+initialdir = %(initialdir)s
 
-def condor_srf_to_fastq(srf_file, target_pathname):
-    script = """output=%(target_pathname)s
-arguments="-c %(srf_file)s"
-queue
+queue 
 """
-    params = {'srf_file': srf_file,
-              'target_pathname': target_pathname}
+    auth = netrc.netrc(os.path.expanduser("~diane/.netrc"))
     
-    return  script % params
+    encodeftp = 'encodeftp.cse.ucsc.edu'
+    ftpuser = auth.hosts[encodeftp][0]
+    ftppassword = auth.hosts[encodeftp][2]
+    context = {'archivename': make_submission_name(ininame),
+               'initialdir': os.getcwd(),
+               'user': os.getlogin(),
+               'ftpuser': ftpuser,
+               'ftppassword': ftppassword,
+               'ftphost': encodeftp}
 
-def condor_qseq_to_fastq(qseq_file, target_pathname):
-    script = """
-arguments="-i %(qseq_file)s -o %(target_pathname)s"
-queue
-"""
-    params = {'qseq_file': qseq_file,
-              'target_pathname': target_pathname}
-    
-    return script % params
+    condor_script = make_condor_name(ininame, 'upload')
+    condor_stream = open(condor_script,'w')
+    condor_stream.write(script % context)
+    condor_stream.close()
+    os.chmod(condor_script, stat.S_IREAD|stat.S_IWRITE)
 
-def find_archive_sequence_files(host, apidata, sequences_path, 
-                                library_result_map):
-    """
-    Find all the archive sequence files possibly associated with our results.
+    return condor_script
 
-    """
-    logging.debug("Searching for sequence files in: %s" %(sequences_path,))
 
-    lib_db = {}
-    seq_dirs = set()
-    #seq_dirs = set(os.path.join(sequences_path, 'srfs'))
-    candidate_lanes = {}
-    for lib_id, result_dir in library_result_map:
-        lib_info = get_library_info(host, apidata, lib_id)
-        lib_db[lib_id] = lib_info
-
-        for lane in lib_info['lane_set']:
-            lane_key = (lane['flowcell'], lane['lane_number'])
-            candidate_lanes[lane_key] = lib_id
-            seq_dirs.add(os.path.join(sequences_path, 
-                                         'flowcells', 
-                                         lane['flowcell']))
-    logging.debug("Seq_dirs = %s" %(unicode(seq_dirs)))
-    candidate_seq_list = scan_for_sequences(seq_dirs)
-
-    # at this point we have too many sequences as scan_for_sequences
-    # returns all the sequences in a flowcell directory
-    # so lets filter out the extras
-    
-    for seq in candidate_seq_list:
-        lane_key = (seq.flowcell, seq.lane)
-        lib_id = candidate_lanes.get(lane_key, None)
-        if lib_id is not None:
-            lib_info = lib_db[lib_id]
-            lanes = lib_info.setdefault('lanes', {})
-            lanes.setdefault(lane_key, set()).add(seq)
-    
-    return lib_db
-
-def build_fastqs(host, apidata, sequences_path, library_result_map, 
-                 paired=True ):
+def make_dag_fragment(ininame, archive_condor, upload_condor):
     """
-    Generate condor scripts to build any needed fastq files
-    
-    Args:
-      host (str): root of the htsworkflow api server
-      apidata (dict): id & key to post to the server
-      sequences_path (str): root of the directory tree to scan for files
-      library_result_map (list):  [(library_id, destination directory), ...]
-      paired: should we assume that we are processing paired end records?
-              if False, we will treat this as single ended.
+    Make the couple of fragments compress and then upload the data.
     """
-    qseq_condor_header = """
-Universe=vanilla
-executable=/woldlab/rattus/lvol0/mus/home/diane/proj/gaworkflow/scripts/qseq2fastq
-error=qseqfastq.err.$(process).log
-output=qseqfastq.out.$(process).log
-log=qseqfastq.log
+    cur_dir = os.getcwd()
+    archive_condor = os.path.join(cur_dir, archive_condor)
+    upload_condor = os.path.join(cur_dir, upload_condor)
+    job_basename = make_base_name(ininame)
 
-"""
-    qseq_condor_entries = []
-    srf_condor_header = """
-Universe=vanilla
-executable=/woldlab/rattus/lvol0/mus/home/diane/bin/srf2fastq
-output=srf2fastq.out.$(process).log
-error=srf2fastq.err.$(process).log
-log=srffastq.log
+    fragments = []
+    fragments.append('JOB %s_archive %s' % (job_basename, archive_condor))
+    fragments.append('JOB %s_upload %s' % (job_basename,  upload_condor))
+    fragments.append('PARENT %s_archive CHILD %s_upload' % (job_basename, job_basename))
 
-"""
-    srf_condor_entries = []
-    fastq_paired_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s_r%(read)s.fastq'
-    fastq_single_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s.fastq'
-    lib_db = find_archive_sequence_files(host, 
-                                         apidata, 
-                                         sequences_path, 
-                                         library_result_map)
-
-    # find what targets we're missing
-    needed_targets = {}
-    for lib_id, result_dir in library_result_map:
-        lib = lib_db[lib_id]
-        for lane_key, sequences in lib['lanes'].items():
-            for seq in sequences:
-                filename_attributes = { 
-                    'flowcell': seq.flowcell,
-                    'lib_id': lib_id,
-                    'lane': seq.lane,
-                    'read': seq.read,
-                    'cycle': seq.cycle
-                    }
-                # throw out test runs
-                # FIXME: this should probably be configurable
-                if seq.cycle < 50:
-                    continue
-                if seq.flowcell == '30CUUAAXX':
-                    # 30CUUAAXX run sucked
-                    continue
-
-                # end filters
-                if paired:
-                    target_name = fastq_paired_template % filename_attributes
-                else:
-                    target_name = fastq_single_template % filename_attributes
-
-                target_pathname = os.path.join(result_dir, target_name)
-                if not os.path.exists(target_pathname):
-                    t = needed_targets.setdefault(target_pathname, {})
-                    t[seq.filetype] = seq
-                    
-    for target_pathname, available_sources in needed_targets.items():
-        logging.debug(' target : %s' % (target_pathname,))
-        logging.debug(' candidate sources: %s' % (available_sources,))
-        if available_sources.has_key('qseq'):
-            source = available_sources['qseq']
-            qseq_condor_entries.append(
-                condor_qseq_to_fastq(source.path, target_pathname)
-            )
-        elif available_sources.has_key('srf'):
-            source = available_sources['srf']
-            if source.read is not None:
-                logging.warn(
-                    "srf -> fastq paired end doesn't work yet: %s" % (source,)
-                )
-            else:
-                srf_condor_entries.append(
-                    condor_srf_to_fastq(source.path, target_pathname)
-                )
-        else:
-            print " need file", target_pathname
+    return fragments
 
-    if len(srf_condor_entries) > 0:
-        make_submit_script('srf.fastq.condor', 
-                           srf_condor_header,
-                           srf_condor_entries)
 
-    if len(qseq_condor_entries) > 0:
-        make_submit_script('qseq.fastq.condor', 
-                           qseq_condor_header,
-                           qseq_condor_entries)
+def get_library_info(host, apidata, library_id):
+    url = api.library_url(host, library_id)
+    contents = api.retrieve_info(url, apidata)
+    return contents
 
-def find_best_extension(extension_map, filename):
-    """
-    Search through extension_map looking for the best extension
-    The 'best' is the longest match
 
-    :Args:
-      extension_map (dict): '.ext' -> { 'view': 'name' or None }
-      filename (str): the filename whose extention we are about to examine
-    """
-    best_ext = None
-    path, last_ext = os.path.splitext(filename)
-
-    for ext in extension_map.keys():
-        if filename.endswith(ext):
-            if best_ext is None:
-                best_ext = ext
-            elif len(ext) > len(best_ext):
-                best_ext = ext
-    return best_ext
-
-def add_submission_section(line_counter, files, standard_attributes, file_attributes):
+def make_submission_section(line_counter, files, attributes):
     """
     Create a section in the submission ini file
     """
-    inifile = [ '[line%s]' % (line_counter,) ]
+    inifile = [ "[line%s]" % (line_counter,) ]
     inifile += ["files=%s" % (",".join(files))]
-    cur_attributes = {}
-    cur_attributes.update(standard_attributes)
-    cur_attributes.update(file_attributes)
-    
-    for k,v in cur_attributes.items():
+
+    for k,v in attributes.items():
         inifile += ["%s=%s" % (k,v)]
     return inifile
-    
-def make_submission_ini(host, apidata, library_result_map):
-    attributes = {
-        '.bai':                   {'view': None}, # bam index
-        '.bam':                   {'view': 'Signal'},
-        '.condor':                {'view': None},
-        '.daf':                   {'view': None},
-        '.ddf':                   {'view': None},
-        '.splices.bam':           {'view': 'Splices'},
-        '.bed':                   {'view': 'TopHatJunctions'},
-        '.ini':                   {'view': None},
-        '.log':                   {'view': None},
-        '_r1.fastq':              {'view': 'FastqRd1'},
-        '_r2.fastq':              {'view': 'FastqRd2'},
-        '.tar.bz2':               {'view': None},
-        'novel.genes.expr':       {'view': 'GeneDeNovoFPKM'},
-        'novel.transcripts.expr': {'view': 'TranscriptDeNovoFPKM'},
-        '.genes.expr':            {'view': 'GeneFPKM'},
-        '.transcripts.expr':      {'view': 'TranscriptFPKM'},
-        '.stats.txt':             {'view': 'InsLength'},
-        '.gtf':                   {'view': 'CufflinksGeneModel'},
-        '.wig':                   {'view': 'RawSignal'},
-    }
-   
-    candidate_fastq_src = {}
 
-    for lib_id, result_dir in library_result_map:
-        inifile =  ['[config]']
-        inifile += ['order_by=files view cell localization rnaExtract mapAlgorithm readType replicate labVersion']
-        inifile += ['']
-        line_counter = 1
-        lib_info = get_library_info(host, apidata, lib_id)
-        result_ini = os.path.join(result_dir, result_dir+'.ini')
-
-        standard_attributes = {'cell': lib_info['cell_line'],
-                               'insertLength': '200', # ali
-                               'labVersion': 'TopHat',
-                               'localization': 'cell',
-                               'mapAlgorithm': 'TopHat',
-                               'readType': '2x75', #ali
-                               'replicate': lib_info['replicate'],
-                               'rnaExtract': 'longPolyA',
-                               }
-
-        # write fastq line
-        #fastqs = []
-        #for lane in lib_info['lane_set']:
-        #    target_name = "%s_%s_%s.fastq" % (lane['flowcell'], lib_id, lane['lane_number'])
-        #    fastqs.append(target_name)
-        #inifile.extend(
-        #    make_run_block(line_counter, fastqs, standard_attributes, attributes['.fastq'])
-        #)
-        #inifile += ['']
-        #line_counter += 1
-
-        # write other lines
-        submission_files = os.listdir(result_dir)
-        for f in submission_files:
-            best_ext = find_best_extension(attributes, f)
-
-            if best_ext is not None:
-               if attributes[best_ext]['view'] is None:
-                   continue
-               inifile.extend(
-                   add_submission_section(line_counter,
-                                          [f],
-                                          standard_attributes,
-                                          attributes[best_ext]
-                   )
-               )
-               inifile += ['']
-               line_counter += 1
-            else:
-                raise ValueError("Unrecognized file: %s" % (f,))
 
-        f = open(result_ini,'w')
-        f.write(os.linesep.join(inifile))
-        f.close()
+def make_base_name(pathname):
+    base = os.path.basename(pathname)
+    name, ext = os.path.splitext(base)
+    return name
 
-def link_daf(daf_path, library_result_map):
-    if not os.path.exists(daf_path):
-        raise RuntimeError("%s does not exist, how can I link to it?" % (daf_path,))
 
-    base_daf = os.path.basename(daf_path)
-    
-    for lib_id, result_dir in library_result_map:
-        submission_daf = os.path.join(result_dir, base_daf)
-        if not os.path.exists(submission_daf):
-            os.link(daf_path, submission_daf)
+def make_submission_name(ininame):
+    name = make_base_name(ininame)
+    return name + ".tgz"
 
-def make_all_ddfs(library_result_map, daf_name):
-    for lib_id, result_dir in library_result_map:
-        ininame = result_dir+'.ini'
-        inipathname = os.path.join(result_dir, ininame)
-        if os.path.exists(inipathname):
-            make_ddf(ininame, daf_name, True, True, result_dir)
-            
-def make_parser():
-    # Load defaults from the config files
-    config = SafeConfigParser()
-    config.read([os.path.expanduser('~/.htsworkflow.ini'), '/etc/htsworkflow.ini'])
-    
-    sequence_archive = None
-    apiid = None
-    apikey = None
-    apihost = None
-    SECTION = 'sequence_archive'
-    if config.has_section(SECTION):
-        sequence_archive = config.get(SECTION, 'sequence_archive',sequence_archive)
-        sequence_archive = os.path.expanduser(sequence_archive)
-        apiid = config.get(SECTION, 'apiid', apiid)
-        apikey = config.get(SECTION, 'apikey', apikey)
-        apihost = config.get(SECTION, 'host', apihost)
 
-    parser = OptionParser()
+def make_ddf_name(pathname):
+    name = make_base_name(pathname)
+    return name + ".ddf"
 
-    # commands
-    parser.add_option('--fastq', help="generate scripts for making fastq files",
-                      default=False, action="store_true")
 
-    parser.add_option('--ini', help="generate submission ini file", default=False,
-                      action="store_true")
+def make_condor_name(pathname, run_type=None):
+    name = make_base_name(pathname)
+    elements = [name]
+    if run_type is not None:
+        elements.append(run_type)
+    elements.append("condor")
+    return ".".join(elements)
 
-    parser.add_option('--makeddf', help='make the ddfs', default=False,
-                      action="store_true")
-    
-    parser.add_option('--daf', default=None, help='specify daf name')
 
-    # configuration options
-    parser.add_option('--apiid', default=apiid, help="Specify API ID")
-    parser.add_option('--apikey', default=apikey, help="Specify API KEY")
-    parser.add_option('--host',  default=apihost,
-                      help="specify HTSWorkflow host",)
-    parser.add_option('--sequence', default=sequence_archive,
-                      help="sequence repository")
-    parser.add_option('--single', default=False, action="store_true", 
-                      help="treat the sequences as single ended runs")
+def parse_filelist(file_string):
+    return file_string.split(",")
 
-    # debugging
-    parser.add_option('--verbose', default=False, action="store_true",
-                      help='verbose logging')
-    parser.add_option('--debug', default=False, action="store_true",
-                      help='debug logging')
 
-    return parser
+def validate_filelist(files):
+    """
+    Die if a file doesn't exist in a file list
+    """
+    for f in files:
+        if not os.path.exists(f):
+            raise RuntimeError("%s does not exist" % (f,))
 
-def main(cmdline=None):
-    parser = make_parser()
-    opts, args = parser.parse_args(cmdline)
-    
-    if opts.debug:
-        logging.basicConfig(level = logging.DEBUG )
-    elif opts.verbose:
-        logging.basicConfig(level = logging.INFO )
+def make_md5sum(filename):
+    """Quickly find the md5sum of a file
+    """
+    md5_cache = os.path.join(filename+".md5")
+    print md5_cache
+    if os.path.exists(md5_cache):
+        logging.debug("Found md5sum in {0}".format(md5_cache))
+        stream = open(md5_cache,'r')
+        lines = stream.readlines()
+        md5sum = parse_md5sum_line(lines, filename)
     else:
-        logging.basicConfig(level = logging.WARNING )
-        
+        md5sum = make_md5sum_unix(filename, md5_cache)
+    return md5sum
     
-    apidata = {'apiid': opts.apiid, 'apikey': opts.apikey }
+def make_md5sum_unix(filename, md5_cache):
+    cmd = ["md5sum", filename]
+    logging.debug("Running {0}".format(" ".join(cmd)))
+    p = Popen(cmd, stdout=PIPE)
+    stdin, stdout = p.communicate()
+    retcode = p.wait()
+    logging.debug("Finished {0} retcode {1}".format(" ".join(cmd), retcode))
+    if retcode != 0:
+        logging.error("Trouble with md5sum for {0}".format(filename))
+        return None
+    lines = stdin.split(os.linesep)
+    md5sum = parse_md5sum_line(lines, filename)
+    if md5sum is not None:
+        logging.debug("Caching sum in {0}".format(md5_cache))
+        stream = open(md5_cache, "w")
+        stream.write(stdin)
+        stream.close()
+    return md5sum
+
+def parse_md5sum_line(lines, filename):
+    md5sum, md5sum_filename = lines[0].split()
+    if md5sum_filename != filename:
+        errmsg = "MD5sum and I disagre about filename. {0} != {1}"
+        logging.error(errmsg.format(filename, md5sum_filename))
+        return None
+    return md5sum
 
-    if opts.host is None or opts.apiid is None or opts.apikey is None:
-        parser.error("Please specify host url, apiid, apikey")
-
-    if len(args) == 0:
-        parser.error("I need at least one library submission-dir input file")
-        
-    library_result_map = []
-    for a in args:
-        library_result_map.extend(read_library_result_map(a))
-
-    if opts.daf is not None:
-        link_daf(opts.daf, library_result_map)
-
-    if opts.fastq:
-        build_fastqs(opts.host, 
-                     apidata, 
-                     opts.sequence, 
-                     library_result_map,
-                     not opts.single)
-
-    if opts.ini:
-        make_submission_ini(opts.host, apidata, library_result_map)
-
-    if opts.makeddf:
-        make_all_ddfs(library_result_map, opts.daf)
-        
 if __name__ == "__main__":
     main()