Move code to make srf and qseq to fastq condor scripts to its own module
[htsworkflow.git] / extra / ucsc_encode_submission / ucsc_gather.py
index dc5fe184cb14204499671990f26ba56b414c1f1c..c65feecdfae6598260aba209ea5cabd24d8b2b6c 100755 (executable)
@@ -4,24 +4,23 @@ import fnmatch
 from glob import glob
 import json
 import logging
+import netrc
 from optparse import OptionParser
 import os
 from pprint import pprint, pformat
 import shlex
 from StringIO import StringIO
-import time
+import stat
+from subprocess import Popen, PIPE
 import sys
+import time
 import types
 import urllib
 import urllib2
 import urlparse
 
 from htsworkflow.util import api
-from htsworkflow.pipelines.sequences import \
-    create_sequence_table, \
-    scan_for_sequences
-from htsworkflow.pipelines import qseq2fastq
-from htsworkflow.pipelines import srf2fastq
+from htsworkflow.submission.condorfastq import CondorFastqExtract
 
 def main(cmdline=None):
     parser = make_parser()
@@ -34,10 +33,10 @@ def main(cmdline=None):
     else:
         logging.basicConfig(level = logging.WARNING )        
     
-    apidata = {'apiid': opts.apiid, 'apikey': opts.apikey }
+    apidata = api.make_auth_from_opts(opts, parser)
 
-    if opts.host is None or opts.apiid is None or opts.apikey is None:
-        parser.error("Please specify host url, apiid, apikey")
+    if opts.makeddf and opts.daf is None:
+        parser.error("Please specify your daf when making ddf files")
 
     if len(args) == 0:
         parser.error("I need at least one library submission-dir input file")
@@ -53,11 +52,9 @@ def main(cmdline=None):
         link_daf(opts.daf, library_result_map)
 
     if opts.fastq:
-        build_fastqs(opts.host, 
-                     apidata, 
-                     opts.sequence, 
-                     library_result_map,
-                     force=opts.force)
+        extractor = CondorFastqExtract(opts.host, apidata, opts.sequence,
+                                       force=opts.force)
+        extractor.build_fastqs(library_result_map)
 
     if opts.ini:
         make_submission_ini(opts.host, apidata, library_result_map)
@@ -67,22 +64,6 @@ def main(cmdline=None):
 
 
 def make_parser():
-    # Load defaults from the config files
-    config = SafeConfigParser()
-    config.read([os.path.expanduser('~/.htsworkflow.ini'), '/etc/htsworkflow.ini'])
-    
-    sequence_archive = None
-    apiid = None
-    apikey = None
-    apihost = None
-    SECTION = 'sequence_archive'
-    if config.has_section(SECTION):
-        sequence_archive = config.get(SECTION, 'sequence_archive',sequence_archive)
-        sequence_archive = os.path.expanduser(sequence_archive)
-        apiid = config.get(SECTION, 'apiid', apiid)
-        apikey = config.get(SECTION, 'apikey', apikey)
-        apihost = config.get(SECTION, 'host', apihost)
-
     parser = OptionParser()
 
     # commands
@@ -102,20 +83,14 @@ def make_parser():
     parser.add_option('--force', default=False, action="store_true",
                       help="Force regenerating fastqs")
 
-    # configuration options
-    parser.add_option('--apiid', default=apiid, help="Specify API ID")
-    parser.add_option('--apikey', default=apikey, help="Specify API KEY")
-    parser.add_option('--host',  default=apihost,
-                      help="specify HTSWorkflow host",)
-    parser.add_option('--sequence', default=sequence_archive,
-                      help="sequence repository")
-
     # debugging
     parser.add_option('--verbose', default=False, action="store_true",
                       help='verbose logging')
     parser.add_option('--debug', default=False, action="store_true",
                       help='debug logging')
 
+    api.add_auth_options(parser)
+    
     return parser
 
 
@@ -139,130 +114,6 @@ def make_tree_from(source_path, library_result_map):
                 logging.info(
                     'LINK {0} to {1}'.format(source_pathname, target_pathname))
     
-def build_fastqs(host, apidata, sequences_path, library_result_map, 
-                 force=False ):
-    """
-    Generate condor scripts to build any needed fastq files
-    
-    Args:
-      host (str): root of the htsworkflow api server
-      apidata (dict): id & key to post to the server
-      sequences_path (str): root of the directory tree to scan for files
-      library_result_map (list):  [(library_id, destination directory), ...]
-    """
-    qseq_condor_header = """
-Universe=vanilla
-executable=%(exe)s
-error=log/qseq2fastq.err.$(process).log
-output=log/qseq2fastq.out.$(process).log
-log=log/qseq2fastq.log
-
-""" % {'exe': sys.executable }
-    qseq_condor_entries = []
-    srf_condor_header = """
-Universe=vanilla
-executable=%(exe)s
-output=log/srf_pair_fastq.out.$(process).log
-error=log/srf_pair_fastq.err.$(process).log
-log=log/srf_pair_fastq.log
-environment="PYTHONPATH=/home/diane/lib/python2.6/site-packages:/home/diane/proj/solexa/gaworkflow PATH=/woldlab/rattus/lvol0/mus/home/diane/bin:/usr/bin:/bin"
-
-""" % {'exe': sys.executable }
-    srf_condor_entries = []
-    lib_db = find_archive_sequence_files(host, 
-                                         apidata, 
-                                         sequences_path, 
-                                         library_result_map)
-
-    needed_targets = find_missing_targets(library_result_map, lib_db, force)
-
-    for target_pathname, available_sources in needed_targets.items():
-        logging.debug(' target : %s' % (target_pathname,))
-        logging.debug(' candidate sources: %s' % (available_sources,))
-        if available_sources.has_key('qseq'):
-            source = available_sources['qseq']
-            qseq_condor_entries.append(
-                condor_qseq_to_fastq(source.path, 
-                                     target_pathname, 
-                                     source.flowcell,
-                                     force=force)
-            )
-        elif available_sources.has_key('srf'):
-            source = available_sources['srf']
-            mid = getattr(source, 'mid_point', None)
-            srf_condor_entries.append(
-                condor_srf_to_fastq(source.path, 
-                                    target_pathname,
-                                    source.paired,
-                                    source.flowcell,
-                                    mid,
-                                    force=force)
-            )
-        else:
-            print " need file", target_pathname
-
-    if len(srf_condor_entries) > 0:
-        make_submit_script('srf.fastq.condor', 
-                           srf_condor_header,
-                           srf_condor_entries)
-
-    if len(qseq_condor_entries) > 0:
-        make_submit_script('qseq.fastq.condor', 
-                           qseq_condor_header,
-                           qseq_condor_entries)
-
-
-def find_missing_targets(library_result_map, lib_db, force=False):
-    """
-    Check if the sequence file exists.
-    This requires computing what the sequence name is and checking
-    to see if it can be found in the sequence location.
-
-    Adds seq.paired flag to sequences listed in lib_db[*]['lanes']
-    """
-    fastq_paired_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s_r%(read)s.fastq'
-    fastq_single_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s.fastq'
-    # find what targets we're missing
-    needed_targets = {}
-    for lib_id, result_dir in library_result_map:
-        lib = lib_db[lib_id]
-        lane_dict = make_lane_dict(lib_db, lib_id)
-        
-        for lane_key, sequences in lib['lanes'].items():
-            for seq in sequences:
-                seq.paired = lane_dict[seq.flowcell]['paired_end']
-                lane_status = lane_dict[seq.flowcell]['status']
-
-                if seq.paired and seq.read is None:
-                    seq.read = 1
-                filename_attributes = { 
-                    'flowcell': seq.flowcell,
-                    'lib_id': lib_id,
-                    'lane': seq.lane,
-                    'read': seq.read,
-                    'cycle': seq.cycle
-                    }
-                # skip bad runs
-                if lane_status == 'Failed':
-                    continue
-                if seq.flowcell == '30DY0AAXX':
-                    # 30DY0 only ran for 151 bases instead of 152
-                    # it is actually 76 1st read, 75 2nd read
-                    seq.mid_point = 76
-
-                # end filters
-                if seq.paired:
-                    target_name = fastq_paired_template % filename_attributes
-                else:
-                    target_name = fastq_single_template % filename_attributes
-
-                target_pathname = os.path.join(result_dir, target_name)
-                if force or not os.path.exists(target_pathname):
-                    t = needed_targets.setdefault(target_pathname, {})
-                    t[seq.filetype] = seq
-
-    return needed_targets
-
 
 def link_daf(daf_path, library_result_map):
     if not os.path.exists(daf_path):
@@ -271,8 +122,12 @@ def link_daf(daf_path, library_result_map):
     base_daf = os.path.basename(daf_path)
     
     for lib_id, result_dir in library_result_map:
+        if not os.path.exists(result_dir):
+            raise RuntimeError("Couldn't find target directory %s" %(result_dir,))
         submission_daf = os.path.join(result_dir, base_daf)
         if not os.path.exists(submission_daf):
+            if not os.path.exists(daf_path):
+                raise RuntimeError("Couldn't find daf: %s" %(daf_path,))
             os.link(daf_path, submission_daf)
 
 
@@ -284,7 +139,7 @@ def make_submission_ini(host, apidata, library_result_map, paired=True):
 
     for lib_id, result_dir in library_result_map:
         order_by = ['order_by=files', 'view', 'replicate', 'cell', 
-                    'readType', 'mapAlgorithm', 'insertLength' ]
+                    'readType', 'mapAlgorithm', 'insertLength', 'md5sum' ]
         inifile =  ['[config]']
         inifile += [" ".join(order_by)]
         inifile += ['']
@@ -299,6 +154,7 @@ def make_submission_ini(host, apidata, library_result_map, paired=True):
             attributes = view_map.find_attributes(f, lib_id)
             if attributes is None:
                 raise ValueError("Unrecognized file: %s" % (f,))
+            attributes['md5sum'] = "None"
             
             ext = attributes["extension"]
             if attributes['view'] is None:                   
@@ -307,6 +163,9 @@ def make_submission_ini(host, apidata, library_result_map, paired=True):
                 fastqs.setdefault(ext, set()).add(f)
                 fastq_attributes[ext] = attributes
             else:
+                md5sum = make_md5sum(os.path.join(result_dir,f))
+                if md5sum is not None:
+                    attributes['md5sum']=md5sum
                 inifile.extend(
                     make_submission_section(line_counter,
                                             [f],
@@ -330,17 +189,6 @@ def make_submission_ini(host, apidata, library_result_map, paired=True):
         f.write(os.linesep.join(inifile))
 
         
-def make_lane_dict(lib_db, lib_id):
-    """
-    Convert the lane_set in a lib_db to a dictionary
-    indexed by flowcell ID
-    """
-    result = []
-    for lane in lib_db[lib_id]['lane_set']:
-        result.append((lane['flowcell'], lane))
-    return dict(result)
-
-
 def make_all_ddfs(library_result_map, daf_name, make_condor=True, force=False):
     dag_fragment = []
     for lib_id, result_dir in library_result_map:
@@ -378,6 +226,9 @@ def make_ddf(ininame,  daf_name, guess_ddf=False, make_condor=False, outdir=None
         output = open(ddf_name,'w')
 
     file_list = read_ddf_ini(ininame, output)
+    logging.info(
+        "Read config {0}, found files: {1}".format(
+            ininame, ", ".join(file_list)))
 
     file_list.append(daf_name)
     if ddf_name is not None:
@@ -480,7 +331,7 @@ def make_condor_upload_script(ininame):
     script = """Universe = vanilla
 
 Executable = /usr/bin/lftp
-arguments = -c put ../%(archivename)s -o ftp://detrout@encodeftp.cse.ucsc.edu/%(archivename)s
+arguments = -c put ../%(archivename)s -o ftp://%(ftpuser)s:%(ftppassword)s@%(ftphost)s/%(archivename)s
 
 Error = upload.err.$(Process).log
 Output = upload.out.$(Process).log
@@ -489,14 +340,24 @@ initialdir = %(initialdir)s
 
 queue 
 """
+    auth = netrc.netrc(os.path.expanduser("~diane/.netrc"))
+    
+    encodeftp = 'encodeftp.cse.ucsc.edu'
+    ftpuser = auth.hosts[encodeftp][0]
+    ftppassword = auth.hosts[encodeftp][2]
     context = {'archivename': make_submission_name(ininame),
                'initialdir': os.getcwd(),
-               'user': os.getlogin()}
+               'user': os.getlogin(),
+               'ftpuser': ftpuser,
+               'ftppassword': ftppassword,
+               'ftphost': encodeftp}
 
     condor_script = make_condor_name(ininame, 'upload')
     condor_stream = open(condor_script,'w')
     condor_stream.write(script % context)
     condor_stream.close()
+    os.chmod(condor_script, stat.S_IREAD|stat.S_IWRITE)
+
     return condor_script
 
 
@@ -523,93 +384,6 @@ def get_library_info(host, apidata, library_id):
     return contents
 
 
-def condor_srf_to_fastq(srf_file, target_pathname, paired, flowcell=None,
-                        mid=None, force=False):
-    py = srf2fastq.__file__
-    args = [ py, srf_file, ]
-    if paired:
-        args.extend(['--left', target_pathname])
-        # this is ugly. I did it because I was pregenerating the target
-        # names before I tried to figure out what sources could generate
-        # those targets, and everything up to this point had been
-        # one-to-one. So I couldn't figure out how to pair the 
-        # target names. 
-        # With this at least the command will run correctly.
-        # however if we rename the default targets, this'll break
-        # also I think it'll generate it twice.
-        args.extend(['--right', 
-                     target_pathname.replace('_r1.fastq', '_r2.fastq')])
-    else:
-        args.extend(['--single', target_pathname ])
-    if flowcell is not None:
-        args.extend(['--flowcell', flowcell])
-
-    if mid is not None:
-        args.extend(['-m', str(mid)])
-
-    if force:
-        args.extend(['--force'])
-
-    script = """
-arguments="%s"
-queue
-""" % (" ".join(args),)
-    
-    return  script 
-
-
-def condor_qseq_to_fastq(qseq_file, target_pathname, flowcell=None, force=False):
-    py = qseq2fastq.__file__
-    args = [py, '-i', qseq_file, '-o', target_pathname ]
-    if flowcell is not None:
-        args.extend(['-f', flowcell])
-    script = """
-arguments="%s"
-queue
-""" % (" ".join(args))
-
-    return script 
-
-def find_archive_sequence_files(host, apidata, sequences_path, 
-                                library_result_map):
-    """
-    Find all the archive sequence files possibly associated with our results.
-
-    """
-    logging.debug("Searching for sequence files in: %s" %(sequences_path,))
-
-    lib_db = {}
-    seq_dirs = set()
-    #seq_dirs = set(os.path.join(sequences_path, 'srfs'))
-    candidate_lanes = {}
-    for lib_id, result_dir in library_result_map:
-        lib_info = get_library_info(host, apidata, lib_id)
-        lib_info['lanes'] = {}
-        lib_db[lib_id] = lib_info
-
-        for lane in lib_info['lane_set']:
-            lane_key = (lane['flowcell'], lane['lane_number'])
-            candidate_lanes[lane_key] = lib_id
-            seq_dirs.add(os.path.join(sequences_path, 
-                                         'flowcells', 
-                                         lane['flowcell']))
-    logging.debug("Seq_dirs = %s" %(unicode(seq_dirs)))
-    candidate_seq_list = scan_for_sequences(seq_dirs)
-
-    # at this point we have too many sequences as scan_for_sequences
-    # returns all the sequences in a flowcell directory
-    # so lets filter out the extras
-    
-    for seq in candidate_seq_list:
-        lane_key = (seq.flowcell, seq.lane)
-        lib_id = candidate_lanes.get(lane_key, None)
-        if lib_id is not None:
-            lib_info = lib_db[lib_id]
-            lib_info['lanes'].setdefault(lane_key, set()).add(seq)
-    
-    return lib_db
-
-
 class NameToViewMap(object):
     """Determine view attributes for a given submission file name
     """
@@ -623,22 +397,44 @@ class NameToViewMap(object):
         ma = 'TH1014'
 
         self.patterns = [
+            # for 2011 Feb 18 elements submission
+            ('final_Cufflinks_genes_*gtf',       'GeneDeNovo'),
+            ('final_Cufflinks_transcripts_*gtf', 'TranscriptDeNovo'),
+            ('final_exonFPKM-Cufflinks-0.9.3-GENCODE-v3c-*.gtf',       
+             'ExonsGencV3c'),
+            ('final_GENCODE-v3-Cufflinks-0.9.3.genes-*gtf',          
+             'GeneGencV3c'),
+            ('final_GENCODE-v3-Cufflinks-0.9.3.transcripts-*gtf',    
+             'TranscriptGencV3c'),
+            ('final_TSS-Cufflinks-0.9.3-GENCODE-v3c-*.gtf', 'TSS'),
+            ('final_junctions-*.bed6+3',                    'Junctions'),
+            
             ('*.bai',                   None),
             ('*.splices.bam',           'Splices'),
             ('*.bam',                   self._guess_bam_view),
             ('junctions.bed',           'Junctions'),
             ('*.jnct',                  'Junctions'),
-            ('*.unique.bigwig',         None),
-            ('*.plus.bigwig',           'PlusSignal'),
-            ('*.minus.bigwig',          'MinusSignal'),
+            ('*unique.bigwig',         None),
+            ('*plus.bigwig',           'PlusSignal'),
+            ('*minus.bigwig',          'MinusSignal'),
             ('*.bigwig',                'Signal'),
             ('*.tar.bz2',               None),
             ('*.condor',                None),
             ('*.daf',                   None),
             ('*.ddf',                   None),
+
+            ('*ufflinks?0.9.3.genes.gtf',       'GeneDeNovo'),
+            ('*ufflinks?0.9.3.transcripts.gtf', 'TranscriptDeNovo'),
+            ('*GENCODE-v3c.exonFPKM.gtf',        'ExonsGencV3c'),
+            ('*GENCODE-v3c.genes.gtf',           'GeneGencV3c'),
+            ('*GENCODE-v3c.transcripts.gtf',     'TranscriptGencV3c'),
+            ('*GENCODE-v3c.TSS.gtf',             'TSS'),
+            ('*.junctions.bed6+3',                'Junctions'),
+            
             ('*.?ufflinks-0.9.0?genes.expr',       'GeneDeNovo'),
             ('*.?ufflinks-0.9.0?transcripts.expr', 'TranscriptDeNovo'),
             ('*.?ufflinks-0.9.0?transcripts.gtf',  'GeneModel'),
+
             ('*.GENCODE-v3c?genes.expr',       'GeneGCV3c'),
             ('*.GENCODE-v3c?transcript*.expr', 'TranscriptGCV3c'),
             ('*.GENCODE-v3c?transcript*.gtf',  'TranscriptGencV3c'),
@@ -653,11 +449,13 @@ class NameToViewMap(object):
             ('*.gtf',                   'GeneModel'),
             ('*.ini',                   None),
             ('*.log',                   None),
+            ('*.md5',                   None),
             ('paired-end-distribution*', 'InsLength'),
             ('*.stats.txt',              'InsLength'),
             ('*.srf',                   None),
             ('*.wig',                   None),
             ('*.zip',                   None),
+            ('transfer_log',            None),
             ]
 
         self.views = {
@@ -673,6 +471,9 @@ class NameToViewMap(object):
             "GeneModel": {"MapAlgorithm": ma},
             "GeneDeNovo": {"MapAlgorithm": ma},
             "TranscriptDeNovo": {"MapAlgorithm": ma},
+            "ExonsGencV3c": {"MapAlgorithm": ma},
+            "GeneGencV3c": {"MapAlgorithm": ma},
+            "TSS": {"MapAlgorithm": ma},
             "GeneGCV3c": {"MapAlgorithm": ma},
             "TranscriptGCV3c": {"MapAlgorithm": ma},
             "TranscriptGencV3c": {"MapAlgorithm": ma},
@@ -734,8 +535,14 @@ class NameToViewMap(object):
 
     def _is_paired(self, lib_id, lib_info):
         """Determine if a library is paired end"""
+        # TODO: encode this information in the library type page.
+        single = (1,3,6)
         if len(lib_info["lane_set"]) == 0:
-            return False
+            # we haven't sequenced anything so guess based on library type
+            if lib_info['library_type_id'] in single:
+                return False
+            else:
+                return True
 
         if not self.lib_paired.has_key(lib_id):
             is_paired = 0
@@ -820,32 +627,6 @@ def make_condor_name(pathname, run_type=None):
     return ".".join(elements)
 
 
-def make_submit_script(target, header, body_list):
-    """
-    write out a text file
-
-    this was intended for condor submit scripts
-
-    Args:
-      target (str or stream): 
-        if target is a string, we will open and close the file
-        if target is a stream, the caller is responsible.
-
-      header (str);
-        header to write at the beginning of the file
-      body_list (list of strs):
-        a list of blocks to add to the file.
-    """
-    if type(target) in types.StringTypes:
-        f = open(target,"w")
-    else:
-        f = target
-    f.write(header)
-    for entry in body_list:
-        f.write(entry)
-    if type(target) in types.StringTypes:
-        f.close()
-
 def parse_filelist(file_string):
     return file_string.split(",")
 
@@ -858,6 +639,46 @@ def validate_filelist(files):
         if not os.path.exists(f):
             raise RuntimeError("%s does not exist" % (f,))
 
+def make_md5sum(filename):
+    """Quickly find the md5sum of a file
+    """
+    md5_cache = os.path.join(filename+".md5")
+    print md5_cache
+    if os.path.exists(md5_cache):
+        logging.debug("Found md5sum in {0}".format(md5_cache))
+        stream = open(md5_cache,'r')
+        lines = stream.readlines()
+        md5sum = parse_md5sum_line(lines, filename)
+    else:
+        md5sum = make_md5sum_unix(filename, md5_cache)
+    return md5sum
+    
+def make_md5sum_unix(filename, md5_cache):
+    cmd = ["md5sum", filename]
+    logging.debug("Running {0}".format(" ".join(cmd)))
+    p = Popen(cmd, stdout=PIPE)
+    stdin, stdout = p.communicate()
+    retcode = p.wait()
+    logging.debug("Finished {0} retcode {1}".format(" ".join(cmd), retcode))
+    if retcode != 0:
+        logging.error("Trouble with md5sum for {0}".format(filename))
+        return None
+    lines = stdin.split(os.linesep)
+    md5sum = parse_md5sum_line(lines, filename)
+    if md5sum is not None:
+        logging.debug("Caching sum in {0}".format(md5_cache))
+        stream = open(md5_cache, "w")
+        stream.write(stdin)
+        stream.close()
+    return md5sum
+
+def parse_md5sum_line(lines, filename):
+    md5sum, md5sum_filename = lines[0].split()
+    if md5sum_filename != filename:
+        errmsg = "MD5sum and I disagre about filename. {0} != {1}"
+        logging.error(errmsg.format(filename, md5sum_filename))
+        return None
+    return md5sum
 
 if __name__ == "__main__":
     main()