from glob import glob
import json
import logging
+import netrc
from optparse import OptionParser
import os
from pprint import pprint, pformat
import shlex
from StringIO import StringIO
-import time
+import stat
+from subprocess import Popen, PIPE
import sys
+import time
import types
import urllib
import urllib2
from htsworkflow.pipelines.sequences import \
create_sequence_table, \
scan_for_sequences
+from htsworkflow.pipelines import qseq2fastq
+from htsworkflow.pipelines import srf2fastq
def main(cmdline=None):
parser = make_parser()
else:
logging.basicConfig(level = logging.WARNING )
- apidata = {'apiid': opts.apiid, 'apikey': opts.apikey }
+ apidata = api.make_auth_from_opts(opts, parser)
- if opts.host is None or opts.apiid is None or opts.apikey is None:
- parser.error("Please specify host url, apiid, apikey")
+ if opts.makeddf and opts.daf is None:
+ parser.error("Please specify your daf when making ddf files")
if len(args) == 0:
parser.error("I need at least one library submission-dir input file")
for a in args:
library_result_map.extend(read_library_result_map(a))
+ if opts.make_tree_from is not None:
+ make_tree_from(opts.make_tree_from, library_result_map)
+
if opts.daf is not None:
link_daf(opts.daf, library_result_map)
def make_parser():
- # Load defaults from the config files
- config = SafeConfigParser()
- config.read([os.path.expanduser('~/.htsworkflow.ini'), '/etc/htsworkflow.ini'])
-
- sequence_archive = None
- apiid = None
- apikey = None
- apihost = None
- SECTION = 'sequence_archive'
- if config.has_section(SECTION):
- sequence_archive = config.get(SECTION, 'sequence_archive',sequence_archive)
- sequence_archive = os.path.expanduser(sequence_archive)
- apiid = config.get(SECTION, 'apiid', apiid)
- apikey = config.get(SECTION, 'apikey', apikey)
- apihost = config.get(SECTION, 'host', apihost)
-
parser = OptionParser()
# commands
+ parser.add_option('--make-tree-from',
+ help="create directories & link data files",
+ default=None)
parser.add_option('--fastq', help="generate scripts for making fastq files",
default=False, action="store_true")
parser.add_option('--force', default=False, action="store_true",
help="Force regenerating fastqs")
- # configuration options
- parser.add_option('--apiid', default=apiid, help="Specify API ID")
- parser.add_option('--apikey', default=apikey, help="Specify API KEY")
- parser.add_option('--host', default=apihost,
- help="specify HTSWorkflow host",)
- parser.add_option('--sequence', default=sequence_archive,
- help="sequence repository")
-
# debugging
parser.add_option('--verbose', default=False, action="store_true",
help='verbose logging')
parser.add_option('--debug', default=False, action="store_true",
help='debug logging')
+ api.add_auth_options(parser)
+
return parser
+def make_tree_from(source_path, library_result_map):
+ """Create a tree using data files from source path.
+ """
+ for lib_id, lib_path in library_result_map:
+ if not os.path.exists(lib_path):
+ logging.info("Making dir {0}".format(lib_path))
+ os.mkdir(lib_path)
+ source_lib_dir = os.path.join(source_path, lib_path)
+ if os.path.exists(source_lib_dir):
+ pass
+ for filename in os.listdir(source_lib_dir):
+ source_pathname = os.path.join(source_lib_dir, filename)
+ target_pathname = os.path.join(lib_path, filename)
+ if not os.path.exists(source_pathname):
+ raise IOError("{0} does not exist".format(source_pathname))
+ if not os.path.exists(target_pathname):
+ os.symlink(source_pathname, target_pathname)
+ logging.info(
+ 'LINK {0} to {1}'.format(source_pathname, target_pathname))
+
def build_fastqs(host, apidata, sequences_path, library_result_map,
force=False ):
"""
"""
qseq_condor_header = """
Universe=vanilla
-executable=/woldlab/rattus/lvol0/mus/home/diane/proj/solexa/gaworkflow/scripts/qseq2fastq
+executable=%(exe)s
error=log/qseq2fastq.err.$(process).log
output=log/qseq2fastq.out.$(process).log
log=log/qseq2fastq.log
-"""
+""" % {'exe': sys.executable }
qseq_condor_entries = []
srf_condor_header = """
Universe=vanilla
-executable=/woldlab/rattus/lvol0/mus/home/diane/proj/solexa/gaworkflow/scripts/srf2fastq
+executable=%(exe)s
output=log/srf_pair_fastq.out.$(process).log
error=log/srf_pair_fastq.err.$(process).log
log=log/srf_pair_fastq.log
environment="PYTHONPATH=/home/diane/lib/python2.6/site-packages:/home/diane/proj/solexa/gaworkflow PATH=/woldlab/rattus/lvol0/mus/home/diane/bin:/usr/bin:/bin"
-"""
+""" % {'exe': sys.executable }
srf_condor_entries = []
lib_db = find_archive_sequence_files(host,
apidata,
base_daf = os.path.basename(daf_path)
for lib_id, result_dir in library_result_map:
+ if not os.path.exists(result_dir):
+ raise RuntimeError("Couldn't find target directory %s" %(result_dir,))
submission_daf = os.path.join(result_dir, base_daf)
if not os.path.exists(submission_daf):
+ if not os.path.exists(daf_path):
+ raise RuntimeError("Couldn't find daf: %s" %(daf_path,))
os.link(daf_path, submission_daf)
for lib_id, result_dir in library_result_map:
order_by = ['order_by=files', 'view', 'replicate', 'cell',
- 'readType', 'mapAlgorithm', 'insertLength' ]
+ 'readType', 'mapAlgorithm', 'insertLength', 'md5sum' ]
inifile = ['[config]']
inifile += [" ".join(order_by)]
inifile += ['']
# write other lines
submission_files = os.listdir(result_dir)
fastqs = {}
+ fastq_attributes = {}
for f in submission_files:
attributes = view_map.find_attributes(f, lib_id)
if attributes is None:
raise ValueError("Unrecognized file: %s" % (f,))
+ attributes['md5sum'] = "None"
ext = attributes["extension"]
if attributes['view'] is None:
continue
elif attributes.get("type", None) == 'fastq':
fastqs.setdefault(ext, set()).add(f)
+ fastq_attributes[ext] = attributes
else:
+ md5sum = make_md5sum(os.path.join(result_dir,f))
+ if md5sum is not None:
+ attributes['md5sum']=md5sum
inifile.extend(
make_submission_section(line_counter,
[f],
)
inifile += ['']
line_counter += 1
+ # add in fastqs on a single line.
- # add in fastqs on a single line.
- for extension, fastq_set in fastqs.items():
+ for extension, fastq_files in fastqs.items():
inifile.extend(
make_submission_section(line_counter,
- fastq_set,
- attributes[extension])
+ fastq_files,
+ fastq_attributes[extension])
)
inifile += ['']
line_counter += 1
output = open(ddf_name,'w')
file_list = read_ddf_ini(ininame, output)
+ logging.info(
+ "Read config {0}, found files: {1}".format(
+ ininame, ", ".join(file_list)))
file_list.append(daf_name)
if ddf_name is not None:
script = """Universe = vanilla
Executable = /bin/tar
-arguments = czvf ../%(archivename)s %(filelist)s
+arguments = czvhf ../%(archivename)s %(filelist)s
Error = compress.err.$(Process).log
Output = compress.out.$(Process).log
-Log = /tmp/submission-compress.log
+Log = /tmp/submission-compress-%(user)s.log
initialdir = %(initialdir)s
+environment="GZIP=-3"
+request_memory = 20
queue
"""
context = {'archivename': make_submission_name(ininame),
'filelist': " ".join(files),
- 'initialdir': os.getcwd()}
+ 'initialdir': os.getcwd(),
+ 'user': os.getlogin()}
condor_script = make_condor_name(ininame, 'archive')
condor_stream = open(condor_script,'w')
script = """Universe = vanilla
Executable = /usr/bin/lftp
-arguments = -c put ../%(archivename)s -o ftp://detrout@encodeftp.cse.ucsc.edu/
+arguments = -c put ../%(archivename)s -o ftp://%(ftpuser)s:%(ftppassword)s@%(ftphost)s/%(archivename)s
Error = upload.err.$(Process).log
Output = upload.out.$(Process).log
-Log = /tmp/submission-upload.log
+Log = /tmp/submission-upload-%(user)s.log
initialdir = %(initialdir)s
queue
"""
+ auth = netrc.netrc(os.path.expanduser("~diane/.netrc"))
+
+ encodeftp = 'encodeftp.cse.ucsc.edu'
+ ftpuser = auth.hosts[encodeftp][0]
+ ftppassword = auth.hosts[encodeftp][2]
context = {'archivename': make_submission_name(ininame),
- 'initialdir': os.getcwd()}
+ 'initialdir': os.getcwd(),
+ 'user': os.getlogin(),
+ 'ftpuser': ftpuser,
+ 'ftppassword': ftppassword,
+ 'ftphost': encodeftp}
condor_script = make_condor_name(ininame, 'upload')
condor_stream = open(condor_script,'w')
condor_stream.write(script % context)
condor_stream.close()
+ os.chmod(condor_script, stat.S_IREAD|stat.S_IWRITE)
+
return condor_script
def condor_srf_to_fastq(srf_file, target_pathname, paired, flowcell=None,
mid=None, force=False):
- args = [ srf_file, ]
+ py = srf2fastq.__file__
+ args = [ py, srf_file, ]
if paired:
args.extend(['--left', target_pathname])
# this is ugly. I did it because I was pregenerating the target
def condor_qseq_to_fastq(qseq_file, target_pathname, flowcell=None, force=False):
- args = ['-i', qseq_file, '-o', target_pathname ]
+ py = qseq2fastq.__file__
+ args = [py, '-i', qseq_file, '-o', target_pathname ]
if flowcell is not None:
args.extend(['-f', flowcell])
script = """
self.apidata = apidata
self.lib_cache = {}
+ self.lib_paired = {}
# ma is "map algorithm"
ma = 'TH1014'
self.patterns = [
('*.bai', None),
- ('*.bam', self._guess_bam_view),
('*.splices.bam', 'Splices'),
+ ('*.bam', self._guess_bam_view),
+ ('junctions.bed', 'Junctions'),
('*.jnct', 'Junctions'),
- ('*.plus.bigwig', 'PlusSignal'),
- ('*.minus.bigwig', 'MinusSignal'),
+ ('*unique.bigwig', None),
+ ('*plus.bigwig', 'PlusSignal'),
+ ('*minus.bigwig', 'MinusSignal'),
('*.bigwig', 'Signal'),
('*.tar.bz2', None),
('*.condor', None),
('*.daf', None),
('*.ddf', None),
- ('*denovo.genes.expr', 'GeneDeNovo'),
- ('*denovo.transcripts.expr','TranscriptDeNovo'),
- ('*novel.genes.expr', 'GeneDeNovo'),
- ('*novel.transcripts.expr', 'TranscriptDeNovo'),
- ('*genes.expr', 'GeneFPKM'),
- ('*transcripts.expr', 'TranscriptFPKM'),
- ('*transcript.expr', 'TranscriptFPKM'),
+
+ ('*ufflinks?0.9.3.genes.gtf', 'GeneDeNovo'),
+ ('*ufflinks?0.9.3.transcripts.gtf', 'TranscriptDeNovo'),
+ ('*GENCODE-v3c.exonFPKM.gtf', 'ExonsGencV3c'),
+ ('*GENCODE-v3c.genes.gtf', 'GeneGencV3c'),
+ ('*GENCODE-v3c.transcripts.gtf', 'TranscriptGencV3c'),
+ ('*GENCODE-v3c.TSS.gtf', 'TSS'),
+ ('*.junctions.bed6+3', 'Junctions'),
+
+ ('*.?ufflinks-0.9.0?genes.expr', 'GeneDeNovo'),
+ ('*.?ufflinks-0.9.0?transcripts.expr', 'TranscriptDeNovo'),
+ ('*.?ufflinks-0.9.0?transcripts.gtf', 'GeneModel'),
+
+ ('*.GENCODE-v3c?genes.expr', 'GeneGCV3c'),
+ ('*.GENCODE-v3c?transcript*.expr', 'TranscriptGCV3c'),
+ ('*.GENCODE-v3c?transcript*.gtf', 'TranscriptGencV3c'),
+ ('*.GENCODE-v4?genes.expr', None), #'GeneGCV4'),
+ ('*.GENCODE-v4?transcript*.expr', None), #'TranscriptGCV4'),
+ ('*.GENCODE-v4?transcript*.gtf', None), #'TranscriptGencV4'),
+ ('*_1.75mers.fastq', 'FastqRd1'),
+ ('*_2.75mers.fastq', 'FastqRd2'),
('*_r1.fastq', 'FastqRd1'),
('*_r2.fastq', 'FastqRd2'),
('*.fastq', 'Fastq'),
('*.gtf', 'GeneModel'),
('*.ini', None),
('*.log', None),
- ('*.stats.txt', 'InsLength'),
+ ('*.md5', None),
+ ('paired-end-distribution*', 'InsLength'),
+ ('*.stats.txt', 'InsLength'),
('*.srf', None),
('*.wig', None),
('*.zip', None),
+ ('transfer_log', None),
]
self.views = {
None: {"MapAlgorithm": "NA"},
"Paired": {"MapAlgorithm": ma},
+ "Aligns": {"MapAlgorithm": ma},
"Single": {"MapAlgorithm": ma},
"Splices": {"MapAlgorithm": ma},
"Junctions": {"MapAlgorithm": ma},
"PlusSignal": {"MapAlgorithm": ma},
"MinusSignal": {"MapAlgorithm": ma},
"Signal": {"MapAlgorithm": ma},
+ "GeneModel": {"MapAlgorithm": ma},
"GeneDeNovo": {"MapAlgorithm": ma},
"TranscriptDeNovo": {"MapAlgorithm": ma},
- "GeneDeNovo": {"MapAlgorithm": ma},
- "TranscriptDeNovo": {"MapAlgorithm": ma},
- "GeneFPKM": {"MapAlgorithm": ma},
- "TranscriptFPKM": {"MapAlgorithm": ma},
- "TranscriptFPKM": {"MapAlgorithm": ma},
+ "ExonsGencV3c": {"MapAlgorithm": ma},
+ "GeneGencV3c": {"MapAlgorithm": ma},
+ "TSS": {"MapAlgorithm": ma},
+ "GeneGCV3c": {"MapAlgorithm": ma},
+ "TranscriptGCV3c": {"MapAlgorithm": ma},
+ "TranscriptGencV3c": {"MapAlgorithm": ma},
+ "GeneGCV4": {"MapAlgorithm": ma},
+ "TranscriptGCV4": {"MapAlgorithm": ma},
"FastqRd1": {"MapAlgorithm": "NA", "type": "fastq"},
"FastqRd2": {"MapAlgorithm": "NA", "type": "fastq"},
"Fastq": {"MapAlgorithm": "NA", "type": "fastq" },
- "GeneModel": {"MapAlgorithm": ma},
"InsLength": {"MapAlgorithm": ma},
}
# view name is one of the attributes
'cell': lib_info['cell_line'],
'replicate': lib_info['replicate'],
}
- is_paired = self._is_paired(lib_info)
+ is_paired = self._is_paired(lib_id, lib_info)
if is_paired:
attributes.update(self.get_paired_attributes(lib_info))
if is_paired:
return "Paired"
else:
- return "Align"
+ return "Aligns"
- def _is_paired(self, lib_info):
+ def _is_paired(self, lib_id, lib_info):
"""Determine if a library is paired end"""
if len(lib_info["lane_set"]) == 0:
return False
-
- is_paired = 0
- isnot_paired = 0
- # check to see if all the flowcells are the same.
- # otherwise we might need to do something complicated
- for flowcell in lib_info["lane_set"]:
- if flowcell["paired_end"]:
- is_paired += 1
- else:
- isnot_paired += 1
- logging.debug("Library %s: %d were, %d were not paired" % \
- (lib_info["library_id"], is_paired, isnot_paired))
+ if not self.lib_paired.has_key(lib_id):
+ is_paired = 0
+ isnot_paired = 0
+ failed = 0
+ # check to see if all the flowcells are the same.
+ # otherwise we might need to do something complicated
+ for flowcell in lib_info["lane_set"]:
+ # yes there's also a status code, but this comparison
+ # is easier to read
+ if flowcell["status"].lower() == "failed":
+ # ignore failed flowcell
+ failed += 1
+ pass
+ elif flowcell["paired_end"]:
+ is_paired += 1
+ else:
+ isnot_paired += 1
+
+ logging.debug("Library %s: %d paired, %d single, %d failed" % \
+ (lib_info["library_id"], is_paired, isnot_paired, failed))
- if is_paired > isnot_paired:
- return True
- elif is_paired < isnot_paired:
- return False
- else:
- raise RuntimeError("Assumptions about paired vs not paired are wrong")
+ if is_paired > isnot_paired:
+ self.lib_paired[lib_id] = True
+ elif is_paired < isnot_paired:
+ self.lib_paired[lib_id] = False
+ else:
+ raise RuntimeError("Equal number of paired & unpaired lanes."\
+ "Can't guess library paired status")
+
+ return self.lib_paired[lib_id]
def get_paired_attributes(self, lib_info):
if lib_info['insert_size'] is None:
if not os.path.exists(f):
raise RuntimeError("%s does not exist" % (f,))
+def make_md5sum(filename):
+ """Quickly find the md5sum of a file
+ """
+ md5_cache = os.path.join(filename+".md5")
+ print md5_cache
+ if os.path.exists(md5_cache):
+ logging.debug("Found md5sum in {0}".format(md5_cache))
+ stream = open(md5_cache,'r')
+ lines = stream.readlines()
+ md5sum = parse_md5sum_line(lines, filename)
+ else:
+ md5sum = make_md5sum_unix(filename, md5_cache)
+ return md5sum
+
+def make_md5sum_unix(filename, md5_cache):
+ cmd = ["md5sum", filename]
+ logging.debug("Running {0}".format(" ".join(cmd)))
+ p = Popen(cmd, stdout=PIPE)
+ stdin, stdout = p.communicate()
+ retcode = p.wait()
+ logging.debug("Finished {0} retcode {1}".format(" ".join(cmd), retcode))
+ if retcode != 0:
+ logging.error("Trouble with md5sum for {0}".format(filename))
+ return None
+ lines = stdin.split(os.linesep)
+ md5sum = parse_md5sum_line(lines, filename)
+ if md5sum is not None:
+ logging.debug("Caching sum in {0}".format(md5_cache))
+ stream = open(md5_cache, "w")
+ stream.write(stdin)
+ stream.close()
+ return md5sum
+
+def parse_md5sum_line(lines, filename):
+ md5sum, md5sum_filename = lines[0].split()
+ if md5sum_filename != filename:
+ errmsg = "MD5sum and I disagre about filename. {0} != {1}"
+ logging.error(errmsg.format(filename, md5sum_filename))
+ return None
+ return md5sum
if __name__ == "__main__":
main()