2 from ConfigParser import SafeConfigParser
8 from optparse import OptionParser, OptionGroup
10 from pprint import pprint, pformat
12 from StringIO import StringIO
20 from zipfile import ZipFile
24 from htsworkflow.util import api
25 from htsworkflow.util.rdfhelp import \
33 from htsworkflow.submission.daf import \
35 MetadataLookupException, \
37 from htsworkflow.submission.results import ResultMap
38 from htsworkflow.submission.condorfastq import CondorFastqExtract
40 logger = logging.getLogger('ucsc_gather')
43 LFTP = '/usr/bin/lftp'
45 def main(cmdline=None):
46 parser = make_parser()
47 opts, args = parser.parse_args(cmdline)
56 logging.basicConfig(level = logging.DEBUG )
58 logging.basicConfig(level = logging.INFO )
60 logging.basicConfig(level = logging.WARNING )
62 apidata = api.make_auth_from_opts(opts, parser)
64 model = get_model(opts.model, opts.db_path)
67 mapper = UCSCSubmission(opts.name, opts.daf, model)
68 if opts.library_url is not None:
69 mapper.library_url = opts.library_url
70 submission_uri = get_submission_uri(opts.name)
73 if opts.load_rdf is not None:
74 if submission_uri is None:
75 parser.error("Please specify the submission name")
76 load_into_model(model, 'turtle', opts.load_rdf, submission_uri)
78 if opts.make_ddf and opts.daf is None:
79 parser.error("Please specify your daf when making ddf files")
83 results.add_results_from_file(a)
85 if opts.make_tree_from is not None:
86 results.make_tree_from(opts.make_tree_from)
90 parser.error("Specify a submission model")
91 if mapper.daf is None:
92 parser.error("Please load a daf first")
93 mapper.link_daf(results)
96 extractor = CondorFastqExtract(opts.host, apidata, opts.sequence,
98 extractor.create_scripts(results)
100 if opts.scan_submission:
101 mapper.scan_submission_dirs(results)
104 if not os.path.exists(TAR):
105 parser.error("%s does not exist, please specify --tar" % (TAR,))
106 if not os.path.exists(LFTP):
107 parser.error("%s does not exist, please specify --lftp" % (LFTP,))
108 make_all_ddfs(mapper, results, opts.daf, force=opts.force)
111 zip_ddfs(mapper, results, opts.daf)
114 sparql_query(model, opts.sparql)
117 writer = get_serializer()
118 print writer.serialize_model_to_string(model)
122 parser = OptionParser()
124 model = OptionGroup(parser, 'model')
125 model.add_option('--name', help="Set submission name")
126 model.add_option('--db-path', default=None,
127 help="set rdf database path")
128 model.add_option('--model', default=None,
129 help="Load model database")
130 model.add_option('--load-rdf', default=None,
131 help="load rdf statements into model")
132 model.add_option('--sparql', default=None, help="execute sparql query")
133 model.add_option('--print-rdf', action="store_true", default=False,
134 help="print ending model state")
135 model.add_option('--tar', default=TAR,
136 help="override path to tar command")
137 model.add_option('--lftp', default=LFTP,
138 help="override path to lftp command")
139 parser.add_option_group(model)
141 commands = OptionGroup(parser, 'commands')
142 commands.add_option('--make-tree-from',
143 help="create directories & link data files",
145 commands.add_option('--fastq', default=False, action="store_true",
146 help="generate scripts for making fastq files")
147 commands.add_option('--scan-submission', default=False, action="store_true",
148 help="Import metadata for submission into our model")
149 commands.add_option('--link-daf', default=False, action="store_true",
150 help="link daf into submission directories")
151 commands.add_option('--make-ddf', help='make the ddfs', default=False,
153 commands.add_option('--zip-ddf', default=False, action='store_true',
154 help='zip up just the metadata')
156 parser.add_option_group(commands)
158 parser.add_option('--force', default=False, action="store_true",
159 help="Force regenerating fastqs")
160 parser.add_option('--daf', default=None, help='specify daf name')
161 parser.add_option('--library-url', default=None,
162 help="specify an alternate source for library information")
164 parser.add_option('--verbose', default=False, action="store_true",
165 help='verbose logging')
166 parser.add_option('--debug', default=False, action="store_true",
167 help='debug logging')
169 api.add_auth_options(parser)
174 def make_all_ddfs(view_map, library_result_map, daf_name, make_condor=True, force=False):
176 for lib_id, result_dir in library_result_map.items():
177 submissionNode = view_map.get_submission_node(result_dir)
179 make_ddf(view_map, submissionNode, daf_name, make_condor, result_dir)
182 if make_condor and len(dag_fragment) > 0:
183 dag_filename = 'submission.dagman'
184 if not force and os.path.exists(dag_filename):
185 logger.warn("%s exists, please delete" % (dag_filename,))
187 f = open(dag_filename,'w')
188 f.write( os.linesep.join(dag_fragment))
189 f.write( os.linesep )
193 def make_ddf(view_map, submissionNode, daf_name, make_condor=False, outdir=None):
195 Make ddf files, and bonus condor file
197 query_template = """PREFIX libraryOntology: <http://jumpgate.caltech.edu/wiki/LibraryOntology#>
198 PREFIX submissionOntology: <http://jumpgate.caltech.edu/wiki/UcscSubmissionOntology#>
199 PREFIX ucscDaf: <http://jumpgate.caltech.edu/wiki/UcscDaf#>
201 select ?submitView ?files ?md5sum ?view ?cell ?antibody ?sex ?control ?strain ?controlId ?labExpId ?labVersion ?treatment ?protocol ?readType ?insertLength ?replicate ?mapAlgorithm
203 ?file ucscDaf:filename ?files ;
204 ucscDaf:md5sum ?md5sum .
205 ?submitView ucscDaf:has_file ?file ;
206 ucscDaf:view ?dafView ;
207 ucscDaf:submission <%(submission)s> .
208 ?dafView ucscDaf:name ?view .
209 <%(submission)s> submissionOntology:library ?library ;
211 OPTIONAL { ?library libraryOntology:antibody ?antibody }
212 OPTIONAL { ?library libraryOntology:cell_line ?cell }
213 OPTIONAL { <%(submission)s> ucscDaf:control ?control }
214 OPTIONAL { <%(submission)s> ucscDaf:controlId ?controlId }
215 OPTIONAL { ?library ucscDaf:sex ?sex }
216 OPTIONAL { ?library libraryOntology:library_id ?labExpId }
217 OPTIONAL { ?library libraryOntology:library_id ?labVersion }
218 OPTIONAL { ?library libraryOntology:replicate ?replicate }
219 OPTIONAL { ?library libraryOntology:condition_term ?treatment }
220 OPTIONAL { ?library ucscDaf:protocol ?protocol }
221 OPTIONAL { ?library ucscDaf:readType ?readType }
222 OPTIONAL { ?library ucscDaf:strain ?strain }
223 OPTIONAL { ?library libraryOntology:insert_size ?insertLength }
224 OPTIONAL { ?library ucscDaf:mapAlgorithm ?mapAlgorithm }
226 ORDER BY ?submitView"""
229 name = fromTypedNode(view_map.model.get_target(submissionNode, submissionOntology['name']))
231 logger.error("Need name for %s" % (str(submissionNode)))
234 ddf_name = make_ddf_name(name)
235 if outdir is not None:
236 outfile = os.path.join(outdir, ddf_name)
237 output = open(outfile,'w')
242 formatted_query = query_template % {'submission': str(submissionNode.uri)}
244 query = RDF.SPARQLQuery(formatted_query)
245 results = query.execute(view_map.model)
247 # filename goes first
248 variables = view_map.get_daf_variables()
250 output.write('\t'.join(variables))
251 output.write(os.linesep)
256 viewname = fromTypedNode(row['view'])
257 current = all_views.setdefault(viewname, {})
258 for variable_name in variables:
259 value = str(fromTypedNode(row[variable_name]))
260 if value is None or value == 'None':
261 logger.warn("{0}: {1} was None".format(outfile, variable_name))
262 if variable_name in ('files', 'md5sum'):
263 current.setdefault(variable_name,[]).append(value)
265 current[variable_name] = value
267 for view in all_views.keys():
269 for variable_name in variables:
270 if variable_name in ('files', 'md5sum'):
271 line.append(','.join(all_views[view][variable_name]))
273 line.append(all_views[view][variable_name])
274 output.write("\t".join(line))
275 output.write(os.linesep)
276 all_files.extend(all_views[view]['files'])
279 "Examined {0}, found files: {1}".format(
280 str(submissionNode), ", ".join(all_files)))
282 all_files.append(daf_name)
283 all_files.append(ddf_name)
286 archive_condor = make_condor_archive_script(name, all_files, outdir)
287 upload_condor = make_condor_upload_script(name, outdir)
289 dag_fragments.extend(
290 make_dag_fragment(name, archive_condor, upload_condor)
296 def zip_ddfs(view_map, library_result_map, daf_name):
297 """zip up just the ddf & daf files
299 rootdir = os.getcwd()
300 for lib_id, result_dir in library_result_map:
301 submissionNode = view_map.get_submission_node(result_dir)
302 nameNode = view_map.model.get_target(submissionNode,
303 submissionOntology['name'])
304 name = fromTypedNode(nameNode)
306 logger.error("Need name for %s" % (str(submissionNode)))
309 zip_name = '../{0}.zip'.format(lib_id)
310 os.chdir(os.path.join(rootdir, result_dir))
311 with ZipFile(zip_name, 'w') as stream:
312 stream.write(make_ddf_name(name))
313 stream.write(daf_name)
317 def make_condor_archive_script(name, files, outdir=None):
318 script = """Universe = vanilla
321 arguments = czvhf ../%(archivename)s %(filelist)s
323 Error = compress.out.$(Process).log
324 Output = compress.out.$(Process).log
325 Log = /tmp/submission-compress-%(user)s.log
326 initialdir = %(initialdir)s
327 environment="GZIP=-3"
335 pathname = os.path.join(outdir, f)
336 if not os.path.exists(pathname):
337 raise RuntimeError("Missing %s from %s" % (f,outdir))
339 context = {'archivename': make_submission_name(name),
340 'filelist': " ".join(files),
341 'initialdir': os.path.abspath(outdir),
342 'user': os.getlogin(),
345 condor_script = os.path.join(outdir, make_condor_name(name, 'archive'))
346 condor_stream = open(condor_script,'w')
347 condor_stream.write(script % context)
348 condor_stream.close()
352 def make_condor_upload_script(name, lftp, outdir=None):
353 script = """Universe = vanilla
355 Executable = %(lftp)s
356 arguments = -c put %(archivename)s -o ftp://%(ftpuser)s:%(ftppassword)s@%(ftphost)s/%(archivename)s
358 Error = upload.out.$(Process).log
359 Output = upload.out.$(Process).log
360 Log = /tmp/submission-upload-%(user)s.log
361 initialdir = %(initialdir)s
368 auth = netrc.netrc(os.path.expanduser("~diane/.netrc"))
370 encodeftp = 'encodeftp.cse.ucsc.edu'
371 ftpuser = auth.hosts[encodeftp][0]
372 ftppassword = auth.hosts[encodeftp][2]
373 context = {'archivename': make_submission_name(name),
374 'initialdir': os.path.abspath(outdir),
375 'user': os.getlogin(),
377 'ftppassword': ftppassword,
378 'ftphost': encodeftp,
381 condor_script = os.path.join(outdir, make_condor_name(name, 'upload'))
382 condor_stream = open(condor_script,'w')
383 condor_stream.write(script % context)
384 condor_stream.close()
385 os.chmod(condor_script, stat.S_IREAD|stat.S_IWRITE)
390 def make_dag_fragment(ininame, archive_condor, upload_condor):
392 Make the couple of fragments compress and then upload the data.
394 cur_dir = os.getcwd()
395 archive_condor = os.path.join(cur_dir, archive_condor)
396 upload_condor = os.path.join(cur_dir, upload_condor)
397 job_basename = make_base_name(ininame)
400 fragments.append('JOB %s_archive %s' % (job_basename, archive_condor))
401 fragments.append('JOB %s_upload %s' % (job_basename, upload_condor))
402 fragments.append('PARENT %s_archive CHILD %s_upload' % (job_basename, job_basename))
407 def make_base_name(pathname):
408 base = os.path.basename(pathname)
409 name, ext = os.path.splitext(base)
413 def make_submission_name(ininame):
414 name = make_base_name(ininame)
418 def make_ddf_name(pathname):
419 name = make_base_name(pathname)
423 def make_condor_name(pathname, run_type=None):
424 name = make_base_name(pathname)
426 if run_type is not None:
427 elements.append(run_type)
428 elements.append("condor")
429 return ".".join(elements)
432 def parse_filelist(file_string):
433 return file_string.split(",")
436 def validate_filelist(files):
438 Die if a file doesn't exist in a file list
441 if not os.path.exists(f):
442 raise RuntimeError("%s does not exist" % (f,))
444 if __name__ == "__main__":