Merge branch 'django1.4'
authorDiane Trout <diane@ghic.org>
Tue, 17 Dec 2013 19:20:03 +0000 (11:20 -0800)
committerDiane Trout <diane@ghic.org>
Tue, 17 Dec 2013 19:20:03 +0000 (11:20 -0800)
there was a conflict with my qualifying the load_pipeline_run_xml function call

Conflicts:
htsworkflow/frontend/samples/views.py

htsworkflow/pipelines/retrieve_config.py
htsworkflow/pipelines/srf.py
htsworkflow/pipelines/test/test_runfolder_rta180.py

index a189f097e3017b8c8aac8b26b3d96ccbb0ec4460..fe2b9428cd01c38775c6a743dc2588b140425833 100644 (file)
@@ -111,7 +111,7 @@ def format_gerald_header(flowcell_info):
     config += ['Flowcell Notes:']
     config.extend(flowcell_info['notes'].split('\r\n'))
     config += ['']
-    for lane_number in LANE_LIST_JSON:
+    for lane_number in sorted(flowcell_info['lane_set']):
         lane_contents = flowcell_info['lane_set'][lane_number]
         for lane_info in lane_contents:
             config += ['Lane%s: %s | %s' % (lane_number,
@@ -356,8 +356,8 @@ def save_sample_sheet(outstream, options, flowcell_info):
                             'Operator': format_operator_name}
     out = csv.DictWriter(outstream, sample_sheet_fields)
     out.writerow(dict(((x,x) for x in sample_sheet_fields)))
-    for lane_number in LANE_LIST:
-        lane_contents = flowcell_info['lane_set'][str(lane_number)]
+    for lane_number in sorted(flowcell_info['lane_set']):
+        lane_contents = flowcell_info['lane_set'][lane_number]
 
         pooled_lane_contents = []
         for library in lane_contents:
index 5a6c969bd768aa09fab68e005f9ef5caae7ffcea..03b96b8f35b481fddfb7f41100ed0366c2ef473f 100644 (file)
@@ -1,3 +1,4 @@
+import optparse
 from glob import glob
 import logging
 import os
@@ -211,3 +212,47 @@ def make_md5_commands(destdir):
 
   return cmd_list
 
+def main(cmdline=None):
+    parser = make_parser()
+    opts, args = parser.parse_args(cmdline)
+
+    logging.basicConfig(level = logging.DEBUG)
+    if not opts.name:
+        parser.error("Specify run name. Usually runfolder name")
+    if not opts.destination:
+        parser.error("Specify where to write sequence files")
+    if not opts.site_name:
+        parser.error("Specify site name")
+    if len(args) != 1:
+        parser.error("Can only process one directory")
+
+    source = args[0]
+    LOGGER.info("Raw Format is: %s" % (opts.format, ))
+    seq_cmds = []
+    if opts.format == 'fastq':
+        LOGGER.info("raw data = %s" % (source,))
+        copy_hiseq_project_fastqs(opts.name, source, opts.site_name, opts.destination)
+    elif opts.format == 'qseq':
+        seq_cmds = make_qseq_commands(opts.name, source, opts.lanes, opts.site_name, opts.destination)
+    elif opts.format == 'srf':
+        seq_cmds = make_srf_commands(opts.name, source, opts.lanes, opts.site_name, opts.destination, 0)
+    else:
+        raise ValueError('Unknown --format=%s' % (opts.format))
+    print seq_cmds
+    srf.run_commands(args.source, seq_cmds, num_jobs)
+
+def make_parser():
+    parser = optparse.OptionParser()
+    parser.add_option('-f', '--format', default='fastq',
+                        help="Format raw data is in")
+    parser.add_option('-n', '--name', default=None,
+                        help="Specify run name")
+    parser.add_option('-d', '--destination', default=None,
+                        help='specify where to write files  (cycle dir)')
+    parser.add_option('-s', '--site-name', default=None,
+                        help="specify site name")
+    parser.add_option('-l', '--lanes', default="1,2,3,4,5,6,7,8",
+                        help="what lanes to process, defaults to all")
+    return parser
+if __name__ == "__main__":
+    main()
index 0db7857399cd8f9c391a8b8c58fb0b5023fef0f5..2817328dc3a0fdcc415cc17e0c1adda725bebc37 100644 (file)
@@ -274,6 +274,7 @@ class RunfolderTests(TestCase):
         self.failUnlessEqual(runs[0].serialization_filename, name)
 
 
+
         r1 = runs[0]
         xml = r1.get_elements()
         xml_str = ElementTree.tostring(xml)