import urllib2
import urlparse
-
from htsworkflow.util import api
from htsworkflow.pipelines.sequences import \
create_sequence_table, \
elif opts.verbose:
logging.basicConfig(level = logging.INFO )
else:
- logging.basicConfig(level = logging.WARNING )
-
+ logging.basicConfig(level = logging.WARNING )
apidata = {'apiid': opts.apiid, 'apikey': opts.apikey }
if opts.makeddf:
make_all_ddfs(library_result_map, opts.daf)
+
def make_parser():
# Load defaults from the config files
config = SafeConfigParser()
return parser
+
def build_fastqs(host, apidata, sequences_path, library_result_map,
force=False ):
"""
qseq_condor_header,
qseq_condor_entries)
+
def find_missing_targets(library_result_map, lib_db, force=False):
"""
Check if the sequence file exists.
return needed_targets
+
def link_daf(daf_path, library_result_map):
if not os.path.exists(daf_path):
raise RuntimeError("%s does not exist, how can I link to it?" % (daf_path,))
if not os.path.exists(submission_daf):
os.link(daf_path, submission_daf)
+
def make_submission_ini(host, apidata, library_result_map, paired=True):
# ma is "map algorithm"
ma = 'TH1014'
f = open(result_ini,'w')
f.write(os.linesep.join(inifile))
+
def make_lane_dict(lib_db, lib_id):
"""
Convert the lane_set in a lib_db to a dictionary
result.append((lane['flowcell'], lane))
return dict(result)
+
def make_all_ddfs(library_result_map, daf_name, make_condor=True):
dag_fragment = []
for lib_id, result_dir in library_result_map:
return dag_fragments
+
def read_ddf_ini(filename, output=sys.stdout):
"""
Read a ini file and dump out a tab delmited text file
output.write("\t".join(values))
output.write(os.linesep)
return file_list
-
+
+
def read_library_result_map(filename):
"""
Read a file that maps library id to result directory.
library_id, result_dir = line.split()
results.append((library_id, result_dir))
return results
-
+
+
def make_condor_archive_script(ininame, files):
script = """Universe = vanilla
condor_stream.close()
return condor_script
+
def make_condor_upload_script(ininame):
script = """Universe = vanilla
condor_stream.close()
return condor_script
+
def make_dag_fragment(ininame, archive_condor, upload_condor):
"""
Make the couple of fragments compress and then upload the data.
return fragments
+
def get_library_info(host, apidata, library_id):
url = api.library_url(host, library_id)
contents = api.retrieve_info(url, apidata)
return contents
+
def condor_srf_to_fastq(srf_file, target_pathname, paired, flowcell=None,
mid=None, force=False):
args = [ srf_file, ]
return script
+
def condor_qseq_to_fastq(qseq_file, target_pathname, flowcell=None, force=False):
args = ['-i', qseq_file, '-o', target_pathname ]
if flowcell is not None:
return lib_db
+
def find_best_extension(extension_map, filename):
"""
Search through extension_map looking for the best extension
elif len(ext) > len(best_ext):
best_ext = ext
return best_ext
-
+
+
def make_submission_section(line_counter, files, standard_attributes, file_attributes):
"""
Create a section in the submission ini file
inifile += ["%s=%s" % (k,v)]
return inifile
+
def make_base_name(pathname):
base = os.path.basename(pathname)
name, ext = os.path.splitext(base)
return name
+
def make_submission_name(ininame):
name = make_base_name(ininame)
return name + '.tgz'
+
def make_ddf_name(pathname):
name = make_base_name(pathname)
return name + '.ddf'
+
def make_condor_name(pathname, run_type=None):
name = make_base_name(pathname)
elements = [name]
elements.append(run_type)
elements.append('condor')
return ".".join(elements)
+
def make_submit_script(target, header, body_list):
"""
def parse_filelist(file_string):
return file_string.split(',')
+
def validate_filelist(files):
"""
Die if a file doesn't exist in a file list
for f in files:
if not os.path.exists(f):
raise RuntimeError("%s does not exist" % (f,))
-
+
+
if __name__ == "__main__":
main()