#!/usr/bin/env python
-from ConfigParser import SafeConfigParser
+from configparser import SafeConfigParser
import fnmatch
from glob import glob
import json
import os
from pprint import pprint, pformat
import shlex
-from StringIO import StringIO
+from io import StringIO
import stat
import sys
import time
import types
-import urllib
-import urllib2
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+import urllib.request, urllib.error, urllib.parse
+import urllib.parse
from zipfile import ZipFile
import RDF
+if not 'DJANGO_SETTINGS_MODULE' in os.environ:
+ os.environ['DJANGO_SETTINGS_MODULE'] = 'htsworkflow.settings'
+
from htsworkflow.util import api
from htsworkflow.util.rdfhelp import \
dafTermOntology, \
mapper.link_daf(results)
if opts.fastq:
- extractor = CondorFastqExtract(opts.host, apidata, opts.sequence,
+ flowcells = os.path.join(opts.sequence, 'flowcells')
+ extractor = CondorFastqExtract(opts.host, flowcells,
force=opts.force)
extractor.create_scripts(results)
if opts.print_rdf:
writer = get_serializer()
- print writer.serialize_model_to_string(model)
+ print(writer.serialize_model_to_string(model))
def make_parser():
def make_all_ddfs(view_map, library_result_map, daf_name, make_condor=True, force=False):
dag_fragment = []
- for lib_id, result_dir in library_result_map.items():
+ for lib_id, result_dir in list(library_result_map.items()):
submissionNode = view_map.get_submission_node(result_dir)
dag_fragment.extend(
make_ddf(view_map, submissionNode, daf_name, make_condor, result_dir)
else:
current[variable_name] = value
- for view in all_views.keys():
+ for view in list(all_views.keys()):
line = []
for variable_name in variables:
if variable_name in ('files', 'md5sum'):