from htsworkflow.util.rdfhelp import \
blankOrUri, \
- dafTermOntology, \
dump_model, \
+ fromTypedNode, \
get_model, \
- libraryOntology, \
- owlNS, \
- rdfNS, \
- submissionLog, \
- submissionOntology, \
- toTypedNode, \
- fromTypedNode
+ stripNamespace, \
+ toTypedNode
+from htsworkflow.util.rdfns import *
from htsworkflow.util.hashfile import make_md5sum
from htsworkflow.submission.fastqname import FastqName
from htsworkflow.submission.daf import \
def scan_submission_dirs(self, result_map):
"""Examine files in our result directory
"""
- for lib_id, result_dir in result_map.items():
+ for lib_id, result_dir in list(result_map.items()):
LOGGER.info("Importing %s from %s" % (lib_id, result_dir))
try:
self.import_analysis_dir(result_dir, lib_id)
- except MetadataLookupException, e:
+ except MetadataLookupException as e:
LOGGER.error("Skipping %s: %s" % (lib_id, str(e)))
def import_analysis_dir(self, analysis_dir, library_id):
pathname = os.path.abspath(os.path.join(analysis_dir, filename))
self.construct_file_attributes(analysis_dir, libNode, pathname)
+ def analysis_nodes(self, result_map):
+ """Return an iterable of analysis nodes
+ """
+ for result_dir in list(result_map.values()):
+ an_analysis = self.get_submission_node(result_dir)
+ yield an_analysis
+
def construct_file_attributes(self, analysis_dir, libNode, pathname):
"""Looking for the best extension
The 'best' is the longest match
LOGGER.debug("Importing %s" % (lane.uri,))
try:
parser.parse_into_model(self.model, lane.uri)
- except RDF.RedlandError, e:
+ except RDF.RedlandError as e:
LOGGER.error("Error accessing %s" % (lane.uri,))
raise e
self.__view_map = self._get_filename_view_map()
results = []
- for pattern, view in self.__view_map.items():
+ for pattern, view in list(self.__view_map.items()):
if re.match(pattern, filename):
results.append(view)
LOGGER.debug("Found: %s" % (literal_re,))
try:
filename_re = re.compile(literal_re)
- except re.error, e:
+ except re.error as e:
LOGGER.error("Unable to compile: %s" % (literal_re,))
patterns[literal_re] = view_name
return patterns
results = []
for record in rdfstream:
d = {}
- for key, value in record.items():
+ for key, value in list(record.items()):
d[key] = fromTypedNode(value)
results.append(d)
return results
+
+
+def list_submissions(model):
+ """Return generator of submissions in this model.
+ """
+ query_body = """
+ PREFIX subns: <http://jumpgate.caltech.edu/wiki/UcscSubmissionOntology#>
+
+ select distinct ?submission
+ where { ?submission subns:has_submission ?library_dir }
+ """
+ query = RDF.SPARQLQuery(query_body)
+ rdfstream = query.execute(model)
+ for row in rdfstream:
+ s = stripNamespace(submissionLog, row['submission'])
+ if s[-1] in ['#', '/', '?']:
+ s = s[:-1]
+ yield s