Generate manifest files for ENCODE3
[htsworkflow.git] / htsworkflow / submission / submission.py
index 01bdb17228689281b3f33e8aaaaa3af5ccda6d18..3320f1caf3174e6d5636c4dd20841188771050d9 100644 (file)
@@ -19,9 +19,10 @@ from htsworkflow.util.rdfhelp import \
      toTypedNode, \
      fromTypedNode
 from htsworkflow.util.hashfile import make_md5sum
-
+from htsworkflow.submission.fastqname import FastqName
 from htsworkflow.submission.daf import \
      MetadataLookupException, \
+     ModelException, \
      get_submission_uri
 
 LOGGER = logging.getLogger(__name__)
@@ -114,21 +115,27 @@ class Submission(object):
                           an_analysis))
 
         # add file specific information
-        fileNode = self.link_file_to_classes(pathname,
-                                             an_analysis,
-                                             an_analysis_uri,
-                                             analysis_dir)
+        fileNode = self.make_file_node(pathname, an_analysis)
         self.add_md5s(filename, fileNode, analysis_dir)
+        self.add_fastq_metadata(filename, fileNode)
         self.model.add_statement(
             RDF.Statement(fileNode,
                           rdfNS['type'],
                           file_type))
+        self.model.add_statement(
+            RDF.Statement(fileNode,
+                          libraryOntology['library'],
+                          libNode))
+                          
         LOGGER.debug("Done.")
 
-    def link_file_to_classes(self, pathname, submissionNode, submission_uri, analysis_dir):
+    def make_file_node(self, pathname, submissionNode):
+        """Create file node and attach it to its submission.
+        """
         # add file specific information
         path, filename = os.path.split(pathname)
-        fileNode = RDF.Node(RDF.Uri('file://'+ os.path.abspath(pathname)))
+        pathname = os.path.abspath(pathname)
+        fileNode = RDF.Node(RDF.Uri('file://'+ pathname))
         self.model.add_statement(
             RDF.Statement(submissionNode,
                           dafTermOntology['has_file'],
@@ -137,6 +144,10 @@ class Submission(object):
             RDF.Statement(fileNode,
                           dafTermOntology['filename'],
                           filename))
+        self.model.add_statement(
+            RDF.Statement(fileNode,
+                          dafTermOntology['relative_path'],
+                          os.path.relpath(pathname)))
         return fileNode
 
     def add_md5s(self, filename, fileNode, analysis_dir):
@@ -150,13 +161,37 @@ class Submission(object):
             self.model.add_statement(
                 RDF.Statement(fileNode, dafTermOntology['md5sum'], md5))
 
+    def add_fastq_metadata(self, filename, fileNode):
+        # How should I detect if this is actually a fastq file?
+        try:
+            fqname = FastqName(filename=filename)
+        except ValueError:
+            # currently its just ignore it if the fastq name parser fails
+            return
+        
+        terms = [('flowcell', libraryOntology['flowcell_id']),
+                 ('lib_id', libraryOntology['library_id']),
+                 ('lane', libraryOntology['lane_number']),
+                 ('read', libraryOntology['read']),
+                 ('cycle', libraryOntology['read_length'])]
+        for file_term, model_term in terms:
+            value = fqname.get(file_term)
+            if value is not None:
+                s = RDF.Statement(fileNode, model_term, toTypedNode(value))
+                self.model.append(s)
+
     def _add_library_details_to_model(self, libNode):
         # attributes that can have multiple values
         set_attributes = set((libraryOntology['has_lane'],
                               libraryOntology['has_mappings'],
                               dafTermOntology['has_file']))
         parser = RDF.Parser(name='rdfa')
-        new_statements = parser.parse_as_stream(libNode.uri)
+        try:
+            new_statements = parser.parse_as_stream(libNode.uri)
+        except RDF.RedlandError as e:
+            LOGGER.error(e)
+            return
+        LOGGER.debug("Scanning %s", str(libNode.uri))
         toadd = []
         for s in new_statements:
             # always add "collections"
@@ -290,7 +325,8 @@ class Submission(object):
         paired = ['Barcoded Illumina',
                   'Multiplexing',
                   'Nextera',
-                  'Paired End (non-multiplexed)',]
+                  'Paired End (non-multiplexed)',
+                  'Dual Index Illumina',]
         if library_type in single:
             return False
         elif library_type in paired: