-*.py[co~]
+*~
+*.py[co]
.coverage
*.egg-info
dist
--- /dev/null
+##
+## Override submission ID to library URN names for our libraries
+## whose names either lack, or have the wrong library ID string
+## embedded in them.
+##
+
+@base <file:///home/diane/proj/solexa/htsworkflow/extra/ucsc_encode_submission/no-lib.sparql> .
+@prefix encodeSubmit:<http://jumpgate.caltech.edu/wiki/UCSCSubmissionOntology#> .
+
+# woldlab-hepg2-rnaseq-2009dec
+<http://encodesubmit.ucsc.edu/pipeline/show/805>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10879> .
+
+# woldlab-hepg2-rnaseq-2009dec-part2
+<http://encodesubmit.ucsc.edu/pipeline/show/810>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10879> .
+
+# woldlab-hepg2-rnaseq-2009dec-part3\t
+<http://encodesubmit.ucsc.edu/pipeline/show/869>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10879> .
+
+# woldlab-rnaseq-GM12878-rep1-stranded-2010Jan15
+<http://encodesubmit.ucsc.edu/pipeline/show/870>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11011> .
+
+# woldlab-hepg2-rnaseq-2010Jan-part4
+<http://encodesubmit.ucsc.edu/pipeline/show/897>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10879> .
+
+# woldlab-gm12878-directional-rep2-rnaseq-2010Jan06
+<http://encodesubmit.ucsc.edu/pipeline/show/898>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11010> .
+
+# woldlab-K562-directional-rnaseq-rep1-2010Jan6
+<http://encodesubmit.ucsc.edu/pipeline/show/903>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11010> .
+
+# woldlab-K562-directional-rnaseq-rep2-2010jan9
+<http://encodesubmit.ucsc.edu/pipeline/show/904>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11007> .
+
+# woldlab hESC 10886 rep1 2009Jan13
+<http://encodesubmit.ucsc.edu/pipeline/show/1026>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11286> .
+
+# woldlab 2010Jun15 1x75-Directional-NHEK-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1483>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11204> .
+
+# woldlab Jun18 1x75-Directional-H1-hESC-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1626>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11009> .
+
+# woldlab jun 18 1x75-Directional-GM12878-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1631>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11011> .
+
+# woldlab jun 18 1x75-Directional-GM12878-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1632>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11010> .
+
+# woldlab jun 18 1x75-Directional-H1-hESC-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1633>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10947> .
+
+# woldlab jun 18 1x75-Directional-HeLa-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1634>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11208> .
+
+# woldlab jun 18 1x75-Directional-HeLa-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1635>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11207> .
+
+# woldlab jun 18 1x75-Directional-HepG2-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1636>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11210> .
+
+# woldlab jun 18 1x75-Directional-K562-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1637>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11008> .
+
+# woldlab jun 18 1x75-Directional-HepG2-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1638>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11209> .
+
+# woldlab jun 18 1x75-Directional-HUVEC-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/1639>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11206> .
+
+# woldlab jun 18 1x75-Directional-HUVEC-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1645>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11205> .
+
+# woldlab jun 18 1x75-Directional-K562-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1646>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11007> .
+
+# woldlab June 2x75-GM12878-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/1856>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10515> .
+
+#2010 jul 9corrected fastqs
+<http://encodesubmit.ucsc.edu/pipeline/show/1874>
+ encodeSubmit:ignore "1" .
+# encodeSubmit:library_urn "
+
+# 2010-11-05 Correction 1x75-Directional-GM12878-Rep1.tgz
+<http://encodesubmit.ucsc.edu/pipeline/show/2926>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11010> .
+
+# 1x75-Directional-GM12878-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2930>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11010> .
+
+# 1x75-Directional-H1-hESC-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2931>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/10947> .
+
+# 1x75-Directional-H1-hESC-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2932>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11205> .
+
+# 1x75-Directional-HUVEC-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2933>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11206> .
+
+# 1x75-Directional-HUVEC-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2934>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11205> .
+
+# 1x75-Directional-HeLa-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2935>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11208> .
+
+# 1x75-Directional-HeLa-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2936>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11207> .
+
+# 1x75-Directional-HepG2-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2937>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11210> .
+
+# 1x75-Directional-HepG2-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2938>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11209> .
+
+# 1x75-Directional-K562-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2939>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11008> .
+
+# 1x75-Directional-K562-Rep2
+<http://encodesubmit.ucsc.edu/pipeline/show/2940>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11007> .
+
+# 1x75-Directional-NHEK-Rep1
+<http://encodesubmit.ucsc.edu/pipeline/show/2941>
+ encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/11204> .
+
from datetime import datetime
import httplib2
from operator import attrgetter
-from optparse import OptionParser
+from optparse import OptionParser, OptionGroup
# python keyring
import keyring
import logging
import RDF
import sys
import urllib
+import urlparse
+from htsworkflow.util import api
+from htsworkflow.util.rdfhelp import \
+ dublinCoreNS, \
+ get_model, \
+ get_serializer, \
+ sparql_query, \
+ submitOntology, \
+ libraryOntology, \
+ load_into_model, \
+ rdfNS, \
+ rdfsNS, \
+ xsdNS
+
+# URL mappings
libraryNS = RDF.NS("http://jumpgate.caltech.edu/library/")
-submissionNS = RDF.NS("http://encodesubmit.ucsc.edu/pipeline/show/")
-submitNS = RDF.NS("http://jumpgate.caltech.edu/wiki/EncodeSubmit#")
-dublinCoreNS = RDF.NS("http://purl.org/dc/elements/1.1/")
-rdfNS = RDF.NS("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
-rdfsNS= RDF.NS("http://www.w3.org/2000/01/rdf-schema#")
+
+
+from htsworkflow.submission.ucsc import submission_view_url, UCSCEncodePipeline
+download_ddf = urlparse.urljoin(UCSCEncodePipeline, "download_ddf#", allow_fragments=True)
+ddfNS = RDF.NS(download_ddf)
+
+DBDIR = os.path.expanduser("~diane/proj/submission")
+
+logger = logging.getLogger("encode_find")
LOGIN_URL = 'http://encodesubmit.ucsc.edu/account/login'
USER_URL = 'http://encodesubmit.ucsc.edu/pipeline/show_user'
-DETAIL_URL = 'http://encodesubmit.ucsc.edu/pipeline/show/{0}'
-LIBRARY_URL = 'http://jumpgate.caltech.edu/library/{0}'
+
USERNAME = 'detrout'
+CHARSET = 'utf-8'
def main(cmdline=None):
parser = make_parser()
opts, args = parser.parse_args(cmdline)
- cookie = login()
- if cookie is None:
- print "Failed to login"
+ if opts.verbose:
+ logging.basicConfig(level=logging.INFO)
+
+ htsw_authdata = api.make_auth_from_opts(opts, parser)
+ htswapi = api.HtswApi(opts.host, htsw_authdata)
+
+ cookie = None
+ model = get_model(opts.load_model, DBDIR)
+
+ if opts.load_rdf is not None:
+ ns_uri = submitOntology[''].uri
+ load_into_model(model, opts.rdf_parser_name, opts.load_rdf, ns_uri)
+
+ if opts.update:
+ cookie = login(cookie=cookie)
+ load_my_submissions(model, cookie=cookie)
+ load_encode_libraries(model, htswapi)
+
+ if opts.sparql is not None:
+ sparql_query(model, opts.sparql)
+
+ if opts.find_submission_with_no_library:
+ missing = find_submissions_with_no_library(model)
+
+ if opts.print_rdf:
+ serializer = get_serializer(name=opts.rdf_parser_name)
+ print serializer.serialize_model_to_string(model)
+
- submissions = my_submissions(cookie)
- for s in submissions:
- for t in s.triples():
- print t
-
def make_parser():
parser = OptionParser()
- return parser
-
+ commands = OptionGroup(parser, "Commands")
+ commands.add_option('--load-model', default=None,
+ help="Load model database")
+ commands.add_option('--load-rdf', default=None,
+ help="load rdf statements into model")
+ commands.add_option('--print-rdf', action="store_true", default=False,
+ help="print ending model state")
+ commands.add_option('--update', action="store_true", default=False,
+ help="Query remote data sources and update our database")
+ #commands.add_option('--update-ucsc-status', default=None,
+ # help="download status from ucsc, requires filename for extra rules")
+ #commands.add_option('--update-ddfs', action="store_true", default=False,
+ # help="download ddf information for known submission")
+ #commands.add_option('--update-library', default=None,
+ # help="download library info from htsw, requires filename for extra rules")
+ parser.add_option_group(commands)
+
+ queries = OptionGroup(parser, "Queries")
+ queries.add_option('--sparql', default=None,
+ help="execute arbitrary sparql query")
+ queries.add_option('--find-submission-with-no-library', default=False,
+ action="store_true",
+ help="find submissions with no library ID")
+ parser.add_option_group(queries)
-def login():
- keys = keyring.get_keyring()
- password = keys.get_password(LOGIN_URL, USERNAME)
- credentials = {'login': USERNAME,
- 'password': password}
- headers = {'Content-type': 'application/x-www-form-urlencoded'}
- http = httplib2.Http()
- response, content = http.request(LOGIN_URL,
- 'POST',
- headers=headers,
- body=urllib.urlencode(credentials))
- logging.debug("Login to {0}, status {1}".format(LOGIN_URL,
- response['status']))
+ options = OptionGroup(parser, "Options")
+ options.add_option("--rdf-parser-name", default="turtle",
+ help="set rdf file parser type")
+ options.add_option("-v", "--verbose", action="store_true", default=False)
+ parser.add_option_group(options)
- cookie = response.get('set-cookie', None)
- return cookie
+ api.add_auth_options(parser)
+
+ return parser
-def my_submissions(cookie):
+def load_my_submissions(model, cookie=None):
+ if cookie is None:
+ cookie = login()
+
soup = get_url_as_soup(USER_URL, 'GET', cookie)
p = soup.find('table', attrs={'id':'projects'})
tr = p.findNext('tr')
# first record is header
tr = tr.findNext()
- submissions = []
+ TypeN = rdfsNS['type']
+ NameN = submitOntology['name']
+ SpeciesN = submitOntology['species']
+ LibraryURN = submitOntology['library_urn']
+
while tr is not None:
td = tr.findAll('td')
if td is not None and len(td) > 1:
- subid = td[0].contents[0].contents[0]
- species = get_contents(td[2])
+ subUrnText = td[0].contents[0].contents[0].encode(CHARSET)
+ subUrn = RDF.Uri(submission_view_url(subUrnText))
+
+ add_stmt(model, subUrn, TypeN, submitOntology['Submission'])
+
name = get_contents(td[4])
+ add_stmt(model, subUrn, NameN, name)
+
+ species = get_contents(td[2])
+ if species is not None:
+ add_stmt(model, subUrn, SpeciesN, species)
+
+ library_id = get_library_id(name)
+ if library_id is not None:
+ add_submission_to_library_urn(model,
+ subUrn,
+ LibraryURN,
+ library_id)
+
+ add_submission_creation_date(model, subUrn, cookie)
+
+ # grab changing atttributes
status = get_contents(td[6]).strip()
- date = get_date_contents(td[8])
- age = get_contents(td[10])
- submissions.append(
- Submission(subid, species, name, status, date, age, cookie)
- )
+ last_mod_datetime = get_date_contents(td[8])
+ last_mod = last_mod_datetime.isoformat()
+
+ update_submission_detail(model, subUrn, status, last_mod, cookie=cookie)
+
+ logging.info("Processed {0}".format( subUrn))
+
tr = tr.findNext('tr')
- return submissions
+
+
+def add_submission_to_library_urn(model, submissionUrn, predicate, library_id):
+ """Add a link from a UCSC submission to woldlab library if needed
+ """
+ libraryUrn = libraryNS[library_id]
+ query = RDF.Statement(submissionUrn, predicate, libraryUrn)
+ if not model.contains_statement(query):
+ link = RDF.Statement(submissionUrn, predicate, libraryNS[library_id])
+ logger.info("Adding Sub -> Lib link: {0}".format(link))
+ model.add_statement(link)
+ else:
+ logger.debug("Found: {0}".format(str(query)))
+
+
+def find_submissions_with_no_library(model):
+ missing_lib_query = RDF.SPARQLQuery("""
+PREFIX submissionOntology:<{submissionOntology}>
+
+SELECT
+ ?subid ?name
+WHERE {{
+ ?subid submissionOntology:name ?name
+ OPTIONAL {{ ?subid submissionOntology:library_urn ?libid }}
+ FILTER (!bound(?libid))
+}}""".format(submissionOntology=submitOntology[''].uri)
+)
+
+ results = missing_lib_query.execute(model)
+ for row in results:
+ subid = row['subid']
+ name = row['name']
+ print "# {0}".format(name)
+ print "<{0}>".format(subid.uri)
+ print " encodeSubmit:library_urn <http://jumpgate.caltech.edu/library/> ."
+ print ""
+
+
+def add_submission_creation_date(model, subUrn, cookie):
+ # in theory the submission page might have more information on it.
+ creationDateN = libraryOntology['date']
+ dateTimeType = xsdNS['dateTime']
+ query = RDF.Statement(subUrn, creationDateN, None)
+ creation_dates = list(model.find_statements(query))
+ if len(creation_dates) == 0:
+ logger.info("Getting creation date for: {0}".format(str(subUrn)))
+ soup = get_url_as_soup(str(subUrn.uri), 'GET', cookie)
+ created_label = soup.find(text="Created: ")
+ if created_label:
+ created_date = get_date_contents(created_label.next)
+ created_date_node = RDF.Node(literal=created_date.isoformat(),
+ datatype=dateTimeType.uri)
+ add_stmt(model, subUrn, creationDateN, created_date_node)
+ else:
+ logger.debug("Found creation date for: {0}".format(str(subUrn)))
+
+def update_submission_detail(model, subUrn, status, recent_update, cookie):
+ HasStatusN = submitOntology['has_status']
+ StatusN = submitOntology['status']
+ LastModifyN = submitOntology['last_modify_date']
+
+ status_nodes_query = RDF.Statement(subUrn, HasStatusN, None)
+ status_nodes = list(model.find_statements(status_nodes_query))
+
+ if len(status_nodes) == 0:
+ # has no status node, add one
+ logging.info("Adding status node to {0}".format(subUrn))
+ status_blank = RDF.Node()
+ add_stmt(model, subUrn, HasStatusN, status_blank)
+ add_stmt(model, status_blank, rdfsNS['type'], StatusN)
+ add_stmt(model, status_blank, StatusN, status)
+ add_stmt(model, status_blank, LastModifyN, recent_update)
+ update_ddf(model, subUrn, status_blank, cookie=cookie)
+ else:
+ logging.info("Found {0} status blanks".format(len(status_nodes)))
+ for status_statement in status_nodes:
+ status_blank = status_statement.object
+ last_modified_query = RDF.Statement(status_blank, LastModifyN, None)
+ last_mod_nodes = model.find_statements(last_modified_query)
+ for last_mod_statement in last_mod_nodes:
+ last_mod_date = str(last_mod_statement.object)
+ if recent_update == str(last_mod_date):
+ update_ddf(model, subUrn, status_blank, cookie=cookie)
+ break
+
+
+
+def update_ddf(model, subUrn, statusNode, cookie):
+ TypeN = rdfsNS['type']
+
+ download_ddf_url = str(subUrn).replace('show', 'download_ddf')
+ ddfUrn = RDF.Uri(download_ddf_url)
+
+ status_is_ddf = RDF.Statement(statusNode, TypeN, ddfNS['ddf'])
+ if not model.contains_statement(status_is_ddf):
+ logging.info('Adding ddf to {0}, {1}'.format(subUrn, statusNode))
+ ddf_text = get_url_as_text(download_ddf_url, 'GET', cookie)
+ add_ddf_statements(model, statusNode, ddf_text)
+ model.add_statement(status_is_ddf)
+
+
+def add_ddf_statements(model, statusNode, ddf_string):
+ """Convert a ddf text file into RDF Statements
+ """
+ ddf_lines = ddf_string.split('\n')
+ # first line is header
+ header = ddf_lines[0].split()
+ attributes = [ ddfNS[x] for x in header ]
+ statements = []
+
+ for ddf_line in ddf_lines[1:]:
+ ddf_line = ddf_line.strip()
+ if len(ddf_line) == 0:
+ continue
+ if ddf_line.startswith("#"):
+ continue
+
+ ddf_record = ddf_line.split('\t')
+ files = ddf_record[0].split(',')
+ file_attributes = ddf_record[1:]
+
+ for f in files:
+ fileNode = RDF.Node()
+ add_stmt(model, statusNode, submitOntology['has_file'], fileNode)
+ add_stmt(model, fileNode, rdfsNS['type'], ddfNS['file'])
+ add_stmt(model, fileNode, ddfNS['filename'], f)
+
+ for predicate, object in zip( attributes[1:], file_attributes):
+ add_stmt(model, fileNode, predicate, object)
+
+
+def load_encode_libraries(model, htswapi):
+ """Get libraries associated with encode.
+ """
+ encodeUrl = os.path.join(htswapi.root_url + "/library/?affiliations__id__exact=44")
+ rdfaParser = RDF.Parser(name='rdfa')
+ print encodeUrl
+ rdfaParser.parse_into_model(model, encodeUrl)
+ query = RDF.Statement(None, libraryOntology['library_id'], None)
+ libraries = model.find_statements(query)
+ for statement in libraries:
+ libraryUrn = statement.subject
+ load_library_detail(model, libraryUrn)
+
+
+def load_library_detail(model, libraryUrn):
+ """Grab detail information from library page
+ """
+ rdfaParser = RDF.Parser(name='rdfa')
+ query = RDF.Statement(libraryUrn, libraryOntology['date'], None)
+ results = list(model.find_statements(query))
+ if len(results) == 0:
+ logger.info("Loading {0}".format(str(libraryUrn)))
+ rdfaParser.parse_into_model(model, libraryUrn.uri)
+ elif len(results) == 1:
+ pass # Assuming that a loaded dataset has one record
+ else:
+ logging.warning("Many dates for {0}".format(libraryUrn))
+
+def get_library_id(name):
+ """Guess library ID from library name
+ """
+ match = re.search(r"[ -](?P<id>([\d]{5})|(SL[\d]{4}))", name)
+ library_id = None
+ if match is not None:
+ library_id = match.group('id')
+ return library_id
+
def get_contents(element):
"""Return contents or none.
a = element.find('a')
if a is not None:
- return a.contents[0]
-
- return element.contents[0]
+ return a.contents[0].encode(CHARSET)
+ return element.contents[0].encode(CHARSET)
+
+
def get_date_contents(element):
data = get_contents(element)
if data:
else:
return None
+
+def load_into_model(model, parser_name, filename):
+ if not os.path.exists(filename):
+ raise IOError("Can't find {0}".format(filename))
+
+ data = open(filename, 'r').read()
+ rdf_parser = RDF.Parser(name=parser_name)
+ rdf_parser.parse_string_into_model(model, data, ns_uri)
+
+def add_stmt(model, subject, predicate, object):
+ """Convienence create RDF Statement and add to a model
+ """
+ return model.add_statement(
+ RDF.Statement(subject, predicate, object)
+ )
+
+def login(cookie=None):
+ """Login if we don't have a cookie
+ """
+ if cookie is not None:
+ return cookie
+
+ keys = keyring.get_keyring()
+ password = keys.get_password(LOGIN_URL, USERNAME)
+ credentials = {'login': USERNAME,
+ 'password': password}
+ headers = {'Content-type': 'application/x-www-form-urlencoded'}
+ http = httplib2.Http()
+ response, content = http.request(LOGIN_URL,
+ 'POST',
+ headers=headers,
+ body=urllib.urlencode(credentials))
+ logging.debug("Login to {0}, status {1}".format(LOGIN_URL,
+ response['status']))
+
+ cookie = response.get('set-cookie', None)
+ if cookie is None:
+ raise RuntimeError("Wasn't able to log into: {0}".format(LOGIN_URL))
+ return cookie
+
+
+def get_url_as_soup(url, method, cookie=None):
+ http = httplib2.Http()
+ headers = {}
+ if cookie is not None:
+ headers['Cookie'] = cookie
+ response, content = http.request(url, method, headers=headers)
+ if response['status'] == '200':
+ soup = BeautifulSoup(content,
+ fromEncoding="utf-8", # should read from header
+ convertEntities=BeautifulSoup.HTML_ENTITIES
+ )
+ return soup
+ else:
+ msg = "error accessing {0}, status {1}"
+ msg = msg.format(url, response['status'])
+ e = httplib2.HttpLib2ErrorWithResponse(msg, response, content)
+
+def get_url_as_text(url, method, cookie=None):
+ http = httplib2.Http()
+ headers = {}
+ if cookie is not None:
+ headers['Cookie'] = cookie
+ response, content = http.request(url, method, headers=headers)
+ if response['status'] == '200':
+ return content
+ else:
+ msg = "error accessing {0}, status {1}"
+ msg = msg.format(url, response['status'])
+ e = httplib2.HttpLib2ErrorWithResponse(msg, response, content)
+
+################
+# old stuff
SUBMISSIONS_LACKING_LIBID = [
('1x75-Directional-HeLa-Rep1', '11208'),
('1x75-Directional-HeLa-Rep2', '11207'),
('1x75-Directional-K562-Rep1', '11008'),
('1x75-Directional-K562-Rep2', '11007'),
('1x75-Directional-NHEK-Rep1', '11204'),
+ ('1x75-Directional-GM12878-Rep1', '11011'),
+ ('1x75-Directional-GM12878-Rep2', '11010'),
]
-class Submission(object):
- def __init__(self, subid, species, name, status, date, age, cookie=None):
- self.cookie = cookie
- self.subid = subid
- self.species = species
- self.name = name
- self.status = status
- self.date = date
- self.age = age
- self._library_id = None
- self._created_date = None
-
- def triples(self):
- subNode = submissionNS[self.subid.encode('utf-8')]
- dateNode = self.date.strftime("%Y-%m-%d")
- s = [RDF.Statement(subNode, submitNS['name'],
- self.name.encode('utf-8')),
- RDF.Statement(subNode, submitNS['status'],
- self.status.encode('utf-8')),
- RDF.Statement(subNode, submitNS['last_modify_date'], dateNode),
- ]
- if self.species is not None:
- s.append(RDF.Statement(subNode, submitNS['species'],
- self.species.encode('utf-8')))
- if self.library_id is not None:
- libId = libraryNS[self.library_id.encode('utf-8')]
- s.append(RDF.Statement(subNode, rdfsNS['seeAlso'], libId))
-
- return s
-
-
- def _get_library_id(self):
- if self._library_id is None:
- match = re.search(r"[ -](?P<id>([\d]{5})|(SL[\d]{4}))", self.name)
- if match is not None:
- self._library_id = match.group('id')
- else:
- for dir_lib_name, lib_id in SUBMISSIONS_LACKING_LIBID:
- if dir_lib_name in self.name:
- self._library_id = lib_id
- break
-
- return self._library_id
-
- library_id = property(_get_library_id)
-
- def _get_detail(self):
- detail = DETAIL_URL.format(self.subid)
- soup = get_url_as_soup(detail, 'GET', self.cookie)
-
- created_label = soup.find(text="Created: ")
- if created_label:
- self._created_date = get_date_contents(created_label.next)
-
- def _get_created_date(self):
- if self._created_date is None:
- self._get_detail()
- return self._created_date
- created_date = property(_get_created_date)
-
- def __unicode__(self):
- return u"{0}\t{1}\t{2}".format(self.subid, self.library_id, self.name)
-
- def __repr__(self):
- return u"<Submission ({0}) '{1}'>".format(self.subid, self.name)
def select_by_library_id(submission_list):
report.append('<tbody>')
for lib_id in lib_ids:
report.append('<tr>')
- lib_url = LIBRARY_URL.format(lib_id)
+ lib_url = libraryNS[lib_id].uri
report.append('<td><a href="{0}">{1}</a></td>'.format(lib_url, lib_id))
submissions = selected_libraries[lib_id]
report.append('<td>{0}</td>'.format(submissions[0].name))
for d in freezes:
report.append('<td>')
for s in batched.get(d, []):
- subid = '<a href="http://encodesubmit.ucsc.edu/pipeline/show/{0}">{0}</a>'.format(s.subid)
+ show_url = submission_view_url(s.subid)
+ subid = '<a href="{0}">{1}</a>'.format(show_url, s.subid)
report.append("{0}:{1}".format(subid, s.status))
report.append('</td>')
else:
return name
else:
return None
-
-
-def get_url_as_soup(url, method, cookie=None):
- http = httplib2.Http()
- headers = {}
- if cookie is not None:
- headers['Cookie'] = cookie
- response, content = http.request(url, method, headers=headers)
- if response['status'] == '200':
- soup = BeautifulSoup(content,
- fromEncoding="utf-8", # should read from header
- convertEntities=BeautifulSoup.HTML_ENTITIES
- )
- return soup
- else:
- msg = "error accessing {0}, status {1}"
- msg = msg.format(url, response['status'])
- e = httplib2.HttpLib2ErrorWithResponse(msg, response, content)
if __name__ == "__main__":
main()
--- /dev/null
+##
+## Find submissions that are currently "failed"
+##
+
+PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
+PREFIX submitOnt:<http://jumpgate.caltech.edu/wiki/UCSCSubmissionOntology#>
+PREFIX libOntNS:<http://jumpgate.caltech.edu/wiki/LibraryOntology#">
+
+#libraryNS = RDF.NS("http://jumpgate.caltech.edu/library/")
+#submissionNS = RDF.NS("http://encodesubmit.ucsc.edu/pipeline/show/")
+#ddfNS = RDF.NS("http://encodesubmit.ucsc.edu/pipeline/download_ddf#")
+
+SELECT
+ ?subid ?subname ?liburn ?status
+WHERE {
+ ?subid submitOnt:name ?subname .
+ ?subid submitOnt:library_urn ?liburn .
+ ?subid submitOnt:has_status ?statusNode .
+ ?statusNode submitOnt:status ?status .
+ ?statusNode submitOnt:last_modify_date ?last_modify .
+ FILTER (regex(?status, "failed", "i"))
+}
--- /dev/null
+# Produce list of submissions associated with a cell/replicate
+
+PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
+PREFIX encodeSubmit:<http://jumpgate.caltech.edu/wiki/UCSCSubmissionOntology#>
+PREFIX libraryOntology:<http://jumpgate.caltech.edu/wiki/LibraryOntology#>
+
+SELECT distinct ?liburn ?cell ?replicate ?subid
+WHERE {
+ ?subid encodeSubmit:library_urn ?liburn ;
+ encodeSubmit:name ?name .
+ ?liburn libraryOntology:cell_line ?cell ;
+ libraryOntology:replicate ?replicate
+}
+ORDER BY ?cell ?replicate ?liburn
--- /dev/null
+from optparse import OptionParser
+import os
+import sys
+from pprint import pprint
+
+def main(cmdline=None):
+ parser = make_parser()
+ opts, args = parser.parse_args(cmdline)
+
+ extensions = scan(args)
+ #pprint(extensions)
+ print find_common_suffix(extensions)
+
+def make_parser():
+ parser = OptionParser("%prog: directory [directory...]")
+ return parser
+
+def scan(toscan):
+ index = {}
+ for cur_scan_dir in toscan:
+ for path, dirnames, filenames in os.walk(cur_scan_dir):
+ for filename in filenames:
+ next_index = index
+ for c in filename[::-1]:
+ next_index = next_index.setdefault(c, {})
+ return index
+
+def find_common_suffix(index, tail=[]):
+ if len(tail) > 0 and len(index) > 1:
+ return "".join(tail[::-1])
+
+ results = []
+ for key, choice in index.items():
+ r = find_common_suffix(choice, tail+[key])
+ if r is not None:
+ results.append (r)
+
+ if len(results) == 0:
+ return None
+ elif len(results) == 1:
+ return results[0]
+ else:
+ return results
+
+if __name__ == "__main__":
+ main()
import json
import logging
import netrc
-from optparse import OptionParser
+from optparse import OptionParser, OptionGroup
import os
from pprint import pprint, pformat
import shlex
from StringIO import StringIO
import stat
-from subprocess import Popen, PIPE
import sys
import time
import types
import urllib2
import urlparse
+import RDF
+
from htsworkflow.util import api
-from htsworkflow.pipelines.sequences import \
- create_sequence_table, \
- scan_for_sequences
-from htsworkflow.pipelines import qseq2fastq
-from htsworkflow.pipelines import srf2fastq
+from htsworkflow.util.rdfhelp import \
+ dafTermOntology, \
+ fromTypedNode, \
+ get_model, \
+ get_serializer, \
+ load_into_model, \
+ sparql_query, \
+ submissionOntology
+from htsworkflow.submission.daf import \
+ DAFMapper, \
+ MetadataLookupException, \
+ get_submission_uri
+from htsworkflow.submission.condorfastq import CondorFastqExtract
+
+logger = logging.getLogger('ucsc_gather')
def main(cmdline=None):
parser = make_parser()
else:
logging.basicConfig(level = logging.WARNING )
- apidata = {'apiid': opts.apiid, 'apikey': opts.apikey }
+ apidata = api.make_auth_from_opts(opts, parser)
+
+ model = get_model(opts.load_model)
+ if opts.name:
+ mapper = DAFMapper(opts.name, opts.daf, model)
+ submission_uri = get_submission_uri(opts.name)
- if opts.host is None or opts.apiid is None or opts.apikey is None:
- parser.error("Please specify host url, apiid, apikey")
+ if opts.library_url is not None:
+ mapper.library_url = opts.library_url
+
+ if opts.load_rdf is not None:
+ load_into_model(model, 'turtle', opts.load_rdf, submission_uri)
- if opts.makeddf and opts.daf is None:
+ if opts.make_ddf and opts.daf is None:
parser.error("Please specify your daf when making ddf files")
- if len(args) == 0:
- parser.error("I need at least one library submission-dir input file")
-
library_result_map = []
for a in args:
library_result_map.extend(read_library_result_map(a))
if opts.make_tree_from is not None:
make_tree_from(opts.make_tree_from, library_result_map)
- if opts.daf is not None:
+ if opts.link_daf:
link_daf(opts.daf, library_result_map)
if opts.fastq:
- build_fastqs(opts.host,
- apidata,
- opts.sequence,
- library_result_map,
- force=opts.force)
+ extractor = CondorFastqExtract(opts.host, apidata, opts.sequence,
+ force=opts.force)
+ extractor.build_fastqs(library_result_map)
- if opts.ini:
- make_submission_ini(opts.host, apidata, library_result_map)
+ if opts.scan_submission:
+ scan_submission_dirs(mapper, library_result_map)
- if opts.makeddf:
- make_all_ddfs(library_result_map, opts.daf, force=opts.force)
+ if opts.make_ddf:
+ make_all_ddfs(mapper, library_result_map, opts.daf, force=opts.force)
+ if opts.sparql:
+ sparql_query(model, opts.sparql)
+
+ if opts.print_rdf:
+ writer = get_serializer()
+ print writer.serialize_model_to_string(model)
+
def make_parser():
- # Load defaults from the config files
- config = SafeConfigParser()
- config.read([os.path.expanduser('~/.htsworkflow.ini'), '/etc/htsworkflow.ini'])
-
- sequence_archive = None
- apiid = None
- apikey = None
- apihost = None
- SECTION = 'sequence_archive'
- if config.has_section(SECTION):
- sequence_archive = config.get(SECTION, 'sequence_archive',sequence_archive)
- sequence_archive = os.path.expanduser(sequence_archive)
- apiid = config.get(SECTION, 'apiid', apiid)
- apikey = config.get(SECTION, 'apikey', apikey)
- apihost = config.get(SECTION, 'host', apihost)
-
parser = OptionParser()
+ model = OptionGroup(parser, 'model')
+ model.add_option('--name', help="Set submission name")
+ model.add_option('--load-model', default=None,
+ help="Load model database")
+ model.add_option('--load-rdf', default=None,
+ help="load rdf statements into model")
+ model.add_option('--sparql', default=None, help="execute sparql query")
+ model.add_option('--print-rdf', action="store_true", default=False,
+ help="print ending model state")
+ parser.add_option_group(model)
# commands
- parser.add_option('--make-tree-from',
+ commands = OptionGroup(parser, 'commands')
+ commands.add_option('--make-tree-from',
help="create directories & link data files",
default=None)
- parser.add_option('--fastq', help="generate scripts for making fastq files",
- default=False, action="store_true")
-
- parser.add_option('--ini', help="generate submission ini file", default=False,
- action="store_true")
-
- parser.add_option('--makeddf', help='make the ddfs', default=False,
+ commands.add_option('--fastq', default=False, action="store_true",
+ help="generate scripts for making fastq files")
+ commands.add_option('--scan-submission', default=False, action="store_true",
+ help="Import metadata for submission into our model")
+ commands.add_option('--link-daf', default=False, action="store_true",
+ help="link daf into submission directories")
+ commands.add_option('--make-ddf', help='make the ddfs', default=False,
action="store_true")
+ parser.add_option_group(commands)
- parser.add_option('--daf', default=None, help='specify daf name')
parser.add_option('--force', default=False, action="store_true",
help="Force regenerating fastqs")
-
- # configuration options
- parser.add_option('--apiid', default=apiid, help="Specify API ID")
- parser.add_option('--apikey', default=apikey, help="Specify API KEY")
- parser.add_option('--host', default=apihost,
- help="specify HTSWorkflow host",)
- parser.add_option('--sequence', default=sequence_archive,
- help="sequence repository")
-
+ parser.add_option('--daf', default=None, help='specify daf name')
+ parser.add_option('--library-url', default=None,
+ help="specify an alternate source for library information")
# debugging
parser.add_option('--verbose', default=False, action="store_true",
help='verbose logging')
parser.add_option('--debug', default=False, action="store_true",
help='debug logging')
+ api.add_auth_options(parser)
+
return parser
-
def make_tree_from(source_path, library_result_map):
"""Create a tree using data files from source path.
"""
if not os.path.exists(lib_path):
logging.info("Making dir {0}".format(lib_path))
os.mkdir(lib_path)
- source_lib_dir = os.path.join(source_path, lib_path)
+ source_lib_dir = os.path.abspath(os.path.join(source_path, lib_path))
if os.path.exists(source_lib_dir):
pass
for filename in os.listdir(source_lib_dir):
logging.info(
'LINK {0} to {1}'.format(source_pathname, target_pathname))
-def build_fastqs(host, apidata, sequences_path, library_result_map,
- force=False ):
- """
- Generate condor scripts to build any needed fastq files
-
- Args:
- host (str): root of the htsworkflow api server
- apidata (dict): id & key to post to the server
- sequences_path (str): root of the directory tree to scan for files
- library_result_map (list): [(library_id, destination directory), ...]
- """
- qseq_condor_header = """
-Universe=vanilla
-executable=%(exe)s
-error=log/qseq2fastq.err.$(process).log
-output=log/qseq2fastq.out.$(process).log
-log=log/qseq2fastq.log
-
-""" % {'exe': sys.executable }
- qseq_condor_entries = []
- srf_condor_header = """
-Universe=vanilla
-executable=%(exe)s
-output=log/srf_pair_fastq.out.$(process).log
-error=log/srf_pair_fastq.err.$(process).log
-log=log/srf_pair_fastq.log
-environment="PYTHONPATH=/home/diane/lib/python2.6/site-packages:/home/diane/proj/solexa/gaworkflow PATH=/woldlab/rattus/lvol0/mus/home/diane/bin:/usr/bin:/bin"
-
-""" % {'exe': sys.executable }
- srf_condor_entries = []
- lib_db = find_archive_sequence_files(host,
- apidata,
- sequences_path,
- library_result_map)
-
- needed_targets = find_missing_targets(library_result_map, lib_db, force)
-
- for target_pathname, available_sources in needed_targets.items():
- logging.debug(' target : %s' % (target_pathname,))
- logging.debug(' candidate sources: %s' % (available_sources,))
- if available_sources.has_key('qseq'):
- source = available_sources['qseq']
- qseq_condor_entries.append(
- condor_qseq_to_fastq(source.path,
- target_pathname,
- source.flowcell,
- force=force)
- )
- elif available_sources.has_key('srf'):
- source = available_sources['srf']
- mid = getattr(source, 'mid_point', None)
- srf_condor_entries.append(
- condor_srf_to_fastq(source.path,
- target_pathname,
- source.paired,
- source.flowcell,
- mid,
- force=force)
- )
- else:
- print " need file", target_pathname
-
- if len(srf_condor_entries) > 0:
- make_submit_script('srf.fastq.condor',
- srf_condor_header,
- srf_condor_entries)
-
- if len(qseq_condor_entries) > 0:
- make_submit_script('qseq.fastq.condor',
- qseq_condor_header,
- qseq_condor_entries)
-
-
-def find_missing_targets(library_result_map, lib_db, force=False):
- """
- Check if the sequence file exists.
- This requires computing what the sequence name is and checking
- to see if it can be found in the sequence location.
-
- Adds seq.paired flag to sequences listed in lib_db[*]['lanes']
- """
- fastq_paired_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s_r%(read)s.fastq'
- fastq_single_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s.fastq'
- # find what targets we're missing
- needed_targets = {}
- for lib_id, result_dir in library_result_map:
- lib = lib_db[lib_id]
- lane_dict = make_lane_dict(lib_db, lib_id)
-
- for lane_key, sequences in lib['lanes'].items():
- for seq in sequences:
- seq.paired = lane_dict[seq.flowcell]['paired_end']
- lane_status = lane_dict[seq.flowcell]['status']
-
- if seq.paired and seq.read is None:
- seq.read = 1
- filename_attributes = {
- 'flowcell': seq.flowcell,
- 'lib_id': lib_id,
- 'lane': seq.lane,
- 'read': seq.read,
- 'cycle': seq.cycle
- }
- # skip bad runs
- if lane_status == 'Failed':
- continue
- if seq.flowcell == '30DY0AAXX':
- # 30DY0 only ran for 151 bases instead of 152
- # it is actually 76 1st read, 75 2nd read
- seq.mid_point = 76
-
- # end filters
- if seq.paired:
- target_name = fastq_paired_template % filename_attributes
- else:
- target_name = fastq_single_template % filename_attributes
-
- target_pathname = os.path.join(result_dir, target_name)
- if force or not os.path.exists(target_pathname):
- t = needed_targets.setdefault(target_pathname, {})
- t[seq.filetype] = seq
-
- return needed_targets
-
def link_daf(daf_path, library_result_map):
if not os.path.exists(daf_path):
os.link(daf_path, submission_daf)
-def make_submission_ini(host, apidata, library_result_map, paired=True):
- #attributes = get_filename_attribute_map(paired)
- view_map = NameToViewMap(host, apidata)
-
- candidate_fastq_src = {}
-
+def scan_submission_dirs(view_map, library_result_map):
+ """Look through our submission directories and collect needed information
+ """
for lib_id, result_dir in library_result_map:
- order_by = ['order_by=files', 'view', 'replicate', 'cell',
- 'readType', 'mapAlgorithm', 'insertLength', 'md5sum' ]
- inifile = ['[config]']
- inifile += [" ".join(order_by)]
- inifile += ['']
- line_counter = 1
- result_ini = os.path.join(result_dir, result_dir+'.ini')
-
- # write other lines
- submission_files = os.listdir(result_dir)
- fastqs = {}
- fastq_attributes = {}
- for f in submission_files:
- attributes = view_map.find_attributes(f, lib_id)
- if attributes is None:
- raise ValueError("Unrecognized file: %s" % (f,))
- attributes['md5sum'] = "None"
-
- ext = attributes["extension"]
- if attributes['view'] is None:
- continue
- elif attributes.get("type", None) == 'fastq':
- fastqs.setdefault(ext, set()).add(f)
- fastq_attributes[ext] = attributes
- else:
- md5sum = make_md5sum(os.path.join(result_dir,f))
- if md5sum is not None:
- attributes['md5sum']=md5sum
- inifile.extend(
- make_submission_section(line_counter,
- [f],
- attributes
- )
- )
- inifile += ['']
- line_counter += 1
- # add in fastqs on a single line.
-
- for extension, fastq_files in fastqs.items():
- inifile.extend(
- make_submission_section(line_counter,
- fastq_files,
- fastq_attributes[extension])
- )
- inifile += ['']
- line_counter += 1
-
- f = open(result_ini,'w')
- f.write(os.linesep.join(inifile))
-
+ logging.info("Importing %s from %s" % (lib_id, result_dir))
+ try:
+ view_map.import_submission_dir(result_dir, lib_id)
+ except MetadataLookupException, e:
+ logging.error("Skipping %s: %s" % (lib_id, str(e)))
-def make_lane_dict(lib_db, lib_id):
- """
- Convert the lane_set in a lib_db to a dictionary
- indexed by flowcell ID
- """
- result = []
- for lane in lib_db[lib_id]['lane_set']:
- result.append((lane['flowcell'], lane))
- return dict(result)
-
-
-def make_all_ddfs(library_result_map, daf_name, make_condor=True, force=False):
+def make_all_ddfs(view_map, library_result_map, daf_name, make_condor=True, force=False):
dag_fragment = []
for lib_id, result_dir in library_result_map:
- ininame = result_dir+'.ini'
- inipathname = os.path.join(result_dir, ininame)
- if os.path.exists(inipathname):
- dag_fragment.extend(
- make_ddf(ininame, daf_name, True, make_condor, result_dir)
- )
+ submissionNode = view_map.get_submission_node(result_dir)
+ dag_fragment.extend(
+ make_ddf(view_map, submissionNode, daf_name, make_condor, result_dir)
+ )
if make_condor and len(dag_fragment) > 0:
dag_filename = 'submission.dagman'
f.close()
-def make_ddf(ininame, daf_name, guess_ddf=False, make_condor=False, outdir=None):
+def make_ddf(view_map, submissionNode, daf_name, make_condor=False, outdir=None):
"""
Make ddf files, and bonus condor file
"""
+ query_template = """PREFIX libraryOntology: <http://jumpgate.caltech.edu/wiki/LibraryOntology#>
+PREFIX submissionOntology: <http://jumpgate.caltech.edu/wiki/UcscSubmissionOntology#>
+PREFIX ucscDaf: <http://jumpgate.caltech.edu/wiki/UcscDaf#>
+
+select ?submitView ?filename ?md5sum ?view ?cell ?antibody ?sex ?control ?controlId ?labExpId ?labVersion ?treatment ?protocol
+WHERE {
+ ?file ucscDaf:filename ?filename ;
+ ucscDaf:md5sum ?md5sum .
+ ?submitView ucscDaf:has_file ?file ;
+ ucscDaf:view ?dafView ;
+ ucscDaf:submission %(submission)s .
+ ?dafView ucscDaf:name ?view .
+ %(submission)s submissionOntology:library ?library .
+
+ OPTIONAL { ?submitView ucscDaf:antibody ?antibody }
+ OPTIONAL { ?submitView ucscDaf:cell ?cell }
+ OPTIONAL { ?submitView ucscDaf:control ?control }
+ OPTIONAL { ?library ucscDaf:controlId ?controlId }
+ OPTIONAL { ?submitView ucscDaf:sex ?sex }
+ OPTIONAL { ?submitView ucscDaf:labVersion ?labExpId }
+ OPTIONAL { ?submitView ucscDaf:labVersion ?labVersion }
+ OPTIONAL { ?library ucscDaf:treatment ?treatment }
+ OPTIONAL { ?submitView ucscDaf:protocol ?protocol }
+}
+ORDER BY ?submitView"""
dag_fragments = []
- curdir = os.getcwd()
+
+ name = fromTypedNode(view_map.model.get_target(submissionNode, submissionOntology['name']))
+ if name is None:
+ logging.error("Need name for %s" % (str(submissionNode)))
+ return []
+
+ ddf_name = name + '.ddf'
if outdir is not None:
- os.chdir(outdir)
- output = sys.stdout
- ddf_name = None
- if guess_ddf:
- ddf_name = make_ddf_name(ininame)
- print ddf_name
- output = open(ddf_name,'w')
-
- file_list = read_ddf_ini(ininame, output)
+ outfile = os.path.join(outdir, ddf_name)
+ output = open(outfile,'w')
+ else:
+ output = sys.stdout
+
+ formatted_query = query_template % {'submission': str(submissionNode)}
+
+ query = RDF.SPARQLQuery(formatted_query)
+ results = query.execute(view_map.model)
+
+ variables = ['filename']
+ # filename goes first
+ variables.extend(view_map.get_daf_variables())
+ variables += ['controlId', 'labExpId', 'md5sum']
+ output.write('\t'.join(variables))
+ output.write(os.linesep)
+
+ all_views = {}
+ all_files = []
+ for row in results:
+ viewname = fromTypedNode(row['view'])
+ current = all_views.setdefault(viewname, {})
+ for variable_name in variables:
+ value = str(fromTypedNode(row[variable_name]))
+ if variable_name in ('filename', 'md5sum'):
+ current.setdefault(variable_name,[]).append(value)
+ else:
+ current[variable_name] = value
+
+ for view in all_views.keys():
+ line = []
+ for variable_name in variables:
+ if variable_name in ('filename', 'md5sum'):
+ line.append(','.join(all_views[view][variable_name]))
+ else:
+ line.append(all_views[view][variable_name])
+ output.write("\t".join(line))
+ output.write(os.linesep)
+ all_files.extend(all_views[view]['filename'])
+
logging.info(
- "Read config {0}, found files: {1}".format(
- ininame, ", ".join(file_list)))
+ "Examined {0}, found files: {1}".format(
+ str(submissionNode), ", ".join(all_files)))
- file_list.append(daf_name)
- if ddf_name is not None:
- file_list.append(ddf_name)
+ all_files.append(daf_name)
+ all_files.append(ddf_name)
if make_condor:
- archive_condor = make_condor_archive_script(ininame, file_list)
- upload_condor = make_condor_upload_script(ininame)
+ archive_condor = make_condor_archive_script(name, all_files, outdir)
+ upload_condor = make_condor_upload_script(name, outdir)
dag_fragments.extend(
- make_dag_fragment(ininame, archive_condor, upload_condor)
+ make_dag_fragment(name, archive_condor, upload_condor)
)
- os.chdir(curdir)
-
return dag_fragments
-def read_ddf_ini(filename, output=sys.stdout):
- """
- Read a ini file and dump out a tab delmited text file
- """
- file_list = []
- config = SafeConfigParser()
- config.read(filename)
-
- order_by = shlex.split(config.get("config", "order_by"))
-
- output.write("\t".join(order_by))
- output.write(os.linesep)
- sections = config.sections()
- sections.sort()
- for section in sections:
- if section == "config":
- # skip the config block
- continue
- values = []
- for key in order_by:
- v = config.get(section, key)
- values.append(v)
- if key == 'files':
- file_list.extend(parse_filelist(v))
-
- output.write("\t".join(values))
- output.write(os.linesep)
- return file_list
-
-
def read_library_result_map(filename):
"""
Read a file that maps library id to result directory.
return results
-def make_condor_archive_script(ininame, files):
+def make_condor_archive_script(name, files, outdir=None):
script = """Universe = vanilla
Executable = /bin/tar
arguments = czvhf ../%(archivename)s %(filelist)s
-Error = compress.err.$(Process).log
+Error = compress.out.$(Process).log
Output = compress.out.$(Process).log
Log = /tmp/submission-compress-%(user)s.log
initialdir = %(initialdir)s
queue
"""
+ if outdir is None:
+ outdir = os.getcwd()
for f in files:
- if not os.path.exists(f):
+ pathname = os.path.join(outdir, f)
+ if not os.path.exists(pathname):
raise RuntimeError("Missing %s" % (f,))
- context = {'archivename': make_submission_name(ininame),
+ context = {'archivename': make_submission_name(name),
'filelist': " ".join(files),
- 'initialdir': os.getcwd(),
+ 'initialdir': os.path.abspath(outdir),
'user': os.getlogin()}
- condor_script = make_condor_name(ininame, 'archive')
+ condor_script = os.path.join(outdir, make_condor_name(name, 'archive'))
condor_stream = open(condor_script,'w')
condor_stream.write(script % context)
condor_stream.close()
return condor_script
-def make_condor_upload_script(ininame):
+def make_condor_upload_script(name, outdir=None):
script = """Universe = vanilla
Executable = /usr/bin/lftp
arguments = -c put ../%(archivename)s -o ftp://%(ftpuser)s:%(ftppassword)s@%(ftphost)s/%(archivename)s
-Error = upload.err.$(Process).log
+Error = upload.out.$(Process).log
Output = upload.out.$(Process).log
Log = /tmp/submission-upload-%(user)s.log
initialdir = %(initialdir)s
queue
"""
+ if outdir is None:
+ outdir = os.getcwd()
+
auth = netrc.netrc(os.path.expanduser("~diane/.netrc"))
encodeftp = 'encodeftp.cse.ucsc.edu'
ftpuser = auth.hosts[encodeftp][0]
ftppassword = auth.hosts[encodeftp][2]
- context = {'archivename': make_submission_name(ininame),
- 'initialdir': os.getcwd(),
+ context = {'archivename': make_submission_name(name),
+ 'initialdir': os.path.abspath(outdir),
'user': os.getlogin(),
'ftpuser': ftpuser,
'ftppassword': ftppassword,
'ftphost': encodeftp}
- condor_script = make_condor_name(ininame, 'upload')
+ condor_script = os.path.join(outdir, make_condor_name(name, 'upload'))
condor_stream = open(condor_script,'w')
condor_stream.write(script % context)
condor_stream.close()
return contents
-def condor_srf_to_fastq(srf_file, target_pathname, paired, flowcell=None,
- mid=None, force=False):
- py = srf2fastq.__file__
- args = [ py, srf_file, ]
- if paired:
- args.extend(['--left', target_pathname])
- # this is ugly. I did it because I was pregenerating the target
- # names before I tried to figure out what sources could generate
- # those targets, and everything up to this point had been
- # one-to-one. So I couldn't figure out how to pair the
- # target names.
- # With this at least the command will run correctly.
- # however if we rename the default targets, this'll break
- # also I think it'll generate it twice.
- args.extend(['--right',
- target_pathname.replace('_r1.fastq', '_r2.fastq')])
- else:
- args.extend(['--single', target_pathname ])
- if flowcell is not None:
- args.extend(['--flowcell', flowcell])
-
- if mid is not None:
- args.extend(['-m', str(mid)])
-
- if force:
- args.extend(['--force'])
-
- script = """
-arguments="%s"
-queue
-""" % (" ".join(args),)
-
- return script
-
-
-def condor_qseq_to_fastq(qseq_file, target_pathname, flowcell=None, force=False):
- py = qseq2fastq.__file__
- args = [py, '-i', qseq_file, '-o', target_pathname ]
- if flowcell is not None:
- args.extend(['-f', flowcell])
- script = """
-arguments="%s"
-queue
-""" % (" ".join(args))
-
- return script
-
-def find_archive_sequence_files(host, apidata, sequences_path,
- library_result_map):
- """
- Find all the archive sequence files possibly associated with our results.
-
- """
- logging.debug("Searching for sequence files in: %s" %(sequences_path,))
-
- lib_db = {}
- seq_dirs = set()
- #seq_dirs = set(os.path.join(sequences_path, 'srfs'))
- candidate_lanes = {}
- for lib_id, result_dir in library_result_map:
- lib_info = get_library_info(host, apidata, lib_id)
- lib_info['lanes'] = {}
- lib_db[lib_id] = lib_info
-
- for lane in lib_info['lane_set']:
- lane_key = (lane['flowcell'], lane['lane_number'])
- candidate_lanes[lane_key] = lib_id
- seq_dirs.add(os.path.join(sequences_path,
- 'flowcells',
- lane['flowcell']))
- logging.debug("Seq_dirs = %s" %(unicode(seq_dirs)))
- candidate_seq_list = scan_for_sequences(seq_dirs)
-
- # at this point we have too many sequences as scan_for_sequences
- # returns all the sequences in a flowcell directory
- # so lets filter out the extras
-
- for seq in candidate_seq_list:
- lane_key = (seq.flowcell, seq.lane)
- lib_id = candidate_lanes.get(lane_key, None)
- if lib_id is not None:
- lib_info = lib_db[lib_id]
- lib_info['lanes'].setdefault(lane_key, set()).add(seq)
-
- return lib_db
-
-
-class NameToViewMap(object):
- """Determine view attributes for a given submission file name
- """
- def __init__(self, root_url, apidata):
- self.root_url = root_url
- self.apidata = apidata
-
- self.lib_cache = {}
- self.lib_paired = {}
- # ma is "map algorithm"
- ma = 'TH1014'
-
- self.patterns = [
- ('*.bai', None),
- ('*.splices.bam', 'Splices'),
- ('*.bam', self._guess_bam_view),
- ('junctions.bed', 'Junctions'),
- ('*.jnct', 'Junctions'),
- ('*unique.bigwig', None),
- ('*plus.bigwig', 'PlusSignal'),
- ('*minus.bigwig', 'MinusSignal'),
- ('*.bigwig', 'Signal'),
- ('*.tar.bz2', None),
- ('*.condor', None),
- ('*.daf', None),
- ('*.ddf', None),
-
- ('*ufflinks?0.9.3.genes.gtf', 'GeneDeNovo'),
- ('*ufflinks?0.9.3.transcripts.gtf', 'TranscriptDeNovo'),
- ('*GENCODE-v3c.exonFPKM.gtf', 'ExonsGencV3c'),
- ('*GENCODE-v3c.genes.gtf', 'GeneGencV3c'),
- ('*GENCODE-v3c.transcripts.gtf', 'TranscriptGencV3c'),
- ('*GENCODE-v3c.TSS.gtf', 'TSS'),
- ('*.junctions.bed6+3', 'Junctions'),
-
- ('*.?ufflinks-0.9.0?genes.expr', 'GeneDeNovo'),
- ('*.?ufflinks-0.9.0?transcripts.expr', 'TranscriptDeNovo'),
- ('*.?ufflinks-0.9.0?transcripts.gtf', 'GeneModel'),
-
- ('*.GENCODE-v3c?genes.expr', 'GeneGCV3c'),
- ('*.GENCODE-v3c?transcript*.expr', 'TranscriptGCV3c'),
- ('*.GENCODE-v3c?transcript*.gtf', 'TranscriptGencV3c'),
- ('*.GENCODE-v4?genes.expr', None), #'GeneGCV4'),
- ('*.GENCODE-v4?transcript*.expr', None), #'TranscriptGCV4'),
- ('*.GENCODE-v4?transcript*.gtf', None), #'TranscriptGencV4'),
- ('*_1.75mers.fastq', 'FastqRd1'),
- ('*_2.75mers.fastq', 'FastqRd2'),
- ('*_r1.fastq', 'FastqRd1'),
- ('*_r2.fastq', 'FastqRd2'),
- ('*.fastq', 'Fastq'),
- ('*.gtf', 'GeneModel'),
- ('*.ini', None),
- ('*.log', None),
- ('*.md5', None),
- ('paired-end-distribution*', 'InsLength'),
- ('*.stats.txt', 'InsLength'),
- ('*.srf', None),
- ('*.wig', None),
- ('*.zip', None),
- ('transfer_log', None),
- ]
-
- self.views = {
- None: {"MapAlgorithm": "NA"},
- "Paired": {"MapAlgorithm": ma},
- "Aligns": {"MapAlgorithm": ma},
- "Single": {"MapAlgorithm": ma},
- "Splices": {"MapAlgorithm": ma},
- "Junctions": {"MapAlgorithm": ma},
- "PlusSignal": {"MapAlgorithm": ma},
- "MinusSignal": {"MapAlgorithm": ma},
- "Signal": {"MapAlgorithm": ma},
- "GeneModel": {"MapAlgorithm": ma},
- "GeneDeNovo": {"MapAlgorithm": ma},
- "TranscriptDeNovo": {"MapAlgorithm": ma},
- "ExonsGencV3c": {"MapAlgorithm": ma},
- "GeneGencV3c": {"MapAlgorithm": ma},
- "TSS": {"MapAlgorithm": ma},
- "GeneGCV3c": {"MapAlgorithm": ma},
- "TranscriptGCV3c": {"MapAlgorithm": ma},
- "TranscriptGencV3c": {"MapAlgorithm": ma},
- "GeneGCV4": {"MapAlgorithm": ma},
- "TranscriptGCV4": {"MapAlgorithm": ma},
- "FastqRd1": {"MapAlgorithm": "NA", "type": "fastq"},
- "FastqRd2": {"MapAlgorithm": "NA", "type": "fastq"},
- "Fastq": {"MapAlgorithm": "NA", "type": "fastq" },
- "InsLength": {"MapAlgorithm": ma},
- }
- # view name is one of the attributes
- for v in self.views.keys():
- self.views[v]['view'] = v
-
- def find_attributes(self, pathname, lib_id):
- """Looking for the best extension
- The 'best' is the longest match
-
- :Args:
- filename (str): the filename whose extention we are about to examine
- """
- path, filename = os.path.splitext(pathname)
- if not self.lib_cache.has_key(lib_id):
- self.lib_cache[lib_id] = get_library_info(self.root_url,
- self.apidata, lib_id)
-
- lib_info = self.lib_cache[lib_id]
- if lib_info['cell_line'].lower() == 'unknown':
- logging.warn("Library %s missing cell_line" % (lib_id,))
- attributes = {
- 'cell': lib_info['cell_line'],
- 'replicate': lib_info['replicate'],
- }
- is_paired = self._is_paired(lib_id, lib_info)
-
- if is_paired:
- attributes.update(self.get_paired_attributes(lib_info))
- else:
- attributes.update(self.get_single_attributes(lib_info))
-
- for pattern, view in self.patterns:
- if fnmatch.fnmatch(pathname, pattern):
- if callable(view):
- view = view(is_paired=is_paired)
-
- attributes.update(self.views[view])
- attributes["extension"] = pattern
- return attributes
-
-
- def _guess_bam_view(self, is_paired=True):
- """Guess a view name based on library attributes
- """
- if is_paired:
- return "Paired"
- else:
- return "Aligns"
-
-
- def _is_paired(self, lib_id, lib_info):
- """Determine if a library is paired end"""
- if len(lib_info["lane_set"]) == 0:
- return False
-
- if not self.lib_paired.has_key(lib_id):
- is_paired = 0
- isnot_paired = 0
- failed = 0
- # check to see if all the flowcells are the same.
- # otherwise we might need to do something complicated
- for flowcell in lib_info["lane_set"]:
- # yes there's also a status code, but this comparison
- # is easier to read
- if flowcell["status"].lower() == "failed":
- # ignore failed flowcell
- failed += 1
- pass
- elif flowcell["paired_end"]:
- is_paired += 1
- else:
- isnot_paired += 1
-
- logging.debug("Library %s: %d paired, %d single, %d failed" % \
- (lib_info["library_id"], is_paired, isnot_paired, failed))
-
- if is_paired > isnot_paired:
- self.lib_paired[lib_id] = True
- elif is_paired < isnot_paired:
- self.lib_paired[lib_id] = False
- else:
- raise RuntimeError("Equal number of paired & unpaired lanes."\
- "Can't guess library paired status")
-
- return self.lib_paired[lib_id]
-
- def get_paired_attributes(self, lib_info):
- if lib_info['insert_size'] is None:
- errmsg = "Library %s is missing insert_size, assuming 200"
- logging.warn(errmsg % (lib_info["library_id"],))
- insert_size = 200
- else:
- insert_size = lib_info['insert_size']
- return {'insertLength': insert_size,
- 'readType': '2x75'}
-
- def get_single_attributes(self, lib_info):
- return {'insertLength':'ilNA',
- 'readType': '1x75D'
- }
-
def make_submission_section(line_counter, files, attributes):
"""
Create a section in the submission ini file
return ".".join(elements)
-def make_submit_script(target, header, body_list):
- """
- write out a text file
-
- this was intended for condor submit scripts
-
- Args:
- target (str or stream):
- if target is a string, we will open and close the file
- if target is a stream, the caller is responsible.
-
- header (str);
- header to write at the beginning of the file
- body_list (list of strs):
- a list of blocks to add to the file.
- """
- if type(target) in types.StringTypes:
- f = open(target,"w")
- else:
- f = target
- f.write(header)
- for entry in body_list:
- f.write(entry)
- if type(target) in types.StringTypes:
- f.close()
-
def parse_filelist(file_string):
return file_string.split(",")
if not os.path.exists(f):
raise RuntimeError("%s does not exist" % (f,))
-def make_md5sum(filename):
- """Quickly find the md5sum of a file
- """
- md5_cache = os.path.join(filename+".md5")
- print md5_cache
- if os.path.exists(md5_cache):
- logging.debug("Found md5sum in {0}".format(md5_cache))
- stream = open(md5_cache,'r')
- lines = stream.readlines()
- md5sum = parse_md5sum_line(lines, filename)
- else:
- md5sum = make_md5sum_unix(filename, md5_cache)
- return md5sum
-
-def make_md5sum_unix(filename, md5_cache):
- cmd = ["md5sum", filename]
- logging.debug("Running {0}".format(" ".join(cmd)))
- p = Popen(cmd, stdout=PIPE)
- stdin, stdout = p.communicate()
- retcode = p.wait()
- logging.debug("Finished {0} retcode {1}".format(" ".join(cmd), retcode))
- if retcode != 0:
- logging.error("Trouble with md5sum for {0}".format(filename))
- return None
- lines = stdin.split(os.linesep)
- md5sum = parse_md5sum_line(lines, filename)
- if md5sum is not None:
- logging.debug("Caching sum in {0}".format(md5_cache))
- stream = open(md5_cache, "w")
- stream.write(stdin)
- stream.close()
- return md5sum
-
-def parse_md5sum_line(lines, filename):
- md5sum, md5sum_filename = lines[0].split()
- if md5sum_filename != filename:
- errmsg = "MD5sum and I disagre about filename. {0} != {1}"
- logging.error(errmsg.format(filename, md5sum_filename))
- return None
- return md5sum
-
if __name__ == "__main__":
main()
from benderjab import rpc
-def runfolder_validate(fname):
- """
- Return True if fname looks like a runfolder name
- """
- if re.match("^[0-9]{6}_[-A-Za-z0-9_]*$", fname):
- return True
- else:
- return False
+from htsworkflow.automation.solexa import is_runfolder
class rsync(object):
def __init__(self, sources, dest, pwfile):
self.rsync.poll()
# see if we're still copying
- if runfolder_validate(runDir):
+ if is_runfolder(runDir):
logging.info("recevied sequencing finshed for %s" % (runDir))
self.pending.append(runDir)
self.startCopy()
--- /dev/null
+"""Utilities to help process solexa/illumina runfolders
+"""
+import os
+import re
+
+def is_runfolder(name):
+ """
+ Is it a runfolder?
+
+ >>> print is_runfolder('090630_HWUSI-EAS999_0006_30LNFAAXX')
+ True
+ >>> print is_runfolder('hello')
+ False
+ """
+ if re.match("^[0-9]{6}_[-A-Za-z0-9_]*$", name):
+ return True
+ else:
+ return False
+
+def get_top_dir(root, path):
+ """
+ Return the directory in path that is a subdirectory of root.
+ e.g.
+
+ >>> print get_top_dir('/a/b/c', '/a/b/c/d/e/f')
+ d
+ >>> print get_top_dir('/a/b/c/', '/a/b/c/d/e/f')
+ d
+ >>> print get_top_dir('/a/b/c', '/g/e/f')
+ None
+ >>> print get_top_dir('/a/b/c', '/a/b/c')
+ <BLANKLINE>
+ """
+ if path.startswith(root):
+ subpath = path[len(root):]
+ if subpath.startswith('/'):
+ subpath = subpath[1:]
+ return subpath.split(os.path.sep)[0]
+ else:
+ return None
+
import time
from htsworkflow.util import mount
+from htsworkflow.automation.solexa import is_runfolder, get_top_dir
# this uses pyinotify
import pyinotify
from benderjab import rpc
-def is_runfolder(name):
- """
- Is it a runfolder?
-
- >>> print is_runfolder('090630_HWUSI-EAS999_0006_30LNFAAXX')
- True
- >>> print is_runfolder('hello')
- False
- """
- if re.match("[0-9]{6}_.*", name):
- return True
- else:
- return False
-
-def get_top_dir(root, path):
- """
- Return the directory in path that is a subdirectory of root.
- e.g.
-
- >>> print get_top_dir('/a/b/c', '/a/b/c/d/e/f')
- d
- >>> print get_top_dir('/a/b/c/', '/a/b/c/d/e/f')
- d
- >>> print get_top_dir('/a/b/c', '/g/e/f')
- None
- >>> print get_top_dir('/a/b/c', '/a/b/c')
- <BLANKLINE>
- """
- if path.startswith(root):
- subpath = path[len(root):]
- if subpath.startswith('/'):
- subpath = subpath[1:]
- return subpath.split(os.path.sep)[0]
- else:
- return None
-
class WatcherEvent(object):
"""
Track information about a file event
self.last_event = {}
self.watchmanager = watchmanager
self.bot = bot
+ self.log = bot.log
if completion_files is not None:
completion_files = [ x.lower() for x in completion_files ]
self.completion_files = completion_files
runfolder = os.path.join(watch_path, target)
if not is_runfolder(target):
- logging.debug("Skipping %s, not a runfolder" % (target,))
+ self.log.debug("Skipping %s, not a runfolder" % (target,))
continue
# grab the previous events for this watch path
self.last_event[watch_path][target].complete = True
msg += "(completed)"
- logging.debug(msg)
+ self.log.debug(msg)
def process_IN_DELETE(self, event):
- logging.debug("Remove: %s" % os.path.join(event.path, event.name))
+ self.log.debug("Remove: %s" % os.path.join(event.path, event.name))
pass
def process_IN_UNMOUNT(self, event):
pathname = os.path.join(event.path, event.name)
- logging.debug("IN_UNMOUNT: %s" % (pathname,))
+ self.log.debug("IN_UNMOUNT: %s" % (pathname,))
self.bot.unmount_watch(event.path)
class SpoolWatcher(rpc.XmlRpcBot):
mounts.append(w)
self.mounts_to_watches[mount_location] = mounts
- logging.info(u"Watching:"+unicode(w))
+ self.log.info(u"Watching:"+unicode(w))
self.wdds.append(self.wm.add_watch(w, mask, rec=True, auto_add=True))
def unmount_watch(self, event_path):
# the list getting shorter
for i in range(len(self.wdds),0, -1):
wdd = self.wdds[i]
- logging.info(u'unmounting: '+unicode(wdd.items()))
+ self.log.info(u'unmounting: '+unicode(wdd.items()))
self.wm.rm_watch(wdd.values())
del self.wdds[i]
self.mounted = False
if root_copy_url[-1] != '/':
root_copy_url += '/'
copy_url = root_copy_url + list_event_dir
- logging.debug('Copy url: %s' % (copy_url,))
+ self.log.debug('Copy url: %s' % (copy_url,))
return copy_url
def process_notify(self, *args):
# restart the watch
for watch in self.mounts_to_watches[mount_point]:
self.add_watch(watch)
- logging.info(
+ self.logg.info(
"%s was remounted, restarting watch" % \
(mount_point)
)
super(SpoolWatcher, self).stop()
def startCopy(self, copy_url=None):
- logging.debug("writes seem to have stopped")
+ self.log.debug("writes seem to have stopped")
if self.notify_runner is not None:
for r in self.notify_runner:
self.rpc_send(r, tuple([copy_url]), 'startCopy')
def sequencingFinished(self, run_dir):
# need to strip off self.watchdirs from rundir I suspect.
- logging.info("run.completed in " + str(run_dir))
+ self.log.info("run.completed in " + str(run_dir))
for watch in self.watchdirs:
if not run_dir.startswith(watch):
print "%s didn't start with %s" % (run_dir, watch)
else:
stripped_run_dir = run_dir
- logging.debug("stripped to " + stripped_run_dir)
+ self.log.debug("stripped to " + stripped_run_dir)
if self.notify_users is not None:
for u in self.notify_users:
self.send(u, 'Sequencing run %s finished' % \
import os
-from htsworkflow.automation.copier import runfolder_validate
+from htsworkflow.automation.solexa import is_runfolder
def extract_runfolder_path(watchdir, event):
runfolder_path = watchdir
fragments = path[len(watchdir):].split(os.path.sep)
for f in fragments:
runfolder_path = os.path.join(runfolder_path, f)
- if runfolder_validate(f):
+ if is_runfolder(f):
return runfolder_path
return None
--- /dev/null
+
+import unittest
+
+from htsworkflow.automation import solexa
+
+class testSolexaRunfolderUtils(unittest.TestCase):
+ def test_is_runfolder(self):
+ self.failUnlessEqual(solexa.is_runfolder(""), False)
+ self.failUnlessEqual(solexa.is_runfolder("1345_23"), False)
+ self.failUnlessEqual(solexa.is_runfolder("123456_asdf-$23'"), False)
+ self.failUnlessEqual(solexa.is_runfolder("123456_USI-EAS44"), True)
+ self.failUnlessEqual(solexa.is_runfolder("123456_USI-EAS44 "), False)
+
+
+ def test_get_top_dir(self):
+ test_data = [ # root, path, response
+ ('/a/b/c', '/a/b/c/d/e/f', 'd'),
+ ('/a/b/c/', '/a/b/c/d/e/f', 'd'),
+ ('/a/b/c', '/g/e/f', None),
+ ('/a/b/c', '/a/b/c', ''),
+ ]
+
+ for root, path, response in test_data:
+ self.failUnlessEqual(solexa.get_top_dir(root, path), response)
+
+def suite():
+ return unittest.makeSuite(testSolexaRunfolderUtils, 'test')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
+
# some core functions of analysis manager module
-from django.http import HttpResponse
+
from datetime import datetime
from string import *
import re
-from htsworkflow.frontend import settings
-from htsworkflow.frontend.analysis.models import Task, Project
+
+from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
+from django.http import HttpResponse
+
+from htsworkflow.frontend.analysis.models import Task, Project
def updStatus(request):
ClIP = request.META['REMOTE_ADDR']
from django.db import models
+from django.conf import settings
from datetime import datetime
-from htsworkflow.frontend import settings
from htsworkflow.frontend.samples.models import Library
from string import *
Define some alternate authentication methods
"""
from django.core.exceptions import PermissionDenied
-
-from htsworkflow.frontend import settings
+from django.conf import settings
apidata = {'apiid': u'0', 'apikey': settings.DEFAULT_API_KEY}
-[{"pk": 1, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)", "url_template": "/samples/freezer/{{ uuid }}/", "keyword": "frzr"}}, {"pk": 2, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)", "url_template": "/samples/container/{{ uuid }}/", "keyword": "cntr"}}, {"pk": 3, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<sampleid>\\d+)\\|(?P<owner>[A-Za-z0-9_\\- ]+)", "url_template": "/samples/sample/{{ sampleid }}/", "keyword": "s"}}, {"pk": 4, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<search>[\\S\\s]+)", "url_template": "http://www.google.com/search?q={{ search }}", "keyword": "gg"}}, {"pk": 5, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<search>[\\S\\s]+)", "url_template": "http://www.flickr.com/search/?q={{ search }}", "keyword": "flickr"}}, {"pk": 6, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)", "url_template": "/inventory/{{ uuid }}/", "keyword": "invu"}}, {"pk": 7, "model": "bcmagic.keywordmap", "fields": {"regex": "(?P<barcode_id>.+)", "url_template": "/inventory/{{barcode_id}}/", "keyword": "invb"}}, {"pk": 1, "model": "bcmagic.printer", "fields": {"name": "ZM400 1.25x1", "label_height": 1.0, "notes": "Everyday use labels", "label_width": 1.25, "label_shape": "Square", "model": "Zebra ZM400", "ip_address": "131.215.54.194"}}, {"pk": 2, "model": "bcmagic.printer", "fields": {"name": "ZM400 3x3", "label_height": 3.0, "notes": "Larger everyday use labels", "label_width": 3.0, "label_shape": "Square", "model": "Zebra ZM400", "ip_address": "131.215.34.199"}}]
+[{"pk": 1,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)",
+ "url_template": "/samples/freezer/{{ uuid }}/",
+ "keyword": "frzr"}},
+ {"pk": 2,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)",
+ "url_template": "/samples/container/{{ uuid }}/",
+ "keyword": "cntr"}},
+ {"pk": 3,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<sampleid>\\d+)\\|(?P<owner>[A-Za-z0-9_\\- ]+)",
+ "url_template": "/samples/sample/{{ sampleid }}/",
+ "keyword": "s"}},
+ {"pk": 4,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<search>[\\S\\s]+)",
+ "url_template": "http://www.google.com/search?q={{ search }}",
+ "keyword": "gg"}},
+ {"pk": 5,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<search>[\\S\\s]+)",
+ "url_template": "http://www.flickr.com/search/?q={{ search }}",
+ "keyword": "flickr"}},
+ {"pk": 6,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<uuid>[A-Fa-f0-9]+)",
+ "url_template": "/inventory/{{ uuid }}/",
+ "keyword": "invu"}},
+ {"pk": 7,
+ "model": "bcmagic.keywordmap",
+ "fields": {"regex": "(?P<barcode_id>.+)",
+ "url_template": "/inventory/{{barcode_id}}/",
+ "keyword": "invb"}},
+ {"pk": 1,
+ "model": "bcmagic.printer",
+ "fields": {"name": "ZM400 1.25x1",
+ "label_height": 1.0,
+ "notes": "Everyday use labels",
+ "label_width": 1.25,
+ "label_shape": "Square",
+ "model": "Zebra ZM400",
+ "ip_address": "131.215.54.194"}},
+ {"pk": 2,
+ "model": "bcmagic.printer",
+ "fields": {"name": "ZM400 3x3",
+ "label_height": 3.0,
+ "notes": "Larger everyday use labels",
+ "label_width": 3.0,
+ "label_shape": "Square",
+ "model": "Zebra ZM400",
+ "ip_address": "131.215.34.199"}}]
-from htsworkflow.frontend import settings
+from django.conf import settings
import ftplib
import socket
Return a bcm dictionary with a command to automatically fill the
corresponding "field" with "value"
"""
- return {'mode': 'autofill', 'field': field, 'value': value}
\ No newline at end of file
+ return {'mode': 'autofill', 'field': field, 'value': value}
+from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.core.exceptions import ObjectDoesNotExist
from htsworkflow.frontend.eland_config import forms
-from htsworkflow.frontend import settings
from htsworkflow.frontend.experiments import models
import os
-from htsworkflow.frontend.experiments.models import FlowCell, DataRun, ClusterStation, Sequencer, Lane
+from htsworkflow.frontend.experiments.models import \
+ FlowCell, DataRun, DataFile, FileType, ClusterStation, Sequencer, Lane
from django.contrib import admin
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.forms import ModelForm
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy as _
+class DataFileForm(ModelForm):
+ class Meta:
+ model = DataFile
+
+class DataFileInline(admin.TabularInline):
+ model = DataFile
+ form = DataFileForm
+ raw_id_fields = ('library',)
+ extra = 0
+
class DataRunOptions(admin.ModelAdmin):
search_fields = [
+ 'flowcell_id',
'run_folder',
'run_note',
- 'config_params', ]
+ ]
list_display = [
- 'run_folder',
- 'Flowcell_Info',
+ 'runfolder_name',
+ 'result_dir',
'run_start_time',
- 'main_status',
- 'run_note',
]
- list_filter = ('run_status', 'run_start_time')
+ fieldsets = (
+ (None, {
+ 'fields': (('flowcell', 'run_status'),
+ ('runfolder_name', 'cycle_start', 'cycle_stop'),
+ ('result_dir',),
+ ('last_update_time'),
+ ('comment',))
+ }),
+ )
+ inlines = [ DataFileInline ]
+ #list_filter = ('run_status', 'run_start_time')
+admin.site.register(DataRun, DataRunOptions)
+
+class FileTypeAdmin(admin.ModelAdmin):
+ list_display = ('name', 'mimetype', 'regex')
+admin.site.register(FileType, FileTypeAdmin)
+
# lane form setup needs to come before Flowcell form config
# as flowcell refers to the LaneInline class
class LaneForm(ModelForm):
Controls display of Lanes on the Flowcell form.
"""
model = Lane
- max_num = 8
extra = 8
form = LaneForm
raw_id_fields = ('library',)
'fields': ('comment', )
}),
)
+admin.site.register(Lane, LaneOptions)
class FlowCellOptions(admin.ModelAdmin):
date_hierarchy = "run_date"
if db_field.name == "notes":
field.widget.attrs["rows"] = "3"
return field
+admin.site.register(FlowCell, FlowCellOptions)
class ClusterStationOptions(admin.ModelAdmin):
list_display = ('name', )
fieldsets = ( ( None, { 'fields': ( 'name', ) } ), )
+admin.site.register(ClusterStation, ClusterStationOptions)
class SequencerOptions(admin.ModelAdmin):
list_display = ('name', )
fieldsets = ( ( None, { 'fields': ( 'name', ) } ), )
-
-
-#admin.site.register(DataRun, DataRunOptions)
-admin.site.register(FlowCell, FlowCellOptions)
-admin.site.register(ClusterStation, ClusterStationOptions)
admin.site.register(Sequencer, SequencerOptions)
-admin.site.register(Lane, LaneOptions)
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import send_mail, mail_admins
from django.http import HttpResponse, Http404
+from django.conf import settings
from htsworkflow.frontend.auth import require_api_key
-from htsworkflow.frontend import settings
from htsworkflow.frontend.experiments.models import \
FlowCell, \
DataRun, \
--- /dev/null
+[
+ { "model": "experiments.FileType",
+ "pk": 1,
+ "fields": {
+ "name": "run_xml",
+ "mimetype": "application/vnd.htsworkflow-run-xml",
+ "regex": "run.*\\.xml\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 2,
+ "fields": {
+ "name": "Summary.htm",
+ "mimetype": "text/html",
+ "regex": "Summary\\.htm\\Z(?ms)"
+ }
+ },
+
+ { "model": "experiments.FileType",
+ "pk": 3,
+ "fields": {
+ "name": "IVC All",
+ "mimetype": "image/png",
+ "regex": "s_(?P<lane>[0-9])_all\\.png\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 4,
+ "fields": {
+ "name": "IVC Call",
+ "mimetype": "image/png",
+ "regex": "s_(?P<lane>[0-9])_call\\.png\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 5,
+ "fields": {
+ "name": "IVC Percent All",
+ "mimetype": "image/png",
+ "regex": "s_(?P<lane>[0-9])_percent_all\\.png\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 6,
+ "fields": {
+ "name": "IVC Percent Base",
+ "mimetype": "image/png",
+ "regex": "s_(?P<lane>[0-9])_percent_base\\.png\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 7,
+ "fields": {
+ "name": "IVC Percent Call",
+ "mimetype": "image/png",
+ "regex": "s_(?P<lane>[0-9])_percent_call\\.png\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 8,
+ "fields": {
+ "name": "GERALD Scores",
+ "regex": "scores\\.tar\\.bz2\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 9,
+ "fields": {
+ "name": "ELAND Result",
+ "regex": "s_(?P<lane>[0-9])((?P<end>[1-4])_)?_eland_result\\.txt\\.bz2\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 10,
+ "fields": {
+ "name": "ELAND Multi",
+ "regex": "s_(?P<lane>[0-9])((?P<end>[1-4])_)?_eland_multi\\.txt\\.bz2\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 11,
+ "fields": {
+ "name": "ELAND Extended",
+ "regex": "s_(?P<lane>[0-9])((?P<end>[1-4])_)?_eland_extended\\.txt\\.bz2\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 12,
+ "fields": {
+ "name": "ELAND Export",
+ "regex": "s_(?P<lane>[0-9])((?P<end>[1-4])_)?_export\\.txt\\.bz2\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 13,
+ "fields": {
+ "name": "SRF",
+ "regex": ".*_(?P<lane>[1-8])\\.srf\\Z(?ms)"
+ }
+ },
+ { "model": "experiments.FileType",
+ "pk": 14,
+ "fields": {
+ "name": "QSEQ tarfile",
+ "regex": ".*_l(?P<lane>[1-8])_r(?P<end>[1-4])\\.tar\\.bz2\\Z(?ms)"
+ }
+ }
+
+]
\ No newline at end of file
{"pk": 153, "model": "experiments.flowcell",
"fields": {
"paired_end": true,
- "run_date": "2009-09-11 22:12:13",
- "read_length": 75,
+ "run_date": "2007-09-27 22:12:13",
+ "read_length": 36,
"notes": "",
"advanced_run": false,
"control_lane": 2,
"cluster_station": 3,
"sequencer": 2,
- "flowcell_id": "303TUAAXX"
+ "flowcell_id": "FC12150"
}
},
{"pk": 1193, "model": "experiments.lane",
"pM": "8"
}
},
+ {"pk": 1192, "model": "experiments.lane",
+ "fields": {
+ "comment": "Other library",
+ "library": "11070",
+ "cluster_estimate": 132000,
+ "flowcell": 153,
+ "lane_number": 1,
+ "pM": "7"
+ }
+ },
+
{"pk": "10981", "model": "samples.library",
"fields": {
"ten_nM_dilution": false,
"experiment_type": 4,
"antibody": null
}
- }
-
+ },
+ {"pk": 200, "model": "experiments.flowcell",
+ "fields": {
+ "paired_end": true,
+ "run_date": "2007-09-27 22:12:13",
+ "read_length": 36,
+ "notes": "",
+ "advanced_run": false,
+ "control_lane": 2,
+ "cluster_station": 3,
+ "sequencer": 2,
+ "flowcell_id": "30012AAXX (failed)"
+ }
+ },
+ {"pk": 201, "model": "experiments.lane",
+ "fields": {
+ "comment": "",
+ "library": "11070",
+ "cluster_estimate": 182000,
+ "flowcell": 200,
+ "lane_number": 8,
+ "pM": "7"
+ }
+ }
]
+import datetime
+import glob
import logging
+import os
+import re
+import types
+import uuid
+from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core import urlresolvers
from django.db import models
+from django.db.models.signals import post_init
-from htsworkflow.frontend.samples.models import *
-from htsworkflow.frontend.settings import options
+from htsworkflow.frontend.samples.models import Library
+from htsworkflow.util.conversion import parse_flowcell_id
+from htsworkflow.pipelines import runfolder
+
+logger = logging.getLogger(__name__)
+default_pM = 5
+try:
+ default_pM = int(settings.DEFAULT_PM)
+except ValueError,e:
+ logger.error("invalid value for frontend.default_pm")
+
+RUN_STATUS_CHOICES = (
+ (0, 'Sequencer running'), ##Solexa Data Pipeline Not Yet Started'),
+ (1, 'Data Pipeline Started'),
+ (2, 'Data Pipeline Interrupted'),
+ (3, 'Data Pipeline Finished'),
+ (4, 'Collect Results Started'),
+ (5, 'Collect Results Finished'),
+ (6, 'QC Started'),
+ (7, 'QC Finished'),
+ (255, 'DONE'),
+ )
+RUN_STATUS_REVERSE_MAP = dict(((v,k) for k,v in RUN_STATUS_CHOICES))
class ClusterStation(models.Model):
name = models.CharField(max_length=50, unique=True)
def __unicode__(self):
return unicode(self.name)
-default_pM = 5
-try:
- default_pM = int(options.get('frontend', 'default_pm'))
-except ValueError,e:
- logging.error("invalid value for frontend.default_pm")
-
class FlowCell(models.Model):
-
flowcell_id = models.CharField(max_length=20, unique=True, db_index=True)
run_date = models.DateTimeField()
advanced_run = models.BooleanField(default=False)
paired_end = models.BooleanField(default=False)
read_length = models.IntegerField(default=32) #Stanford is currenlty 25
- control_lane = models.IntegerField(choices=[(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(0,'All Lanes')], null=True)
+ control_lane = models.IntegerField(choices=[(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(0,'All Lanes')], null=True, blank=True)
cluster_station = models.ForeignKey(ClusterStation, default=3)
sequencer = models.ForeignKey(Sequencer, default=1)
def __unicode__(self):
return unicode(self.flowcell_id)
- def Create_LOG(self):
- str = ''
- str +='<a target=_balnk href="/experiments/'+self.flowcell_id+'" title="Create XLS like sheet for this Flowcell ..." ">Create LOG</a>'
- try:
- t = DataRun.objects.get(fcid=self.id)
- str +='<br/><a target=_self href="/admin/experiments/datarun/?q='+self.flowcell_id+'" title="Check Data Runs ..." ">DataRun ..</a>'
- except ObjectDoesNotExist:
- str += '<br/><span style="color:red">not sequenced</span>'
- return str
- Create_LOG.allow_tags = True
-
def Lanes(self):
- library_url = '/admin/samples/library/%s'
html = ['<table>']
- #for i in range(1,9):
for lane in self.lane_set.all():
cluster_estimate = lane.cluster_estimate
if cluster_estimate is not None:
library_id = lane.library_id
library = lane.library
element = '<tr><td>%d</td><td><a href="%s">%s</a></td><td>%s</td></tr>'
- expanded_library_url = library_url %(library_id,)
- html.append(element % (lane.lane_number, expanded_library_url, library, cluster_estimate))
+ html.append(element % (lane.lane_number,
+ library.get_admin_url(),
+ library,
+ cluster_estimate))
html.append('</table>')
return "\n".join(html)
Lanes.allow_tags = True
def get_admin_url(self):
# that's the django way... except it didn't work
- #return urlresolvers.reverse('admin_experiments_FlowCell_change', args=(self.id,))
- return '/admin/experiments/flowcell/%s/' % (self.id,)
+ return urlresolvers.reverse('admin:experiments_flowcell_change',
+ args=(self.id,))
def flowcell_type(self):
"""
return u"Paired"
else:
return u"Single"
+
+ @models.permalink
+ def get_absolute_url(self):
+ flowcell_id, status = parse_flowcell_id(self.flowcell_id)
+ return ('htsworkflow.frontend.experiments.views.flowcell_detail',
+ [str(flowcell_id)])
-### -----------------------
-class DataRun(models.Model):
- ConfTemplate = "CONFIG PARAMS WILL BE GENERATED BY THE PIPELINE SCRIPT.\nYOU'LL BE ABLE TO EDIT AFTER IF NEEDED."
- run_folder = models.CharField(max_length=50,unique=True, db_index=True)
- fcid = models.ForeignKey(FlowCell,verbose_name="Flowcell Id")
- config_params = models.TextField(default=ConfTemplate)
- run_start_time = models.DateTimeField()
- RUN_STATUS_CHOICES = (
- (0, 'Sequencer running'), ##Solexa Data Pipeline Not Yet Started'),
- (1, 'Data Pipeline Started'),
- (2, 'Data Pipeline Interrupted'),
- (3, 'Data Pipeline Finished'),
- (4, 'CollectReads Started'),
- (5, 'CollectReads Finished'),
- (6, 'QC Finished'),
- (7, 'DONE'),
- )
- run_status = models.IntegerField(choices=RUN_STATUS_CHOICES, default=0)
- run_note = models.TextField(blank=True)
-
-
- def main_status(self):
- str = '<div'
- if self.run_status >= 5:
- str += ' style="color:green">'
- str += '<b>'+self.RUN_STATUS_CHOICES[self.run_status][1]+'</b>'
- str += '<br/><br/>' #<span style="color:red;font-size:80%;">New!</span>'
- str +='<br/><a target=_balnk href="'+settings.TASKS_PROJS_SERVER+'/Flowcells/'+self.fcid.flowcell_id+'/'+self.fcid.flowcell_id+'_QC_Summary.html" title="View QC Summaries of this run ..." ">View QC Page</a>'
- else:
- str += '>'+self.RUN_STATUS_CHOICES[self.run_status][1]
+ def get_raw_data_directory(self):
+ """Return location of where the raw data is stored"""
+ flowcell_id, status = parse_flowcell_id(self.flowcell_id)
+
+ return os.path.join(settings.RESULT_HOME_DIR, flowcell_id)
+
+ def update_data_runs(self):
+ result_root = self.get_raw_data_directory()
+ if result_root is None:
+ return
+
+ result_home_dir = os.path.join(settings.RESULT_HOME_DIR,'')
+ run_xml_re = re.compile(glob.fnmatch.translate('run*.xml'))
+
+ dataruns = self.datarun_set.all()
+ datarun_result_dirs = [ x.result_dir for x in dataruns ]
+
+ result_dirs = []
+ for dirpath, dirnames, filenames in os.walk(result_root):
+ for filename in filenames:
+ if run_xml_re.match(filename):
+ # we have a run directory
+ relative_pathname = get_relative_pathname(dirpath)
+ if relative_pathname not in datarun_result_dirs:
+ self.import_data_run(relative_pathname, filename)
+
+ def import_data_run(self, relative_pathname, run_xml_name):
+ """Given a result directory import files"""
+ run_dir = get_absolute_pathname(relative_pathname)
+ run_xml_path = os.path.join(run_dir, run_xml_name)
+ run_xml_data = runfolder.load_pipeline_run_xml(run_xml_path)
+
+ run = DataRun()
+ run.flowcell = self
+ run.status = RUN_STATUS_REVERSE_MAP['DONE']
+ run.result_dir = relative_pathname
+ run.runfolder_name = run_xml_data.runfolder_name
+ run.cycle_start = run_xml_data.image_analysis.start
+ run.cycle_stop = run_xml_data.image_analysis.stop
+ run.run_start_time = run_xml_data.image_analysis.date
+
+ run.last_update_time = datetime.datetime.now()
+ run.save()
+
+ run.update_result_files()
+
+
+# FIXME: should we automatically update dataruns?
+# Or should we expect someone to call update_data_runs?
+#def update_flowcell_dataruns(sender, instance, *args, **kwargs):
+# """Update our dataruns
+# """
+# if not os.path.exists(settings.RESULT_HOME_DIR):
+# return
+#
+# instance.update_data_runs()
+#post_init.connect(update_flowcell_dataruns, sender=FlowCell)
- str += '</div>'
- return str
- main_status.allow_tags = True
- main_status.allow_tags = True
-
- def Flowcell_Info(self):
- str = '<b>'+self.fcid.__str__()+'</b>'
- str += ' (c: '+self.fcid.cluster_mac_id+', s: '+self.fcid.seq_mac_id+')'
- str += '<div style="margin-top:5px;">'
- str +='<a title="View Lane List here ..." onClick="el = document.getElementById(\'LanesOf'+self.fcid.__str__()+'\');if(el) (el.style.display==\'none\'?el.style.display=\'block\':el.style.display=\'none\')" style="cursor:pointer;color: #5b80b2;">View/hide lanes</a>'
- str += '<div id="LanesOf'+self.fcid.__str__()+'" style="display:block;border:solid #cccccc 1px;width:350px">'
- LanesList = '1: '+self.fcid.lane_1_library.__str__()+' ('+self.fcid.lane_1_library.library_species.use_genome_build+')<br/>2: '+self.fcid.lane_2_library.__str__()+' ('+self.fcid.lane_2_library.library_species.use_genome_build+')<br/>3: '+self.fcid.lane_3_library.__str__()+' ('+self.fcid.lane_3_library.library_species.use_genome_build+')<br/>4: '+self.fcid.lane_4_library.__str__()+' ('+self.fcid.lane_4_library.library_species.use_genome_build+')<br/>5: '+self.fcid.lane_5_library.__str__()+' ('+self.fcid.lane_5_library.library_species.use_genome_build+')<br/>6: '+self.fcid.lane_6_library.__str__()+' ('+self.fcid.lane_6_library.library_species.use_genome_build+')<br/>7: '+self.fcid.lane_7_library.__str__()+' ('+self.fcid.lane_7_library.library_species.use_genome_build+')<br/>8: '+self.fcid.lane_8_library.__str__()+' ('+self.fcid.lane_8_library.library_species.use_genome_build+')'
- str += LanesList ## self.fcid.Lanes()
- str += '</div>'
- str += '<div><a title="open Flowcell record" href="/admin/exp_track/flowcell/'+self.fcid.id.__str__()+'/" target=_self>Edit Flowcell record</a>'
- #str += '<span style="color:red;font-size:80%;margin-left:15px;margin-right:3px">New!</span>'
- str +='<a style="margin-left:15px;" target=_balnk href="/exp_track/'+self.fcid.flowcell_id+'" title="View XLS like sheet for this Flowcell LOG ..." ">GA LOG Page</a>'
- str += '</div>'
- str += '</div>'
- return str
- Flowcell_Info.allow_tags = True
LANE_STATUS_CODES = [(0, 'Failed'),
(1, 'Marginal'),
LANE_STATUS_MAP = dict((int(k),v) for k,v in LANE_STATUS_CODES )
LANE_STATUS_MAP[None] = "Unknown"
+def is_valid_lane(value):
+ if value >= 1 and value <= 8:
+ return True
+ else:
+ return False
+
class Lane(models.Model):
flowcell = models.ForeignKey(FlowCell)
- lane_number = models.IntegerField(choices=[(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8)])
+ lane_number = models.IntegerField(validators=[is_valid_lane])
library = models.ForeignKey(Library)
pM = models.DecimalField(max_digits=5, decimal_places=2,blank=False, null=False,default=default_pM)
cluster_estimate = models.IntegerField(blank=True, null=True)
status = models.IntegerField(choices=LANE_STATUS_CODES, null=True, blank=True)
comment = models.TextField(null=True, blank=True)
+
+ @models.permalink
+ def get_absolute_url(self):
+ return ('htsworkflow.frontend.experiments.views.flowcell_lane_detail',
+ [str(self.id)])
+
+ def __unicode__(self):
+ return self.flowcell.flowcell_id + ':' + unicode(self.lane_number)
+
+### -----------------------
+class DataRun(models.Model):
+ flowcell = models.ForeignKey(FlowCell,verbose_name="Flowcell Id")
+ runfolder_name = models.CharField(max_length=50)
+ result_dir = models.CharField(max_length=255)
+ last_update_time = models.DateTimeField()
+ run_start_time = models.DateTimeField()
+ cycle_start = models.IntegerField(null=True, blank=True)
+ cycle_stop = models.IntegerField(null=True, blank=True)
+ run_status = models.IntegerField(choices=RUN_STATUS_CHOICES,
+ null=True, blank=True)
+ comment = models.TextField(blank=True)
+
+ def update_result_files(self):
+ abs_result_dir = get_absolute_pathname(self.result_dir)
+
+ for dirname, dirnames, filenames in os.walk(abs_result_dir):
+ for filename in filenames:
+ pathname = os.path.join(dirname, filename)
+ relative_pathname = get_relative_pathname(pathname)
+ datafiles = self.datafile_set.filter(
+ data_run = self,
+ relative_pathname=relative_pathname)
+ if len(datafiles) > 0:
+ continue
+
+ metadata = find_file_type_metadata_from_filename(filename)
+ if metadata is not None:
+ metadata['filename'] = filename
+ newfile = DataFile()
+ newfile.data_run = self
+ newfile.file_type = metadata['file_type']
+ newfile.relative_pathname = relative_pathname
+
+ lane_number = metadata.get('lane', None)
+ if lane_number is not None:
+ lane = self.flowcell.lane_set.get(lane_number = lane_number)
+ newfile.library = lane.library
+
+ self.datafile_set.add(newfile)
+
+ self.last_update_time = datetime.datetime.now()
+
+ def lane_files(self):
+ lanes = {}
+
+ for datafile in self.datafile_set.all():
+ metadata = datafile.attributes
+ if metadata is not None:
+ lane = metadata.get('lane', None)
+ if lane is not None:
+ lane_file_set = lanes.setdefault(lane, {})
+ lane_file_set[datafile.file_type.normalized_name] = datafile
+ return lanes
+
+ def ivc_plots(self, lane):
+ ivc_name = ['IVC All', 'IVC Call',
+ 'IVC Percent Base', 'IVC Percent All', 'IVC Percent Call']
+
+ plots = {}
+ for rel_filename, metadata in self.get_result_files():
+ if metadata.file_type.name in ivc_name:
+ plots[metadata.file_type.name] = (rel_filename, metadata)
+
+class FileType(models.Model):
+ """Represent potential file types
+
+ regex is a pattern used to detect if a filename matches this type
+ data run currently assumes that there may be a (?P<lane>) and
+ (?P<end>) pattern in the regular expression.
+ """
+ name = models.CharField(max_length=50)
+ mimetype = models.CharField(max_length=50, null=True, blank=True)
+ # regular expression from glob.fnmatch.translate
+ regex = models.CharField(max_length=50, null=True, blank=True)
+
+ def parse_filename(self, pathname):
+ """Does filename match our pattern?
+
+ Returns None if not, or dictionary of match variables if we do.
+ """
+ path, filename = os.path.split(pathname)
+ if len(self.regex) > 0:
+ match = re.match(self.regex, filename)
+ if match is not None:
+ # These are (?P<>) names we know about from our default regexes.
+ results = match.groupdict()
+
+ # convert int parameters
+ for attribute_name in ['lane', 'end']:
+ value = results.get(attribute_name, None)
+ if value is not None:
+ results[attribute_name] = int(value)
+
+ return results
+
+ def _get_normalized_name(self):
+ """Crush data file name into identifier friendly name"""
+ return self.name.replace(' ', '_').lower()
+ normalized_name = property(_get_normalized_name)
+
+ def __unicode__(self):
+ #return u"<FileType: %s>" % (self.name,)
+ return self.name
+
+def str_uuid():
+ """Helper function to set default UUID in DataFile"""
+ return str(uuid.uuid1())
+
+class DataFile(models.Model):
+ """Store map from random ID to filename"""
+ random_key = models.CharField(max_length=64,
+ db_index=True,
+ default=str_uuid)
+ data_run = models.ForeignKey(DataRun, db_index=True)
+ library = models.ForeignKey(Library, db_index=True, null=True, blank=True)
+ file_type = models.ForeignKey(FileType)
+ relative_pathname = models.CharField(max_length=255, db_index=True)
+
+ def _get_attributes(self):
+ return self.file_type.parse_filename(self.relative_pathname)
+ attributes = property(_get_attributes)
+
+ def _get_pathname(self):
+ return get_absolute_pathname(self.relative_pathname)
+ pathname = property(_get_pathname)
+
+ @models.permalink
+ def get_absolute_url(self):
+ return ('htsworkflow.frontend.experiments.views.read_result_file',
+ (), {'key': self.random_key })
+
+def find_file_type_metadata_from_filename(pathname):
+ path, filename = os.path.split(pathname)
+ result = None
+ for file_type in FileType.objects.all():
+ result = file_type.parse_filename(filename)
+ if result is not None:
+ result['file_type'] = file_type
+ return result
+
+ return None
+
+def get_relative_pathname(abspath):
+ """Strip off the result home directory from a path
+ """
+ result_home_dir = os.path.join(settings.RESULT_HOME_DIR,'')
+ relative_pathname = abspath.replace(result_home_dir,'')
+ return relative_pathname
+
+def get_absolute_pathname(relative_pathname):
+ """Attach relative path to results home directory"""
+ return os.path.join(settings.RESULT_HOME_DIR, relative_pathname)
+
import json
except ImportError, e:
import simplejson as json
+import os
+import shutil
import sys
+import tempfile
+from django.conf import settings
from django.core import mail
from django.core.exceptions import ObjectDoesNotExist
from django.test import TestCase
from htsworkflow.frontend.experiments import experiments
from htsworkflow.frontend.auth import apidata
+from htsworkflow.pipelines.test.simulate_runfolder import TESTDATA_DIR
+
LANE_SET = range(1,9)
class ExperimentsTestCases(TestCase):
fixtures = ['test_flowcells.json']
def setUp(self):
- pass
+ self.tempdir = tempfile.mkdtemp(prefix='htsw-test-experiments-')
+ settings.RESULT_HOME_DIR = self.tempdir
+
+ self.fc1_id = 'FC12150'
+ self.fc1_root = os.path.join(self.tempdir, self.fc1_id)
+ os.mkdir(self.fc1_root)
+ self.fc1_dir = os.path.join(self.fc1_root, 'C1-37')
+ os.mkdir(self.fc1_dir)
+ runxml = 'run_FC12150_2007-09-27.xml'
+ shutil.copy(os.path.join(TESTDATA_DIR, runxml),
+ os.path.join(self.fc1_dir, runxml))
+ for i in range(1,9):
+ shutil.copy(
+ os.path.join(TESTDATA_DIR,
+ 'woldlab_070829_USI-EAS44_0017_FC11055_1.srf'),
+ os.path.join(self.fc1_dir,
+ 'woldlab_070829_SERIAL_FC12150_%d.srf' %(i,))
+ )
+
+ self.fc2_dir = os.path.join(self.tempdir, '42JTNAAXX')
+ os.mkdir(self.fc2_dir)
+ os.mkdir(os.path.join(self.fc2_dir, 'C1-25'))
+ os.mkdir(os.path.join(self.fc2_dir, 'C1-37'))
+ os.mkdir(os.path.join(self.fc2_dir, 'C1-37', 'Plots'))
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
def test_flowcell_information(self):
"""
Check the code that packs the django objects into simple types.
"""
- for fc_id in [u'303TUAAXX', u"42JTNAAXX", "42JU1AAXX"]:
+ for fc_id in [u'FC12150', u"42JTNAAXX", "42JU1AAXX"]:
fc_dict = experiments.flowcell_information(fc_id)
fc_django = models.FlowCell.objects.get(flowcell_id=fc_id)
self.failUnlessEqual(fc_dict['flowcell_id'], fc_id)
"""
Require logging in to retrieve meta data
"""
- response = self.client.get(u'/experiments/config/303TUAAXX/json')
+ response = self.client.get(u'/experiments/config/FC12150/json')
self.failUnlessEqual(response.status_code, 403)
def test_library_id(self):
"""
Library IDs should be flexible, so make sure we can retrive a non-numeric ID
"""
- response = self.client.get('/experiments/config/303TUAAXX/json', apidata)
+ response = self.client.get('/experiments/config/FC12150/json', apidata)
self.failUnlessEqual(response.status_code, 200)
flowcell = json.loads(response.content)
self.failUnlessEqual(library_id, expected_ids[i])
self.failUnlessEqual(input_field['value'], library_id)
+ def test_library_to_flowcell_link(self):
+ """
+ Make sure the library page includes links to the flowcell pages.
+ That work with flowcell IDs that have parenthetical comments.
+ """
+ self.client.login(username='supertest', password='BJOKL5kAj6aFZ6A5')
+ response = self.client.get('/library/11070/')
+ soup = BeautifulSoup(response.content)
+ failed_fc_span = soup.find(text='30012AAXX (failed)')
+ failed_fc_a = failed_fc_span.findPrevious('a')
+ # make sure some of our RDF made it.
+ self.failUnlessEqual(failed_fc_a.get('rel'), 'libns:flowcell')
+ self.failUnlessEqual(failed_fc_a.get('href'), '/flowcell/30012AAXX/')
+ fc_response = self.client.get(failed_fc_a.get('href'))
+ self.failUnlessEqual(fc_response.status_code, 200)
+ fc_lane_response = self.client.get('/flowcell/30012AAXX/8/')
+ self.failUnlessEqual(fc_lane_response.status_code, 200)
+
+
+
def test_lanes_for(self):
"""
Check the code that packs the django objects into simple types.
self.failUnlessEqual(response.status_code, 404)
+ def test_raw_data_dir(self):
+ """Raw data path generator check"""
+ flowcell_id = self.fc1_id
+ raw_dir = os.path.join(settings.RESULT_HOME_DIR, flowcell_id)
+
+ fc = models.FlowCell.objects.get(flowcell_id=flowcell_id)
+ self.failUnlessEqual(fc.get_raw_data_directory(), raw_dir)
+
+ fc.flowcell_id = flowcell_id + " (failed)"
+ self.failUnlessEqual(fc.get_raw_data_directory(), raw_dir)
+
+
+ def test_data_run_import(self):
+ srf_file_type = models.FileType.objects.get(name='SRF')
+ runxml_file_type = models.FileType.objects.get(name='run_xml')
+ flowcell_id = self.fc1_id
+ flowcell = models.FlowCell.objects.get(flowcell_id=flowcell_id)
+ flowcell.update_data_runs()
+ self.failUnlessEqual(len(flowcell.datarun_set.all()), 1)
+
+ run = flowcell.datarun_set.all()[0]
+ result_files = run.datafile_set.all()
+ result_dict = dict(((rf.relative_pathname, rf) for rf in result_files))
+
+ srf4 = result_dict['FC12150/C1-37/woldlab_070829_SERIAL_FC12150_4.srf']
+ self.failUnlessEqual(srf4.file_type, srf_file_type)
+ self.failUnlessEqual(srf4.library_id, '11060')
+ self.failUnlessEqual(srf4.data_run.flowcell.flowcell_id, 'FC12150')
+ self.failUnlessEqual(
+ srf4.data_run.flowcell.lane_set.get(lane_number=4).library_id,
+ '11060')
+ self.failUnlessEqual(
+ srf4.pathname,
+ os.path.join(settings.RESULT_HOME_DIR, srf4.relative_pathname))
+
+ lane_files = run.lane_files()
+ self.failUnlessEqual(lane_files[4]['srf'], srf4)
+
+ runxml= result_dict['FC12150/C1-37/run_FC12150_2007-09-27.xml']
+ self.failUnlessEqual(runxml.file_type, runxml_file_type)
+ self.failUnlessEqual(runxml.library_id, None)
+
+
+ def test_read_result_file(self):
+ """make sure we can return a result file
+ """
+ flowcell_id = self.fc1_id
+ flowcell = models.FlowCell.objects.get(flowcell_id=flowcell_id)
+ flowcell.update_data_runs()
+
+ #self.client.login(username='supertest', password='BJOKL5kAj6aFZ6A5')
+
+ result_files = flowcell.datarun_set.all()[0].datafile_set.all()
+ for f in result_files:
+ url = '/experiments/file/%s' % ( f.random_key,)
+ response = self.client.get(url)
+ self.failUnlessEqual(response.status_code, 200)
+ mimetype = f.file_type.mimetype
+ if mimetype is None:
+ mimetype = 'application/octet-stream'
+
+ self.failUnlessEqual(mimetype, response['content-type'])
+
+class TestFileType(TestCase):
+ def test_file_type_unicode(self):
+ file_type_objects = models.FileType.objects
+ name = 'QSEQ tarfile'
+ file_type_object = file_type_objects.get(name=name)
+ self.failUnlessEqual(u"<FileType: QSEQ tarfile>",
+ unicode(file_type_object))
+
+class TestFileType(TestCase):
+ def test_find_file_type(self):
+ file_type_objects = models.FileType.objects
+ cases = [('woldlab_090921_HWUSI-EAS627_0009_42FC3AAXX_l7_r1.tar.bz2',
+ 'QSEQ tarfile', 7, 1),
+ ('woldlab_091005_HWUSI-EAS627_0010_42JT2AAXX_1.srf',
+ 'SRF', 1, None),
+ ('s_1_eland_extended.txt.bz2','ELAND Extended', 1, None),
+ ('s_7_eland_multi.txt.bz2', 'ELAND Multi', 7, None),
+ ('s_3_eland_result.txt.bz2','ELAND Result', 3, None),
+ ('s_1_export.txt.bz2','ELAND Export', 1, None),
+ ('s_1_percent_call.png', 'IVC Percent Call', 1, None),
+ ('s_2_percent_base.png', 'IVC Percent Base', 2, None),
+ ('s_3_percent_all.png', 'IVC Percent All', 3, None),
+ ('s_4_call.png', 'IVC Call', 4, None),
+ ('s_5_all.png', 'IVC All', 5, None),
+ ('Summary.htm', 'Summary.htm', None, None),
+ ('run_42JT2AAXX_2009-10-07.xml', 'run_xml', None, None),
+ ]
+ for filename, typename, lane, end in cases:
+ ft = models.find_file_type_metadata_from_filename(filename)
+ self.failUnlessEqual(ft['file_type'],
+ file_type_objects.get(name=typename))
+ self.failUnlessEqual(ft.get('lane', None), lane)
+ self.failUnlessEqual(ft.get('end', None), end)
+
+ def test_assign_file_type_complex_path(self):
+ file_type_objects = models.FileType.objects
+ cases = [('/a/b/c/woldlab_090921_HWUSI-EAS627_0009_42FC3AAXX_l7_r1.tar.bz2',
+ 'QSEQ tarfile', 7, 1),
+ ('foo/woldlab_091005_HWUSI-EAS627_0010_42JT2AAXX_1.srf',
+ 'SRF', 1, None),
+ ('../s_1_eland_extended.txt.bz2','ELAND Extended', 1, None),
+ ('/bleem/s_7_eland_multi.txt.bz2', 'ELAND Multi', 7, None),
+ ('/qwer/s_3_eland_result.txt.bz2','ELAND Result', 3, None),
+ ('/ty///1/s_1_export.txt.bz2','ELAND Export', 1, None),
+ ('/help/s_1_percent_call.png', 'IVC Percent Call', 1, None),
+ ('/bored/s_2_percent_base.png', 'IVC Percent Base', 2, None),
+ ('/example1/s_3_percent_all.png', 'IVC Percent All', 3, None),
+ ('amonkey/s_4_call.png', 'IVC Call', 4, None),
+ ('fishie/s_5_all.png', 'IVC All', 5, None),
+ ('/random/Summary.htm', 'Summary.htm', None, None),
+ ('/notrandom/run_42JT2AAXX_2009-10-07.xml', 'run_xml', None, None),
+ ]
+ for filename, typename, lane, end in cases:
+ result = models.find_file_type_metadata_from_filename(filename)
+ self.failUnlessEqual(result['file_type'],
+ file_type_objects.get(name=typename))
+ self.failUnlessEqual(result.get('lane',None), lane)
+ self.failUnlessEqual(result.get('end', None), end)
+
class TestEmailNotify(TestCase):
fixtures = ['test_flowcells.json']
self.client.login(username='supertest', password='BJOKL5kAj6aFZ6A5')
response = self.client.get('/experiments/started/153/')
self.failUnlessEqual(response.status_code, 200)
- self.failUnless(re.search('Flowcell 303TUAAXX', response.content))
+ self.failUnless(re.search('Flowcell FC12150', response.content))
# require that navigation back to the admin page exists
self.failUnless(re.search('<a href="/admin/experiments/flowcell/153/">[^<]+</a>', response.content))
+
#(r'^(?P<run_folder>.+)/$', 'gaworkflow.frontend.experiments.views.detail'),
(r'^config/(?P<fc_id>.+)/json$', 'htsworkflow.frontend.experiments.experiments.flowcell_json'),
(r'^lanes_for/(?P<username>.+)/json$', 'htsworkflow.frontend.experiments.experiments.lanes_for_json'),
- (r'^fcsheet/(?P<fcid>.+)/$', 'htsworkflow.frontend.experiments.views.makeFCSheet'),
- (r'^updStatus$', 'htsworkflow.frontend.experiments.experiments.updStatus'),
- (r'^getConfile$', 'htsworkflow.frontend.experiments.experiments.getConfile'),
- (r'^getLanesNames$', 'htsworkflow.frontend.experiments.experiments.getLaneLibs'),
- # for the following two URLS I have to pass in the primary key
- # because I link to the page from an overridden version of the admin change_form
- # which only makes the object primary key available in the form.
- # (Or at least as far as I could tell)
+ (r'^file/(?P<key>.+)/?$', 'htsworkflow.frontend.experiments.views.read_result_file'),
(r'^started/(?P<pk>.+)/$', 'htsworkflow.frontend.experiments.views.startedEmail'),
(r'^finished/(?P<pk>.+)/$', 'htsworkflow.frontend.experiments.views.finishedEmail'),
+
)
# Create your views here.
from datetime import datetime
+import os
#from django.template import Context, loader
#shortcut to the above modules
from django.contrib.auth.decorators import user_passes_test
+from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import EmailMessage, mail_managers
from django.http import HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
-from django.template import Context
+from django.template import RequestContext
from django.template.loader import get_template
-from htsworkflow.frontend.experiments.models import *
+from htsworkflow.frontend.experiments.models import \
+ DataRun, \
+ DataFile, \
+ FlowCell, \
+ Lane
from htsworkflow.frontend.experiments.experiments import \
estimateFlowcellDuration, \
estimateFlowcellTimeRemaining, \
for user_email in email_lane.keys():
sending = ""
# build body
- context = Context({u'flowcell': fc,
- u'lanes': email_lane[user_email],
- u'runfolder': 'blank',
- u'finish_low': estimate_low,
- u'finish_high': estimate_high,
- u'now': datetime.now(),
- })
+ context = RequestContext(request,
+ {u'flowcell': fc,
+ u'lanes': email_lane[user_email],
+ u'runfolder': 'blank',
+ u'finish_low': estimate_low,
+ u'finish_high': estimate_high,
+ u'now': datetime.now(),
+ })
# build view
subject = "Flowcell %s" % ( fc.flowcell_id )
emails.append((user_email, subject, body, sending))
- verify_context = Context({
- 'emails': emails,
- 'flowcell': fc,
- 'from': sender,
- 'send': send,
- 'site_managers': settings.MANAGERS,
- 'title': fc.flowcell_id,
- 'warnings': warnings,
+ verify_context = RequestContext(
+ request,
+ { 'emails': emails,
+ 'flowcell': fc,
+ 'from': sender,
+ 'send': send,
+ 'site_managers': settings.MANAGERS,
+ 'title': fc.flowcell_id,
+ 'warnings': warnings,
})
return HttpResponse(email_verify.render(verify_context))
"""
"""
return HttpResponse("I've got nothing.")
+
+
+def flowcell_detail(request, flowcell_id, lane_number=None):
+ fc = get_object_or_404(FlowCell, flowcell_id__startswith=flowcell_id)
+ fc.update_data_runs()
+
+
+ if lane_number is not None:
+ lanes = fc.lane_set.filter(lane_number=lane_number)
+ else:
+ lanes = fc.lane_set.all()
+
+ context = RequestContext(request,
+ {'flowcell': fc,
+ 'lanes': lanes})
+
+ return render_to_response('experiments/flowcell_detail.html',
+ context)
+
+def flowcell_lane_detail(request, lane_pk):
+ lane = get_object_or_404(Lane, id=lane_pk)
+ lane.flowcell.update_data_runs()
+
+ dataruns = []
+ for run in lane.flowcell.datarun_set.all():
+ dataruns.append((run, lane.lane_number, run.lane_files()[lane.lane_number]))
+
+ context = RequestContext(request,
+ {'lib': lane.library,
+ 'lane': lane,
+ 'flowcell': lane.flowcell,
+ 'filtered_dataruns': dataruns})
+
+ return render_to_response('experiments/flowcell_lane_detail.html',
+ context)
+
+def read_result_file(self, key):
+ """Return the contents of filename if everything is approved
+ """
+ data_file = get_object_or_404(DataFile, random_key = key)
+
+ mimetype = 'application/octet-stream'
+ if data_file.file_type.mimetype is not None:
+ mimetype = data_file.file_type.mimetype
+
+ if os.path.exists(data_file.pathname):
+ return HttpResponse(open(data_file.pathname,'r'),
+ mimetype=mimetype)
+
+ raise Http404
+
+
-[{"pk": 1, "model": "inventory.printertemplate", "fields": {"default": false, "item_type": 1, "printer": 2, "template": "^FX=========================\r\n^FX 3\"x3\" Label\r\n^FX=========================\r\n^XA\r\n\r\n\r\n^FX======== Left Side ===========\r\n\r\n^FX------------\r\n^FX ^LH changes the 0,0 point of all subsequent location references\r\n^FX------------\r\n\r\n^LH0,50\r\n\r\n^FX ---Header---\r\n\r\n^FO25,0\r\n^CF0,50\r\n^FB250,2,,C\r\n^FD{{ item.barcode_id }}^FS\r\n\r\n^FX ---Column 1: Flowcells---\r\n\r\n^FX-----------------\r\n^FX FB command for automatic text formatting:\r\n^FX ^FB[dot width of area], [max # of lines], [change line spacing], [justification: L, C, R, J], [hanging indent]\r\n^FX-----------------\r\n\r\n^CF0,30,30\r\n^FO75,125\r\n^FB275,19,,L\r\n^FD{% for flowcell in flowcell_id_list %}{{ flowcell }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n^FX ---Date---\r\n\r\n^FO0,725\r\n^CF0,35\r\n^FB300,2,,C\r\n^FD{{ oldest_rundate|date:\"YMd\" }} - {{ latest_rundate|date:\"YMd\" }}^FS\r\n\r\n^FX ---Barcode---\r\n\r\n^FO135,795\r\n^BXN,3,200^FDinvb|{{ item.barcode_id }}^FS\r\n\r\n^FX======== Right Side ===========\r\n\r\n^LH300,60\r\n\r\n^FX ---Header---\r\n\r\n^FO0,0\r\n^CF0,50\r\n^FB600,2,,C\r\n^FD{{ barcode_id }}^FS\r\n\r\n^FX ---Dividing line---\r\n\r\n^FX---------------\r\n^FX GB command:\r\n^FX ^GB[box width], [box height], [border thickness], [color: B, W], [corner rounding: 0-8]^FS\r\n^FX---------------\r\n\r\n^FO0,100\r\n^GB0,600,10^FS\r\n\r\n^FX ---Column 2: Libraries 1-20---\r\n\r\n^CF0,30,30\r\n^FO75,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_1_to_20 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 3: Libraries 21-40---\r\n\r\n^CF0,30,30\r\n^FO200,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_21_to_40 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 4: Libraries 41-60---\r\n\r\n^CF0,30,30\r\n^FO325,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_41_to_60 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 5: Libraries 61-80---\r\n\r\n^CF0,30,30\r\n^FO450,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_61_to_80 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Date---\r\n\r\n^FO0,715\r\n^CF0,35\r\n^FB600,2,,C\r\n^FDRun Dates: {{ oldest_rundate|date:\"YMd\" }}-{{ latest_rundate|date:\"YMd\" }}^FS\r\n\r\n^FX ---Barcode---\r\n\r\n^FO255,785\r\n^BXN,3,200^FDinvb|{{ item.barcode_id }}^FS\r\n\r\n^LH0,0\r\n^FX ---End---\r\n^XZ\r\n"}}, {"pk": 2, "model": "inventory.printertemplate", "fields": {"default": true, "item_type": 2, "printer": 1, "template": "^FX=========================\r\n^FX Harddrive Location Tracking Label\r\n^FX 300x375 dots\r\n^FX=========================\r\n\r\n^XA\r\n^LH 0,25\r\n\r\n^FO0,0\r\n^CF0,35\r\n^FB375,1,,C\r\n^FD{{ item.item_type.name }}:^FS\r\n\r\n^FX -------Text contains HD serial #-------------\r\n^FO15,75\r\n^CF0,42\r\n^FB325,3,,C\r\n^FD{% if use_uuid %}{{ item.uuid }}{% else %}{{ item.barcode_id }}{% endif %}^FS\r\n\r\n^FX -------Barcode contains HD serial #-----------\r\n^FO150,200\r\n^BXN,3,200\r\n^FD{% if use_uuid %}invu|{{ item.uuid }}{% else %}invb|{{ item.barcode_id }}{% endif %}^FS\r\n\r\n^XZ\r\n"}}]
+[
+ {"pk": 1,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Hard Drive"}
+ },
+ {"pk": 2,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Illumina SR Cluster Generation Reagents"}
+ },
+ {"pk": 3,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Illumina Library Creation Reagents"}
+ },
+ {"pk": 4,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Illumina Sequencing Reagents"}
+ },
+ {"pk": 5,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Illumina PE Cluster Generation Reagents"}
+ },
+ {"pk": 6,
+ "model": "inventory.itemtype",
+ "fields": {"name": "Library"}
+ },
+
+ {"pk": 1,
+ "model": "inventory.printertemplate",
+ "fields": {"default": false,
+ "item_type": 1,
+ "printer": 2,
+ "template": "^FX=========================\r\n^FX 3\"x3\" Label\r\n^FX=========================\r\n^XA\r\n\r\n\r\n^FX======== Left Side ===========\r\n\r\n^FX------------\r\n^FX ^LH changes the 0,0 point of all subsequent location references\r\n^FX------------\r\n\r\n^LH0,50\r\n\r\n^FX ---Header---\r\n\r\n^FO25,0\r\n^CF0,50\r\n^FB250,2,,C\r\n^FD{{ item.barcode_id }}^FS\r\n\r\n^FX ---Column 1: Flowcells---\r\n\r\n^FX-----------------\r\n^FX FB command for automatic text formatting:\r\n^FX ^FB[dot width of area], [max # of lines], [change line spacing], [justification: L, C, R, J], [hanging indent]\r\n^FX-----------------\r\n\r\n^CF0,30,30\r\n^FO75,125\r\n^FB275,19,,L\r\n^FD{% for flowcell in flowcell_id_list %}{{ flowcell }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n^FX ---Date---\r\n\r\n^FO0,725\r\n^CF0,35\r\n^FB300,2,,C\r\n^FD{{ oldest_rundate|date:\"YMd\" }} - {{ latest_rundate|date:\"YMd\" }}^FS\r\n\r\n^FX ---Barcode---\r\n\r\n^FO135,795\r\n^BXN,3,200^FDinvb|{{ item.barcode_id }}^FS\r\n\r\n^FX======== Right Side ===========\r\n\r\n^LH300,60\r\n\r\n^FX ---Header---\r\n\r\n^FO0,0\r\n^CF0,50\r\n^FB600,2,,C\r\n^FD{{ barcode_id }}^FS\r\n\r\n^FX ---Dividing line---\r\n\r\n^FX---------------\r\n^FX GB command:\r\n^FX ^GB[box width], [box height], [border thickness], [color: B, W], [corner rounding: 0-8]^FS\r\n^FX---------------\r\n\r\n^FO0,100\r\n^GB0,600,10^FS\r\n\r\n^FX ---Column 2: Libraries 1-20---\r\n\r\n^CF0,30,30\r\n^FO75,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_1_to_20 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 3: Libraries 21-40---\r\n\r\n^CF0,30,30\r\n^FO200,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_21_to_40 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 4: Libraries 41-60---\r\n\r\n^CF0,30,30\r\n^FO325,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_41_to_60 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Column 5: Libraries 61-80---\r\n\r\n^CF0,30,30\r\n^FO450,100\r\n^FB100,20,,L\r\n^FD{% for lib_id in library_id_list_61_to_80 %}{{ lib_id }}{% if not forloop.last %}\\&{% endif %}{% endfor %}^FS\r\n\r\n^FX ---Date---\r\n\r\n^FO0,715\r\n^CF0,35\r\n^FB600,2,,C\r\n^FDRun Dates: {{ oldest_rundate|date:\"YMd\" }}-{{ latest_rundate|date:\"YMd\" }}^FS\r\n\r\n^FX ---Barcode---\r\n\r\n^FO255,785\r\n^BXN,3,200^FDinvb|{{ item.barcode_id }}^FS\r\n\r\n^LH0,0\r\n^FX ---End---\r\n^XZ\r\n"}},
+ {"pk": 2,
+ "model": "inventory.printertemplate",
+ "fields": {"default": true,
+ "item_type": 2,
+ "printer": 1,
+ "template": "^FX=========================\r\n^FX Harddrive Location Tracking Label\r\n^FX 300x375 dots\r\n^FX=========================\r\n\r\n^XA\r\n^LH 0,25\r\n\r\n^FO0,0\r\n^CF0,35\r\n^FB375,1,,C\r\n^FD{{ item.item_type.name }}:^FS\r\n\r\n^FX -------Text contains HD serial #-------------\r\n^FO15,75\r\n^CF0,42\r\n^FB325,3,,C\r\n^FD{% if use_uuid %}{{ item.uuid }}{% else %}{{ item.barcode_id }}{% endif %}^FS\r\n\r\n^FX -------Barcode contains HD serial #-----------\r\n^FO150,200\r\n^BXN,3,200\r\n^FD{% if use_uuid %}invu|{{ item.uuid }}{% else %}invb|{{ item.barcode_id }}{% endif %}^FS\r\n\r\n^XZ\r\n"}}]
from htsworkflow.frontend.experiments.models import FlowCell
from htsworkflow.frontend.bcmagic.forms import BarcodeMagicForm
from htsworkflow.frontend.bcmagic.utils import print_zpl_socket
-from htsworkflow.frontend import settings
-#from htsworkflow.util.jsonutil import encode_json
+from django.conf import settings
+from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext, Template
from django.template.loader import get_template
-from django.contrib.auth.decorators import login_required
register_search_plugin('Inventory Item', item_search)
--- /dev/null
+from htsworkflow.frontend.labels.models import LabelContent, LabelTemplate, LabelPrinter
+from htsworkflow.frontend.inventory.models import PrinterTemplate
+from htsworkflow.frontend.bcmagic.utils import print_zpl_socket
+from django.template import Context, Template
+from django.contrib import admin
+
+class LabelContentOptions(admin.ModelAdmin):
+ save_as = True
+ save_on_top = True
+ search_fields = (
+ 'title',
+ 'subtitle',
+ 'text',
+ 'barcode',
+ 'creator',
+ )
+ list_display = ('title','subtitle','text','barcode','template','creator')
+ list_filter = ('template','creator',)
+ fieldsets = (
+ (None, {
+ 'fields': (('title','subtitle','text','barcode'),
+ ('template','creator'))
+
+ }),
+ )
+ actions = ['action_print_labels']
+
+ def action_print_labels(self, request, queryset):
+ """
+ Django action which prints labels for the selected set of labels from the
+ Django Admin interface.
+ """
+
+ zpl_list = []
+ #Iterate over selected labels to print
+ for label in queryset.all():
+
+ template_used = LabelTemplate.objects.get(name=label.template.name)
+ # ZPL Template
+ t = Template(template_used.ZPL_code)
+
+ # Django Template Context
+ c = Context({'label': label})
+
+ # Send rendered template to the printer that the template
+ # object has been attached to in the database.
+ zpl_list.append(t.render(c))
+
+ print_zpl_socket(zpl_list, host=template_used.printer.ip_address)
+
+ self.message_user(request, "%s labels printed." % (len(queryset)))
+
+ action_print_labels.short_description = "Print Selected Labels"
+
+class LabelTemplateOptions(admin.ModelAdmin):
+ save_as = True
+ save_on_top = True
+ list_display = ('name', 'printer', 'ZPL_code')
+
+class LabelPrinterOptions(admin.ModelAdmin):
+ list_display = ('name', 'ip_address', 'labels')
+
+admin.site.register(LabelContent, LabelContentOptions)
+admin.site.register(LabelTemplate, LabelTemplateOptions)
+admin.site.register(LabelPrinter, LabelPrinterOptions)
+
--- /dev/null
+from django.db import models
+
+class LabelPrinter(models.Model):
+ """
+ Barcode Printer Information
+ """
+ name = models.CharField(max_length=256)
+ model = models.CharField(max_length=64, default='ZM400')
+ ip_address = models.IPAddressField()
+ labels = models.CharField(max_length=200)
+ notes = models.TextField(null=True, blank=True)
+
+ def __unicode__(self):
+ return u'%s: %s' % (self.name, self.labels)
+
+class LabelTemplate(models.Model):
+ """
+ Maps templates to printer to use
+ """
+ name = models.CharField(max_length=200)
+ description = models.TextField(null=True, blank=True)
+ printer = models.ForeignKey(LabelPrinter)
+
+ ZPL_code = models.TextField('template')
+
+ def __unicode__(self):
+ return '%s %s' % (self.name, self.printer.name)
+
+class LabelContent(models.Model):
+ title = models.CharField(max_length=200, null=True, blank=True)
+ subtitle = models.CharField(max_length=200, null=True, blank=True)
+ text = models.CharField(max_length=200, null=True, blank=True)
+ barcode = models.CharField(max_length=200, null=True, blank=True)
+ template = models.ForeignKey(LabelTemplate)
+ creator = models.CharField(max_length=200)
--- /dev/null
+"""
+This file demonstrates two different styles of tests (one doctest and one
+unittest). These will both pass when you run "manage.py test".
+
+Replace these with more appropriate tests for your application.
+"""
+
+from django.test import TestCase
+
+class SimpleTest(TestCase):
+ def test_basic_addition(self):
+ """
+ Tests that 1 + 1 always equals 2.
+ """
+ self.failUnlessEqual(1 + 1, 2)
+
+__test__ = {"doctest": """
+Another way to test that 1 + 1 is equal to 2.
+
+>>> 1 + 1 == 2
+True
+"""}
+
--- /dev/null
+# Create your views here.
+++ /dev/null
-#!/usr/bin/env python
-from django.core.management import execute_manager
-try:
- import settings # Assumed to be in the same directory.
-except ImportError:
- import sys
- sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
- sys.exit(1)
-
-if __name__ == "__main__":
- execute_manager(settings)
-from htsworkflow.frontend import settings
+from django.conf import settings
from django.http import HttpResponse
from datetime import datetime
from string import *
)
class CelllineOptions(admin.ModelAdmin):
- list_display = ('cellline_name', 'nickname', 'notes')
+ list_display = ('cellline_name', 'notes')
search_fields = ('cellline_name', 'nickname', 'notes')
fieldsets = (
(None, {
- 'fields': (('cellline_name'),('notes'),)
+ 'fields': (('cellline_name','nickname',),('notes'),)
}),
)
(None, {
'fields': (
('id','library_name','hidden'),
- ('library_species', 'cell_line', 'replicate'),
- ('library_type', 'experiment_type'))
+ ('library_species', 'library_type', 'experiment_type'),
+ )
}),
+ ('Experiment Detail:', {
+ 'fields': (('cell_line', 'replicate',),
+ ('condition',),
+ ('antibody', ),
+ ),
+ 'classes': ('collapse',),
+ }),
('Creation Information:', {
'fields' : (('made_by', 'creation_date', 'stopping_point'),
('amplified_from_sample'),
admin.site.register(Library, LibraryOptions)
admin.site.register(Affiliation, AffiliationOptions)
-#admin.site.register(Antibody, AntibodyOptions)
+admin.site.register(Antibody, AntibodyOptions)
admin.site.register(Cellline, CelllineOptions)
-#admin.site.register(Condition, ConditionOptions)
+admin.site.register(Condition, ConditionOptions)
admin.site.register(ExperimentType, ExperimentTypeOptions)
#admin.site.register(HTSUser, HTSUserOptions)
admin.site.register(LibraryType, LibraryTypeOptions)
"pk": 8,
"fields": {
"scientific_name": "Homo sapiens",
- "common_name": ""
+ "common_name": "human"
}
},
{
-[
+[
{"pk": 5, "model": "auth.user",
"fields": {
"username": "test",
"date_joined": "2009-01-01 00:01:01"
}
},
+ {"pk": 1, "model": "samples.affiliation",
+ "fields": {
+ "users": [5],
+ "name": "Alice",
+ "contact": "Lab Boss",
+ "email": "alice@some.where.else."
+ }
+ },
+ {"pk": 2, "model": "samples.affiliation",
+ "fields": { "name": "Bob",
+ "contact": "Other Lab Boss",
+ "email": "bob@some.where.else"
+ }
+ },
{"pk": 153, "model": "experiments.flowcell",
"fields": {
"paired_end": true,
"library_type": null,
"made_by": "Igor",
"affiliations": [
- 40
+ 2
],
"replicate": 1,
"condition": 1,
import urlparse
from django.db import models
from django.contrib.auth.models import User, UserManager
+from django.core import urlresolvers
from django.db.models.signals import pre_save, post_save
from django.db import connection
-from htsworkflow.frontend import settings
from htsworkflow.frontend.reports.libinfopar import *
-
-# Create your models here.
logger = logging.getLogger(__name__)
class Antibody(models.Model):
max_length=20,
blank=True,
null=True,
- db_index=True,
- verbose_name = 'Short Name'
+ db_index=True
)
catalog = models.CharField(max_length=50, unique=True, db_index=True)
antibodies = models.CharField(max_length=500, db_index=True)
nickname = models.CharField(max_length=20,
blank=True,
null=True,
- db_index=True,
- verbose_name = 'Short Name')
+ db_index=True)
+
notes = models.TextField(blank=True)
def __unicode__(self):
return unicode(self.cellline_name)
class Meta:
ordering = ["condition_name"]
+
class ExperimentType(models.Model):
name = models.CharField(max_length=50, unique=True)
class Meta:
verbose_name_plural = "species"
ordering = ["scientific_name"]
+
+ @models.permalink
+ def get_absolute_url(self):
+ return ('htsworkflow.frontend.samples.views.species', [str(self.id)])
class Affiliation(models.Model):
name = models.CharField(max_length=256, db_index=True, verbose_name='Name')
id = models.CharField(max_length=10, primary_key=True)
library_name = models.CharField(max_length=100, unique=True)
library_species = models.ForeignKey(Species)
- # new field 2008 Mar 5, alter table samples_library add column "hidden" NOT NULL default 0;
hidden = models.BooleanField()
- # new field 2009 Oct 6, alter table samples_library add column "account_number" varchar(100) NULL
account_number = models.CharField(max_length=100, null=True, blank=True)
- cell_line = models.ForeignKey(Cellline, blank=True, null=True, verbose_name="Background")
+ cell_line = models.ForeignKey(Cellline, blank=True, null=True,
+ verbose_name="Background")
condition = models.ForeignKey(Condition, blank=True, null=True)
antibody = models.ForeignKey(Antibody,blank=True,null=True)
- # New field Aug/25/08. SQL: alter table fctracker_library add column "lib_affiliation" varchar(256) NULL;
- affiliations = models.ManyToManyField(Affiliation,related_name='library_affiliations',null=True)
- # new field Nov/14/08
- tags = models.ManyToManyField(Tag,related_name='library_tags',blank=True,null=True)
- # New field Aug/19/08
- # SQL to add column: alter table fctracker_library add column "replicate" smallint unsigned NULL;
+ affiliations = models.ManyToManyField(
+ Affiliation,related_name='library_affiliations',null=True)
+ tags = models.ManyToManyField(Tag,related_name='library_tags',
+ blank=True,null=True)
REPLICATE_NUM = ((1,1),(2,2),(3,3),(4,4))
- replicate = models.PositiveSmallIntegerField(choices=REPLICATE_NUM,default=1)
+ replicate = models.PositiveSmallIntegerField(choices=REPLICATE_NUM,
+ blank=True,null=True)
experiment_type = models.ForeignKey(ExperimentType)
library_type = models.ForeignKey(LibraryType, blank=True, null=True)
creation_date = models.DateField(blank=True, null=True)
made_for = models.CharField(max_length=50, blank=True,
- verbose_name='ChIP/DNA/RNA Made By')
+ verbose_name='ChIP/DNA/RNA Made By')
made_by = models.CharField(max_length=50, blank=True, default="Lorian")
PROTOCOL_END_POINTS = (
('Done', 'Completed'),
)
PROTOCOL_END_POINTS_DICT = dict(PROTOCOL_END_POINTS)
+ stopping_point = models.CharField(max_length=25,
+ choices=PROTOCOL_END_POINTS,
+ default='Done')
- stopping_point = models.CharField(max_length=25, choices=PROTOCOL_END_POINTS, default='Done')
- amplified_from_sample = models.ForeignKey('self', blank=True, null=True, related_name='amplified_into_sample')
+ amplified_from_sample = models.ForeignKey('self',
+ related_name='amplified_into_sample',
+ blank=True, null=True)
undiluted_concentration = models.DecimalField("Concentration",
max_digits=5, decimal_places=2, blank=True, null=True,
help_text=u"Undiluted concentration (ng/\u00b5l)")
# note \u00b5 is the micro symbol in unicode
- successful_pM = models.DecimalField(max_digits=9, decimal_places=1, blank=True, null=True)
+ successful_pM = models.DecimalField(max_digits=9,
+ decimal_places=1, blank=True, null=True)
ten_nM_dilution = models.BooleanField()
gel_cut_size = models.IntegerField(default=225, blank=True, null=True)
insert_size = models.IntegerField(blank=True, null=True)
ordering = ["-id"]
def antibody_name(self):
- str ='<a target=_self href="/admin/samples/antibody/'+self.antibody.id.__str__()+'/" title="'+self.antibody.__str__()+'">'+self.antibody.nickname+'</a>'
+ str ='<a target=_self href="/admin/samples/antibody/'+self.antibody.id.__str__()+'/" title="'+self.antibody.__str__()+'">'+self.antibody.label+'</a>'
return str
antibody_name.allow_tags = True
@models.permalink
def get_absolute_url(self):
return ('htsworkflow.frontend.samples.views.library_to_flowcells', [str(self.id)])
-
-
-
+ def get_admin_url(self):
+ return urlresolvers.reverse('admin:samples_library_change',
+ args=(self.id,))
class HTSUser(User):
"""
-from htsworkflow.frontend import settings
+from django.conf import settings
import glob
import os
pass
-def parse_flowcell_id(flowcell_id):
- """
- Return flowcell id and any status encoded in the id
-
- We stored the status information in the flowcell id name.
- this was dumb, but database schemas are hard to update.
- """
- fields = flowcell_id.split()
- fcid = None
- status = None
- if len(fields) > 0:
- fcid = fields[0]
- if len(fields) > 1:
- status = fields[1]
- return fcid, status
-
class LibraryTestCase(TestCase):
+ fixtures = ['test_samples.json']
+
def setUp(self):
create_db(self)
lib_response = self.client.get(url, apidata)
self.failUnlessEqual(lib_response.status_code, 200)
lib_json = json.loads(lib_response.content)
- print lib_json
for d in [lib_dict, lib_json]:
# amplified_from_sample is a link to the library table,
# so to be more compatible with running via nose we flush the database tables
# of interest before creating our sample data.
def create_db(obj):
- Species.objects.all().delete()
- obj.species_human = Species(
- scientific_name = 'Homo Sapeins',
- common_name = 'human',
- )
- obj.species_human.save()
- obj.species_worm = Species(
- scientific_name = 'C. Elegans',
- common_name = 'worm',
- )
- obj.species_worm.save()
- obj.species_phix = Species(
- scientific_name = 'PhiX',
- common_name = 'PhiX'
- )
- obj.species_phix.save()
-
- ExperimentType.objects.all().delete()
- obj.experiment_de_novo = ExperimentType(
- name = 'De Novo',
- )
- obj.experiment_de_novo.save()
- obj.experiment_chip_seq = ExperimentType(
- name = 'ChIP-Seq'
- )
- obj.experiment_chip_seq.save()
- obj.experiment_rna_seq = ExperimentType(
- name = 'RNA-Seq'
- )
- obj.experiment_rna_seq.save()
-
- Affiliation.objects.all().delete()
- obj.affiliation_alice = Affiliation(
- name = 'Alice',
- contact = 'Lab Boss',
- email = 'alice@some.where.else.'
- )
- obj.affiliation_alice.save()
- obj.affiliation_bob = Affiliation(
- name = 'Bob',
- contact = 'Other Lab Boss',
- email = 'bob@some.where.else',
- )
- obj.affiliation_bob.save()
-
+ obj.species_human = Species.objects.get(pk=8)
+ obj.experiment_rna_seq = ExperimentType.objects.get(pk=4)
+ obj.affiliation_alice = Affiliation.objects.get(pk=1)
+ obj.affiliation_bob = Affiliation.objects.get(pk=2)
+
Library.objects.all().delete()
obj.library_10001 = Library(
id = "10001",
)
obj.library_10002.save()
+try:
+ import RDF
+ HAVE_RDF = True
+
+ rdfNS = RDF.NS("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+ xsdNS = RDF.NS("http://www.w3.org/2001/XMLSchema#")
+ libNS = RDF.NS("http://jumpgate.caltech.edu/wiki/LibraryOntology#")
+except ImportError,e:
+ HAVE_RDF = False
+
+
+class TestRDFaLibrary(TestCase):
+ fixtures = ['test_samples.json']
+
+ def test_parse_rdfa(self):
+ model = get_rdf_memory_model()
+ parser = RDF.Parser(name='rdfa')
+ url = '/library/10981/'
+ lib_response = self.client.get(url)
+ self.failIfEqual(len(lib_response.content), 0)
+
+ parser.parse_string_into_model(model,
+ lib_response.content,
+ 'http://localhost'+url)
+ # http://jumpgate.caltech.edu/wiki/LibraryOntology#affiliation>
+ self.check_literal_object(model, ['Bob'], p=libNS['affiliation'])
+ self.check_literal_object(model, ['Multiplexed'], p=libNS['experiment_type'])
+ self.check_literal_object(model, ['400'], p=libNS['gel_cut'])
+ self.check_literal_object(model, ['Igor'], p=libNS['made_by'])
+ self.check_literal_object(model, ['Paired End Multiplexed Sp-BAC'], p=libNS['name'])
+ self.check_literal_object(model, ['Drosophila melanogaster'], p=libNS['species'])
+
+ self.check_uri_object(model,
+ [u'http://localhost/lane/1193'],
+ p=libNS['has_lane'])
+
+ self.check_literal_object(model,
+ [u"303TUAAXX"],
+ s=RDF.Uri('http://localhost/flowcell/303TUAAXX/'))
+
+ def check_literal_object(self, model, values, s=None, p=None, o=None):
+ statements = list(model.find_statements(
+ RDF.Statement(s,p,o)))
+ self.failUnlessEqual(len(statements), len(values),
+ "Couln't find %s %s %s" % (s,p,o))
+ for s in statements:
+ self.failUnless(s.object.literal_value['string'] in values)
+
+
+ def check_uri_object(self, model, values, s=None, p=None, o=None):
+ statements = list(model.find_statements(
+ RDF.Statement(s,p,o)))
+ self.failUnlessEqual(len(statements), len(values),
+ "Couln't find %s %s %s" % (s,p,o))
+ for s in statements:
+ self.failUnless(unicode(s.object.uri) in values)
+
+
+
+def get_rdf_memory_model():
+ storage = RDF.MemoryStorage()
+ model = RDF.Model(storage)
+ return model
+
urlpatterns = patterns('',
(r"^library/(?P<library_id>\w+)/json", 'htsworkflow.frontend.samples.views.library_json'),
(r"^species/(?P<species_id>\w+)/json", 'htsworkflow.frontend.samples.views.species_json'),
+ (r"^species/(?P<species_id>\w+)", 'htsworkflow.frontend.samples.views.species'),
)
from htsworkflow.frontend.auth import require_api_key
from htsworkflow.frontend.experiments.models import FlowCell, Lane, LANE_STATUS_MAP
from htsworkflow.frontend.samples.changelist import ChangeList
-from htsworkflow.frontend.samples.models import Library, HTSUser
-from htsworkflow.frontend.samples.results import get_flowcell_result_dict, parse_flowcell_id
+from htsworkflow.frontend.samples.models import Library, Species, HTSUser
+from htsworkflow.frontend.samples.results import get_flowcell_result_dict
from htsworkflow.frontend.bcmagic.forms import BarcodeMagicForm
from htsworkflow.pipelines.runfolder import load_pipeline_run_xml
from htsworkflow.pipelines import runfolder
from htsworkflow.pipelines.eland import ResultLane
-from htsworkflow.frontend import settings
-from htsworkflow.util.conversion import unicode_or_none
+from htsworkflow.util.conversion import unicode_or_none, parse_flowcell_id
from htsworkflow.util import makebed
from htsworkflow.util import opener
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, HttpResponseRedirect, Http404
-from django.shortcuts import render_to_response
+from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.loader import get_template
from django.contrib.auth.decorators import login_required
+from django.conf import settings
LANE_LIST = [1,2,3,4,5,6,7,8]
SAMPLES_CONTEXT_DEFAULTS = {
#for lib in library_items.object_list:
for lib in cl.result_list:
summary = {}
+ summary['library'] = lib
summary['library_id'] = lib.id
summary['library_name'] = lib.library_name
summary['species_name' ] = lib.library_species.scientific_name
eland_results = []
for fc, lane_number in flowcell_list:
lane_summary, err_list = _summary_stats(fc, lane_number)
-
- eland_results.extend(_make_eland_results(fc, lane_number, flowcell_run_results))
lane_summary_list.extend(lane_summary)
+
+ eland_results.extend(_make_eland_results(fc, lane_number, flowcell_run_results))
context = {
'page_name': 'Library Details',
flowcell = FlowCell.objects.get(flowcell_id=flowcell_id)
#pm_field = 'lane_%d_pM' % (lane_id)
lane_obj = flowcell.lane_set.get(lane_number=lane_id)
- eland_summary.successful_pm = lane_obj.pM
+ eland_summary.flowcell = flowcell
+ eland_summary.lane = lane_obj
summary_list.append(eland_summary)
return (summary_list, err_list)
-def _summary_stats_old(flowcell_id, lane):
- """
- return a dictionary of summary stats for a given flowcell_id & lane.
- """
- fc_id, status = parse_flowcell_id(flowcell_id)
- fc_result_dict = get_flowcell_result_dict(fc_id)
-
- dict_list = []
- err_list = []
- summary_list = []
-
- if fc_result_dict is None:
- err_list.append('Results for Flowcell %s not found.' % (fc_id))
- return (dict_list, err_list, summary_list)
-
- for cnm in fc_result_dict:
-
- xmlpath = fc_result_dict[cnm]['run_xml']
-
- if xmlpath is None:
- err_list.append('Run xml for Flowcell %s(%s) not found.' % (fc_id, cnm))
- continue
-
- tree = ElementTree.parse(xmlpath).getroot()
- results = runfolder.PipelineRun(pathname='', xml=tree)
- try:
- lane_report = runfolder.summarize_lane(results.gerald, lane)
- summary_list.append(os.linesep.join(lane_report))
- except Exception, e:
- summary_list.append("Summary report needs to be updated.")
- logging.error("Exception: " + str(e))
-
- print >>sys.stderr, "----------------------------------"
- print >>sys.stderr, "-- DOES NOT SUPPORT PAIRED END ---"
- print >>sys.stderr, "----------------------------------"
- lane_results = results.gerald.summary[0][lane]
- lrs = lane_results
-
- d = {}
-
- d['average_alignment_score'] = lrs.average_alignment_score
- d['average_first_cycle_intensity'] = lrs.average_first_cycle_intensity
- d['cluster'] = lrs.cluster
- d['lane'] = lrs.lane
- d['flowcell'] = flowcell_id
- d['cnm'] = cnm
- d['percent_error_rate'] = lrs.percent_error_rate
- d['percent_intensity_after_20_cycles'] = lrs.percent_intensity_after_20_cycles
- d['percent_pass_filter_align'] = lrs.percent_pass_filter_align
- d['percent_pass_filter_clusters'] = lrs.percent_pass_filter_clusters
-
- #FIXME: function finished, but need to take advantage of
- # may need to take in a list of lanes so we only have to
- # load the xml file once per flowcell rather than once
- # per lane.
- dict_list.append(d)
-
- return (dict_list, err_list, summary_list)
-
def get_eland_result_type(pathname):
"""
else:
return 'unknown'
-def _make_eland_results(flowcell_id, lane, interesting_flowcells):
+def _make_eland_results(flowcell_id, lane_number, interesting_flowcells):
fc_id, status = parse_flowcell_id(flowcell_id)
cur_fc = interesting_flowcells.get(fc_id, None)
if cur_fc is None:
return []
flowcell = FlowCell.objects.get(flowcell_id=flowcell_id)
+ lane = flowcell.lane_set.get(lane_number=lane_number)
# Loop throw storage devices if a result has been archived
storage_id_list = []
if cur_fc is not None:
result_path = cur_fc[cycle]['eland_results'].get(lane, None)
result_link = make_result_link(fc_id, cycle, lane, result_path)
results.append({'flowcell_id': fc_id,
+ 'flowcell': flowcell,
'run_date': flowcell.run_date,
'cycle': cycle,
'lane': lane,
"""
raise Http404
+def species(request, species_id):
+ species = get_object_or_404(Species, id=species_id)
+
+ context = RequestContext(request,
+ { 'species': species })
+
+ return render_to_response("samples/species_detail.html", context)
+
@login_required
def user_profile(request):
"""
+++ /dev/null
-"""
-Generate settings for the Django Application.
-
-To make it easier to customize the application the settings can be
-defined in a configuration file read by ConfigParser.
-
-The options understood by this module are (with their defaults):
-
- [frontend]
- email_host=localhost
- email_port=25
- database_engine=sqlite3
- database_name=/path/to/db
-
- [admins]
- #name1=email1
-
- [allowed_hosts]
- #name1=ip
- localhost=127.0.0.1
-
- [allowed_analysis_hosts]
- #name1=ip
- localhost=127.0.0.1
-
-"""
-import ConfigParser
-import os
-import shlex
-
-# make epydoc happy
-__docformat__ = "restructuredtext en"
-
-def options_to_list(options, dest, section_name, option_name):
- """
- Load a options from section_name and store in a dictionary
- """
- if options.has_option(section_name, option_name):
- opt = options.get(section_name, option_name)
- dest.extend( shlex.split(opt) )
-
-def options_to_dict(dest, section_name):
- """
- Load a options from section_name and store in a dictionary
- """
- if options.has_section(section_name):
- for name in options.options(section_name):
- dest[name] = options.get(section_name, name)
-
-# define your defaults here
-options = ConfigParser.SafeConfigParser(
- { 'email_host': 'localhost',
- 'email_port': '25',
- 'database_engine': 'sqlite3',
- 'database_name':
- os.path.abspath('../../fctracker.db'),
- 'time_zone': 'America/Los_Angeles',
- 'default_pm': '5',
- 'link_flowcell_storage_device_url': "http://localhost:8000/inventory/lts/link/",
- 'printer1_host': '127.0.0.1',
- 'printer1_port': '9100',
- 'printer2_host': '127.0.0.1',
- 'printer2_port': '9100',
- })
-
-options.read([os.path.expanduser("~/.htsworkflow.ini"),
- '/etc/htsworkflow.ini',])
-
-# OptionParser will use the dictionary passed into the config parser as
-# 'Default' values in any section. However it still needs an empty section
-# to exist in order to retrieve anything.
-if not options.has_section('frontend'):
- options.add_section('frontend')
-if not options.has_section('bcprinter'):
- options.add_section('bcprinter')
-
-
-# Django settings for elandifier project.
-
-DEBUG = True
-TEMPLATE_DEBUG = DEBUG
-
-ADMINS = []
-options_to_list(options, ADMINS, 'frontend', 'admins')
-
-MANAGERS = []
-options_to_list(options, MANAGERS, 'frontend', 'managers')
-
-AUTHENTICATION_BACKENDS = ( 'samples.auth_backend.HTSUserModelBackend', )
-CUSTOM_USER_MODEL = 'samples.HTSUser'
-
-EMAIL_HOST = options.get('frontend', 'email_host')
-EMAIL_PORT = int(options.get('frontend', 'email_port'))
-
-if options.has_option('frontend', 'notification_sender'):
- NOTIFICATION_SENDER = options.get('frontend', 'notification_sender')
-else:
- NOTIFICATION_SENDER = "noreply@example.com"
-NOTIFICATION_BCC = []
-options_to_list(options, NOTIFICATION_BCC, 'frontend', 'notification_bcc')
-
-# 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
-DATABASE_ENGINE = options.get('frontend', 'database_engine')
-
-# Or path to database file if using sqlite3.
-DATABASE_NAME = options.get('frontend', 'database_name' )
-DATABASE_USER = '' # Not used with sqlite3.
-DATABASE_PASSWORD = '' # Not used with sqlite3.
-DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
-DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
-
-# Local time zone for this installation. Choices can be found here:
-# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
-# although not all variations may be possible on all operating systems.
-# If running in a Windows environment this must be set to the same as your
-# system time zone.
-TIME_ZONE = options.get('frontend', 'time_zone')
-
-# Language code for this installation. All choices can be found here:
-# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
-# http://blogs.law.harvard.edu/tech/stories/storyReader$15
-LANGUAGE_CODE = 'en-us'
-
-SITE_ID = 1
-
-# If you set this to False, Django will make some optimizations so as not
-# to load the internationalization machinery.
-USE_I18N = True
-
-# Absolute path to the directory that holds media.
-# Example: "/home/media/media.lawrence.com/"
-MEDIA_ROOT = os.path.abspath(os.path.split(__file__)[0]) + '/static/'
-
-# URL that handles the media served from MEDIA_ROOT.
-# Example: "http://media.lawrence.com"
-MEDIA_URL = '/static/'
-
-# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
-# trailing slash.
-# Examples: "http://foo.com/media/", "/media/".
-ADMIN_MEDIA_PREFIX = '/media/'
-
-# Make this unique, and don't share it with anybody.
-SECRET_KEY = '(ekv^=gf(j9f(x25@a7r+8)hqlz%&_1!tw^75l%^041#vi=@4n'
-
-# some of our urls need an api key
-DEFAULT_API_KEY = 'n7HsXGHIi0vp9j5u4TIRJyqAlXYc4wrH'
-
-# List of callables that know how to import templates from various sources.
-TEMPLATE_LOADERS = (
- 'django.template.loaders.filesystem.load_template_source',
- 'django.template.loaders.app_directories.load_template_source',
-# 'django.template.loaders.eggs.load_template_source',
-)
-
-MIDDLEWARE_CLASSES = (
- 'django.middleware.common.CommonMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.middleware.doc.XViewMiddleware',
-)
-
-ROOT_URLCONF = 'htsworkflow.frontend.urls'
-
-TEMPLATE_DIRS = (
- # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
- # Always use forward slashes, even on Windows.
- # Don't forget to use absolute paths, not relative paths.
- '/usr/share/python-support/python-django/django/contrib/admin/templates',
- #'/usr/lib/pymodules/python2.6/django/contrib/admin/templates/',
- os.path.join(os.path.split(__file__)[0], 'templates'),
-)
-
-INSTALLED_APPS = (
- 'django.contrib.admin',
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.humanize',
- 'django.contrib.sessions',
- 'django.contrib.sites',
- 'htsworkflow.frontend.eland_config',
- 'htsworkflow.frontend.samples',
- # modules from htsworkflow branch
- 'htsworkflow.frontend.experiments',
- 'htsworkflow.frontend.analysis',
- 'htsworkflow.frontend.reports',
- 'htsworkflow.frontend.inventory',
- 'htsworkflow.frontend.bcmagic',
- 'django.contrib.databrowse',
-)
-
-# Project specific settings
-
-ALLOWED_IPS={'127.0.0.1': '127.0.0.1'}
-options_to_dict(ALLOWED_IPS, 'allowed_hosts')
-
-ALLOWED_ANALYS_IPS = {'127.0.0.1': '127.0.0.1'}
-options_to_dict(ALLOWED_ANALYS_IPS, 'allowed_analysis_hosts')
-#UPLOADTO_HOME = os.path.abspath('../../uploads')
-#UPLOADTO_CONFIG_FILE = os.path.join(UPLOADTO_HOME, 'eland_config')
-#UPLOADTO_ELAND_RESULT_PACKS = os.path.join(UPLOADTO_HOME, 'eland_results')
-#UPLOADTO_BED_PACKS = os.path.join(UPLOADTO_HOME, 'bed_packs')
-# Where "results_dir" means directory with all the flowcells
-if options.has_option('frontend', 'results_dir'):
- RESULT_HOME_DIR=os.path.expanduser(options.get('frontend', 'results_dir'))
-else:
- RESULT_HOME_DIR='/tmp'
-
-LINK_FLOWCELL_STORAGE_DEVICE_URL = options.get('frontend', 'link_flowcell_storage_device_url')
-# PORT 9100 is default for Zebra tabletop/desktop printers
-# PORT 6101 is default for Zebra mobile printers
-BCPRINTER_PRINTER1_HOST = options.get('bcprinter', 'printer1_host')
-BCPRINTER_PRINTER1_PORT = int(options.get('bcprinter', 'printer1_port'))
-BCPRINTER_PRINTER2_HOST = options.get('bcprinter', 'printer2_host')
-BCPRINTER_PRINTER2_PORT = int(options.get('bcprinter', 'printer2_port'))
color: white;
background: #880000;
}
+
+div.htswdetail {
+ margin: 0;
+ padding: 0;
+}
+div.htswdetail table, div.htswdetail td {
+ border-style: solid;
+}
+div.htswdetail table {
+ border-width: 0 0 1px 1px;
+ border-spacing: 0;
+ border-collapse: collapse;
+}
+div.htswdetail td {
+ margin: 0;
+ padding: 3px;
+ border-width: 1px 1px 0 0;
+}
+div.htswdetail thead {
+ text-align: center;
+}
+div.htswdetail tbody {
+ text-align: left;
+ }
+div.htswdetail h1,
+div.htswdetail h2
+{
+ font-size: 150%;
+}
+
+div.htswdetail h3 {
+ font-size: 125%;
+ margin: 0;
+}
+
+div.htswdetail h4,
+div.htswdetail h5,
+div.htswdetail ul,
+div.htswdetail ol,
+div.htswdetail li
+{
+ list-style: none;
+ margin: 0;
+}
+
+div.htswdetail ul,
+div.htswdetail ol
+{
+ margin-bottom: .5em;
+}
+
+/* style library detail headers */
+div#librarydetail {
+ margin: 0;
+ padding: 0;
+}
+div#librarydetail table, div#librarydetail td {
+ border-style: solid;
+}
+div#librarydetail table {
+ border-width: 0 0 1px 1px;
+ border-spacing: 0;
+ border-collapse: collapse;
+}
+div#librarydetail td {
+ margin: 0;
+ padding: 3px;
+ border-width: 1px 1px 0 0;
+}
+div#librarydetail thead {
+ text-align: center;
+ }
+div#librarydetail tbody {
+ text-align: right;
+}
+div#librarydetail h1,
+div#librarydetail h2
+{
+ font-size: 150%;
+}
+
+div#librarydetail h3 {
+ font-size: 125%;
+ margin: 0;
+}
+
+div#librarydetail h4,
+div#librarydetail h5,
+div#librarydetail ul,
+div#librarydetail ol,
+div#librarydetail li
+{
+ list-style: none;
+ margin: 0;
+}
+
+div#librarydetail ul,
+div#librarydetail ol
+{
+ margin-bottom: .5em;
+}
+
+div.library_identity {
+ float: left; margin: 5px; }
+div.library_sample_detail { float: left; margin: 5px; }
+div.library_library_detail { float: left; margin: 5px; }
+div.library_statistics { clear: both; border: 1px; }
+
<ul class="object-tools">
<li><a href="../../../../{{ app_label }}/started/{{ object_id }}/">{% trans "Started Email" %}</a></li>
<li><a href="history/" class="historylink">{% trans "History" %}</a></li>
- {% if has_absolute_url %}<li><a href="../../../r/{{ content_type_id }}/{{ object_id }}/" class="viewsitelink">{% trans
- "View on site" %}</a></li>{% endif%}
+ {% if has_absolute_url %}<li><a href="../../../r/{{ content_type_id }}/{{ object_id }}/" class="viewsitelink">{% trans "View on site" %}</a></li>{% endif%}
</ul>
{% endif %}{% endif %}
-{% endblock %}
\ No newline at end of file
+{% endblock %}
<th scope="row"><a href="/admin/experiments/flowcell/">Flowcells</a></th>
<td><a href="/admin/experiments/flowcell/add/" class="addlink">{% trans 'Add' %}</a></td>
</tr>
+</table></div>
+
+<div class='module'>
+<table>
+<caption>Label Printing</caption>
+<tr>
+<th scope="row"><a href="/admin/labels/labelcontent/">Label Contents</a></th>
+<td><a href="/admin/labels/labelcontent/add/" class="addlink">{% trans 'Add' %}</a></td>
+</tr>
+<tr>
+<th scope="row"><a href="/admin/labels/labeltemplate/">Label Templates</a></th>
+<td><a href="/admin/labels/labeltemplate/add/" class="addlink">{% trans 'Add' %}</a></td>
+</tr>
</table></div><BR>
{% if app_list %}
</head>
<body>
<!-- Container -->
- {% if not is_popup %}
+ {% if not is_popup %}
<div id="header">
<div id="branding">
{% block branding %}{% endblock %}
</div>
- {% if user.is_authenticated and user.is_staff %}
- <div id="user-tools">{% trans 'Welcome,' %} <strong>{% firstof user.first_name user.username %}</strong>. {% block userlinks %}{% url django-admindocs-docroot as docsroot %}{% if docsroot %}<a href="{{ docsroot }}">{% trans 'Documentation' %}</a> / {% endif %}<a href="/admin/password_change/">{% trans 'Change password' %}</a> / <a href="/admin/logout/">{% trans 'Log out' %}</a>{% endblock %}
- </div>
+ <div id="user-tools">
+ {% if user.is_authenticated %}
+ {% trans 'Welcome,' %} <strong>{% firstof user.first_name user.username %}</strong>. {% block userlinks %}{% url django-admindocs-docroot as docsroot %}{% if docsroot %}<a href="{{ docsroot }}">{% trans 'Documentation' %}</a> / {% endif %}<a href="/admin/password_change/">{% trans 'Change password' %}</a> / <a href="/admin/logout/">{% trans 'Log out' %}</a>{% endblock %}
+ {% else %}
+ <a href="/admin/login/">{% trans 'Log in' %}</a>
{% endif %}
+ </div>
{% block nav-global %}{% endblock %}
</div>
{% endif %}
- {% if messages %}
+ {% block breadcrumbs %}{% endblock %}
+ {% if messages %}
<ul class="messagelist">{% for message in messages %}<li>{{ message }}</li>{% endfor %}</ul>
- {% endif %}
-
+ {% endif %}
+
<!-- Content -->
<div id="content" class="{% block coltype %}colM{% endblock %}">
{% block pretitle %}{% endblock %}
--- /dev/null
+{% extends "base_site.html" %}
+{% load adminmedia humanize i18n %}
+{% block extrahead %}
+ <!-- App Stuff -->
+ <link type="text/css" rel="stylesheet" href="/static/css/app.css" />
+ <script type="text/javascript" src="/static/js/jquery.min.js"></script>
+
+ {% block additional_javascript %}
+ {% endblock %}
+{% endblock %}
+
+{% block content %}
+<div id="flowcell_detail">
+ <h2>About this Flowcell</h2>
+ <b>Flowcell</b>:
+ <a href="{{flowcell.get_absolute_url}}" property="libns:flowcell_id">{{flowcell.flowcell_id}}</a><br/>
+ <b>Run Date</b>:
+ <span property="libns:date" content="{{flowcell.run_date|date:'Y-m-d\TH:i:s'}}" datatype="xsd:dateTime">{{ flowcell.run_date }}</span><br/>
+ <b>Type</b>:
+ <span property="libns:flowcell_type">{{flowcell.flowcell_type}}</span><br/>
+ <b>Read Length</b>:
+ <span property="libns:read_length">{{flowcell.read_length}}</span><br/>
+ <b>Control Lane</b>:
+ <span property="libns:control_lane">{{flowcell.control_lane}}</span><br/>
+
+ <b>Notes</b>:
+ <pre property="libns:flowcell_notes">{{flowcell.notes}}</pre>
+ <div class="htswdetail">
+ <h2>Lanes</h2>
+ <table>
+ <thead>
+ <tr>
+ <td>Lane</td>
+ <td>Library ID</td>
+ <td>Library Name</td>
+ <td>Species</td>
+ <td>Comment</td>
+ </tr>
+ </thead>
+ <tbody>
+ {% for lane in lanes %}
+ <tr rel="libns:has_lane" resource="{{lane.get_absolute_url}}" >
+ <td><a href="{{lane.get_absolute_url}}">
+ <span property="libns:lane_number">{{lane.lane_number}}</span></a></td>
+ <td><a href="{{lane.library.get_absolute_url}}"
+ rel="libns:library"><span property="libns:library_id"
+ >{{lane.library.id}}</span></a></td>
+ <td><a href="{{lane.library.get_absolute_url}}" rel="libns:library"><span property="libns:name">{{lane.library.library_name}}</span></a></td>
+ <td><a href="{{lane.library.library_species.get_absolute_url}}" rel="libns:species">
+ <span property="libns:species_name">{{ lane.library.library_species.scientific_name }}</span></a></td>
+ <td><span property="libns:comment">{{lane.comment}}</span></td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+ </div>
+ <div class="htsw_flowcell_ivc">
+ {% for run in flowcell.datarun_set.all %}
+ <h2>Run {{ run.runfolder_name }}</h2>
+ <table>
+ <thead>
+ <tr>
+ <td>Lane</td>
+ <td>IVC All</td>
+ <td>IVC Call</td>
+ <td>IVC Percent Base</td>
+ <td>IVC Percent Base All</td>
+ <td>IVC Percent Base Called</td>
+ </thead>
+ <tbody>
+ {% for lane_id, lane_file_set in run.lane_files.items %}
+ {% if lane_file_set.ivc_all %}
+ <tr>
+ <td>{{ lane_id }}</td>
+ <td>
+ <a href="{{ lane_file_set.ivc_all.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_all.get_absolute_url }}"/></a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_call.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_call.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_base.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_base.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_all.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_all.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_call.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_call.get_absolute_url }}"/>
+ </a>
+ </td>
+ </tr>
+ {% endif %}
+ {% endfor %}
+ </tbody>
+ </table>
+ {% endfor %}
+ </div>
+</div>
+{% endblock %}
--- /dev/null
+{% extends "base_site.html" %}
+{% load adminmedia humanize i18n %}
+{% block extrahead %}
+ <!-- App Stuff -->
+ <link type="text/css" rel="stylesheet" href="/static/css/app.css" />
+ <script type="text/javascript" src="/static/js/jquery.min.js"></script>
+
+ {% block additional_javascript %}
+ {% endblock %}
+{% endblock %}
+
+{% block content %}
+<div id="lane_detail" class="htswdetail">
+ <h2>About this lane</h2>
+ <div rel="libns:flowcell" resource="{{flowcell.get_absolute_url}}">
+ <b>Flowcell</b>:
+ <a href="{{flowcell.get_absolute_url}}">{{flowcell.flowcell_id}}</a><br/>
+ <b>Run Date</b>:
+ <span property="libns:date" content="{{flowcell.run_date|date:'Y-m-d\TH:i:s'}}" datatype="xsd:dateTime">{{ flowcell.run_date }}</span><br/>
+ <b>Type</b>:
+ <span property="libns:flowcell_type">{{flowcell.flowcell_type}}</span><br/>
+ </div>
+ <b>Lane</b>:
+ <span property="libns:lane_number" datatype="xsd:decimal">{{lane.lane_number}}</span><br/>
+ <b>Cycles</b>:
+ <span property="libns:read_length">{{lane.flowcell.read_length}}</span><br/>
+ <b>pM</b>
+ <span property="libns:pM" datatype="xsd:decimal">{{ lane.pM }}</span><br/>
+ <b>Cluster Estimate</b>
+ <span property="libns:cluster_estimate" datatype="xsd:decimal"
+ content="{{lane.cluster_estimate}}">{{ lane.cluster_estimate|intcomma }}</span><br/>
+ <b>Lane Status</b>:
+ <span property="libns:status">{{ lane.status }}</span><br/>
+ <b>Comment</b>:
+ <span property="libns:comment">{{ lane.comment }}</span><br/>
+ <hr/>
+ {% include "sample_header.html" %}
+ <hr/>
+ <div class="htsw_flowcell_ivc">
+ {% for run in flowcell.datarun_set.all %}
+ <h2>Run {{ run.runfolder_name }}</h2>
+ <table>
+ <thead>
+ <tr>
+ <td>Lane</td>
+ <td>IVC All</td>
+ <td>IVC Call</td>
+ <td>IVC Percent Base</td>
+ <td>IVC Percent Base All</td>
+ <td>IVC Percent Base Called</td>
+ </thead>
+ <tbody>
+ {% for run, lane_number, lane_file_set in filtered_dataruns %}
+ {% if lane_file_set.ivc_all %}
+ <tr>
+ <td>{{lane_number}}</td>
+ <td>
+ <a href="{{ lane_file_set.ivc_all.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_all.get_absolute_url }}"/></a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_call.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_call.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_base.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_base.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_all.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_all.get_absolute_url }}"/>
+ </a>
+ </td>
+ <td>
+ <a href="{{ lane_file_set.ivc_percent_call.get_absolute_url }}">
+ <img height="84" width="126" src="{{ lane_file_set.ivc_percent_call.get_absolute_url }}"/>
+ </a>
+ </td>
+ </tr>
+ {% endif %}
+ {% endfor %}
+ </tbody>
+ </table>
+ {% endfor %}
+ </div>
+{% endblock %}
--- /dev/null
+<div id="librarydetail" about="{{lib.get_absolute_url}}">
+ <div class="library_identity">
+ <h2>Library Name</h2>
+ <b>Library ID</b>: <a href="{{lib.get_absolute_url}}"><span property="libns:library_id">{{ lib.id }}</span></a><br/>
+ <b>Name</b>:
+ <span property="libns:name">{{ lib.library_name }}</span>
+ <br/>
+ <b>Affiliations</b>:
+ <ul>
+ {% for individual in lib.affiliations.all %}
+ <li property="libns:affiliation" content="{{individual.name}}">
+ {{ individual.name }} ( {{ individual.contact }} )
+ </li>
+ {% endfor %}
+ </ul>
+ </div>
+ <div class="library_sample_detail">
+ <h2>Sample Details</h2>
+ <b>Species</b>:
+ <span property="libns:species" content="{{lib.library_species.scientific_name}}"><a href="{{lib.library_species.get_absolute_url}}">{{ lib.library_species.scientific_name }}</a></span>
+ <br/>
+ <b>Experiment Type</b>:
+ <span property="libns:experiment_type">{{ lib.experiment_type }}</span>
+ <br/>
+ {% if lib.antibody %}
+ <b>Antibody</b>:
+ <span property="libns:antibody">{{ lib.antibody.antibodies }}</span>
+ {% if lib.antibody.antibodies.nuckname %}
+ (<span property="libns:antibody_term">{{ lib.antibody.nickname }}</span>)
+ {% endif %}
+ <br/>
+ {% endif %}
+ {% if lib.cell_line %}
+ <b>Background or Cell Line</b>:
+ <span property="libns:cell_line">{{ lib.cell_line }}</span>
+ <br/>
+ {% endif %}
+ {% if lib.condition %}
+ <b>Condition</b>:
+ <span property="libns:condition">{{ lib.condition.condition_name }}</span>
+ {% if lib.condition.nickname %}
+ (<span property="libns:condition_term">{{ lib.condition.nickname }}</span>)
+ {% endif %}
+ <br/>
+ {% endif %}
+ {% if lib.replicate %}
+ <b>Replicate</b>:
+ <span property="libns:replicate">{{ lib.replicate }}</span>
+ <br/>
+ {% endif %}
+ </div>
+ <div class="library_library_detail">
+ <h2>Library Details</h2>
+ <b>Library Type</b>:
+ <span property="libns:library_type">{{ lib.library_type }}</span>
+ <br/>
+ <b>Creation Date</b>
+ <span property="libns:date" content="{{lib.creation_date|date:'Y-m-d'}}T00:00:00" datatype="xsd:dateTime">{{ lib.creation_date }}</span>
+ <br/>
+ <b>Made By</b>:
+ <span property="libns:made_by">{{ lib.made_by }}</span>
+ <br/>
+ {% if lib.gel_cut_size %}
+ <b>Gel Cut Size</b>:
+ <span property="libns:gel_cut" datatype="xsd:decimal">{{ lib.gel_cut_size }}</span>
+ <br/>
+ {% endif %}
+ {% if lib.insert_size %}
+ <b>Insert Size</b>:
+ <span property="libns:insert_size" datatype="xsd:decimal">{{ lib.insert_size }}</span>
+ <br/>
+ {% endif %}
+ {% if lib.undiluted_concentration %}
+ <b>Concentration</b>:
+ <span property="libns:concentration">{{ lib.undiluted_concentration }} ng/µl</span>
+ <br/>
+ {% endif %}
+ {% if lib.stopping_point_name %}
+ <b>Protocol Stopping Point</b>
+ <span property="libns:stopping_point">{{ lib.stopping_point_name }}</span>
+ <br/>
+ {% endif %}
+ </div>
{% block additional_javascript %}
{% endblock %}
-
-<style type="text/css">
- /* <![CDATA[ */
- div#librarydetail {
- margin: 0;
- padding: 0;
- }
- div#librarydetail table, div#librarydetail td {
- border-style: solid;
- }
- div#librarydetail table {
- border-width: 0 0 1px 1px;
- border-spacing: 0;
- border-collapse: collapse;
- }
- div#librarydetail td {
- margin: 0;
- padding: 3px;
- border-width: 1px 1px 0 0;
- }
- div#librarydetail thead {
- text-align: center;
- }
- div#librarydetail tbody {
- text-align: right;
- }
- div#librarydetail h1,
- div#librarydetail h2
- {
- font-size: 150%;
- }
-
- div#librarydetail h3 {
- font-size: 125%;
- margin: 0;
- }
-
- div#librarydetail h4,
- div#librarydetail h5,
- div#librarydetail ul,
- div#librarydetail ol,
- div#librarydetail li
- {
- list-style: none;
- margin: 0;
- }
-
- div#librarydetail ul,
- div#librarydetail ol
- {
- margin-bottom: .5em;
- }
- /* ]]> */
-</style>
{% endblock %}
{% block content %}
-<div id="librarydetail">
- <h2>About this library</h2>
- <b>Library ID</b>: {{ lib.id }}<br/>
- <b>Name</b>:
- <span property="libns:name">{{ lib.library_name }}</span>
- <br/>
- <b>Species</b>:
- <span property="libns:species">{{ lib.library_species.scientific_name }}</span>
- <br/>
- <b>Concentration</b>:
- <span property="libns:concentration">{{ lib.undiluted_concentration }} ng/µl</span>
- <br/>
- <b>Gel Cut Size</b>:
- <span property="libns:gel_cut">{{ lib.gel_cut_size }}</span>
- <br/>
- <b>Insert Size</b>:
- <span property="libns:insert_size">{{ lib.insert_size }}</span>
- <br/>
- <b>Background or Cell Line</b>:
- <span property="libns:cell_line">{{ lib.cell_line }}</span>
- <br/>
- <b>Replicate</b>:
- <span property="libns:replicate">{{ lib.replicate }}</span>
- <br/>
- <b>Library Type</b>:
- <span property="libns:library_type">{{ lib.library_type }}</span>
- <br/>
- <b>Experiment Type</b>:
- <span property="libns:experiment_type">{{ lib.experiment_type }}</span>
- <br/>
- <b>Made By</b>:
- <span property="libns:made_by">{{ lib.made_by }}</span>
- <br/>
- <b>Creation Date</b>
- <span property="libns:date" content="{{lib.creation_date|date:'Y-m-d'}}T00:00:00" datatype="xsd:dateTime">{{ lib.creation_date }}</span>
- <br/>
- <b>Protocol Stopping Point</b>
- <span property="libns:stopping_point">{{ lib.stopping_point_name }}</span>
- <br/>
- <b>Affiliations</b>:
- <ul>
- {% for individual in lib.affiliations.all %}
- <li property="libns:affliation" content="{{individual.name}}">
- {{ individual.name }} ( {{ individual.contact }} )
- </li>
- {% endfor %}
- </ul>
-
+ {% include "sample_header.html" %}
+ <hr/>
+ <div class="library_statistics">
<h2>Raw Result Files</h2>
<table>
<thead>
</thead>
<tbody>
{% for result in eland_results %}
- <tr about="/flowcell/{{result.flowcell_id}}/lane/{{result.lane}}">
+ <tr about="{{result.flowcell.get_absolute_url}}">
<td property="libns:date" content="{{result.run_date|date:'Y-m-d\TH:i:s'}}" datatype="xsd:dateTime">{{ result.run_date|date}}</td>
<td>{{ result.cycle }}</td>
- <td property="libns:flowcell_id">{{ result.flowcell_id }}</td>
- <td property="libns:lane">{{ result.lane }}</td>
+ <td><a href="{{result.flowcell.get_absolute_url}}"><span property="libns:flowcell_id">{{ result.flowcell_id }}</span></a></td>
+ <td><a href="{{result.lane.get_absolute_url}}" rel="libns:has_lane"><span property="libns:lane_number" datatype="xsd:decimal">{{ result.lane.lane_number }}</span></a></td>
<td><a href="{{ result.summary_url }}">Summary</a></td>
<td><a href="{{ result.result_url }}">{{ result.result_label }}</a></td>
<td>
</thead>
<tbody>
- {% for lane in lane_summary_list %}
- <tr about="/flowcell/{{lane.flowcell_id}}/lane/{{lane.lane_id}}/end/{% if lane.end %}{{ lane.end }}{% endif %}">
- <td>{{ lane.cycle_width }}</td>
- <td>{{ lane.flowcell_id }}</td>
- <td>{{ lane.lane_id }}</td>
- <td>{% if lane.end %}{{ lane.end }}{% endif %}</td>
- <td>{{ lane.clusters.0|intcomma }}</td>
- <td>{{ lane.successful_pm }}</td>
- <td>{{ lane.reads|intcomma }}</td>
- <td>{{ lane.no_match|intcomma }}</td>
- <td>{{ lane.no_match_percent|stringformat:".2f" }}</td>
- <td>{{ lane.qc_failed|intcomma }}</td>
- <td>{{ lane.qc_failed_percent|stringformat:".2f" }}</td>
- <td>{{ lane.match_codes.U0|intcomma }}</td>
- <td>{{ lane.match_codes.U1|intcomma }}</td>
- <td>{{ lane.match_codes.U2|intcomma }}</td>
- <td {% if lane.unique_reads %}property="libns:total_unique_locations" content="{{lane.unique_reads}}" datatype="xsd:decimal"{% endif %}>{{ lane.unique_reads|intcomma }}</td>
- <td>{{ lane.match_codes.R0|intcomma }}</td>
- <td>{{ lane.match_codes.R1|intcomma }}</td>
- <td>{{ lane.match_codes.R2|intcomma }}</td>
- <td>{{ lane.repeat_reads|intcomma }}</td>
+ {# ls short for lane summary #}
+ {% for ls in lane_summary_list %}
+ <tr about="{{ls.lane.get_absolute_url}}">
+ <td>{{ ls.cycle_width }}</td>
+ <td><a href="{{ls.flowcell.get_absolute_url}}">{{ ls.flowcell_id }}</a></td>
+ <td><a href="{{ls.lane.get_absolute_url}}">{{ ls.lane_id }}</a></td>
+ <td>{% if ls.end %}{{ ls.end }}{% endif %}</td>
+ <td>{{ ls.clusters.0|intcomma }}</td>
+ <td>{{ ls.successful_pm }}</td>
+ <td>{{ ls.reads|intcomma }}</td>
+ <td>{{ ls.no_match|intcomma }}</td>
+ <td>{{ ls.no_match_percent|stringformat:".2f" }}</td>
+ <td>{{ ls.qc_failed|intcomma }}</td>
+ <td>{{ ls.qc_failed_percent|stringformat:".2f" }}</td>
+ <td>{{ ls.match_codes.U0|intcomma }}</td>
+ <td>{{ ls.match_codes.U1|intcomma }}</td>
+ <td>{{ ls.match_codes.U2|intcomma }}</td>
+ <td {% if ls.unique_reads %}property="libns:total_unique_locations" content="{{ls.unique_reads}}" datatype="xsd:decimal"{% endif %}>{{ ls.unique_reads|intcomma }}</td>
+ <td>{{ ls.match_codes.R0|intcomma }}</td>
+ <td>{{ ls.match_codes.R1|intcomma }}</td>
+ <td>{{ ls.match_codes.R2|intcomma }}</td>
+ <td>{{ ls.repeat_reads|intcomma }}</td>
</tr>
{% endfor %}
</tbody>
</thead>
<tbody>
{% for lane in lib.lane_set.all %}
- <tr>
- <td>{{ lane.flowcell.flowcell_id }}</td>
- <td>{{ lane.lane_number }}</td>
+ <tr rel="libns:has_lane" resource="{{lane.get_absolute_url}}">
+ <td><a href="{{lane.flowcell.get_absolute_url}}" rel="libns:flowcell"
+ ><span property="libns:flowcell_id">{{ lane.flowcell.flowcell_id }}</span></a></td>
+ <td><a href="{{lane.get_absolute_url}}">
+ <span property="libns:lane_number" datatype="xsd:decimal"
+ >{{ lane.lane_number }}</span></a></td>
<td>{{ lane.comment }}</td>
</tr>
{% endfor %}
</tbody>
</table>
- <br/>
- <hr/>
- <h2>Count of multi-reads</h2>
- {% for lane in lane_summary_list %}
- {% if lane.summarized_reads %}
- <h3>
- {{lane.cycle_width}} {{ lane.flowcell_id }} lane {{ lane.lane_id }}
- {% if lane.end %} end {{ lane.end }}{% endif %}
- </h3>
- <ul>
- {% for name, counts in lane.summarized_reads.items %}
- <li><b>{{ name }}</b>: {{ counts|intcomma }}</li>
- {% endfor %}
- </ul>
- {% endif %}
- {% endfor %}
{% endblock %}
+ </div>
</div>
{% endblock %}
</thead>
<tbody >
{% for lib in library_list %}
- <tr about="/library/{{lib.library_id}}">
- <td ><a href="/library/{{ lib.library_id }}">{{ lib.amplified_from }}</a></td>
- <td ><a href="/library/{{ lib.library_id }}" property="libns:library_id">{{ lib.library_id }}</a></td>
- <td ><a href="/library/{{ lib.library_id }}" property="libns:species_name">{{ lib.species_name }}</a></td>
- <td ><a href="/library/{{ lib.library_id }}" property="libns:library_name">{{ lib.library_name }}</a></td>
+ <tr about="{{lib.library.get_absolute_url}}">
+ <td ><a href="{{lib.library.get_absolute_url}}">{{ lib.amplified_from }}</a></td>
+ <td ><a href="{{lib.library.get_absolute_url}}"><span property="libns:library_id">{{ lib.library_id }}</span></a></td>
+ <td ><a href="{{lib.library.library_species.get_absolute_url}}" rel="libns:species"><span property="libns:species_name">{{ lib.species_name }}</span></a></td>
+ <td ><a href="{{ lib.library.get_absolute_url }}"><span property="libns:library_name">{{ lib.library_name }}</span></a></td>
<td bgcolor="#00BFFF">{{ lib.lanes_run.0.0 }}</td>
<td bgcolor="#00BFFF">{{ lib.lanes_run.0.1 }}</td>
<td bgcolor="#00BFFF">{{ lib.lanes_run.0.2 }}</td>
--- /dev/null
+{% extends "base_site.html" %}
+{% load adminmedia humanize i18n %}
+{% block extrahead %}
+ <!-- App Stuff -->
+ <link type="text/css" rel="stylesheet" href="/static/css/app.css" />
+ <script type="text/javascript" src="/static/js/jquery.min.js"></script>
+
+ {% block additional_javascript %}
+ {% endblock %}
+{% endblock %}
+
+{% block content %}
+<div id="genome_detail">
+ <h2>About this Genome</h2>
+ <b>Common Name</b>:
+ <span property="libns:species">{{ species.common_name}}</span><br/>
+ <b>Scientific Name</b>:
+ <span property="libns:species">{{ species.scientific_name}}</span><br/>
+</div>
+{% endblock %}
#databrowse.site.register(Library)
#databrowse.site.register(FlowCell)
-from htsworkflow.frontend import settings
+from django.conf import settings
urlpatterns = patterns('',
#(r'^admin/(.*)', admin.site.root),
# Experiments:
(r'^experiments/', include('htsworkflow.frontend.experiments.urls')),
+ # Flowcell:
+ (r'^lane/(?P<lane_pk>\w+)',
+ 'htsworkflow.frontend.experiments.views.flowcell_lane_detail'),
+ (r'^flowcell/(?P<flowcell_id>\w+)/((?P<lane_number>\w+)/)?$',
+ 'htsworkflow.frontend.experiments.views.flowcell_detail'),
# AnalysTrack:
#(r'^analysis/', include('htsworkflow.frontend.analysis.urls')),
# Inventory urls
return time.mktime(self.date.timetuple())
time = property(_get_time, doc='return run time as seconds since epoch')
+ def _get_experiment_root(self):
+ if self.tree is None:
+ return None
+ return self.tree.findtext('ChipWideRunParameters/EXPT_DIR_ROOT')
+
+ def _get_runfolder_name(self):
+ if self.tree is None:
+ return None
+
+ root = self._get_experiment_root()
+ if root is None:
+ root = ''
+ else:
+ root = os.path.join(root,'')
+
+ experiment_dir = self.tree.findtext('ChipWideRunParameters/EXPT_DIR')
+ if experiment_dir is None:
+ return None
+ experiment_dir = experiment_dir.replace(root, '')
+ if len(experiment_dir) == 0:
+ return None
+
+ dirnames = experiment_dir.split(os.path.sep)
+ return dirnames[0]
+ runfolder_name = property(_get_runfolder_name)
+
def _get_version(self):
if self.tree is None:
return None
VERSION_RE = "([0-9\.]+)"
USER_RE = "([a-zA-Z0-9]+)"
LANES_PER_FLOWCELL = 8
-LANE_LIST = range(1, LANES_PER_FLOWCELL+1)
+LANE_LIST = range(1, LANES_PER_FLOWCELL + 1)
from htsworkflow.util.alphanum import alphanum
from htsworkflow.util.ethelp import indent, flatten
return self._flowcell_id
flowcell_id = property(_get_flowcell_id)
+ def _get_runfolder_name(self):
+ if self.gerald is None:
+ return None
+ else:
+ return self.gerald.runfolder_name
+ runfolder_name = property(_get_runfolder_name)
+
def get_elements(self):
"""
make one master xml file from all of our sub-components.
if self._name is None:
tmax = max(self.image_analysis.time, self.bustard.time, self.gerald.time)
timestamp = time.strftime('%Y-%m-%d', time.localtime(tmax))
- self._name = 'run_'+self.flowcell_id+"_"+timestamp+'.xml'
+ self._name = 'run_' + self.flowcell_id + "_" + timestamp + '.xml'
return self._name
name = property(_get_run_name)
def save(self, destdir=None):
if destdir is None:
destdir = ''
- logging.info("Saving run report "+ self.name)
+ logging.info("Saving run report " + self.name)
xml = self.get_elements()
indent(xml)
dest_pathname = os.path.join(destdir, self.name)
logging.info('Searching for runs in ' + datadir)
runs = []
# scan for firecrest directories
- for firecrest_pathname in glob(os.path.join(datadir,"*Firecrest*")):
+ for firecrest_pathname in glob(os.path.join(datadir, "*Firecrest*")):
logging.info('Found firecrest in ' + datadir)
image_analysis = firecrest.firecrest(firecrest_pathname)
if image_analysis is None:
image_analysis = ipar.ipar(ipar_pathname)
if image_analysis is None:
logging.warn(
- "%s is an empty or invalid IPAR directory" %(ipar_pathname,)
+ "%s is an empty or invalid IPAR directory" % (ipar_pathname,)
)
else:
scan_post_image_analysis(
bustard_dir = os.path.abspath(os.path.join(gerald_dir, '..'))
image_dir = os.path.abspath(os.path.join(gerald_dir, '..', '..'))
- runfolder_dir = os.path.abspath(os.path.join(image_dir, '..','..'))
-
+ runfolder_dir = os.path.abspath(os.path.join(image_dir, '..', '..'))
+
logging.info('--- use-run detected options ---')
logging.info('runfolder: %s' % (runfolder_dir,))
logging.info('image_dir: %s' % (image_dir,))
# leaf directory should be an IPAR or firecrest directory
data_dir, short_image_dir = os.path.split(image_dir)
logging.info('data_dir: %s' % (data_dir,))
- logging.info('short_iamge_dir: %s' %(short_image_dir,))
+ logging.info('short_iamge_dir: %s' % (short_image_dir,))
# guess which type of image processing directory we have by looking
# in the leaf directory name
p.image_analysis = image_run
p.bustard = base_calling_run
p.gerald = gerald_run
-
+
logging.info('Constructed PipelineRun from %s' % (gerald_dir,))
return p
def summarize_lane(gerald, lane_id):
report = []
summary_results = gerald.summary.lane_results
- for end in range(len(summary_results)):
+ for end in range(len(summary_results)):
eland_result = gerald.eland_results.results[end][lane_id]
report.append("Sample name %s" % (eland_result.sample_name))
report.append("Lane id %s end %s" % (eland_result.lane_id, end))
if hasattr(eland_result, 'match_codes'):
mc = eland_result.match_codes
nm = mc['NM']
- nm_percent = float(nm)/eland_result.reads * 100
+ nm_percent = float(nm) / eland_result.reads * 100
qc = mc['QC']
- qc_percent = float(qc)/eland_result.reads * 100
+ qc_percent = float(qc) / eland_result.reads * 100
report.append("No Match: %d (%2.2g %%)" % (nm, nm_percent))
report.append("QC Failed: %d (%2.2g %%)" % (qc, qc_percent))
plot_target_path = os.path.join(cycle_dir, 'Plots')
if os.path.exists(plot_html):
- logging.debug("Saving %s" % ( plot_html, ))
- logging.debug("Saving %s" % ( plot_images, ))
+ logging.debug("Saving %s" % (plot_html,))
+ logging.debug("Saving %s" % (plot_images,))
shutil.copy(plot_html, cycle_dir)
if not os.path.exists(plot_target_path):
- os.mkdir(plot_target_path)
+ os.mkdir(plot_target_path)
for plot_file in glob(plot_images):
shutil.copy(plot_file, plot_target_path)
else:
tar_cmd = ['tar', 'c'] + score_files
bzip_cmd = [ 'bzip2', '-9', '-c' ]
- tar_dest_name =os.path.join(cycle_dir, 'scores.tar.bz2')
+ tar_dest_name = os.path.join(cycle_dir, 'scores.tar.bz2')
tar_dest = open(tar_dest_name, 'w')
logging.info("Compressing score files from %s" % (scores_path,))
logging.info("Running tar: " + " ".join(tar_cmd[:10]))
logging.info("Running bzip2: " + " ".join(bzip_cmd))
- logging.info("Writing to %s" %(tar_dest_name,))
+ logging.info("Writing to %s" % (tar_dest_name,))
env = {'BZIP': '-9'}
tar = subprocess.Popen(tar_cmd, stdout=subprocess.PIPE, shell=False, env=env,
"""
# copy & bzip eland files
bz_commands = []
-
+
for lanes_dictionary in gerald_object.eland_results.results:
for eland_lane in lanes_dictionary.values():
source_name = eland_lane.pathname
dest_name = os.path.join(cycle_dir, name)
logging.info("Saving eland file %s to %s" % \
(source_name, dest_name))
-
+
if is_compressed(name):
- logging.info('Already compressed, Saving to %s' % (dest_name, ))
+ logging.info('Already compressed, Saving to %s' % (dest_name,))
shutil.copy(source_name, dest_name)
else:
# not compressed
#bzip = subprocess.Popen(args, stdout=bzip_dest)
#logging.info('Saving to %s' % (dest_name, ))
#bzip.wait()
-
+
if len(bz_commands) > 0:
q = QueueCommands(bz_commands, num_jobs)
q.run()
-
-
-def extract_results(runs, output_base_dir=None, site="individual", num_jobs=1):
+
+
+def extract_results(runs, output_base_dir=None, site="individual", num_jobs=1, raw_format='qseq'):
"""
Iterate over runfolders in runs extracting the most useful information.
* run parameters (in run-*.xml)
r.save(cycle_dir)
# save illumina flowcell status report
- save_flowcell_reports( os.path.join(r.image_analysis.pathname, '..'), cycle_dir )
-
+ save_flowcell_reports(os.path.join(r.image_analysis.pathname, '..'), cycle_dir)
+
# save stuff from bustard
# grab IVC plot
save_ivc_plot(r.bustard, cycle_dir)
# build base call saving commands
if site is not None:
lanes = []
- for lane in range(1,9):
+ for lane in range(1, 9):
if r.gerald.lanes[lane].analysis != 'none':
lanes.append(lane)
run_name = srf.pathname_to_run_name(r.pathname)
- srf_cmds = srf.make_qseq_commands(run_name, r.bustard.pathname, lanes, site, cycle_dir)
- srf.run_commands(r.bustard.pathname, srf_cmds, num_jobs)
+ if raw_format == 'qseq':
+ seq_cmds = srf.make_qseq_commands(run_name, r.bustard.pathname, lanes, site, cycle_dir)
+ elif raw_format == 'srf':
+ seq_cmds = srf.make_srf_commands(run_name, r.bustard.pathname, lanes, site, cycle_dir, 0)
+ else:
+ raise ValueError('Unknown --raw-format=%s' % (raw_format))
+ srf.run_commands(r.bustard.pathname, seq_cmds, num_jobs)
# save stuff from GERALD
# copy stuff out of the main run
# save summary file
save_summary_file(g, cycle_dir)
-
+
# compress eland result files
compress_eland_results(g, cycle_dir, num_jobs)
-
+
# md5 all the compressed files once we're done
md5_commands = srf.make_md5_commands(cycle_dir)
srf.run_commands(cycle_dir, md5_commands, num_jobs)
-
+
def rm_list(files, dry_run=True):
for f in files:
if os.path.exists(f):
else:
os.unlink(f)
else:
- logging.warn("%s doesn't exist."% (f,))
+ logging.warn("%s doesn't exist." % (f,))
def clean_runs(runs, dry_run=True):
"""
logging.info("Cleaning intermediate files")
# make clean_intermediate
if os.path.exists(os.path.join(run.image_analysis.pathname, 'Makefile')):
- clean_process = subprocess.Popen(['make', 'clean_intermediate'],
+ clean_process = subprocess.Popen(['make', 'clean_intermediate'],
cwd=run.image_analysis.pathname,)
clean_process.wait()
rest, flowcell = os.path.split(rest)
cycle_match = re.match("C(?P<start>[0-9]+)-(?P<stop>[0-9]+)", cycle)
if cycle_match is None:
- raise ValueError("Expected .../flowcell/cycle/ directory structure")
+ raise ValueError(
+ "Expected .../flowcell/cycle/ directory structure in %s" % \
+ (path,))
start = cycle_match.group('start')
if start is not None:
start = int(start)
return SequenceFile('qseq', fullpath, flowcell, lane, read, cycle=stop)
def parse_fastq(path, filename):
+ """Parse fastq names
+ """
flowcell_dir, start, stop = get_flowcell_cycle(path)
basename, ext = os.path.splitext(filename)
records = basename.split('_')
flowcell = records[4]
lane = int(records[5][1])
read = int(records[6][1])
- if records[-1].startswith('pass'):
- pf = True
- elif records[-1].startswith('nopass'):
- pf = False
- else:
- raise ValueError("Unrecognized fastq name")
-
+ pf = parse_fastq_pf_flag(records)
+
if flowcell_dir != flowcell:
logging.warn("flowcell %s found in wrong directory %s" % \
(flowcell, path))
return SequenceFile('fastq', fullpath, flowcell, lane, read, pf=pf, cycle=stop)
+def parse_fastq_pf_flag(records):
+ """Take a fastq filename split on _ and look for the pass-filter flag
+ """
+ if len(records) < 8:
+ pf = None
+ else:
+ fastq_type = records[-1].lower()
+ if fastq_type.startswith('pass'):
+ pf = True
+ elif fastq_type.startswith('nopass'):
+ pf = False
+ elif fastq_type.startswith('all'):
+ pf = None
+ else:
+ raise ValueError("Unrecognized fastq name %s at %s" % \
+ (records[-1], os.path.join(path,filename)))
+
+ return pf
+
def parse_eland(path, filename, eland_match=None):
if eland_match is None:
eland_match = eland_re.match(filename)
sequences = []
for d in dirs:
logging.info("Scanning %s for sequences" % (d,))
+ if not os.path.exists(d):
+ logging.warn("Flowcell directory %s does not exist" % (d,))
+ continue
+
for path, dirname, filenames in os.walk(d):
for f in filenames:
seq = None
destdir - where to write all the srf files
"""
# clean up pathname
- logging.info("run_name %s" % ( run_name, ))
-
+ logging.info("run_name %s" % (run_name,))
+
cmd_list = []
for lane in lanes:
name_prefix = '%s_%%l_%%t_' % (run_name,)
seq_pattern = 's_%d_*_seq.txt' % (lane,)
if cmdlevel == SOLEXA2SRF:
- cmd = ['solexa2srf',
+ cmd = ['solexa2srf',
'-N', name_prefix,
- '-n', '%3x:%3y',
- '-o', dest_path,
+ '-n', '%t:%3x:%3y',
+ '-o', dest_path,
seq_pattern]
elif cmdlevel == ILLUMINA2SRF10:
- cmd = ['illumina2srf',
+ cmd = ['illumina2srf',
'-v1.0',
'-o', dest_path,
seq_pattern]
elif cmdlevel == ILLUMINA2SRF11:
seq_pattern = 's_%d_*_qseq.txt' % (lane,)
- cmd = ['illumina2srf',
+ cmd = ['illumina2srf',
'-o', dest_path,
seq_pattern]
else:
qseqs = [ os.path.split(x)[-1] for x in qseqs ]
if len(qseqs[0].split('_')) == 4:
# single ended
- return [(None,"s_%d_[0-9][0-9][0-9][0-9]_qseq.txt")]
+ return [(None, "s_%d_[0-9][0-9][0-9][0-9]_qseq.txt")]
elif len(qseqs[0].split('_')) == 5:
# more than 1 read
# build a dictionary of read numbers by lane
return qseq_patterns
else:
raise RuntimeError('unrecognized qseq pattern, not a single or multiple read pattern')
-
+
def make_qseq_commands(run_name, bustard_dir, lanes, site_name, destdir, cmdlevel=ILLUMINA2SRF11):
"""
make a subprocess-friendly list of command line arguments to run solexa2srf
destdir - where to write all the srf files
"""
# clean up pathname
- logging.info("run_name %s" % ( run_name, ))
-
+ logging.info("run_name %s" % (run_name,))
+
cmd_list = []
for lane in lanes:
name_prefix = '%s_%%l_%%t_' % (run_name,)
else:
destname = '%s_%s_l%d_r%d.tar.bz2' % (site_name, run_name, lane, read)
dest_path = os.path.join(destdir, destname)
-
+
cmd = " ".join(['tar', 'cjf', dest_path, pattern % (lane,) ])
logging.info("Generated command: " + cmd)
cmd_list.append(cmd)
-
+
return cmd_list
def run_commands(new_dir, cmd_list, num_jobs):
q = queuecommands.QueueCommands(cmd_list, num_jobs)
q.run()
os.chdir(curdir)
-
+
def make_md5_commands(destdir):
"""
Scan the cycle dir and create md5s for the contents
"""
cmd_list = []
destdir = os.path.abspath(destdir)
- bz2s = glob(os.path.join(destdir,"*.bz2"))
- gzs = glob(os.path.join(destdir,"*gz"))
- srfs = glob(os.path.join(destdir,"*.srf"))
+ bz2s = glob(os.path.join(destdir, "*.bz2"))
+ gzs = glob(os.path.join(destdir, "*gz"))
+ srfs = glob(os.path.join(destdir, "*.srf"))
file_list = bz2s + gzs + srfs
for f in file_list:
- cmd = " ".join(['md5sum', f, '>', f+'.md5'])
- logging.info('generated command: '+cmd)
+ cmd = " ".join(['md5sum', f, '>', f + '.md5'])
+ logging.info('generated command: ' + cmd)
cmd_list.append(cmd)
return cmd_list
# open the srf, fastq, or compressed fastq
if is_srf(args[0]):
- source = srf_open(args[0])
+ source = srf_open(args[0], opts.cnf1)
else:
source = autoopen(args[0])
help="show information about what we're doing.")
parser.add_option('--version', default=False, action="store_true",
help="Report software version")
+ parser.add_option('--cnf1', default=False, action="store_true",
+ help="Force cnf1 mode in srf2fastq")
return parser
Make a stream from srf file using srf2fastq
"""
cmd = ['srf2fastq']
- if is_cnf1(filename):
+ if cnf1 or is_cnf1(filename):
cmd.append('-c')
cmd.append(filename)
+++ /dev/null
-import os
-import unittest
-from StringIO import StringIO
-
-from simulate_runfolder import TESTDATA_DIR
-from htsworkflow.pipelines.runfolder import load_pipeline_run_xml
-
-class testLoadRunXML(unittest.TestCase):
- def testVerson0(self):
- runxml_path = os.path.join(TESTDATA_DIR, 'run_FC12150_2007-09-27.xml')
- run = load_pipeline_run_xml(runxml_path)
- eland_summary_by_lane = run.gerald.eland_results.results[0]
- assert len(eland_summary_by_lane) == 8
-
- def testVerson1(self):
- runxml_path = os.path.join(TESTDATA_DIR, 'run_207B2AAXX_2008-04-12.xml')
- run = load_pipeline_run_xml(runxml_path)
- eland_summary_by_lane = run.gerald.eland_results.results[0]
- assert len(eland_summary_by_lane) == 8
-
-def suite():
- return unittest.makeSuite(testLoadRunXML,'test')
-
-if __name__ == "__main__":
- unittest.main(defaultTest="suite")
pass
def test_format_gerald(self):
- flowcell_request = self.client.get('/experiments/config/303TUAAXX/json', apidata)
+ flowcell_request = self.client.get('/experiments/config/FC12150/json', apidata)
self.failUnlessEqual(flowcell_request.status_code, 200)
print dir(flowcell_request)
flowcell_info = json.loads(flowcell_request.content)
- options = getCombinedOptions(['-f','303TUAAXX','-g',os.getcwd()])
+ options = getCombinedOptions(['-f','FC12150','-g',os.getcwd()])
genome_map = {u'Homo sapiens': '/tmp/hg18' }
config = format_gerald_config(options, flowcell_info, genome_map)
--- /dev/null
+import os
+import unittest
+from StringIO import StringIO
+
+from simulate_runfolder import TESTDATA_DIR
+from htsworkflow.pipelines.runfolder import load_pipeline_run_xml
+
+class testLoadRunXML(unittest.TestCase):
+
+ def _check_run_xml(self, run_xml_name, results):
+ run_xml_path = os.path.join(TESTDATA_DIR, run_xml_name)
+ run = load_pipeline_run_xml(run_xml_path)
+
+ self.failUnlessEqual(run.image_analysis.start, results['cycle_start'])
+ self.failUnlessEqual(run.image_analysis.stop, results['cycle_stop'])
+
+ eland_summary_by_lane = run.gerald.eland_results.results[0]
+ self.failUnlessEqual(len(eland_summary_by_lane), 8)
+
+ runfolder_name = results['runfolder_name']
+ self.failUnlessEqual(run.runfolder_name, runfolder_name)
+ self.failUnlessEqual(run.gerald.runfolder_name, runfolder_name)
+
+ for (end, lane), lane_results in results['lane_results'].items():
+ for name, test_value in lane_results.items():
+ xml_value = getattr(run.gerald.summary[end][lane], name)
+
+ self.failUnlessEqual(xml_value, test_value,
+ "%s[%s][%s]: %s %s != %s" % (run_xml_name, end, lane, name, xml_value, test_value))
+
+ def testVersion0(self):
+ run_xml_name = 'run_FC12150_2007-09-27.xml'
+ results = {'runfolder_name': '070924_USI-EAS44_0022_FC12150',
+ 'cycle_start': 1,
+ 'cycle_stop': 36,
+ 'lane_results': {
+ # end, lane
+ (0, 1): {
+ 'average_alignment_score': (12116.63, 596.07),
+ 'average_first_cycle_intensity': (500,36),
+ 'cluster': (31261, 6010),
+ 'cluster_pass_filter': None,
+ 'percent_error_rate': (2.07, 0.38),
+ 'percent_intensity_after_20_cycles': (74.74, 3.78),
+ 'percent_pass_filter_align': None,
+ 'percent_pass_filter_clusters': (27.38, 7.31),
+ }
+ }
+ }
+ self._check_run_xml(run_xml_name, results)
+
+ def testVersion1(self):
+
+ run_xml_name = 'run_207B2AAXX_2008-04-12.xml'
+ results = {'runfolder_name': '080408_HWI-EAS229_0023_207B2AAXX',
+ 'cycle_start': 1,
+ 'cycle_stop': 33,
+ 'lane_results': {
+ # end, lane
+ }
+ }
+ self._check_run_xml(run_xml_name, results)
+
+ def testVersion2(self):
+ run_xml_name = 'run_62DJMAAXX_2011-01-09.xml'
+ results = {'runfolder_name': '101229_ILLUMINA-EC5D15_00026_62DJMAAXX',
+ 'cycle_start': 1,
+ 'cycle_stop': 152,
+ 'lane_results': {
+ # end, lane
+ (0, 2): {
+ 'average_alignment_score': (171.98, 1.4),
+ 'average_first_cycle_intensity': (381, 5),
+ 'cluster': (443170, 10241),
+ 'cluster_pass_filter': (362709, 8335),
+ 'percent_error_rate': (4.13, 0.14),
+ 'percent_intensity_after_20_cycles': (85.89, 3.26),
+ 'percent_pass_filter_align': (79.73, 0.23),
+ 'percent_pass_filter_clusters': (81.85, 0.8),
+ },
+ (0, 5): {
+ 'average_alignment_score': None,
+ 'average_first_cycle_intensity': (362, 4),
+ 'cluster': (310619, 15946),
+ 'cluster_pass_filter': (277584, 13858),
+ 'percent_error_rate': None,
+ 'percent_intensity_after_20_cycles': (90.35, 1.12),
+ 'percent_pass_filter_align': None,
+ 'percent_pass_filter_clusters': (89.37, 0.25),
+ }
+ }
+ }
+ self._check_run_xml(run_xml_name, results)
+
+def suite():
+ return unittest.makeSuite(testLoadRunXML,'test')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
--- /dev/null
+<PipelineRun>
+ <FlowcellID>62DJMAAXX</FlowcellID>
+ <IPAR version="1">
+ <timestamp>1294547631</timestamp>
+ <Run Name="Intensities">
+ <Cycles First="1" Last="152" Number="152" />
+ <ImageParameters>
+ <AutoOffsetFlag>0</AutoOffsetFlag>
+ <AutoSizeFlag>0</AutoSizeFlag>
+ <Fwhm>0</Fwhm>
+ <RemappingDistance>0</RemappingDistance>
+ <Threshold>0</Threshold>
+ </ImageParameters>
+ <RunParameters>
+ <AutoCycleFlag>0</AutoCycleFlag>
+ <BasecallFlag>0</BasecallFlag>
+ <Deblocked>0</Deblocked>
+ <DebugFlag>0</DebugFlag>
+ <FirstRunOnlyFlag>0</FirstRunOnlyFlag>
+ <ImagingReads Index="1">
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </ImagingReads>
+ <ImagingReads Index="2">
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </ImagingReads>
+ <Instrument>ILLUMINA-EC5D15</Instrument>
+ <IterativeMatrixFlag>0</IterativeMatrixFlag>
+ <MakeFlag>0</MakeFlag>
+ <MaxCycle>0</MaxCycle>
+ <MinCycle>0</MinCycle>
+ <Reads Index="1">
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </Reads>
+ <Reads Index="2">
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </Reads>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ <RunFolderDate>101229</RunFolderDate>
+ <RunFolderId>00026</RunFolderId>
+ <QTableVersion>PL14</QTableVersion>
+ </RunParameters>
+ <Software Name="RTA" Version="1.8.70.0" />
+ <TileSelection>
+ <Lane Index="1">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>10</Tile>
+ <Tile>3</Tile>
+ <Tile>2</Tile>
+ <Tile>13</Tile>
+ <Tile>6</Tile>
+ <Tile>5</Tile>
+ <Tile>16</Tile>
+ <Tile>9</Tile>
+ <Tile>8</Tile>
+ <Tile>19</Tile>
+ <Tile>12</Tile>
+ <Tile>11</Tile>
+ <Tile>22</Tile>
+ <Tile>15</Tile>
+ <Tile>14</Tile>
+ <Tile>25</Tile>
+ <Tile>18</Tile>
+ <Tile>17</Tile>
+ <Tile>28</Tile>
+ <Tile>21</Tile>
+ <Tile>20</Tile>
+ <Tile>31</Tile>
+ <Tile>24</Tile>
+ <Tile>23</Tile>
+ <Tile>34</Tile>
+ <Tile>27</Tile>
+ <Tile>26</Tile>
+ <Tile>37</Tile>
+ <Tile>30</Tile>
+ <Tile>29</Tile>
+ <Tile>40</Tile>
+ <Tile>33</Tile>
+ <Tile>32</Tile>
+ <Tile>43</Tile>
+ <Tile>36</Tile>
+ <Tile>35</Tile>
+ <Tile>46</Tile>
+ <Tile>39</Tile>
+ <Tile>38</Tile>
+ <Tile>49</Tile>
+ <Tile>42</Tile>
+ <Tile>41</Tile>
+ <Tile>52</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>55</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>58</Tile>
+ <Tile>51</Tile>
+ <Tile>50</Tile>
+ <Tile>61</Tile>
+ <Tile>54</Tile>
+ <Tile>53</Tile>
+ <Tile>64</Tile>
+ <Tile>57</Tile>
+ <Tile>56</Tile>
+ <Tile>67</Tile>
+ <Tile>60</Tile>
+ <Tile>59</Tile>
+ <Tile>70</Tile>
+ <Tile>63</Tile>
+ <Tile>62</Tile>
+ <Tile>73</Tile>
+ <Tile>66</Tile>
+ <Tile>65</Tile>
+ <Tile>76</Tile>
+ <Tile>69</Tile>
+ <Tile>68</Tile>
+ <Tile>79</Tile>
+ <Tile>72</Tile>
+ <Tile>71</Tile>
+ <Tile>82</Tile>
+ <Tile>75</Tile>
+ <Tile>85</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>88</Tile>
+ <Tile>77</Tile>
+ <Tile>81</Tile>
+ <Tile>91</Tile>
+ <Tile>80</Tile>
+ <Tile>84</Tile>
+ <Tile>94</Tile>
+ <Tile>83</Tile>
+ <Tile>87</Tile>
+ <Tile>97</Tile>
+ <Tile>86</Tile>
+ <Tile>90</Tile>
+ <Tile>100</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>92</Tile>
+ <Tile>103</Tile>
+ <Tile>96</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>106</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>101</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>104</Tile>
+ <Tile>115</Tile>
+ <Tile>108</Tile>
+ <Tile>107</Tile>
+ <Tile>118</Tile>
+ <Tile>111</Tile>
+ <Tile>110</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>120</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="2">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>10</Tile>
+ <Tile>3</Tile>
+ <Tile>2</Tile>
+ <Tile>13</Tile>
+ <Tile>6</Tile>
+ <Tile>5</Tile>
+ <Tile>16</Tile>
+ <Tile>9</Tile>
+ <Tile>8</Tile>
+ <Tile>19</Tile>
+ <Tile>12</Tile>
+ <Tile>11</Tile>
+ <Tile>15</Tile>
+ <Tile>22</Tile>
+ <Tile>18</Tile>
+ <Tile>14</Tile>
+ <Tile>25</Tile>
+ <Tile>21</Tile>
+ <Tile>17</Tile>
+ <Tile>28</Tile>
+ <Tile>24</Tile>
+ <Tile>20</Tile>
+ <Tile>31</Tile>
+ <Tile>27</Tile>
+ <Tile>23</Tile>
+ <Tile>34</Tile>
+ <Tile>30</Tile>
+ <Tile>26</Tile>
+ <Tile>37</Tile>
+ <Tile>29</Tile>
+ <Tile>33</Tile>
+ <Tile>40</Tile>
+ <Tile>36</Tile>
+ <Tile>32</Tile>
+ <Tile>43</Tile>
+ <Tile>39</Tile>
+ <Tile>35</Tile>
+ <Tile>46</Tile>
+ <Tile>42</Tile>
+ <Tile>38</Tile>
+ <Tile>49</Tile>
+ <Tile>45</Tile>
+ <Tile>52</Tile>
+ <Tile>41</Tile>
+ <Tile>48</Tile>
+ <Tile>55</Tile>
+ <Tile>44</Tile>
+ <Tile>51</Tile>
+ <Tile>58</Tile>
+ <Tile>47</Tile>
+ <Tile>54</Tile>
+ <Tile>61</Tile>
+ <Tile>50</Tile>
+ <Tile>57</Tile>
+ <Tile>64</Tile>
+ <Tile>53</Tile>
+ <Tile>60</Tile>
+ <Tile>67</Tile>
+ <Tile>56</Tile>
+ <Tile>63</Tile>
+ <Tile>70</Tile>
+ <Tile>59</Tile>
+ <Tile>66</Tile>
+ <Tile>73</Tile>
+ <Tile>62</Tile>
+ <Tile>69</Tile>
+ <Tile>76</Tile>
+ <Tile>65</Tile>
+ <Tile>72</Tile>
+ <Tile>79</Tile>
+ <Tile>68</Tile>
+ <Tile>75</Tile>
+ <Tile>82</Tile>
+ <Tile>71</Tile>
+ <Tile>78</Tile>
+ <Tile>85</Tile>
+ <Tile>74</Tile>
+ <Tile>81</Tile>
+ <Tile>88</Tile>
+ <Tile>77</Tile>
+ <Tile>84</Tile>
+ <Tile>80</Tile>
+ <Tile>91</Tile>
+ <Tile>87</Tile>
+ <Tile>83</Tile>
+ <Tile>94</Tile>
+ <Tile>90</Tile>
+ <Tile>86</Tile>
+ <Tile>97</Tile>
+ <Tile>93</Tile>
+ <Tile>89</Tile>
+ <Tile>100</Tile>
+ <Tile>96</Tile>
+ <Tile>92</Tile>
+ <Tile>103</Tile>
+ <Tile>99</Tile>
+ <Tile>95</Tile>
+ <Tile>106</Tile>
+ <Tile>102</Tile>
+ <Tile>98</Tile>
+ <Tile>109</Tile>
+ <Tile>105</Tile>
+ <Tile>101</Tile>
+ <Tile>112</Tile>
+ <Tile>108</Tile>
+ <Tile>104</Tile>
+ <Tile>115</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>107</Tile>
+ <Tile>114</Tile>
+ <Tile>110</Tile>
+ <Tile>117</Tile>
+ <Tile>113</Tile>
+ <Tile>120</Tile>
+ <Tile>116</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="3">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>6</Tile>
+ <Tile>13</Tile>
+ <Tile>2</Tile>
+ <Tile>9</Tile>
+ <Tile>16</Tile>
+ <Tile>5</Tile>
+ <Tile>12</Tile>
+ <Tile>19</Tile>
+ <Tile>8</Tile>
+ <Tile>15</Tile>
+ <Tile>22</Tile>
+ <Tile>11</Tile>
+ <Tile>18</Tile>
+ <Tile>25</Tile>
+ <Tile>14</Tile>
+ <Tile>21</Tile>
+ <Tile>28</Tile>
+ <Tile>17</Tile>
+ <Tile>24</Tile>
+ <Tile>31</Tile>
+ <Tile>20</Tile>
+ <Tile>27</Tile>
+ <Tile>34</Tile>
+ <Tile>23</Tile>
+ <Tile>30</Tile>
+ <Tile>37</Tile>
+ <Tile>26</Tile>
+ <Tile>33</Tile>
+ <Tile>40</Tile>
+ <Tile>29</Tile>
+ <Tile>36</Tile>
+ <Tile>43</Tile>
+ <Tile>32</Tile>
+ <Tile>39</Tile>
+ <Tile>46</Tile>
+ <Tile>35</Tile>
+ <Tile>42</Tile>
+ <Tile>49</Tile>
+ <Tile>38</Tile>
+ <Tile>45</Tile>
+ <Tile>52</Tile>
+ <Tile>41</Tile>
+ <Tile>48</Tile>
+ <Tile>55</Tile>
+ <Tile>44</Tile>
+ <Tile>51</Tile>
+ <Tile>58</Tile>
+ <Tile>47</Tile>
+ <Tile>54</Tile>
+ <Tile>61</Tile>
+ <Tile>50</Tile>
+ <Tile>57</Tile>
+ <Tile>64</Tile>
+ <Tile>53</Tile>
+ <Tile>60</Tile>
+ <Tile>67</Tile>
+ <Tile>56</Tile>
+ <Tile>63</Tile>
+ <Tile>70</Tile>
+ <Tile>59</Tile>
+ <Tile>66</Tile>
+ <Tile>62</Tile>
+ <Tile>73</Tile>
+ <Tile>69</Tile>
+ <Tile>65</Tile>
+ <Tile>76</Tile>
+ <Tile>72</Tile>
+ <Tile>68</Tile>
+ <Tile>79</Tile>
+ <Tile>71</Tile>
+ <Tile>75</Tile>
+ <Tile>82</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>85</Tile>
+ <Tile>77</Tile>
+ <Tile>81</Tile>
+ <Tile>88</Tile>
+ <Tile>80</Tile>
+ <Tile>84</Tile>
+ <Tile>91</Tile>
+ <Tile>83</Tile>
+ <Tile>87</Tile>
+ <Tile>94</Tile>
+ <Tile>86</Tile>
+ <Tile>90</Tile>
+ <Tile>97</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>100</Tile>
+ <Tile>92</Tile>
+ <Tile>96</Tile>
+ <Tile>103</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>106</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>101</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>104</Tile>
+ <Tile>108</Tile>
+ <Tile>115</Tile>
+ <Tile>107</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>110</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>120</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="4">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>6</Tile>
+ <Tile>2</Tile>
+ <Tile>13</Tile>
+ <Tile>9</Tile>
+ <Tile>5</Tile>
+ <Tile>16</Tile>
+ <Tile>12</Tile>
+ <Tile>8</Tile>
+ <Tile>19</Tile>
+ <Tile>11</Tile>
+ <Tile>15</Tile>
+ <Tile>22</Tile>
+ <Tile>14</Tile>
+ <Tile>18</Tile>
+ <Tile>25</Tile>
+ <Tile>17</Tile>
+ <Tile>21</Tile>
+ <Tile>28</Tile>
+ <Tile>20</Tile>
+ <Tile>24</Tile>
+ <Tile>31</Tile>
+ <Tile>23</Tile>
+ <Tile>27</Tile>
+ <Tile>34</Tile>
+ <Tile>30</Tile>
+ <Tile>26</Tile>
+ <Tile>37</Tile>
+ <Tile>33</Tile>
+ <Tile>29</Tile>
+ <Tile>40</Tile>
+ <Tile>36</Tile>
+ <Tile>32</Tile>
+ <Tile>43</Tile>
+ <Tile>35</Tile>
+ <Tile>39</Tile>
+ <Tile>46</Tile>
+ <Tile>38</Tile>
+ <Tile>42</Tile>
+ <Tile>49</Tile>
+ <Tile>45</Tile>
+ <Tile>41</Tile>
+ <Tile>52</Tile>
+ <Tile>48</Tile>
+ <Tile>44</Tile>
+ <Tile>55</Tile>
+ <Tile>51</Tile>
+ <Tile>47</Tile>
+ <Tile>58</Tile>
+ <Tile>54</Tile>
+ <Tile>50</Tile>
+ <Tile>61</Tile>
+ <Tile>57</Tile>
+ <Tile>53</Tile>
+ <Tile>64</Tile>
+ <Tile>60</Tile>
+ <Tile>56</Tile>
+ <Tile>67</Tile>
+ <Tile>63</Tile>
+ <Tile>59</Tile>
+ <Tile>70</Tile>
+ <Tile>66</Tile>
+ <Tile>62</Tile>
+ <Tile>73</Tile>
+ <Tile>69</Tile>
+ <Tile>65</Tile>
+ <Tile>76</Tile>
+ <Tile>72</Tile>
+ <Tile>68</Tile>
+ <Tile>79</Tile>
+ <Tile>75</Tile>
+ <Tile>71</Tile>
+ <Tile>82</Tile>
+ <Tile>78</Tile>
+ <Tile>74</Tile>
+ <Tile>85</Tile>
+ <Tile>81</Tile>
+ <Tile>77</Tile>
+ <Tile>88</Tile>
+ <Tile>84</Tile>
+ <Tile>80</Tile>
+ <Tile>91</Tile>
+ <Tile>87</Tile>
+ <Tile>83</Tile>
+ <Tile>94</Tile>
+ <Tile>90</Tile>
+ <Tile>86</Tile>
+ <Tile>97</Tile>
+ <Tile>93</Tile>
+ <Tile>89</Tile>
+ <Tile>100</Tile>
+ <Tile>96</Tile>
+ <Tile>92</Tile>
+ <Tile>103</Tile>
+ <Tile>99</Tile>
+ <Tile>95</Tile>
+ <Tile>106</Tile>
+ <Tile>102</Tile>
+ <Tile>98</Tile>
+ <Tile>109</Tile>
+ <Tile>105</Tile>
+ <Tile>101</Tile>
+ <Tile>112</Tile>
+ <Tile>108</Tile>
+ <Tile>104</Tile>
+ <Tile>115</Tile>
+ <Tile>111</Tile>
+ <Tile>107</Tile>
+ <Tile>118</Tile>
+ <Tile>114</Tile>
+ <Tile>110</Tile>
+ <Tile>117</Tile>
+ <Tile>113</Tile>
+ <Tile>120</Tile>
+ <Tile>116</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="5">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>6</Tile>
+ <Tile>2</Tile>
+ <Tile>13</Tile>
+ <Tile>9</Tile>
+ <Tile>5</Tile>
+ <Tile>16</Tile>
+ <Tile>12</Tile>
+ <Tile>8</Tile>
+ <Tile>19</Tile>
+ <Tile>15</Tile>
+ <Tile>11</Tile>
+ <Tile>22</Tile>
+ <Tile>18</Tile>
+ <Tile>14</Tile>
+ <Tile>25</Tile>
+ <Tile>17</Tile>
+ <Tile>21</Tile>
+ <Tile>28</Tile>
+ <Tile>20</Tile>
+ <Tile>24</Tile>
+ <Tile>31</Tile>
+ <Tile>23</Tile>
+ <Tile>27</Tile>
+ <Tile>34</Tile>
+ <Tile>26</Tile>
+ <Tile>30</Tile>
+ <Tile>37</Tile>
+ <Tile>29</Tile>
+ <Tile>33</Tile>
+ <Tile>40</Tile>
+ <Tile>32</Tile>
+ <Tile>36</Tile>
+ <Tile>43</Tile>
+ <Tile>35</Tile>
+ <Tile>46</Tile>
+ <Tile>39</Tile>
+ <Tile>38</Tile>
+ <Tile>49</Tile>
+ <Tile>42</Tile>
+ <Tile>41</Tile>
+ <Tile>52</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>55</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>58</Tile>
+ <Tile>51</Tile>
+ <Tile>50</Tile>
+ <Tile>61</Tile>
+ <Tile>54</Tile>
+ <Tile>53</Tile>
+ <Tile>64</Tile>
+ <Tile>57</Tile>
+ <Tile>56</Tile>
+ <Tile>67</Tile>
+ <Tile>60</Tile>
+ <Tile>59</Tile>
+ <Tile>70</Tile>
+ <Tile>63</Tile>
+ <Tile>62</Tile>
+ <Tile>73</Tile>
+ <Tile>66</Tile>
+ <Tile>65</Tile>
+ <Tile>69</Tile>
+ <Tile>76</Tile>
+ <Tile>68</Tile>
+ <Tile>72</Tile>
+ <Tile>79</Tile>
+ <Tile>71</Tile>
+ <Tile>75</Tile>
+ <Tile>82</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>85</Tile>
+ <Tile>77</Tile>
+ <Tile>81</Tile>
+ <Tile>88</Tile>
+ <Tile>80</Tile>
+ <Tile>84</Tile>
+ <Tile>91</Tile>
+ <Tile>83</Tile>
+ <Tile>87</Tile>
+ <Tile>94</Tile>
+ <Tile>86</Tile>
+ <Tile>90</Tile>
+ <Tile>97</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>100</Tile>
+ <Tile>96</Tile>
+ <Tile>92</Tile>
+ <Tile>103</Tile>
+ <Tile>99</Tile>
+ <Tile>95</Tile>
+ <Tile>106</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>98</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>101</Tile>
+ <Tile>108</Tile>
+ <Tile>115</Tile>
+ <Tile>104</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>107</Tile>
+ <Tile>114</Tile>
+ <Tile>110</Tile>
+ <Tile>117</Tile>
+ <Tile>113</Tile>
+ <Tile>120</Tile>
+ <Tile>116</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="6">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>6</Tile>
+ <Tile>13</Tile>
+ <Tile>2</Tile>
+ <Tile>9</Tile>
+ <Tile>16</Tile>
+ <Tile>5</Tile>
+ <Tile>12</Tile>
+ <Tile>19</Tile>
+ <Tile>8</Tile>
+ <Tile>15</Tile>
+ <Tile>22</Tile>
+ <Tile>11</Tile>
+ <Tile>18</Tile>
+ <Tile>25</Tile>
+ <Tile>14</Tile>
+ <Tile>21</Tile>
+ <Tile>28</Tile>
+ <Tile>17</Tile>
+ <Tile>24</Tile>
+ <Tile>31</Tile>
+ <Tile>20</Tile>
+ <Tile>27</Tile>
+ <Tile>34</Tile>
+ <Tile>23</Tile>
+ <Tile>30</Tile>
+ <Tile>37</Tile>
+ <Tile>26</Tile>
+ <Tile>33</Tile>
+ <Tile>40</Tile>
+ <Tile>29</Tile>
+ <Tile>36</Tile>
+ <Tile>43</Tile>
+ <Tile>32</Tile>
+ <Tile>39</Tile>
+ <Tile>46</Tile>
+ <Tile>35</Tile>
+ <Tile>42</Tile>
+ <Tile>49</Tile>
+ <Tile>38</Tile>
+ <Tile>52</Tile>
+ <Tile>45</Tile>
+ <Tile>41</Tile>
+ <Tile>55</Tile>
+ <Tile>48</Tile>
+ <Tile>44</Tile>
+ <Tile>58</Tile>
+ <Tile>51</Tile>
+ <Tile>47</Tile>
+ <Tile>61</Tile>
+ <Tile>54</Tile>
+ <Tile>50</Tile>
+ <Tile>64</Tile>
+ <Tile>57</Tile>
+ <Tile>53</Tile>
+ <Tile>67</Tile>
+ <Tile>60</Tile>
+ <Tile>56</Tile>
+ <Tile>70</Tile>
+ <Tile>63</Tile>
+ <Tile>59</Tile>
+ <Tile>73</Tile>
+ <Tile>66</Tile>
+ <Tile>62</Tile>
+ <Tile>76</Tile>
+ <Tile>69</Tile>
+ <Tile>65</Tile>
+ <Tile>79</Tile>
+ <Tile>72</Tile>
+ <Tile>68</Tile>
+ <Tile>82</Tile>
+ <Tile>75</Tile>
+ <Tile>71</Tile>
+ <Tile>85</Tile>
+ <Tile>78</Tile>
+ <Tile>74</Tile>
+ <Tile>88</Tile>
+ <Tile>81</Tile>
+ <Tile>77</Tile>
+ <Tile>91</Tile>
+ <Tile>84</Tile>
+ <Tile>80</Tile>
+ <Tile>94</Tile>
+ <Tile>87</Tile>
+ <Tile>83</Tile>
+ <Tile>97</Tile>
+ <Tile>90</Tile>
+ <Tile>86</Tile>
+ <Tile>100</Tile>
+ <Tile>93</Tile>
+ <Tile>89</Tile>
+ <Tile>96</Tile>
+ <Tile>103</Tile>
+ <Tile>92</Tile>
+ <Tile>99</Tile>
+ <Tile>106</Tile>
+ <Tile>95</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>98</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>101</Tile>
+ <Tile>108</Tile>
+ <Tile>115</Tile>
+ <Tile>104</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>107</Tile>
+ <Tile>114</Tile>
+ <Tile>110</Tile>
+ <Tile>117</Tile>
+ <Tile>113</Tile>
+ <Tile>120</Tile>
+ <Tile>116</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="7">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>6</Tile>
+ <Tile>2</Tile>
+ <Tile>13</Tile>
+ <Tile>9</Tile>
+ <Tile>5</Tile>
+ <Tile>16</Tile>
+ <Tile>12</Tile>
+ <Tile>8</Tile>
+ <Tile>19</Tile>
+ <Tile>15</Tile>
+ <Tile>11</Tile>
+ <Tile>22</Tile>
+ <Tile>18</Tile>
+ <Tile>14</Tile>
+ <Tile>25</Tile>
+ <Tile>21</Tile>
+ <Tile>17</Tile>
+ <Tile>28</Tile>
+ <Tile>24</Tile>
+ <Tile>20</Tile>
+ <Tile>31</Tile>
+ <Tile>27</Tile>
+ <Tile>23</Tile>
+ <Tile>34</Tile>
+ <Tile>30</Tile>
+ <Tile>26</Tile>
+ <Tile>37</Tile>
+ <Tile>33</Tile>
+ <Tile>29</Tile>
+ <Tile>40</Tile>
+ <Tile>32</Tile>
+ <Tile>36</Tile>
+ <Tile>43</Tile>
+ <Tile>35</Tile>
+ <Tile>39</Tile>
+ <Tile>46</Tile>
+ <Tile>38</Tile>
+ <Tile>42</Tile>
+ <Tile>49</Tile>
+ <Tile>41</Tile>
+ <Tile>45</Tile>
+ <Tile>52</Tile>
+ <Tile>44</Tile>
+ <Tile>48</Tile>
+ <Tile>55</Tile>
+ <Tile>47</Tile>
+ <Tile>51</Tile>
+ <Tile>58</Tile>
+ <Tile>50</Tile>
+ <Tile>54</Tile>
+ <Tile>61</Tile>
+ <Tile>53</Tile>
+ <Tile>57</Tile>
+ <Tile>64</Tile>
+ <Tile>56</Tile>
+ <Tile>60</Tile>
+ <Tile>67</Tile>
+ <Tile>59</Tile>
+ <Tile>63</Tile>
+ <Tile>70</Tile>
+ <Tile>62</Tile>
+ <Tile>66</Tile>
+ <Tile>73</Tile>
+ <Tile>65</Tile>
+ <Tile>69</Tile>
+ <Tile>76</Tile>
+ <Tile>68</Tile>
+ <Tile>72</Tile>
+ <Tile>79</Tile>
+ <Tile>71</Tile>
+ <Tile>75</Tile>
+ <Tile>82</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>85</Tile>
+ <Tile>77</Tile>
+ <Tile>81</Tile>
+ <Tile>88</Tile>
+ <Tile>80</Tile>
+ <Tile>84</Tile>
+ <Tile>91</Tile>
+ <Tile>83</Tile>
+ <Tile>87</Tile>
+ <Tile>94</Tile>
+ <Tile>86</Tile>
+ <Tile>90</Tile>
+ <Tile>97</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>100</Tile>
+ <Tile>92</Tile>
+ <Tile>96</Tile>
+ <Tile>103</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>106</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>101</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>104</Tile>
+ <Tile>108</Tile>
+ <Tile>115</Tile>
+ <Tile>107</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>110</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>120</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ <Lane Index="8">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>4</Tile>
+ <Tile>7</Tile>
+ <Tile>3</Tile>
+ <Tile>10</Tile>
+ <Tile>2</Tile>
+ <Tile>6</Tile>
+ <Tile>13</Tile>
+ <Tile>5</Tile>
+ <Tile>9</Tile>
+ <Tile>16</Tile>
+ <Tile>8</Tile>
+ <Tile>12</Tile>
+ <Tile>19</Tile>
+ <Tile>11</Tile>
+ <Tile>15</Tile>
+ <Tile>22</Tile>
+ <Tile>14</Tile>
+ <Tile>18</Tile>
+ <Tile>25</Tile>
+ <Tile>17</Tile>
+ <Tile>21</Tile>
+ <Tile>28</Tile>
+ <Tile>20</Tile>
+ <Tile>24</Tile>
+ <Tile>31</Tile>
+ <Tile>23</Tile>
+ <Tile>27</Tile>
+ <Tile>34</Tile>
+ <Tile>26</Tile>
+ <Tile>30</Tile>
+ <Tile>37</Tile>
+ <Tile>29</Tile>
+ <Tile>33</Tile>
+ <Tile>40</Tile>
+ <Tile>32</Tile>
+ <Tile>36</Tile>
+ <Tile>43</Tile>
+ <Tile>35</Tile>
+ <Tile>39</Tile>
+ <Tile>46</Tile>
+ <Tile>38</Tile>
+ <Tile>42</Tile>
+ <Tile>49</Tile>
+ <Tile>41</Tile>
+ <Tile>45</Tile>
+ <Tile>52</Tile>
+ <Tile>44</Tile>
+ <Tile>48</Tile>
+ <Tile>55</Tile>
+ <Tile>47</Tile>
+ <Tile>51</Tile>
+ <Tile>58</Tile>
+ <Tile>50</Tile>
+ <Tile>54</Tile>
+ <Tile>61</Tile>
+ <Tile>53</Tile>
+ <Tile>57</Tile>
+ <Tile>64</Tile>
+ <Tile>56</Tile>
+ <Tile>60</Tile>
+ <Tile>67</Tile>
+ <Tile>59</Tile>
+ <Tile>63</Tile>
+ <Tile>70</Tile>
+ <Tile>62</Tile>
+ <Tile>66</Tile>
+ <Tile>73</Tile>
+ <Tile>65</Tile>
+ <Tile>69</Tile>
+ <Tile>76</Tile>
+ <Tile>68</Tile>
+ <Tile>72</Tile>
+ <Tile>79</Tile>
+ <Tile>71</Tile>
+ <Tile>75</Tile>
+ <Tile>82</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>85</Tile>
+ <Tile>77</Tile>
+ <Tile>81</Tile>
+ <Tile>88</Tile>
+ <Tile>80</Tile>
+ <Tile>84</Tile>
+ <Tile>91</Tile>
+ <Tile>83</Tile>
+ <Tile>87</Tile>
+ <Tile>94</Tile>
+ <Tile>86</Tile>
+ <Tile>90</Tile>
+ <Tile>97</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>100</Tile>
+ <Tile>92</Tile>
+ <Tile>96</Tile>
+ <Tile>103</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>106</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>109</Tile>
+ <Tile>101</Tile>
+ <Tile>105</Tile>
+ <Tile>112</Tile>
+ <Tile>104</Tile>
+ <Tile>108</Tile>
+ <Tile>115</Tile>
+ <Tile>107</Tile>
+ <Tile>111</Tile>
+ <Tile>118</Tile>
+ <Tile>110</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>120</Tile>
+ <Tile>119</Tile>
+ </Lane>
+ </TileSelection>
+ <Time />
+ <User />
+ </Run>
+ <matrix />
+ </IPAR>
+ <Bustard version="2">
+ <version>1.8.70.0</version>
+ <Parameters />
+ <MatrixElements>
+ <Base>A</Base>
+ <Base>C</Base>
+ <Base>G</Base>
+ <Base>T</Base>
+ <Element>1.05223</Element>
+ <Element>0.1357753</Element>
+ <Element>0.0</Element>
+ <Element>0.0</Element>
+ <Element>0.8848184</Element>
+ <Element>0.6870167</Element>
+ <Element>0.0</Element>
+ <Element>0.0</Element>
+ <Element>0.0</Element>
+ <Element>0.0</Element>
+ <Element>1.814581</Element>
+ <Element>0.04488774</Element>
+ <Element>0.0</Element>
+ <Element>0.0</Element>
+ <Element>0.7050906</Element>
+ <Element>0.9331109</Element>
+ </MatrixElements>
+ <BaseCallAnalysis>
+ <Run Name="BaseCalls">
+ <BaseCallParameters>
+ <ChastityThreshold>0.6</ChastityThreshold>
+ <Matrix Path="">
+ <AutoFlag>1</AutoFlag>
+ <AutoLane>5</AutoLane>
+ <Cycle>1</Cycle>
+ <CycleOffset>0</CycleOffset>
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ <Read>1</Read>
+ </Matrix>
+ <Matrix Path="">
+ <AutoFlag>1</AutoFlag>
+ <AutoLane>5</AutoLane>
+ <Cycle>77</Cycle>
+ <CycleOffset>0</CycleOffset>
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <Read>2</Read>
+ </Matrix>
+ <Phasing Path="">
+ <AutoFlag>1</AutoFlag>
+ <AutoLane>5</AutoLane>
+ <Cycle>2</Cycle>
+ <CycleOffset>1</CycleOffset>
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ <Read>1</Read>
+ <PhasingRate>0</PhasingRate>
+ <PrephasingRate>0</PrephasingRate>
+ </Phasing>
+ <Phasing Path="">
+ <AutoFlag>1</AutoFlag>
+ <AutoLane>5</AutoLane>
+ <Cycle>78</Cycle>
+ <CycleOffset>1</CycleOffset>
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <Read>2</Read>
+ <PhasingRate>0</PhasingRate>
+ <PrephasingRate>0</PrephasingRate>
+ </Phasing>
+ <PureBases>0</PureBases>
+ <SmtFilter>failed-chastity</SmtFilter>
+ <SmtRelation>le</SmtRelation>
+ <SmtThreshold>1.0</SmtThreshold>
+ </BaseCallParameters>
+ <Cycles First="1" Last="152" Number="152" />
+ <Input />
+ <RunParameters>
+ <AutoCycleFlag>0</AutoCycleFlag>
+ <BasecallFlag>0</BasecallFlag>
+ <Deblocked>0</Deblocked>
+ <DebugFlag>0</DebugFlag>
+ <FirstRunOnlyFlag>0</FirstRunOnlyFlag>
+ <ImagingReads Index="1">
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ </ImagingReads>
+ <ImagingReads Index="2">
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </ImagingReads>
+ <Instrument>ILLUMINA-EC5D15</Instrument>
+ <IterativeMatrixFlag>0</IterativeMatrixFlag>
+ <MakeFlag>0</MakeFlag>
+ <MaxCycle>0</MaxCycle>
+ <MinCycle>0</MinCycle>
+ <Reads Index="1">
+ <FirstCycle>1</FirstCycle>
+ <LastCycle>76</LastCycle>
+ </Reads>
+ <Reads Index="2">
+ <FirstCycle>77</FirstCycle>
+ <LastCycle>152</LastCycle>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ </Reads>
+ <RunFolder>101229_ILLUMINA-EC5D15_00026_62DJMAAXX</RunFolder>
+ <RunFolderDate>101229</RunFolderDate>
+ <RunFolderId>00026</RunFolderId>
+ <QTableVersion>PL14</QTableVersion>
+ </RunParameters>
+ <Software Name="RTA" Version="1.8.70.0" />
+ <TileSelection>
+ <Lane Index="1">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>3</Tile>
+ <Tile>2</Tile>
+ <Tile>5</Tile>
+ <Tile>4</Tile>
+ <Tile>6</Tile>
+ <Tile>8</Tile>
+ <Tile>9</Tile>
+ <Tile>7</Tile>
+ <Tile>11</Tile>
+ <Tile>10</Tile>
+ <Tile>12</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>13</Tile>
+ <Tile>17</Tile>
+ <Tile>18</Tile>
+ <Tile>16</Tile>
+ <Tile>20</Tile>
+ <Tile>21</Tile>
+ <Tile>19</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>22</Tile>
+ <Tile>26</Tile>
+ <Tile>27</Tile>
+ <Tile>25</Tile>
+ <Tile>29</Tile>
+ <Tile>30</Tile>
+ <Tile>28</Tile>
+ <Tile>32</Tile>
+ <Tile>33</Tile>
+ <Tile>31</Tile>
+ <Tile>35</Tile>
+ <Tile>36</Tile>
+ <Tile>34</Tile>
+ <Tile>39</Tile>
+ <Tile>38</Tile>
+ <Tile>37</Tile>
+ <Tile>42</Tile>
+ <Tile>41</Tile>
+ <Tile>40</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>43</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>46</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>51</Tile>
+ <Tile>52</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>55</Tile>
+ <Tile>56</Tile>
+ <Tile>57</Tile>
+ <Tile>58</Tile>
+ <Tile>59</Tile>
+ <Tile>60</Tile>
+ <Tile>61</Tile>
+ <Tile>62</Tile>
+ <Tile>63</Tile>
+ <Tile>64</Tile>
+ <Tile>65</Tile>
+ <Tile>66</Tile>
+ <Tile>67</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>70</Tile>
+ <Tile>71</Tile>
+ <Tile>72</Tile>
+ <Tile>73</Tile>
+ <Tile>74</Tile>
+ <Tile>75</Tile>
+ <Tile>76</Tile>
+ <Tile>77</Tile>
+ <Tile>78</Tile>
+ <Tile>79</Tile>
+ <Tile>80</Tile>
+ <Tile>81</Tile>
+ <Tile>82</Tile>
+ <Tile>84</Tile>
+ <Tile>83</Tile>
+ <Tile>85</Tile>
+ <Tile>87</Tile>
+ <Tile>86</Tile>
+ <Tile>88</Tile>
+ <Tile>89</Tile>
+ <Tile>90</Tile>
+ <Tile>93</Tile>
+ <Tile>91</Tile>
+ <Tile>92</Tile>
+ <Tile>96</Tile>
+ <Tile>94</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>97</Tile>
+ <Tile>98</Tile>
+ <Tile>100</Tile>
+ <Tile>102</Tile>
+ <Tile>101</Tile>
+ <Tile>103</Tile>
+ <Tile>104</Tile>
+ <Tile>105</Tile>
+ <Tile>106</Tile>
+ <Tile>107</Tile>
+ <Tile>108</Tile>
+ <Tile>110</Tile>
+ <Tile>109</Tile>
+ <Tile>111</Tile>
+ <Tile>113</Tile>
+ <Tile>112</Tile>
+ <Tile>114</Tile>
+ <Tile>116</Tile>
+ <Tile>117</Tile>
+ <Tile>115</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ <Tile>118</Tile>
+ </Lane>
+ <Lane Index="2">
+ <Sample>s</Sample>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>1</Tile>
+ <Tile>5</Tile>
+ <Tile>4</Tile>
+ <Tile>6</Tile>
+ <Tile>8</Tile>
+ <Tile>7</Tile>
+ <Tile>9</Tile>
+ <Tile>11</Tile>
+ <Tile>12</Tile>
+ <Tile>10</Tile>
+ <Tile>13</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>17</Tile>
+ <Tile>16</Tile>
+ <Tile>18</Tile>
+ <Tile>19</Tile>
+ <Tile>20</Tile>
+ <Tile>21</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>26</Tile>
+ <Tile>25</Tile>
+ <Tile>27</Tile>
+ <Tile>28</Tile>
+ <Tile>29</Tile>
+ <Tile>30</Tile>
+ <Tile>31</Tile>
+ <Tile>32</Tile>
+ <Tile>33</Tile>
+ <Tile>34</Tile>
+ <Tile>35</Tile>
+ <Tile>36</Tile>
+ <Tile>37</Tile>
+ <Tile>38</Tile>
+ <Tile>39</Tile>
+ <Tile>40</Tile>
+ <Tile>41</Tile>
+ <Tile>42</Tile>
+ <Tile>43</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>46</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>51</Tile>
+ <Tile>52</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>55</Tile>
+ <Tile>56</Tile>
+ <Tile>57</Tile>
+ <Tile>59</Tile>
+ <Tile>60</Tile>
+ <Tile>58</Tile>
+ <Tile>62</Tile>
+ <Tile>63</Tile>
+ <Tile>61</Tile>
+ <Tile>65</Tile>
+ <Tile>66</Tile>
+ <Tile>64</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>67</Tile>
+ <Tile>71</Tile>
+ <Tile>72</Tile>
+ <Tile>70</Tile>
+ <Tile>74</Tile>
+ <Tile>75</Tile>
+ <Tile>73</Tile>
+ <Tile>77</Tile>
+ <Tile>76</Tile>
+ <Tile>78</Tile>
+ <Tile>80</Tile>
+ <Tile>79</Tile>
+ <Tile>81</Tile>
+ <Tile>83</Tile>
+ <Tile>84</Tile>
+ <Tile>82</Tile>
+ <Tile>86</Tile>
+ <Tile>87</Tile>
+ <Tile>85</Tile>
+ <Tile>89</Tile>
+ <Tile>88</Tile>
+ <Tile>90</Tile>
+ <Tile>92</Tile>
+ <Tile>91</Tile>
+ <Tile>93</Tile>
+ <Tile>95</Tile>
+ <Tile>94</Tile>
+ <Tile>96</Tile>
+ <Tile>98</Tile>
+ <Tile>97</Tile>
+ <Tile>99</Tile>
+ <Tile>101</Tile>
+ <Tile>100</Tile>
+ <Tile>102</Tile>
+ <Tile>104</Tile>
+ <Tile>103</Tile>
+ <Tile>105</Tile>
+ <Tile>107</Tile>
+ <Tile>106</Tile>
+ <Tile>108</Tile>
+ <Tile>109</Tile>
+ <Tile>110</Tile>
+ <Tile>111</Tile>
+ <Tile>112</Tile>
+ <Tile>113</Tile>
+ <Tile>114</Tile>
+ <Tile>115</Tile>
+ <Tile>116</Tile>
+ <Tile>117</Tile>
+ <Tile>118</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ </Lane>
+ <Lane Index="3">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>4</Tile>
+ <Tile>5</Tile>
+ <Tile>6</Tile>
+ <Tile>7</Tile>
+ <Tile>8</Tile>
+ <Tile>9</Tile>
+ <Tile>10</Tile>
+ <Tile>11</Tile>
+ <Tile>12</Tile>
+ <Tile>13</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>16</Tile>
+ <Tile>17</Tile>
+ <Tile>18</Tile>
+ <Tile>19</Tile>
+ <Tile>20</Tile>
+ <Tile>21</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>25</Tile>
+ <Tile>26</Tile>
+ <Tile>27</Tile>
+ <Tile>28</Tile>
+ <Tile>29</Tile>
+ <Tile>30</Tile>
+ <Tile>31</Tile>
+ <Tile>32</Tile>
+ <Tile>33</Tile>
+ <Tile>34</Tile>
+ <Tile>35</Tile>
+ <Tile>36</Tile>
+ <Tile>37</Tile>
+ <Tile>38</Tile>
+ <Tile>39</Tile>
+ <Tile>40</Tile>
+ <Tile>41</Tile>
+ <Tile>42</Tile>
+ <Tile>43</Tile>
+ <Tile>44</Tile>
+ <Tile>45</Tile>
+ <Tile>46</Tile>
+ <Tile>47</Tile>
+ <Tile>48</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>51</Tile>
+ <Tile>53</Tile>
+ <Tile>52</Tile>
+ <Tile>54</Tile>
+ <Tile>55</Tile>
+ <Tile>56</Tile>
+ <Tile>57</Tile>
+ <Tile>58</Tile>
+ <Tile>59</Tile>
+ <Tile>60</Tile>
+ <Tile>62</Tile>
+ <Tile>61</Tile>
+ <Tile>63</Tile>
+ <Tile>64</Tile>
+ <Tile>65</Tile>
+ <Tile>66</Tile>
+ <Tile>67</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>70</Tile>
+ <Tile>71</Tile>
+ <Tile>72</Tile>
+ <Tile>73</Tile>
+ <Tile>74</Tile>
+ <Tile>75</Tile>
+ <Tile>76</Tile>
+ <Tile>77</Tile>
+ <Tile>78</Tile>
+ <Tile>79</Tile>
+ <Tile>80</Tile>
+ <Tile>81</Tile>
+ <Tile>82</Tile>
+ <Tile>83</Tile>
+ <Tile>84</Tile>
+ <Tile>85</Tile>
+ <Tile>87</Tile>
+ <Tile>86</Tile>
+ <Tile>88</Tile>
+ <Tile>90</Tile>
+ <Tile>89</Tile>
+ <Tile>93</Tile>
+ <Tile>91</Tile>
+ <Tile>92</Tile>
+ <Tile>94</Tile>
+ <Tile>96</Tile>
+ <Tile>95</Tile>
+ <Tile>99</Tile>
+ <Tile>97</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>100</Tile>
+ <Tile>101</Tile>
+ <Tile>105</Tile>
+ <Tile>103</Tile>
+ <Tile>104</Tile>
+ <Tile>108</Tile>
+ <Tile>106</Tile>
+ <Tile>107</Tile>
+ <Tile>111</Tile>
+ <Tile>110</Tile>
+ <Tile>109</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>112</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>115</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ <Tile>118</Tile>
+ </Lane>
+ <Lane Index="4">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>4</Tile>
+ <Tile>6</Tile>
+ <Tile>5</Tile>
+ <Tile>9</Tile>
+ <Tile>8</Tile>
+ <Tile>7</Tile>
+ <Tile>12</Tile>
+ <Tile>11</Tile>
+ <Tile>10</Tile>
+ <Tile>15</Tile>
+ <Tile>14</Tile>
+ <Tile>13</Tile>
+ <Tile>18</Tile>
+ <Tile>17</Tile>
+ <Tile>16</Tile>
+ <Tile>21</Tile>
+ <Tile>19</Tile>
+ <Tile>20</Tile>
+ <Tile>24</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>27</Tile>
+ <Tile>25</Tile>
+ <Tile>26</Tile>
+ <Tile>30</Tile>
+ <Tile>28</Tile>
+ <Tile>29</Tile>
+ <Tile>33</Tile>
+ <Tile>32</Tile>
+ <Tile>31</Tile>
+ <Tile>36</Tile>
+ <Tile>35</Tile>
+ <Tile>34</Tile>
+ <Tile>39</Tile>
+ <Tile>38</Tile>
+ <Tile>37</Tile>
+ <Tile>41</Tile>
+ <Tile>42</Tile>
+ <Tile>40</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>43</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>46</Tile>
+ <Tile>51</Tile>
+ <Tile>50</Tile>
+ <Tile>49</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>52</Tile>
+ <Tile>56</Tile>
+ <Tile>57</Tile>
+ <Tile>55</Tile>
+ <Tile>59</Tile>
+ <Tile>60</Tile>
+ <Tile>58</Tile>
+ <Tile>63</Tile>
+ <Tile>62</Tile>
+ <Tile>61</Tile>
+ <Tile>65</Tile>
+ <Tile>66</Tile>
+ <Tile>64</Tile>
+ <Tile>69</Tile>
+ <Tile>68</Tile>
+ <Tile>67</Tile>
+ <Tile>72</Tile>
+ <Tile>71</Tile>
+ <Tile>70</Tile>
+ <Tile>75</Tile>
+ <Tile>73</Tile>
+ <Tile>74</Tile>
+ <Tile>78</Tile>
+ <Tile>76</Tile>
+ <Tile>77</Tile>
+ <Tile>80</Tile>
+ <Tile>79</Tile>
+ <Tile>81</Tile>
+ <Tile>83</Tile>
+ <Tile>82</Tile>
+ <Tile>84</Tile>
+ <Tile>86</Tile>
+ <Tile>87</Tile>
+ <Tile>85</Tile>
+ <Tile>90</Tile>
+ <Tile>89</Tile>
+ <Tile>88</Tile>
+ <Tile>93</Tile>
+ <Tile>92</Tile>
+ <Tile>91</Tile>
+ <Tile>96</Tile>
+ <Tile>95</Tile>
+ <Tile>94</Tile>
+ <Tile>99</Tile>
+ <Tile>97</Tile>
+ <Tile>98</Tile>
+ <Tile>102</Tile>
+ <Tile>101</Tile>
+ <Tile>100</Tile>
+ <Tile>105</Tile>
+ <Tile>103</Tile>
+ <Tile>104</Tile>
+ <Tile>106</Tile>
+ <Tile>108</Tile>
+ <Tile>107</Tile>
+ <Tile>111</Tile>
+ <Tile>109</Tile>
+ <Tile>110</Tile>
+ <Tile>112</Tile>
+ <Tile>114</Tile>
+ <Tile>113</Tile>
+ <Tile>116</Tile>
+ <Tile>117</Tile>
+ <Tile>115</Tile>
+ <Tile>118</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ </Lane>
+ <Lane Index="5">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>5</Tile>
+ <Tile>6</Tile>
+ <Tile>4</Tile>
+ <Tile>9</Tile>
+ <Tile>8</Tile>
+ <Tile>7</Tile>
+ <Tile>11</Tile>
+ <Tile>10</Tile>
+ <Tile>12</Tile>
+ <Tile>13</Tile>
+ <Tile>15</Tile>
+ <Tile>14</Tile>
+ <Tile>16</Tile>
+ <Tile>17</Tile>
+ <Tile>18</Tile>
+ <Tile>20</Tile>
+ <Tile>19</Tile>
+ <Tile>21</Tile>
+ <Tile>23</Tile>
+ <Tile>22</Tile>
+ <Tile>24</Tile>
+ <Tile>26</Tile>
+ <Tile>25</Tile>
+ <Tile>27</Tile>
+ <Tile>29</Tile>
+ <Tile>28</Tile>
+ <Tile>30</Tile>
+ <Tile>32</Tile>
+ <Tile>31</Tile>
+ <Tile>33</Tile>
+ <Tile>35</Tile>
+ <Tile>34</Tile>
+ <Tile>36</Tile>
+ <Tile>38</Tile>
+ <Tile>37</Tile>
+ <Tile>39</Tile>
+ <Tile>40</Tile>
+ <Tile>41</Tile>
+ <Tile>42</Tile>
+ <Tile>43</Tile>
+ <Tile>44</Tile>
+ <Tile>45</Tile>
+ <Tile>46</Tile>
+ <Tile>47</Tile>
+ <Tile>48</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>51</Tile>
+ <Tile>52</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>56</Tile>
+ <Tile>55</Tile>
+ <Tile>57</Tile>
+ <Tile>58</Tile>
+ <Tile>59</Tile>
+ <Tile>60</Tile>
+ <Tile>62</Tile>
+ <Tile>61</Tile>
+ <Tile>63</Tile>
+ <Tile>65</Tile>
+ <Tile>64</Tile>
+ <Tile>66</Tile>
+ <Tile>68</Tile>
+ <Tile>67</Tile>
+ <Tile>69</Tile>
+ <Tile>71</Tile>
+ <Tile>70</Tile>
+ <Tile>72</Tile>
+ <Tile>74</Tile>
+ <Tile>73</Tile>
+ <Tile>75</Tile>
+ <Tile>76</Tile>
+ <Tile>77</Tile>
+ <Tile>78</Tile>
+ <Tile>79</Tile>
+ <Tile>80</Tile>
+ <Tile>81</Tile>
+ <Tile>82</Tile>
+ <Tile>83</Tile>
+ <Tile>84</Tile>
+ <Tile>85</Tile>
+ <Tile>86</Tile>
+ <Tile>87</Tile>
+ <Tile>88</Tile>
+ <Tile>89</Tile>
+ <Tile>90</Tile>
+ <Tile>91</Tile>
+ <Tile>92</Tile>
+ <Tile>93</Tile>
+ <Tile>94</Tile>
+ <Tile>95</Tile>
+ <Tile>96</Tile>
+ <Tile>97</Tile>
+ <Tile>98</Tile>
+ <Tile>99</Tile>
+ <Tile>100</Tile>
+ <Tile>101</Tile>
+ <Tile>102</Tile>
+ <Tile>104</Tile>
+ <Tile>103</Tile>
+ <Tile>105</Tile>
+ <Tile>107</Tile>
+ <Tile>106</Tile>
+ <Tile>108</Tile>
+ <Tile>110</Tile>
+ <Tile>109</Tile>
+ <Tile>111</Tile>
+ <Tile>113</Tile>
+ <Tile>112</Tile>
+ <Tile>114</Tile>
+ <Tile>116</Tile>
+ <Tile>115</Tile>
+ <Tile>117</Tile>
+ <Tile>118</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ </Lane>
+ <Lane Index="6">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>4</Tile>
+ <Tile>5</Tile>
+ <Tile>6</Tile>
+ <Tile>7</Tile>
+ <Tile>8</Tile>
+ <Tile>9</Tile>
+ <Tile>10</Tile>
+ <Tile>11</Tile>
+ <Tile>12</Tile>
+ <Tile>13</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>16</Tile>
+ <Tile>17</Tile>
+ <Tile>18</Tile>
+ <Tile>19</Tile>
+ <Tile>20</Tile>
+ <Tile>21</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>26</Tile>
+ <Tile>25</Tile>
+ <Tile>27</Tile>
+ <Tile>29</Tile>
+ <Tile>28</Tile>
+ <Tile>30</Tile>
+ <Tile>32</Tile>
+ <Tile>31</Tile>
+ <Tile>33</Tile>
+ <Tile>34</Tile>
+ <Tile>35</Tile>
+ <Tile>36</Tile>
+ <Tile>37</Tile>
+ <Tile>38</Tile>
+ <Tile>39</Tile>
+ <Tile>41</Tile>
+ <Tile>40</Tile>
+ <Tile>42</Tile>
+ <Tile>44</Tile>
+ <Tile>45</Tile>
+ <Tile>43</Tile>
+ <Tile>47</Tile>
+ <Tile>46</Tile>
+ <Tile>48</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>51</Tile>
+ <Tile>52</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>55</Tile>
+ <Tile>56</Tile>
+ <Tile>57</Tile>
+ <Tile>59</Tile>
+ <Tile>58</Tile>
+ <Tile>60</Tile>
+ <Tile>62</Tile>
+ <Tile>61</Tile>
+ <Tile>63</Tile>
+ <Tile>65</Tile>
+ <Tile>64</Tile>
+ <Tile>66</Tile>
+ <Tile>67</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>71</Tile>
+ <Tile>70</Tile>
+ <Tile>72</Tile>
+ <Tile>74</Tile>
+ <Tile>73</Tile>
+ <Tile>75</Tile>
+ <Tile>77</Tile>
+ <Tile>76</Tile>
+ <Tile>78</Tile>
+ <Tile>80</Tile>
+ <Tile>79</Tile>
+ <Tile>81</Tile>
+ <Tile>83</Tile>
+ <Tile>82</Tile>
+ <Tile>84</Tile>
+ <Tile>85</Tile>
+ <Tile>86</Tile>
+ <Tile>87</Tile>
+ <Tile>88</Tile>
+ <Tile>89</Tile>
+ <Tile>90</Tile>
+ <Tile>91</Tile>
+ <Tile>92</Tile>
+ <Tile>93</Tile>
+ <Tile>94</Tile>
+ <Tile>95</Tile>
+ <Tile>96</Tile>
+ <Tile>97</Tile>
+ <Tile>98</Tile>
+ <Tile>99</Tile>
+ <Tile>100</Tile>
+ <Tile>101</Tile>
+ <Tile>102</Tile>
+ <Tile>104</Tile>
+ <Tile>103</Tile>
+ <Tile>105</Tile>
+ <Tile>106</Tile>
+ <Tile>107</Tile>
+ <Tile>108</Tile>
+ <Tile>109</Tile>
+ <Tile>110</Tile>
+ <Tile>111</Tile>
+ <Tile>112</Tile>
+ <Tile>113</Tile>
+ <Tile>114</Tile>
+ <Tile>115</Tile>
+ <Tile>116</Tile>
+ <Tile>117</Tile>
+ <Tile>118</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ </Lane>
+ <Lane Index="7">
+ <Sample>s</Sample>
+ <Tile>1</Tile>
+ <Tile>2</Tile>
+ <Tile>3</Tile>
+ <Tile>5</Tile>
+ <Tile>4</Tile>
+ <Tile>6</Tile>
+ <Tile>7</Tile>
+ <Tile>8</Tile>
+ <Tile>9</Tile>
+ <Tile>10</Tile>
+ <Tile>11</Tile>
+ <Tile>12</Tile>
+ <Tile>13</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>16</Tile>
+ <Tile>18</Tile>
+ <Tile>17</Tile>
+ <Tile>19</Tile>
+ <Tile>20</Tile>
+ <Tile>21</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>25</Tile>
+ <Tile>26</Tile>
+ <Tile>27</Tile>
+ <Tile>28</Tile>
+ <Tile>29</Tile>
+ <Tile>30</Tile>
+ <Tile>31</Tile>
+ <Tile>32</Tile>
+ <Tile>33</Tile>
+ <Tile>36</Tile>
+ <Tile>35</Tile>
+ <Tile>34</Tile>
+ <Tile>38</Tile>
+ <Tile>39</Tile>
+ <Tile>37</Tile>
+ <Tile>41</Tile>
+ <Tile>42</Tile>
+ <Tile>40</Tile>
+ <Tile>43</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>46</Tile>
+ <Tile>47</Tile>
+ <Tile>48</Tile>
+ <Tile>51</Tile>
+ <Tile>49</Tile>
+ <Tile>50</Tile>
+ <Tile>54</Tile>
+ <Tile>53</Tile>
+ <Tile>52</Tile>
+ <Tile>57</Tile>
+ <Tile>56</Tile>
+ <Tile>55</Tile>
+ <Tile>60</Tile>
+ <Tile>59</Tile>
+ <Tile>58</Tile>
+ <Tile>63</Tile>
+ <Tile>61</Tile>
+ <Tile>62</Tile>
+ <Tile>64</Tile>
+ <Tile>66</Tile>
+ <Tile>65</Tile>
+ <Tile>67</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>70</Tile>
+ <Tile>71</Tile>
+ <Tile>72</Tile>
+ <Tile>73</Tile>
+ <Tile>74</Tile>
+ <Tile>75</Tile>
+ <Tile>76</Tile>
+ <Tile>77</Tile>
+ <Tile>78</Tile>
+ <Tile>79</Tile>
+ <Tile>81</Tile>
+ <Tile>80</Tile>
+ <Tile>82</Tile>
+ <Tile>84</Tile>
+ <Tile>83</Tile>
+ <Tile>86</Tile>
+ <Tile>85</Tile>
+ <Tile>87</Tile>
+ <Tile>89</Tile>
+ <Tile>90</Tile>
+ <Tile>88</Tile>
+ <Tile>91</Tile>
+ <Tile>92</Tile>
+ <Tile>93</Tile>
+ <Tile>95</Tile>
+ <Tile>96</Tile>
+ <Tile>94</Tile>
+ <Tile>98</Tile>
+ <Tile>99</Tile>
+ <Tile>97</Tile>
+ <Tile>101</Tile>
+ <Tile>102</Tile>
+ <Tile>100</Tile>
+ <Tile>104</Tile>
+ <Tile>105</Tile>
+ <Tile>103</Tile>
+ <Tile>107</Tile>
+ <Tile>108</Tile>
+ <Tile>106</Tile>
+ <Tile>110</Tile>
+ <Tile>111</Tile>
+ <Tile>109</Tile>
+ <Tile>112</Tile>
+ <Tile>113</Tile>
+ <Tile>114</Tile>
+ <Tile>117</Tile>
+ <Tile>116</Tile>
+ <Tile>115</Tile>
+ <Tile>119</Tile>
+ <Tile>120</Tile>
+ <Tile>118</Tile>
+ </Lane>
+ <Lane Index="8">
+ <Sample>s</Sample>
+ <Tile>2</Tile>
+ <Tile>1</Tile>
+ <Tile>3</Tile>
+ <Tile>5</Tile>
+ <Tile>4</Tile>
+ <Tile>6</Tile>
+ <Tile>7</Tile>
+ <Tile>8</Tile>
+ <Tile>9</Tile>
+ <Tile>11</Tile>
+ <Tile>10</Tile>
+ <Tile>12</Tile>
+ <Tile>14</Tile>
+ <Tile>15</Tile>
+ <Tile>13</Tile>
+ <Tile>17</Tile>
+ <Tile>16</Tile>
+ <Tile>18</Tile>
+ <Tile>20</Tile>
+ <Tile>19</Tile>
+ <Tile>21</Tile>
+ <Tile>22</Tile>
+ <Tile>23</Tile>
+ <Tile>24</Tile>
+ <Tile>26</Tile>
+ <Tile>25</Tile>
+ <Tile>27</Tile>
+ <Tile>28</Tile>
+ <Tile>29</Tile>
+ <Tile>30</Tile>
+ <Tile>32</Tile>
+ <Tile>31</Tile>
+ <Tile>33</Tile>
+ <Tile>36</Tile>
+ <Tile>34</Tile>
+ <Tile>35</Tile>
+ <Tile>39</Tile>
+ <Tile>38</Tile>
+ <Tile>37</Tile>
+ <Tile>42</Tile>
+ <Tile>41</Tile>
+ <Tile>40</Tile>
+ <Tile>45</Tile>
+ <Tile>44</Tile>
+ <Tile>43</Tile>
+ <Tile>46</Tile>
+ <Tile>48</Tile>
+ <Tile>47</Tile>
+ <Tile>50</Tile>
+ <Tile>49</Tile>
+ <Tile>51</Tile>
+ <Tile>53</Tile>
+ <Tile>54</Tile>
+ <Tile>52</Tile>
+ <Tile>56</Tile>
+ <Tile>55</Tile>
+ <Tile>57</Tile>
+ <Tile>59</Tile>
+ <Tile>58</Tile>
+ <Tile>60</Tile>
+ <Tile>62</Tile>
+ <Tile>61</Tile>
+ <Tile>63</Tile>
+ <Tile>65</Tile>
+ <Tile>64</Tile>
+ <Tile>66</Tile>
+ <Tile>67</Tile>
+ <Tile>68</Tile>
+ <Tile>69</Tile>
+ <Tile>71</Tile>
+ <Tile>70</Tile>
+ <Tile>72</Tile>
+ <Tile>74</Tile>
+ <Tile>73</Tile>
+ <Tile>75</Tile>
+ <Tile>77</Tile>
+ <Tile>76</Tile>
+ <Tile>78</Tile>
+ <Tile>80</Tile>
+ <Tile>79</Tile>
+ <Tile>81</Tile>
+ <Tile>83</Tile>
+ <Tile>82</Tile>
+ <Tile>84</Tile>
+ <Tile>86</Tile>
+ <Tile>85</Tile>
+ <Tile>87</Tile>
+ <Tile>89</Tile>
+ <Tile>88</Tile>
+ <Tile>90</Tile>
+ <Tile>92</Tile>
+ <Tile>91</Tile>
+ <Tile>93</Tile>
+ <Tile>95</Tile>
+ <Tile>94</Tile>
+ <Tile>96</Tile>
+ <Tile>98</Tile>
+ <Tile>97</Tile>
+ <Tile>99</Tile>
+ <Tile>101</Tile>
+ <Tile>100</Tile>
+ <Tile>102</Tile>
+ <Tile>104</Tile>
+ <Tile>103</Tile>
+ <Tile>105</Tile>
+ <Tile>107</Tile>
+ <Tile>106</Tile>
+ <Tile>108</Tile>
+ <Tile>109</Tile>
+ <Tile>110</Tile>
+ <Tile>111</Tile>
+ <Tile>113</Tile>
+ <Tile>112</Tile>
+ <Tile>114</Tile>
+ <Tile>116</Tile>
+ <Tile>115</Tile>
+ <Tile>117</Tile>
+ <Tile>119</Tile>
+ <Tile>118</Tile>
+ <Tile>120</Tile>
+ </Lane>
+ </TileSelection>
+ <Time />
+ <User />
+ </Run>
+ </BaseCallAnalysis>
+ </Bustard>
+ <Gerald version="1">
+ <RunParameters>
+ <ChipWideRunParameters>
+ <ALIGN_PARAMS />
+ <ANALYSIS>none</ANALYSIS>
+ <BAD_TILES />
+ <BIN_DIR>/usr/local/casava-1.7.0/bin/../bin</BIN_DIR>
+ <CHASTITY_THRESHOLD>0.6</CHASTITY_THRESHOLD>
+ <CMDPREFIX />
+ <COMPRESSION>gzip</COMPRESSION>
+ <COMPRESSIONSUFFIX>.gz</COMPRESSIONSUFFIX>
+ <ELAND_GENOME>Need_to_specify_ELAND_genome_directory</ELAND_GENOME>
+ <ELAND_MAX_MATCHES />
+ <ELAND_MULTIPLE_INSTANCES />
+ <ELAND_REPEAT />
+ <ELAND_RNA_GENOME_CONTAM />
+ <ELAND_RNA_GENOME_REF_FLAT_GZ />
+ <ELAND_RNA_GENOME_SEQ_GENE_MD_GZ />
+ <ELAND_SEED_LENGTH />
+ <ELAND_SET_SIZE>20</ELAND_SET_SIZE>
+ <EMAIL_DOMAIN>domain.com</EMAIL_DOMAIN>
+ <EMAIL_LIST>igor</EMAIL_LIST>
+ <EMAIL_SERVER>localhost:25</EMAIL_SERVER>
+ <EXPT_DIR>/mmjggl/thornvalley/tardigrade/101229_ILLUMINA-EC5D15_00026_62DJMAAXX/Data/Intensities/BaseCalls</EXPT_DIR>
+ <EXPT_DIR_ROOT>/mmjggl/thornvalley/tardigrade</EXPT_DIR_ROOT>
+ <FLOW_CELL>v4</FLOW_CELL>
+ <FLOW_CELL_ID />
+ <FORCE>1</FORCE>
+ <GENOME_DIR>/usr/local/casava-1.7.0/bin/../../Genomes</GENOME_DIR>
+ <GENOME_FILE>Need_to_specify_genome_file_name</GENOME_FILE>
+ <GROUP_LANES />
+ <INDEXING_READ_POSITION />
+ <INDEX_BASES />
+ <KEEP_INTERMEDIARY>false</KEEP_INTERMEDIARY>
+ <LANE_POST_RUN_COMMAND />
+ <NUM_LEADING_DIRS_TO_STRIP>2</NUM_LEADING_DIRS_TO_STRIP>
+ <ORIG_READ_LENGTHS>76:76</ORIG_READ_LENGTHS>
+ <OUT_DIR>/mmjggl/thornvalley/tardigrade/101229_ILLUMINA-EC5D15_00026_62DJMAAXX/Data/Intensities/BaseCalls/GERALD_09-01-2011_igor</OUT_DIR>
+ <PAIR_PARAMS />
+ <POST_RUN_COMMAND>/home/diane/bin/runfolder -j 4 --site=woldlab -o /woldlab/loxcyc/data00/solexa-sequence/flowcells/ --extract-results /mmjggl/thornvalley/tardigrade/101229_ILLUMINA-EC5D15_00026_62DJMAAXX</POST_RUN_COMMAND>
+ <PRB_FILE_SUFFIX>_prb.txt</PRB_FILE_SUFFIX>
+ <PURE_BASES>25</PURE_BASES>
+ <QCAL_SOURCE>upstream</QCAL_SOURCE>
+ <QCAL_SOURCE1 />
+ <QCAL_SOURCE2 />
+ <QHG_FILE_SUFFIX>_qhg.txt</QHG_FILE_SUFFIX>
+ <QSEQ_READS>1 2</QSEQ_READS>
+ <QTABLE_PATH />
+ <QTABLE_PATH1 />
+ <QTABLE_PATH2 />
+ <QTABLE_PATH3 />
+ <QTABLE_REQUIRED>0</QTABLE_REQUIRED>
+ <QTABLE_REQUIRED1>0</QTABLE_REQUIRED1>
+ <QTABLE_REQUIRED2>0</QTABLE_REQUIRED2>
+ <QTABLE_REQUIRED3>0</QTABLE_REQUIRED3>
+ <QUALITY_CALIBRATION>QualityCalibration.xml</QUALITY_CALIBRATION>
+ <RAMM_REPORT_TYPE />
+ <READ_LENGTH>76</READ_LENGTH>
+ <READ_LENGTH1>76</READ_LENGTH1>
+ <READ_LENGTH2>0</READ_LENGTH2>
+ <RNAQC_COUNT_CLUSTERS_CMD>$(WC) -l</RNAQC_COUNT_CLUSTERS_CMD>
+ <RNAQC_COUNT_CLUSTERS_NAME>totalClusters</RNAQC_COUNT_CLUSTERS_NAME>
+ <RNAQC_COUNT_CONTAM_1_CMD>($(EGREP) -c 'c.*M' || exit 0)</RNAQC_COUNT_CONTAM_1_CMD>
+ <RNAQC_COUNT_CONTAM_1_NAME>mito</RNAQC_COUNT_CONTAM_1_NAME>
+ <RNAQC_COUNT_CONTAM_2_CMD>($(FGREP) -c newcontam || exit 0)</RNAQC_COUNT_CONTAM_2_CMD>
+ <RNAQC_COUNT_CONTAM_2_NAME>adapters</RNAQC_COUNT_CONTAM_2_NAME>
+ <RNAQC_COUNT_CONTAM_3_CMD>($(FGREP) -c Ribosomal || exit 0)</RNAQC_COUNT_CONTAM_3_CMD>
+ <RNAQC_COUNT_CONTAM_3_NAME>ribosomal</RNAQC_COUNT_CONTAM_3_NAME>
+ <RNAQC_COUNT_CONTAM_4_CMD>($(FGREP) -c 5S || exit 0)</RNAQC_COUNT_CONTAM_4_CMD>
+ <RNAQC_COUNT_CONTAM_4_NAME>5S</RNAQC_COUNT_CONTAM_4_NAME>
+ <RNAQC_COUNT_CONTAM_5_CMD>($(FGREP) -c phi || exit 0)</RNAQC_COUNT_CONTAM_5_CMD>
+ <RNAQC_COUNT_CONTAM_5_NAME>phiX</RNAQC_COUNT_CONTAM_5_NAME>
+ <RNAQC_COUNT_CONTAM_6_CMD>($(FGREP) -c indexed || exit 0)</RNAQC_COUNT_CONTAM_6_CMD>
+ <RNAQC_COUNT_CONTAM_6_NAME>indAdp</RNAQC_COUNT_CONTAM_6_NAME>
+ <RNAQC_COUNT_GENOME_CMD>($(EGREP) -c '^c' || exit 0)</RNAQC_COUNT_GENOME_CMD>
+ <RNAQC_COUNT_GENOME_NAME>genomeUsable</RNAQC_COUNT_GENOME_NAME>
+ <RNAQC_COUNT_NM_CMD>($(FGREP) -wc NM || exit 0)</RNAQC_COUNT_NM_CMD>
+ <RNAQC_COUNT_NM_NAME>noMatch</RNAQC_COUNT_NM_NAME>
+ <RNAQC_COUNT_PF_CMD>$(WC) -l</RNAQC_COUNT_PF_CMD>
+ <RNAQC_COUNT_PF_NAME>PFClusters</RNAQC_COUNT_PF_NAME>
+ <RNAQC_COUNT_QC_CMD>($(FGREP) -wc QC || exit 0)</RNAQC_COUNT_QC_CMD>
+ <RNAQC_COUNT_QC_NAME>QC</RNAQC_COUNT_QC_NAME>
+ <RNAQC_COUNT_RM_CMD>($(FGREP) -wc RM || exit 0)</RNAQC_COUNT_RM_CMD>
+ <RNAQC_COUNT_RM_NAME>repeatMasked</RNAQC_COUNT_RM_NAME>
+ <RNAQC_COUNT_SPLICE_CMD>($(FGREP) -c splice_sites || exit 0)</RNAQC_COUNT_SPLICE_CMD>
+ <RNAQC_COUNT_SPLICE_NAME>spliceUsable</RNAQC_COUNT_SPLICE_NAME>
+ <RNAQC_COUNT_USABLE_CMD>$(CUT) -f13 |($(EGREP) [0-9] || exit 0)|$(WC) -l</RNAQC_COUNT_USABLE_CMD>
+ <RNAQC_COUNT_USABLE_NAME>usable</RNAQC_COUNT_USABLE_NAME>
+ <SEQUENCE_FORMAT>--fastq</SEQUENCE_FORMAT>
+ <SEQ_FILE_SUFFIX>_seq.txt</SEQ_FILE_SUFFIX>
+ <SIG2_COMPRESSION />
+ <SIG2_COMPRESSION_SUFFIX />
+ <SIG_FILE_SUFFIX_DEPHASED>_sig2.txt</SIG_FILE_SUFFIX_DEPHASED>
+ <SIG_FILE_SUFFIX_NOT_DEPHASED>_sig.txt</SIG_FILE_SUFFIX_NOT_DEPHASED>
+ <SINGLESEED />
+ <SOFTWARE_VERSION>CASAVA-1.7.0</SOFTWARE_VERSION>
+ <SRF_2ND_CYCLE />
+ <SRF_ARCHIVE_REQUIRED />
+ <SRF_CHASTITY>0.6</SRF_CHASTITY>
+ <SRF_FILE_SUFFIX>_traces.srf</SRF_FILE_SUFFIX>
+ <SRF_PROCESSED>-P</SRF_PROCESSED>
+ <SRF_QCAL />
+ <SRF_RAW />
+ <TILE_AREA />
+ <TILE_READ1_REGEX>s_[1-8]_1_[0-9][0-9][0-9][0-9]</TILE_READ1_REGEX>
+ <TILE_REGEX>s_[1-8]_[0-9][0-9][0-9][0-9]</TILE_REGEX>
+ <TILE_ROOT>s</TILE_ROOT>
+ <TIME_STAMP>Sun Jan 9 10:28:08 2011</TIME_STAMP>
+ <UNGAPPED />
+ <USE_BASES>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn</USE_BASES>
+ <USE_BASES1>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</USE_BASES1>
+ <USE_BASES2>nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn</USE_BASES2>
+ <USE_BASES3 />
+ <WEB_DIR_ROOT>http://host.domain.com/yourshare/</WEB_DIR_ROOT>
+ <WITH_SEQUENCE>false</WITH_SEQUENCE>
+ <WITH_SORTED>false</WITH_SORTED>
+ </ChipWideRunParameters>
+ <LaneSpecificRunParameters>
+ <ANALYSIS>
+ <s_1>eland_pair</s_1>
+ <s_2>eland_pair</s_2>
+ <s_3>eland_pair</s_3>
+ <s_4>eland_pair</s_4>
+ <s_5>sequence_pair</s_5>
+ <s_6>eland_pair</s_6>
+ <s_7>eland_pair</s_7>
+ <s_8>eland_pair</s_8>
+ </ANALYSIS>
+ <ELAND_GENOME>
+ <s_1>/mmjggl/tardigrade/data00/genomes/hg18</s_1>
+ <s_2>/mmjggl/tardigrade/data00/genomes/hg18</s_2>
+ <s_3>/mmjggl/tardigrade/data00/genomes/hg18</s_3>
+ <s_4>/mmjggl/tardigrade/data00/genomes/hg18</s_4>
+ <s_6>/mmjggl/tardigrade/data00/genomes/hg18</s_6>
+ <s_7>/mmjggl/tardigrade/data00/genomes/hg18</s_7>
+ <s_8>/mmjggl/tardigrade/data00/genomes/hg18</s_8>
+ </ELAND_GENOME>
+ <QTABLE_REQUIRED>
+ <s_1>0</s_1>
+ <s_2>0</s_2>
+ <s_3>0</s_3>
+ <s_4>0</s_4>
+ <s_5>0</s_5>
+ <s_6>0</s_6>
+ <s_7>0</s_7>
+ <s_8>0</s_8>
+ </QTABLE_REQUIRED>
+ <READ_LENGTH1>
+ <s_1>76</s_1>
+ <s_2>76</s_2>
+ <s_3>76</s_3>
+ <s_4>76</s_4>
+ <s_5>76</s_5>
+ <s_6>76</s_6>
+ <s_7>76</s_7>
+ <s_8>76</s_8>
+ </READ_LENGTH1>
+ <READ_LENGTH2>
+ <s_1>76</s_1>
+ <s_2>76</s_2>
+ <s_3>76</s_3>
+ <s_4>76</s_4>
+ <s_5>76</s_5>
+ <s_6>76</s_6>
+ <s_7>76</s_7>
+ <s_8>76</s_8>
+ </READ_LENGTH2>
+ <SRF_2ND_CYCLE>
+ <s_1>-2 77</s_1>
+ <s_2>-2 77</s_2>
+ <s_3>-2 77</s_3>
+ <s_4>-2 77</s_4>
+ <s_5>-2 77</s_5>
+ <s_6>-2 77</s_6>
+ <s_7>-2 77</s_7>
+ <s_8>-2 77</s_8>
+ </SRF_2ND_CYCLE>
+ <SRF_ARCHIVE_REQUIRED>
+ <s_1 />
+ <s_2 />
+ <s_3 />
+ <s_4 />
+ <s_5 />
+ <s_6 />
+ <s_7 />
+ <s_8 />
+ </SRF_ARCHIVE_REQUIRED>
+ <SRF_QCAL>
+ <s_1 />
+ <s_2 />
+ <s_3 />
+ <s_4 />
+ <s_5 />
+ <s_6 />
+ <s_7 />
+ <s_8 />
+ </SRF_QCAL>
+ <USE_BASES>
+ <s_1>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_1>
+ <s_2>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_2>
+ <s_3>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_3>
+ <s_4>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_4>
+ <s_5>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_5>
+ <s_6>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_6>
+ <s_7>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_7>
+ <s_8>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_8>
+ </USE_BASES>
+ <USE_BASES1>
+ <s_1>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_1>
+ <s_2>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_2>
+ <s_3>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_3>
+ <s_4>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_4>
+ <s_5>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_5>
+ <s_6>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_6>
+ <s_7>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_7>
+ <s_8>YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY</s_8>
+ </USE_BASES1>
+ <USE_BASES2>
+ <s_1>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_1>
+ <s_2>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_2>
+ <s_3>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_3>
+ <s_4>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_4>
+ <s_5>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_5>
+ <s_6>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_6>
+ <s_7>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_7>
+ <s_8>yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy</s_8>
+ </USE_BASES2>
+ <USE_BASES3>
+ <s_1 />
+ <s_2 />
+ <s_3 />
+ <s_4 />
+ <s_5 />
+ <s_6 />
+ <s_7 />
+ <s_8 />
+ </USE_BASES3>
+ <WITH_SEQUENCE>
+ <s_1>true</s_1>
+ <s_2>true</s_2>
+ <s_3>true</s_3>
+ <s_4>true</s_4>
+ <s_5>true</s_5>
+ <s_6>true</s_6>
+ <s_7>true</s_7>
+ <s_8>true</s_8>
+ </WITH_SEQUENCE>
+ </LaneSpecificRunParameters>
+ </RunParameters>
+ <Summary version="3">
+ <LaneResultSummary end="0" lane="1">
+ <Cluster deviation="9456" mean="375885" />
+ <AverageFirstCycleIntensity deviation="11" mean="360" />
+ <AverageAlignmentScore deviation="4.19" mean="172.04" />
+ <ClusterPF deviation="8651" mean="321428" />
+ <PercentPassFilterClusters deviation="1.35" mean="85.52" />
+ <PercentPassFilterAlign deviation="0.72" mean="79.17" />
+ <LaneYield>2931430</LaneYield>
+ <PercentErrorRate deviation="0.54" mean="4.21" />
+ <PercentIntensityAfter20Cycles deviation="3.82" mean="85.72" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="2">
+ <Cluster deviation="10241" mean="443170" />
+ <AverageFirstCycleIntensity deviation="5" mean="381" />
+ <AverageAlignmentScore deviation="1.4" mean="171.98" />
+ <ClusterPF deviation="8335" mean="362709" />
+ <PercentPassFilterClusters deviation="0.8" mean="81.85" />
+ <PercentPassFilterAlign deviation="0.23" mean="79.73" />
+ <LaneYield>3307914</LaneYield>
+ <PercentErrorRate deviation="0.14" mean="4.13" />
+ <PercentIntensityAfter20Cycles deviation="3.26" mean="85.89" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="3">
+ <Cluster deviation="12487" mean="414053" />
+ <AverageFirstCycleIntensity deviation="7" mean="391" />
+ <AverageAlignmentScore deviation="0.79" mean="186.83" />
+ <ClusterPF deviation="9494" mean="350824" />
+ <PercentPassFilterClusters deviation="0.42" mean="84.74" />
+ <PercentPassFilterAlign deviation="0.19" mean="79.22" />
+ <LaneYield>3199519</LaneYield>
+ <PercentErrorRate deviation="0.12" mean="4.09" />
+ <PercentIntensityAfter20Cycles deviation="1.43" mean="87.39" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="4">
+ <Cluster deviation="7195" mean="449298" />
+ <AverageFirstCycleIntensity deviation="5" mean="369" />
+ <AverageAlignmentScore deviation="1.02" mean="187.26" />
+ <ClusterPF deviation="6100" mean="364192" />
+ <PercentPassFilterClusters deviation="0.27" mean="81.06" />
+ <PercentPassFilterAlign deviation="0.27" mean="79.6" />
+ <LaneYield>3321433</LaneYield>
+ <PercentErrorRate deviation="0.14" mean="4.07" />
+ <PercentIntensityAfter20Cycles deviation="1.68" mean="86.34" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="5">
+ <Cluster deviation="15946" mean="310619" />
+ <AverageFirstCycleIntensity deviation="4" mean="362" />
+ <ClusterPF deviation="13858" mean="277584" />
+ <PercentPassFilterClusters deviation="0.25" mean="89.37" />
+ <LaneYield>2531573</LaneYield>
+ <PercentIntensityAfter20Cycles deviation="1.12" mean="90.35" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="6">
+ <Cluster deviation="11650" mean="338882" />
+ <AverageFirstCycleIntensity deviation="5" mean="352" />
+ <AverageAlignmentScore deviation="0.52" mean="169.04" />
+ <ClusterPF deviation="9492" mean="299688" />
+ <PercentPassFilterClusters deviation="0.41" mean="88.44" />
+ <PercentPassFilterAlign deviation="0.09" mean="79.95" />
+ <LaneYield>2733160</LaneYield>
+ <PercentErrorRate deviation="0.06" mean="3.96" />
+ <PercentIntensityAfter20Cycles deviation="1.23" mean="87.56" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="7">
+ <Cluster deviation="8411" mean="332799" />
+ <AverageFirstCycleIntensity deviation="4" mean="336" />
+ <AverageAlignmentScore deviation="0.49" mean="170.86" />
+ <ClusterPF deviation="7225" mean="294318" />
+ <PercentPassFilterClusters deviation="0.38" mean="88.44" />
+ <PercentPassFilterAlign deviation="0.08" mean="79.95" />
+ <LaneYield>2684182</LaneYield>
+ <PercentErrorRate deviation="0.05" mean="3.76" />
+ <PercentIntensityAfter20Cycles deviation="1.61" mean="88.56" />
+ </LaneResultSummary>
+ <LaneResultSummary end="0" lane="8">
+ <Cluster deviation="18049" mean="315442" />
+ <AverageFirstCycleIntensity deviation="40" mean="360" />
+ <AverageAlignmentScore deviation="0.83" mean="173.29" />
+ <ClusterPF deviation="14655" mean="277736" />
+ <PercentPassFilterClusters deviation="0.6" mean="88.08" />
+ <PercentPassFilterAlign deviation="0.12" mean="80.04" />
+ <LaneYield>2532958</LaneYield>
+ <PercentErrorRate deviation="0.05" mean="3.99" />
+ <PercentIntensityAfter20Cycles deviation="1.97" mean="88.94" />
+ </LaneResultSummary>
+ </Summary>
+ <ElandCollection version="3">
+ <ElandLane end="0" id="1" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>1</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2144998" />
+ <MappedItem name="hg18/chr17.fa" value="2282473" />
+ <MappedItem name="hg18/chr15_random.fa" value="64" />
+ <MappedItem name="hg18/chrX_random.fa" value="848" />
+ <MappedItem name="hg18/chr5_random.fa" value="54" />
+ <MappedItem name="hg18/chr11.fa" value="2579028" />
+ <MappedItem name="hg18/chr10_random.fa" value="17" />
+ <MappedItem name="hg18/chrM.fa" value="1632811" />
+ <MappedItem name="hg18/chr18.fa" value="288304" />
+ <MappedItem name="OBF5.fa" value="126600" />
+ <MappedItem name="EPR-1.fa" value="22" />
+ <MappedItem name="Apetala2.fa" value="237" />
+ <MappedItem name="hg18/chr13.fa" value="385426" />
+ <MappedItem name="hg18/chr21.fa" value="304526" />
+ <MappedItem name="hg18/chr13_random.fa" value="299" />
+ <MappedItem name="hg18/chr21_random.fa" value="157701" />
+ <MappedItem name="hg18/chr4.fa" value="789848" />
+ <MappedItem name="hg18/chr6_random.fa" value="71" />
+ <MappedItem name="hg18/chr22_random.fa" value="4" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="44" />
+ <MappedItem name="hg18/chr16_random.fa" value="124" />
+ <MappedItem name="hg18/chr17_random.fa" value="6029" />
+ <MappedItem name="hg18/chr8_random.fa" value="28" />
+ <MappedItem name="hg18/chr9.fa" value="1202341" />
+ <MappedItem name="hg18/chr2.fa" value="2207171" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="5330" />
+ <MappedItem name="hg18/chr19.fa" value="2076941" />
+ <MappedItem name="hg18/chr4_random.fa" value="868" />
+ <MappedItem name="hg18/chr7_random.fa" value="1540" />
+ <MappedItem name="hg18/chr22.fa" value="845743" />
+ <MappedItem name="hg18/chr12.fa" value="1895645" />
+ <MappedItem name="hg18/chr8.fa" value="787255" />
+ <MappedItem name="hg18/chr10.fa" value="1056903" />
+ <MappedItem name="hg18/chrY.fa" value="56373" />
+ <MappedItem name="hg18/chr15.fa" value="1591636" />
+ <MappedItem name="hg18/chr18_random.fa" value="3" />
+ <MappedItem name="hg18/chr16.fa" value="1249046" />
+ <MappedItem name="hg18/chr2_random.fa" value="69" />
+ <MappedItem name="hg18/chr14.fa" value="772454" />
+ <MappedItem name="newcontam_UK.fa" value="258" />
+ <MappedItem name="hg18/chr19_random.fa" value="2" />
+ <MappedItem name="hg18/chr1.fa" value="2884469" />
+ <MappedItem name="BAC_plus_vector.fa" value="557" />
+ <MappedItem name="hg18/chrX.fa" value="1100712" />
+ <MappedItem name="hg18/chr3.fa" value="1461804" />
+ <MappedItem name="hg18/chr20.fa" value="711043" />
+ <MappedItem name="VATG.fa" value="5348" />
+ <MappedItem name="hg18/chr1_random.fa" value="8542" />
+ <MappedItem name="hg18/chr6.fa" value="1241595" />
+ <MappedItem name="hg18/chr3_random.fa" value="1" />
+ <MappedItem name="hg18/chr9_random.fa" value="47" />
+ <MappedItem name="hg18/chr5.fa" value="1523047" />
+ <MappedItem name="AGP.fa" value="20" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="420832" />
+ <Code name="R0" value="20400195" />
+ <Code name="NM" value="3347706" />
+ <Code name="R2" value="12657996" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6634022" />
+ <Code name="U0" value="20310214" />
+ <Code name="R1" value="17742317" />
+ <Code name="U2" value="12729084" />
+ </MatchCodes>
+ <Reads>45106262</Reads>
+ </ElandLane>
+ <ElandLane end="0" id="2" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>2</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2490766" />
+ <MappedItem name="hg18/chr17.fa" value="2626254" />
+ <MappedItem name="hg18/chr15_random.fa" value="87" />
+ <MappedItem name="Apetala2.fa" value="260" />
+ <MappedItem name="hg18/chr5_random.fa" value="66" />
+ <MappedItem name="hg18/chr11.fa" value="2963806" />
+ <MappedItem name="hg18/chr10_random.fa" value="21" />
+ <MappedItem name="hg18/chrM.fa" value="1977596" />
+ <MappedItem name="hg18/chr18.fa" value="340872" />
+ <MappedItem name="OBF5.fa" value="154871" />
+ <MappedItem name="EPR-1.fa" value="21" />
+ <MappedItem name="hg18/chr13.fa" value="453542" />
+ <MappedItem name="hg18/chr21.fa" value="353325" />
+ <MappedItem name="hg18/chr13_random.fa" value="366" />
+ <MappedItem name="hg18/chr21_random.fa" value="177857" />
+ <MappedItem name="hg18/chr4.fa" value="934151" />
+ <MappedItem name="hg18/chr6_random.fa" value="90" />
+ <MappedItem name="hg18/chr22_random.fa" value="5" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="48" />
+ <MappedItem name="hg18/chr16_random.fa" value="157" />
+ <MappedItem name="hg18/chr17_random.fa" value="6944" />
+ <MappedItem name="hg18/chr8_random.fa" value="30" />
+ <MappedItem name="hg18/chr9.fa" value="1397554" />
+ <MappedItem name="hg18/chr2.fa" value="2599065" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="6079" />
+ <MappedItem name="hg18/chr19.fa" value="2335026" />
+ <MappedItem name="hg18/chr4_random.fa" value="978" />
+ <MappedItem name="hg18/chr7_random.fa" value="1929" />
+ <MappedItem name="hg18/chr22.fa" value="967647" />
+ <MappedItem name="hg18/chr12.fa" value="2230354" />
+ <MappedItem name="hg18/chr8.fa" value="916654" />
+ <MappedItem name="hg18/chr10.fa" value="1238852" />
+ <MappedItem name="hg18/chrY.fa" value="66090" />
+ <MappedItem name="hg18/chr15.fa" value="1871738" />
+ <MappedItem name="hg18/chr18_random.fa" value="2" />
+ <MappedItem name="hg18/chrX_random.fa" value="819" />
+ <MappedItem name="hg18/chr16.fa" value="1422614" />
+ <MappedItem name="hg18/chr2_random.fa" value="95" />
+ <MappedItem name="hg18/chr14.fa" value="908527" />
+ <MappedItem name="newcontam_UK.fa" value="340" />
+ <MappedItem name="hg18/chr19_random.fa" value="1" />
+ <MappedItem name="hg18/chr1.fa" value="3373415" />
+ <MappedItem name="BAC_plus_vector.fa" value="619" />
+ <MappedItem name="hg18/chrX.fa" value="1282347" />
+ <MappedItem name="hg18/chr3.fa" value="1723955" />
+ <MappedItem name="hg18/chr20.fa" value="821603" />
+ <MappedItem name="VATG.fa" value="6627" />
+ <MappedItem name="hg18/chr1_random.fa" value="10133" />
+ <MappedItem name="hg18/chr6.fa" value="1464444" />
+ <MappedItem name="hg18/chr3_random.fa" value="8" />
+ <MappedItem name="hg18/chr9_random.fa" value="62" />
+ <MappedItem name="hg18/chr5.fa" value="1790026" />
+ <MappedItem name="AGP.fa" value="22" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="493490" />
+ <Code name="R0" value="24056939" />
+ <Code name="NM" value="4718493" />
+ <Code name="R2" value="14654787" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6553571" />
+ <Code name="U0" value="24692585" />
+ <Code name="R1" value="20347000" />
+ <Code name="U2" value="14765020" />
+ </MatchCodes>
+ <Reads>53180501</Reads>
+ </ElandLane>
+ <ElandLane end="0" id="3" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>3</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2148554" />
+ <MappedItem name="hg18/chr17.fa" value="3229684" />
+ <MappedItem name="hg18/chr15_random.fa" value="195" />
+ <MappedItem name="Apetala2.fa" value="296" />
+ <MappedItem name="hg18/chr5_random.fa" value="87" />
+ <MappedItem name="hg18/chr11.fa" value="2717694" />
+ <MappedItem name="hg18/chr10_random.fa" value="28" />
+ <MappedItem name="hg18/chrM.fa" value="2065569" />
+ <MappedItem name="hg18/chr18.fa" value="271783" />
+ <MappedItem name="OBF5.fa" value="185107" />
+ <MappedItem name="EPR-1.fa" value="38" />
+ <MappedItem name="hg18/chr13.fa" value="465309" />
+ <MappedItem name="hg18/chr21.fa" value="376480" />
+ <MappedItem name="hg18/chr13_random.fa" value="311" />
+ <MappedItem name="hg18/chr21_random.fa" value="96692" />
+ <MappedItem name="hg18/chr4.fa" value="835511" />
+ <MappedItem name="hg18/chr6_random.fa" value="143" />
+ <MappedItem name="hg18/chr22_random.fa" value="13" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="80" />
+ <MappedItem name="hg18/chr16_random.fa" value="14" />
+ <MappedItem name="hg18/chr17_random.fa" value="5926" />
+ <MappedItem name="hg18/chr8_random.fa" value="73" />
+ <MappedItem name="hg18/chr9.fa" value="1331179" />
+ <MappedItem name="hg18/chr2.fa" value="3177572" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="9123" />
+ <MappedItem name="hg18/chr19.fa" value="1866912" />
+ <MappedItem name="hg18/chr4_random.fa" value="1431" />
+ <MappedItem name="hg18/chr7_random.fa" value="1892" />
+ <MappedItem name="hg18/chr22.fa" value="873427" />
+ <MappedItem name="hg18/chr12.fa" value="1747890" />
+ <MappedItem name="hg18/chr8.fa" value="790547" />
+ <MappedItem name="hg18/chr10.fa" value="1135596" />
+ <MappedItem name="hg18/chrY.fa" value="53043" />
+ <MappedItem name="hg18/chr15.fa" value="1704624" />
+ <MappedItem name="hg18/chrX_random.fa" value="750" />
+ <MappedItem name="hg18/chr16.fa" value="1269664" />
+ <MappedItem name="hg18/chr2_random.fa" value="119" />
+ <MappedItem name="hg18/chr14.fa" value="928142" />
+ <MappedItem name="newcontam_UK.fa" value="279" />
+ <MappedItem name="hg18/chr1.fa" value="3091317" />
+ <MappedItem name="BAC_plus_vector.fa" value="440" />
+ <MappedItem name="hg18/chrX.fa" value="946084" />
+ <MappedItem name="hg18/chr3.fa" value="1561501" />
+ <MappedItem name="hg18/chr20.fa" value="684339" />
+ <MappedItem name="VATG.fa" value="7246" />
+ <MappedItem name="hg18/chr1_random.fa" value="5726" />
+ <MappedItem name="hg18/chr6.fa" value="1353514" />
+ <MappedItem name="hg18/chr3_random.fa" value="13" />
+ <MappedItem name="hg18/chr9_random.fa" value="80" />
+ <MappedItem name="hg18/chr5.fa" value="1755943" />
+ <MappedItem name="AGP.fa" value="28" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="488929" />
+ <Code name="R0" value="25866152" />
+ <Code name="NM" value="3646056" />
+ <Code name="R2" value="14198684" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="5813602" />
+ <Code name="U0" value="25215115" />
+ <Code name="R1" value="17490328" />
+ <Code name="U2" value="12882995" />
+ </MatchCodes>
+ <Reads>49686417</Reads>
+ </ElandLane>
+ <ElandLane end="0" id="4" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>4</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2297270" />
+ <MappedItem name="hg18/chr17.fa" value="3368936" />
+ <MappedItem name="hg18/chr15_random.fa" value="230" />
+ <MappedItem name="Apetala2.fa" value="350" />
+ <MappedItem name="hg18/chr5_random.fa" value="99" />
+ <MappedItem name="hg18/chr11.fa" value="2867086" />
+ <MappedItem name="hg18/chr10_random.fa" value="23" />
+ <MappedItem name="hg18/chrM.fa" value="2279052" />
+ <MappedItem name="hg18/chr18.fa" value="296510" />
+ <MappedItem name="OBF5.fa" value="208722" />
+ <MappedItem name="EPR-1.fa" value="47" />
+ <MappedItem name="hg18/chr13.fa" value="503812" />
+ <MappedItem name="hg18/chr21.fa" value="399365" />
+ <MappedItem name="hg18/chr13_random.fa" value="362" />
+ <MappedItem name="hg18/chr21_random.fa" value="100820" />
+ <MappedItem name="hg18/chr4.fa" value="909552" />
+ <MappedItem name="hg18/chr6_random.fa" value="151" />
+ <MappedItem name="hg18/chr22_random.fa" value="19" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="66" />
+ <MappedItem name="hg18/chr16_random.fa" value="32" />
+ <MappedItem name="hg18/chr17_random.fa" value="6055" />
+ <MappedItem name="hg18/chr8_random.fa" value="38" />
+ <MappedItem name="hg18/chr9.fa" value="1417257" />
+ <MappedItem name="hg18/chr2.fa" value="3444039" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="9530" />
+ <MappedItem name="hg18/chr19.fa" value="1926570" />
+ <MappedItem name="hg18/chr4_random.fa" value="1543" />
+ <MappedItem name="hg18/chr7_random.fa" value="2070" />
+ <MappedItem name="hg18/chr22.fa" value="920396" />
+ <MappedItem name="hg18/chr12.fa" value="1883489" />
+ <MappedItem name="hg18/chr8.fa" value="845755" />
+ <MappedItem name="hg18/chr10.fa" value="1229737" />
+ <MappedItem name="hg18/chrY.fa" value="56562" />
+ <MappedItem name="hg18/chr15.fa" value="1839653" />
+ <MappedItem name="hg18/chrX_random.fa" value="789" />
+ <MappedItem name="hg18/chr16.fa" value="1331008" />
+ <MappedItem name="hg18/chr2_random.fa" value="109" />
+ <MappedItem name="hg18/chr14.fa" value="995608" />
+ <MappedItem name="newcontam_UK.fa" value="355" />
+ <MappedItem name="hg18/chr19_random.fa" value="3" />
+ <MappedItem name="hg18/chr1.fa" value="3316111" />
+ <MappedItem name="BAC_plus_vector.fa" value="459" />
+ <MappedItem name="hg18/chrX.fa" value="1015280" />
+ <MappedItem name="hg18/chr3.fa" value="1692924" />
+ <MappedItem name="hg18/chr20.fa" value="728843" />
+ <MappedItem name="VATG.fa" value="8223" />
+ <MappedItem name="hg18/chr1_random.fa" value="6043" />
+ <MappedItem name="hg18/chr6.fa" value="1465061" />
+ <MappedItem name="hg18/chr3_random.fa" value="14" />
+ <MappedItem name="hg18/chr9_random.fa" value="103" />
+ <MappedItem name="hg18/chr5.fa" value="1902275" />
+ <MappedItem name="AGP.fa" value="21" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="511423" />
+ <Code name="R0" value="27569590" />
+ <Code name="NM" value="4800271" />
+ <Code name="R2" value="15993234" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6315510" />
+ <Code name="U0" value="26283390" />
+ <Code name="R1" value="19317860" />
+ <Code name="U2" value="14213378" />
+ </MatchCodes>
+ <Reads>53915808</Reads>
+ </ElandLane>
+ <SequenceLane end="0" id="5" version="1">
+ <SampleName>s</SampleName>
+ <LaneID>5</LaneID>
+ <End>1</End>
+ <Reads>33310172</Reads>
+ <SequenceType>FASTQ</SequenceType>
+ </SequenceLane>
+ <ElandLane end="0" id="6" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>6</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1120453" />
+ <MappedItem name="hg18/chr17.fa" value="2581100" />
+ <MappedItem name="hg18/chr15_random.fa" value="362" />
+ <MappedItem name="Apetala2.fa" value="725" />
+ <MappedItem name="hg18/chr5_random.fa" value="67" />
+ <MappedItem name="hg18/chr11.fa" value="1795101" />
+ <MappedItem name="hg18/chr10_random.fa" value="32" />
+ <MappedItem name="hg18/chr20.fa" value="860142" />
+ <MappedItem name="hg18/chr18.fa" value="227000" />
+ <MappedItem name="OBF5.fa" value="270702" />
+ <MappedItem name="EPR-1.fa" value="91" />
+ <MappedItem name="hg18/chr11_random.fa" value="6" />
+ <MappedItem name="hg18/chr13.fa" value="302913" />
+ <MappedItem name="hg18/chr21.fa" value="224996" />
+ <MappedItem name="hg18/chr13_random.fa" value="400" />
+ <MappedItem name="hg18/chr21_random.fa" value="39959" />
+ <MappedItem name="hg18/chr4.fa" value="659993" />
+ <MappedItem name="hg18/chr6_random.fa" value="146" />
+ <MappedItem name="hg18/chr22_random.fa" value="2" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="260" />
+ <MappedItem name="hg18/chr16_random.fa" value="398" />
+ <MappedItem name="hg18/chr17_random.fa" value="4396" />
+ <MappedItem name="hg18/chr8_random.fa" value="209" />
+ <MappedItem name="hg18/chr9.fa" value="1117173" />
+ <MappedItem name="hg18/chr2.fa" value="1466793" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="32760" />
+ <MappedItem name="hg18/chr19.fa" value="2728003" />
+ <MappedItem name="hg18/chr4_random.fa" value="889" />
+ <MappedItem name="hg18/chr7_random.fa" value="1010" />
+ <MappedItem name="hg18/chr22.fa" value="847241" />
+ <MappedItem name="hg18/chr12.fa" value="1886658" />
+ <MappedItem name="hg18/chr8.fa" value="1185265" />
+ <MappedItem name="hg18/chr10.fa" value="922363" />
+ <MappedItem name="hg18/chrY.fa" value="25307" />
+ <MappedItem name="hg18/chr15.fa" value="714342" />
+ <MappedItem name="hg18/chr18_random.fa" value="4" />
+ <MappedItem name="hg18/chrX_random.fa" value="588" />
+ <MappedItem name="hg18/chr16.fa" value="1484709" />
+ <MappedItem name="hg18/chr2_random.fa" value="227" />
+ <MappedItem name="hg18/chr14.fa" value="715834" />
+ <MappedItem name="newcontam_UK.fa" value="215" />
+ <MappedItem name="hg18/chr1.fa" value="2689713" />
+ <MappedItem name="BAC_plus_vector.fa" value="706" />
+ <MappedItem name="hg18/chrX.fa" value="864197" />
+ <MappedItem name="hg18/chr3.fa" value="1313052" />
+ <MappedItem name="hg18/chrM.fa" value="2107856" />
+ <MappedItem name="VATG.fa" value="10701" />
+ <MappedItem name="hg18/chr1_random.fa" value="12956" />
+ <MappedItem name="hg18/chr6.fa" value="1593145" />
+ <MappedItem name="hg18/chr3_random.fa" value="6" />
+ <MappedItem name="hg18/chr9_random.fa" value="143" />
+ <MappedItem name="hg18/chr5.fa" value="1072750" />
+ <MappedItem name="AGP.fa" value="55" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="411428" />
+ <Code name="R0" value="29376842" />
+ <Code name="NM" value="2015480" />
+ <Code name="R2" value="19845909" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="5158347" />
+ <Code name="U0" value="20507860" />
+ <Code name="R1" value="25860301" />
+ <Code name="U2" value="10463797" />
+ </MatchCodes>
+ <Reads>40665866</Reads>
+ </ElandLane>
+ <ElandLane end="0" id="7" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>7</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1081292" />
+ <MappedItem name="hg18/chr17.fa" value="2454513" />
+ <MappedItem name="hg18/chr15_random.fa" value="270" />
+ <MappedItem name="hg18/chrX_random.fa" value="535" />
+ <MappedItem name="hg18/chr5_random.fa" value="54" />
+ <MappedItem name="hg18/chr11.fa" value="1766515" />
+ <MappedItem name="hg18/chr10_random.fa" value="73" />
+ <MappedItem name="hg18/chrM.fa" value="2433937" />
+ <MappedItem name="hg18/chr18.fa" value="242430" />
+ <MappedItem name="OBF5.fa" value="138879" />
+ <MappedItem name="EPR-1.fa" value="28" />
+ <MappedItem name="hg18/chr11_random.fa" value="4" />
+ <MappedItem name="Apetala2.fa" value="309" />
+ <MappedItem name="hg18/chr13.fa" value="310301" />
+ <MappedItem name="hg18/chr21.fa" value="215758" />
+ <MappedItem name="hg18/chr13_random.fa" value="432" />
+ <MappedItem name="hg18/chr21_random.fa" value="33258" />
+ <MappedItem name="hg18/chr4.fa" value="695628" />
+ <MappedItem name="hg18/chr6_random.fa" value="164" />
+ <MappedItem name="hg18/chr22_random.fa" value="14" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="107" />
+ <MappedItem name="hg18/chr16_random.fa" value="465" />
+ <MappedItem name="hg18/chr17_random.fa" value="4439" />
+ <MappedItem name="hg18/chr8_random.fa" value="181" />
+ <MappedItem name="hg18/chr9.fa" value="1050733" />
+ <MappedItem name="hg18/chr2.fa" value="1501641" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="10387" />
+ <MappedItem name="hg18/chr19.fa" value="2462452" />
+ <MappedItem name="hg18/chr4_random.fa" value="850" />
+ <MappedItem name="hg18/chr7_random.fa" value="1442" />
+ <MappedItem name="hg18/chr22.fa" value="811982" />
+ <MappedItem name="hg18/chr12.fa" value="1819548" />
+ <MappedItem name="hg18/chr8.fa" value="1119851" />
+ <MappedItem name="hg18/chr10.fa" value="957467" />
+ <MappedItem name="hg18/chrY.fa" value="20986" />
+ <MappedItem name="hg18/chr15.fa" value="696558" />
+ <MappedItem name="hg18/chr18_random.fa" value="3" />
+ <MappedItem name="hg18/chr16.fa" value="1415828" />
+ <MappedItem name="hg18/chr2_random.fa" value="180" />
+ <MappedItem name="hg18/chr14.fa" value="719275" />
+ <MappedItem name="newcontam_UK.fa" value="64" />
+ <MappedItem name="hg18/chr1.fa" value="2720787" />
+ <MappedItem name="BAC_plus_vector.fa" value="572" />
+ <MappedItem name="hg18/chrX.fa" value="843558" />
+ <MappedItem name="hg18/chr3.fa" value="1301623" />
+ <MappedItem name="hg18/chr20.fa" value="829176" />
+ <MappedItem name="VATG.fa" value="4851" />
+ <MappedItem name="hg18/chr1_random.fa" value="20384" />
+ <MappedItem name="hg18/chr6.fa" value="1577196" />
+ <MappedItem name="hg18/chr3_random.fa" value="8" />
+ <MappedItem name="hg18/chr9_random.fa" value="153" />
+ <MappedItem name="hg18/chr5.fa" value="1077823" />
+ <MappedItem name="AGP.fa" value="26" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="398401" />
+ <Code name="R0" value="33075102" />
+ <Code name="NM" value="1884694" />
+ <Code name="R2" value="21796739" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="4944553" />
+ <Code name="U0" value="20761715" />
+ <Code name="R1" value="28121693" />
+ <Code name="U2" value="9892023" />
+ </MatchCodes>
+ <Reads>39935997</Reads>
+ </ElandLane>
+ <ElandLane end="0" id="8" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>8</LaneID>
+ <End>1</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1353762" />
+ <MappedItem name="hg18/chr17.fa" value="2335334" />
+ <MappedItem name="hg18/chr22_random.fa" value="48" />
+ <MappedItem name="hg18/chrX_random.fa" value="711" />
+ <MappedItem name="hg18/chr5_random.fa" value="129" />
+ <MappedItem name="hg18/chr11.fa" value="1740419" />
+ <MappedItem name="hg18/chr10_random.fa" value="7" />
+ <MappedItem name="hg18/chrM.fa" value="1915882" />
+ <MappedItem name="hg18/chr18.fa" value="169596" />
+ <MappedItem name="OBF5.fa" value="125644" />
+ <MappedItem name="EPR-1.fa" value="28" />
+ <MappedItem name="hg18/chr11_random.fa" value="1" />
+ <MappedItem name="Apetala2.fa" value="303" />
+ <MappedItem name="hg18/chr13.fa" value="248276" />
+ <MappedItem name="hg18/chr21.fa" value="315746" />
+ <MappedItem name="hg18/chr13_random.fa" value="403" />
+ <MappedItem name="hg18/chr21_random.fa" value="50862" />
+ <MappedItem name="hg18/chr4.fa" value="496918" />
+ <MappedItem name="hg18/chr6_random.fa" value="47" />
+ <MappedItem name="hg18/chr4_random.fa" value="1107" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="140" />
+ <MappedItem name="hg18/chr16_random.fa" value="632" />
+ <MappedItem name="hg18/chr17_random.fa" value="4303" />
+ <MappedItem name="hg18/chr8_random.fa" value="137" />
+ <MappedItem name="hg18/chr9.fa" value="1357817" />
+ <MappedItem name="hg18/chr2.fa" value="1405717" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="12343" />
+ <MappedItem name="hg18/chr19.fa" value="2177360" />
+ <MappedItem name="hg18/chr15_random.fa" value="339" />
+ <MappedItem name="hg18/chr7_random.fa" value="1067" />
+ <MappedItem name="hg18/chr22.fa" value="573666" />
+ <MappedItem name="hg18/chr12.fa" value="1952654" />
+ <MappedItem name="hg18/chr8.fa" value="998240" />
+ <MappedItem name="hg18/chr10.fa" value="759145" />
+ <MappedItem name="hg18/chrY.fa" value="31522" />
+ <MappedItem name="hg18/chr15.fa" value="632544" />
+ <MappedItem name="hg18/chr16.fa" value="1574152" />
+ <MappedItem name="hg18/chr2_random.fa" value="145" />
+ <MappedItem name="hg18/chr14.fa" value="886259" />
+ <MappedItem name="newcontam_UK.fa" value="92" />
+ <MappedItem name="hg18/chr19_random.fa" value="1" />
+ <MappedItem name="hg18/chr1.fa" value="2320965" />
+ <MappedItem name="BAC_plus_vector.fa" value="579" />
+ <MappedItem name="hg18/chrX.fa" value="1046889" />
+ <MappedItem name="hg18/chr3.fa" value="1298647" />
+ <MappedItem name="hg18/chr20.fa" value="1047930" />
+ <MappedItem name="VATG.fa" value="4892" />
+ <MappedItem name="hg18/chr1_random.fa" value="5435" />
+ <MappedItem name="hg18/chr6.fa" value="906640" />
+ <MappedItem name="hg18/chr3_random.fa" value="2" />
+ <MappedItem name="hg18/chr9_random.fa" value="399" />
+ <MappedItem name="hg18/chr5.fa" value="1009359" />
+ <MappedItem name="AGP.fa" value="35" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="384264" />
+ <Code name="R0" value="22432765" />
+ <Code name="NM" value="2047898" />
+ <Code name="R2" value="12821609" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="4371027" />
+ <Code name="U0" value="19760560" />
+ <Code name="R1" value="17023340" />
+ <Code name="U2" value="9509461" />
+ </MatchCodes>
+ <Reads>37853044</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="1" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>1</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2126607" />
+ <MappedItem name="hg18/chr17.fa" value="2253692" />
+ <MappedItem name="hg18/chr15_random.fa" value="61" />
+ <MappedItem name="hg18/chrX_random.fa" value="822" />
+ <MappedItem name="hg18/chr5_random.fa" value="50" />
+ <MappedItem name="hg18/chr11.fa" value="2547492" />
+ <MappedItem name="hg18/chr10_random.fa" value="19" />
+ <MappedItem name="hg18/chrM.fa" value="1629323" />
+ <MappedItem name="hg18/chr18.fa" value="286348" />
+ <MappedItem name="OBF5.fa" value="126737" />
+ <MappedItem name="EPR-1.fa" value="22" />
+ <MappedItem name="Apetala2.fa" value="239" />
+ <MappedItem name="hg18/chr13.fa" value="383402" />
+ <MappedItem name="hg18/chr21.fa" value="301583" />
+ <MappedItem name="hg18/chr13_random.fa" value="299" />
+ <MappedItem name="hg18/chr21_random.fa" value="156063" />
+ <MappedItem name="hg18/chr4.fa" value="784025" />
+ <MappedItem name="hg18/chr6_random.fa" value="78" />
+ <MappedItem name="hg18/chr22_random.fa" value="5" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="43" />
+ <MappedItem name="hg18/chr16_random.fa" value="119" />
+ <MappedItem name="hg18/chr17_random.fa" value="5906" />
+ <MappedItem name="hg18/chr8_random.fa" value="28" />
+ <MappedItem name="hg18/chr9.fa" value="1192809" />
+ <MappedItem name="hg18/chr2.fa" value="2186476" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="5240" />
+ <MappedItem name="hg18/chr19.fa" value="2042730" />
+ <MappedItem name="hg18/chr4_random.fa" value="841" />
+ <MappedItem name="hg18/chr7_random.fa" value="1565" />
+ <MappedItem name="hg18/chr22.fa" value="835246" />
+ <MappedItem name="hg18/chr12.fa" value="1882091" />
+ <MappedItem name="hg18/chr8.fa" value="780595" />
+ <MappedItem name="hg18/chr10.fa" value="1049100" />
+ <MappedItem name="hg18/chrY.fa" value="56311" />
+ <MappedItem name="hg18/chr15.fa" value="1577762" />
+ <MappedItem name="hg18/chr18_random.fa" value="2" />
+ <MappedItem name="hg18/chr16.fa" value="1231679" />
+ <MappedItem name="hg18/chr2_random.fa" value="66" />
+ <MappedItem name="hg18/chr14.fa" value="767069" />
+ <MappedItem name="newcontam_UK.fa" value="169" />
+ <MappedItem name="hg18/chr19_random.fa" value="1" />
+ <MappedItem name="hg18/chr1.fa" value="2860480" />
+ <MappedItem name="BAC_plus_vector.fa" value="551" />
+ <MappedItem name="hg18/chrX.fa" value="1091291" />
+ <MappedItem name="hg18/chr3.fa" value="1451771" />
+ <MappedItem name="hg18/chr20.fa" value="703378" />
+ <MappedItem name="VATG.fa" value="5371" />
+ <MappedItem name="hg18/chr1_random.fa" value="8523" />
+ <MappedItem name="hg18/chr6.fa" value="1232708" />
+ <MappedItem name="hg18/chr3_random.fa" value="2" />
+ <MappedItem name="hg18/chr9_random.fa" value="44" />
+ <MappedItem name="hg18/chr5.fa" value="1513569" />
+ <MappedItem name="AGP.fa" value="22" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="449271" />
+ <Code name="R0" value="20013156" />
+ <Code name="NM" value="3726089" />
+ <Code name="R2" value="13101096" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="7295280" />
+ <Code name="U0" value="18087623" />
+ <Code name="R1" value="18078115" />
+ <Code name="U2" value="13874787" />
+ </MatchCodes>
+ <Reads>45106262</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="2" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>2</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2483459" />
+ <MappedItem name="hg18/chr17.fa" value="2606321" />
+ <MappedItem name="hg18/chr15_random.fa" value="88" />
+ <MappedItem name="Apetala2.fa" value="273" />
+ <MappedItem name="hg18/chr5_random.fa" value="69" />
+ <MappedItem name="hg18/chr11.fa" value="2944847" />
+ <MappedItem name="hg18/chr10_random.fa" value="21" />
+ <MappedItem name="hg18/chrM.fa" value="1982845" />
+ <MappedItem name="hg18/chr18.fa" value="340561" />
+ <MappedItem name="OBF5.fa" value="155602" />
+ <MappedItem name="EPR-1.fa" value="23" />
+ <MappedItem name="hg18/chr13.fa" value="453545" />
+ <MappedItem name="hg18/chr21.fa" value="351384" />
+ <MappedItem name="hg18/chr13_random.fa" value="378" />
+ <MappedItem name="hg18/chr21_random.fa" value="176612" />
+ <MappedItem name="hg18/chr4.fa" value="932629" />
+ <MappedItem name="hg18/chr6_random.fa" value="89" />
+ <MappedItem name="hg18/chr22_random.fa" value="7" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="50" />
+ <MappedItem name="hg18/chr16_random.fa" value="153" />
+ <MappedItem name="hg18/chr17_random.fa" value="6782" />
+ <MappedItem name="hg18/chr8_random.fa" value="35" />
+ <MappedItem name="hg18/chr9.fa" value="1393967" />
+ <MappedItem name="hg18/chr2.fa" value="2589626" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="6030" />
+ <MappedItem name="hg18/chr19.fa" value="2311803" />
+ <MappedItem name="hg18/chr4_random.fa" value="974" />
+ <MappedItem name="hg18/chr7_random.fa" value="1950" />
+ <MappedItem name="hg18/chr22.fa" value="961665" />
+ <MappedItem name="hg18/chr12.fa" value="2228686" />
+ <MappedItem name="hg18/chr8.fa" value="914265" />
+ <MappedItem name="hg18/chr10.fa" value="1236870" />
+ <MappedItem name="hg18/chrY.fa" value="66210" />
+ <MappedItem name="hg18/chr15.fa" value="1865285" />
+ <MappedItem name="hg18/chr18_random.fa" value="2" />
+ <MappedItem name="hg18/chrX_random.fa" value="842" />
+ <MappedItem name="hg18/chr16.fa" value="1411750" />
+ <MappedItem name="hg18/chr2_random.fa" value="96" />
+ <MappedItem name="hg18/chr14.fa" value="906824" />
+ <MappedItem name="newcontam_UK.fa" value="142" />
+ <MappedItem name="hg18/chr19_random.fa" value="2" />
+ <MappedItem name="hg18/chr1.fa" value="3363461" />
+ <MappedItem name="BAC_plus_vector.fa" value="624" />
+ <MappedItem name="hg18/chrX.fa" value="1279200" />
+ <MappedItem name="hg18/chr3.fa" value="1721468" />
+ <MappedItem name="hg18/chr20.fa" value="817155" />
+ <MappedItem name="VATG.fa" value="6670" />
+ <MappedItem name="hg18/chr1_random.fa" value="10058" />
+ <MappedItem name="hg18/chr6.fa" value="1460962" />
+ <MappedItem name="hg18/chr3_random.fa" value="8" />
+ <MappedItem name="hg18/chr9_random.fa" value="64" />
+ <MappedItem name="hg18/chr5.fa" value="1786887" />
+ <MappedItem name="AGP.fa" value="22" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="480598" />
+ <Code name="R0" value="24037041" />
+ <Code name="NM" value="4894717" />
+ <Code name="R2" value="14958119" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6818769" />
+ <Code name="U0" value="24000735" />
+ <Code name="R1" value="20730819" />
+ <Code name="U2" value="15036542" />
+ </MatchCodes>
+ <Reads>53180501</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="3" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>3</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2139203" />
+ <MappedItem name="hg18/chr17.fa" value="3201893" />
+ <MappedItem name="hg18/chr15_random.fa" value="202" />
+ <MappedItem name="hg18/chrX_random.fa" value="764" />
+ <MappedItem name="hg18/chr5_random.fa" value="86" />
+ <MappedItem name="hg18/chr11.fa" value="2699541" />
+ <MappedItem name="hg18/chr10_random.fa" value="28" />
+ <MappedItem name="hg18/chrM.fa" value="2067237" />
+ <MappedItem name="hg18/chr18.fa" value="271529" />
+ <MappedItem name="OBF5.fa" value="185573" />
+ <MappedItem name="EPR-1.fa" value="36" />
+ <MappedItem name="Apetala2.fa" value="293" />
+ <MappedItem name="hg18/chr13.fa" value="463815" />
+ <MappedItem name="hg18/chr21.fa" value="373264" />
+ <MappedItem name="hg18/chr13_random.fa" value="319" />
+ <MappedItem name="hg18/chr21_random.fa" value="95757" />
+ <MappedItem name="hg18/chr4.fa" value="832302" />
+ <MappedItem name="hg18/chr6_random.fa" value="142" />
+ <MappedItem name="hg18/chr22_random.fa" value="13" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="85" />
+ <MappedItem name="hg18/chr16_random.fa" value="14" />
+ <MappedItem name="hg18/chr17_random.fa" value="5878" />
+ <MappedItem name="hg18/chr8_random.fa" value="69" />
+ <MappedItem name="hg18/chr9.fa" value="1325480" />
+ <MappedItem name="hg18/chr2.fa" value="3162629" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="9090" />
+ <MappedItem name="hg18/chr19.fa" value="1848319" />
+ <MappedItem name="hg18/chr4_random.fa" value="1420" />
+ <MappedItem name="hg18/chr7_random.fa" value="1873" />
+ <MappedItem name="hg18/chr22.fa" value="867598" />
+ <MappedItem name="hg18/chr12.fa" value="1741596" />
+ <MappedItem name="hg18/chr8.fa" value="786746" />
+ <MappedItem name="hg18/chr10.fa" value="1131840" />
+ <MappedItem name="hg18/chrY.fa" value="53269" />
+ <MappedItem name="hg18/chr15.fa" value="1696119" />
+ <MappedItem name="hg18/chr16.fa" value="1261968" />
+ <MappedItem name="hg18/chr2_random.fa" value="121" />
+ <MappedItem name="hg18/chr14.fa" value="924404" />
+ <MappedItem name="newcontam_UK.fa" value="142" />
+ <MappedItem name="hg18/chr19_random.fa" value="2" />
+ <MappedItem name="hg18/chr1.fa" value="3077467" />
+ <MappedItem name="BAC_plus_vector.fa" value="433" />
+ <MappedItem name="hg18/chrX.fa" value="941349" />
+ <MappedItem name="hg18/chr3.fa" value="1556024" />
+ <MappedItem name="hg18/chr20.fa" value="681063" />
+ <MappedItem name="VATG.fa" value="7323" />
+ <MappedItem name="hg18/chr1_random.fa" value="5712" />
+ <MappedItem name="hg18/chr6.fa" value="1349024" />
+ <MappedItem name="hg18/chr3_random.fa" value="14" />
+ <MappedItem name="hg18/chr9_random.fa" value="82" />
+ <MappedItem name="hg18/chr5.fa" value="1751986" />
+ <MappedItem name="AGP.fa" value="29" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="465003" />
+ <Code name="R0" value="26081396" />
+ <Code name="NM" value="3808973" />
+ <Code name="R2" value="14683585" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6111504" />
+ <Code name="U0" value="24547920" />
+ <Code name="R1" value="17952764" />
+ <Code name="U2" value="13121000" />
+ </MatchCodes>
+ <Reads>49686417</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="4" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>4</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="2285395" />
+ <MappedItem name="hg18/chr17.fa" value="3329709" />
+ <MappedItem name="hg18/chr15_random.fa" value="233" />
+ <MappedItem name="Apetala2.fa" value="337" />
+ <MappedItem name="hg18/chr5_random.fa" value="99" />
+ <MappedItem name="hg18/chr11.fa" value="2843439" />
+ <MappedItem name="hg18/chr10_random.fa" value="22" />
+ <MappedItem name="hg18/chrM.fa" value="2279334" />
+ <MappedItem name="hg18/chr18.fa" value="295500" />
+ <MappedItem name="OBF5.fa" value="209416" />
+ <MappedItem name="EPR-1.fa" value="49" />
+ <MappedItem name="hg18/chr13.fa" value="502492" />
+ <MappedItem name="hg18/chr21.fa" value="396049" />
+ <MappedItem name="hg18/chr13_random.fa" value="376" />
+ <MappedItem name="hg18/chr21_random.fa" value="99839" />
+ <MappedItem name="hg18/chr4.fa" value="905741" />
+ <MappedItem name="hg18/chr6_random.fa" value="147" />
+ <MappedItem name="hg18/chr22_random.fa" value="18" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="64" />
+ <MappedItem name="hg18/chr16_random.fa" value="27" />
+ <MappedItem name="hg18/chr17_random.fa" value="6045" />
+ <MappedItem name="hg18/chr8_random.fa" value="42" />
+ <MappedItem name="hg18/chr9.fa" value="1409400" />
+ <MappedItem name="hg18/chr2.fa" value="3422310" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="9455" />
+ <MappedItem name="hg18/chr19.fa" value="1900319" />
+ <MappedItem name="hg18/chr4_random.fa" value="1512" />
+ <MappedItem name="hg18/chr7_random.fa" value="2082" />
+ <MappedItem name="hg18/chr22.fa" value="913051" />
+ <MappedItem name="hg18/chr12.fa" value="1874479" />
+ <MappedItem name="hg18/chr8.fa" value="841644" />
+ <MappedItem name="hg18/chr10.fa" value="1225075" />
+ <MappedItem name="hg18/chrY.fa" value="56438" />
+ <MappedItem name="hg18/chr15.fa" value="1829529" />
+ <MappedItem name="hg18/chrX_random.fa" value="806" />
+ <MappedItem name="hg18/chr16.fa" value="1319456" />
+ <MappedItem name="hg18/chr2_random.fa" value="112" />
+ <MappedItem name="hg18/chr14.fa" value="990085" />
+ <MappedItem name="newcontam_UK.fa" value="237" />
+ <MappedItem name="hg18/chr19_random.fa" value="2" />
+ <MappedItem name="hg18/chr1.fa" value="3297073" />
+ <MappedItem name="BAC_plus_vector.fa" value="445" />
+ <MappedItem name="hg18/chrX.fa" value="1009413" />
+ <MappedItem name="hg18/chr3.fa" value="1686851" />
+ <MappedItem name="hg18/chr20.fa" value="723549" />
+ <MappedItem name="VATG.fa" value="8331" />
+ <MappedItem name="hg18/chr1_random.fa" value="5988" />
+ <MappedItem name="hg18/chr6.fa" value="1458573" />
+ <MappedItem name="hg18/chr3_random.fa" value="13" />
+ <MappedItem name="hg18/chr9_random.fa" value="101" />
+ <MappedItem name="hg18/chr5.fa" value="1894819" />
+ <MappedItem name="AGP.fa" value="24" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="493835" />
+ <Code name="R0" value="27933524" />
+ <Code name="NM" value="5037389" />
+ <Code name="R2" value="16575441" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="6649797" />
+ <Code name="U0" value="25505275" />
+ <Code name="R1" value="20052812" />
+ <Code name="U2" value="14441702" />
+ </MatchCodes>
+ <Reads>53915808</Reads>
+ </ElandLane>
+ <SequenceLane end="1" id="5" version="1">
+ <SampleName>s</SampleName>
+ <LaneID>5</LaneID>
+ <End>2</End>
+ <Reads>33310172</Reads>
+ <SequenceType>FASTQ</SequenceType>
+ </SequenceLane>
+ <ElandLane end="1" id="6" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>6</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1118079" />
+ <MappedItem name="hg18/chr17.fa" value="2570258" />
+ <MappedItem name="hg18/chr15_random.fa" value="366" />
+ <MappedItem name="Apetala2.fa" value="727" />
+ <MappedItem name="hg18/chr5_random.fa" value="62" />
+ <MappedItem name="hg18/chr11.fa" value="1790218" />
+ <MappedItem name="hg18/chr10_random.fa" value="32" />
+ <MappedItem name="hg18/chrM.fa" value="2109602" />
+ <MappedItem name="hg18/chr18.fa" value="226387" />
+ <MappedItem name="OBF5.fa" value="270927" />
+ <MappedItem name="EPR-1.fa" value="92" />
+ <MappedItem name="hg18/chr11_random.fa" value="6" />
+ <MappedItem name="hg18/chr13.fa" value="302988" />
+ <MappedItem name="hg18/chr21.fa" value="223133" />
+ <MappedItem name="hg18/chr13_random.fa" value="397" />
+ <MappedItem name="hg18/chr21_random.fa" value="39777" />
+ <MappedItem name="hg18/chr4.fa" value="658824" />
+ <MappedItem name="hg18/chr6_random.fa" value="158" />
+ <MappedItem name="hg18/chr22_random.fa" value="1" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="264" />
+ <MappedItem name="hg18/chr16_random.fa" value="394" />
+ <MappedItem name="hg18/chr17_random.fa" value="4348" />
+ <MappedItem name="hg18/chr8_random.fa" value="212" />
+ <MappedItem name="hg18/chr9.fa" value="1117430" />
+ <MappedItem name="hg18/chr2.fa" value="1461556" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="32684" />
+ <MappedItem name="hg18/chr19.fa" value="2713802" />
+ <MappedItem name="hg18/chr4_random.fa" value="887" />
+ <MappedItem name="hg18/chr7_random.fa" value="992" />
+ <MappedItem name="hg18/chr22.fa" value="844752" />
+ <MappedItem name="hg18/chr12.fa" value="1885047" />
+ <MappedItem name="hg18/chr8.fa" value="1182749" />
+ <MappedItem name="hg18/chr10.fa" value="921449" />
+ <MappedItem name="hg18/chrY.fa" value="25442" />
+ <MappedItem name="hg18/chr15.fa" value="713094" />
+ <MappedItem name="hg18/chr18_random.fa" value="4" />
+ <MappedItem name="hg18/chrX_random.fa" value="602" />
+ <MappedItem name="hg18/chr16.fa" value="1479718" />
+ <MappedItem name="hg18/chr2_random.fa" value="229" />
+ <MappedItem name="hg18/chr14.fa" value="714206" />
+ <MappedItem name="newcontam_UK.fa" value="138" />
+ <MappedItem name="hg18/chr1.fa" value="2682954" />
+ <MappedItem name="BAC_plus_vector.fa" value="698" />
+ <MappedItem name="hg18/chrX.fa" value="862310" />
+ <MappedItem name="hg18/chr3.fa" value="1310743" />
+ <MappedItem name="hg18/chr20.fa" value="858302" />
+ <MappedItem name="VATG.fa" value="10757" />
+ <MappedItem name="hg18/chr1_random.fa" value="12994" />
+ <MappedItem name="hg18/chr6.fa" value="1589887" />
+ <MappedItem name="hg18/chr3_random.fa" value="6" />
+ <MappedItem name="hg18/chr9_random.fa" value="142" />
+ <MappedItem name="hg18/chr5.fa" value="1072768" />
+ <MappedItem name="AGP.fa" value="57" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="410941" />
+ <Code name="R0" value="29332617" />
+ <Code name="NM" value="2140058" />
+ <Code name="R2" value="20062622" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="5296080" />
+ <Code name="U0" value="20105242" />
+ <Code name="R1" value="25994563" />
+ <Code name="U2" value="10610880" />
+ </MatchCodes>
+ <Reads>40665866</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="7" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>7</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1078702" />
+ <MappedItem name="hg18/chr17.fa" value="2444411" />
+ <MappedItem name="hg18/chr15_random.fa" value="272" />
+ <MappedItem name="hg18/chrX_random.fa" value="577" />
+ <MappedItem name="hg18/chr5_random.fa" value="49" />
+ <MappedItem name="hg18/chr11.fa" value="1759146" />
+ <MappedItem name="hg18/chr10_random.fa" value="71" />
+ <MappedItem name="hg18/chrM.fa" value="2433962" />
+ <MappedItem name="hg18/chr18.fa" value="242431" />
+ <MappedItem name="OBF5.fa" value="139021" />
+ <MappedItem name="EPR-1.fa" value="30" />
+ <MappedItem name="hg18/chr11_random.fa" value="4" />
+ <MappedItem name="Apetala2.fa" value="309" />
+ <MappedItem name="hg18/chr13.fa" value="310057" />
+ <MappedItem name="hg18/chr21.fa" value="214386" />
+ <MappedItem name="hg18/chr13_random.fa" value="442" />
+ <MappedItem name="hg18/chr21_random.fa" value="33051" />
+ <MappedItem name="hg18/chr4.fa" value="693890" />
+ <MappedItem name="hg18/chr6_random.fa" value="153" />
+ <MappedItem name="hg18/chr22_random.fa" value="15" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="107" />
+ <MappedItem name="hg18/chr16_random.fa" value="467" />
+ <MappedItem name="hg18/chr17_random.fa" value="4405" />
+ <MappedItem name="hg18/chr8_random.fa" value="183" />
+ <MappedItem name="hg18/chr9.fa" value="1050219" />
+ <MappedItem name="hg18/chr2.fa" value="1496165" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="10371" />
+ <MappedItem name="hg18/chr19.fa" value="2447678" />
+ <MappedItem name="hg18/chr4_random.fa" value="856" />
+ <MappedItem name="hg18/chr7_random.fa" value="1450" />
+ <MappedItem name="hg18/chr22.fa" value="808364" />
+ <MappedItem name="hg18/chr12.fa" value="1815509" />
+ <MappedItem name="hg18/chr8.fa" value="1116286" />
+ <MappedItem name="hg18/chr10.fa" value="956650" />
+ <MappedItem name="hg18/chrY.fa" value="21091" />
+ <MappedItem name="hg18/chr15.fa" value="695403" />
+ <MappedItem name="hg18/chr18_random.fa" value="2" />
+ <MappedItem name="hg18/chr16.fa" value="1408777" />
+ <MappedItem name="hg18/chr2_random.fa" value="179" />
+ <MappedItem name="hg18/chr14.fa" value="717532" />
+ <MappedItem name="newcontam_UK.fa" value="76" />
+ <MappedItem name="hg18/chr1.fa" value="2713175" />
+ <MappedItem name="BAC_plus_vector.fa" value="576" />
+ <MappedItem name="hg18/chrX.fa" value="842394" />
+ <MappedItem name="hg18/chr3.fa" value="1299476" />
+ <MappedItem name="hg18/chr20.fa" value="826774" />
+ <MappedItem name="VATG.fa" value="4894" />
+ <MappedItem name="hg18/chr1_random.fa" value="20371" />
+ <MappedItem name="hg18/chr6.fa" value="1574072" />
+ <MappedItem name="hg18/chr3_random.fa" value="7" />
+ <MappedItem name="hg18/chr9_random.fa" value="156" />
+ <MappedItem name="hg18/chr5.fa" value="1076419" />
+ <MappedItem name="AGP.fa" value="26" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="391630" />
+ <Code name="R0" value="33185367" />
+ <Code name="NM" value="2010407" />
+ <Code name="R2" value="21949225" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="5056565" />
+ <Code name="U0" value="20382001" />
+ <Code name="R1" value="28203270" />
+ <Code name="U2" value="10038024" />
+ </MatchCodes>
+ <Reads>39935997</Reads>
+ </ElandLane>
+ <ElandLane end="1" id="8" version="2">
+ <SampleName>s</SampleName>
+ <LaneID>8</LaneID>
+ <End>2</End>
+ <GenomeMap>
+ <GenomeItem name="chr20.fa" value="hg18/chr20.fa" />
+ <GenomeItem name="chr10_random.fa" value="hg18/chr10_random.fa" />
+ <GenomeItem name="chr4.fa" value="hg18/chr4.fa" />
+ <GenomeItem name="chrY.fa" value="hg18/chrY.fa" />
+ <GenomeItem name="chr4_random.fa" value="hg18/chr4_random.fa" />
+ <GenomeItem name="chr11_random.fa" value="hg18/chr11_random.fa" />
+ <GenomeItem name="chr6_random.fa" value="hg18/chr6_random.fa" />
+ <GenomeItem name="chr6.fa" value="hg18/chr6.fa" />
+ <GenomeItem name="chrX_random.fa" value="hg18/chrX_random.fa" />
+ <GenomeItem name="Apetala2.fa" value="Apetala2.fa" />
+ <GenomeItem name="chr3.fa" value="hg18/chr3.fa" />
+ <GenomeItem name="chr10.fa" value="hg18/chr10.fa" />
+ <GenomeItem name="chr15_random.fa" value="hg18/chr15_random.fa" />
+ <GenomeItem name="chr19.fa" value="hg18/chr19.fa" />
+ <GenomeItem name="chr7_random.fa" value="hg18/chr7_random.fa" />
+ <GenomeItem name="OBF5.fa" value="OBF5.fa" />
+ <GenomeItem name="chr12.fa" value="hg18/chr12.fa" />
+ <GenomeItem name="EPR-1.fa" value="EPR-1.fa" />
+ <GenomeItem name="chr1.fa" value="hg18/chr1.fa" />
+ <GenomeItem name="chr1_random.fa" value="hg18/chr1_random.fa" />
+ <GenomeItem name="chr16_random.fa" value="hg18/chr16_random.fa" />
+ <GenomeItem name="chr21.fa" value="hg18/chr21.fa" />
+ <GenomeItem name="chr2_random.fa" value="hg18/chr2_random.fa" />
+ <GenomeItem name="chr8.fa" value="hg18/chr8.fa" />
+ <GenomeItem name="Lambda_2-3-2_9786nts.fa" value="Lambda_2-3-2_9786nts.fa" />
+ <GenomeItem name="chr8_random.fa" value="hg18/chr8_random.fa" />
+ <GenomeItem name="chr18.fa" value="hg18/chr18.fa" />
+ <GenomeItem name="Lambda_1-1_11936nts.fa" value="Lambda_1-1_11936nts.fa" />
+ <GenomeItem name="chr17.fa" value="hg18/chr17.fa" />
+ <GenomeItem name="chr22_random.fa" value="hg18/chr22_random.fa" />
+ <GenomeItem name="chr15.fa" value="hg18/chr15.fa" />
+ <GenomeItem name="chr7.fa" value="hg18/chr7.fa" />
+ <GenomeItem name="chrX.fa" value="hg18/chrX.fa" />
+ <GenomeItem name="chrM.fa" value="hg18/chrM.fa" />
+ <GenomeItem name="chr21_random.fa" value="hg18/chr21_random.fa" />
+ <GenomeItem name="chr19_random.fa" value="hg18/chr19_random.fa" />
+ <GenomeItem name="VATG.fa" value="VATG.fa" />
+ <GenomeItem name="chr14.fa" value="hg18/chr14.fa" />
+ <GenomeItem name="chr17_random.fa" value="hg18/chr17_random.fa" />
+ <GenomeItem name="chr13.fa" value="hg18/chr13.fa" />
+ <GenomeItem name="chr11.fa" value="hg18/chr11.fa" />
+ <GenomeItem name="chr16.fa" value="hg18/chr16.fa" />
+ <GenomeItem name="newcontam_UK.fa" value="newcontam_UK.fa" />
+ <GenomeItem name="chr9.fa" value="hg18/chr9.fa" />
+ <GenomeItem name="chr2.fa" value="hg18/chr2.fa" />
+ <GenomeItem name="BAC_plus_vector.fa" value="BAC_plus_vector.fa" />
+ <GenomeItem name="chr18_random.fa" value="hg18/chr18_random.fa" />
+ <GenomeItem name="chr9_random.fa" value="hg18/chr9_random.fa" />
+ <GenomeItem name="chr5.fa" value="hg18/chr5.fa" />
+ <GenomeItem name="chr3_random.fa" value="hg18/chr3_random.fa" />
+ <GenomeItem name="chr5_random.fa" value="hg18/chr5_random.fa" />
+ <GenomeItem name="PDF.fa" value="PDF.fa" />
+ <GenomeItem name="chr13_random.fa" value="hg18/chr13_random.fa" />
+ <GenomeItem name="chr22.fa" value="hg18/chr22.fa" />
+ <GenomeItem name="AGP.fa" value="AGP.fa" />
+ </GenomeMap>
+ <MappedReads>
+ <MappedItem name="hg18/chr7.fa" value="1349366" />
+ <MappedItem name="hg18/chr17.fa" value="2326709" />
+ <MappedItem name="hg18/chr15_random.fa" value="352" />
+ <MappedItem name="hg18/chrX_random.fa" value="721" />
+ <MappedItem name="hg18/chr5_random.fa" value="125" />
+ <MappedItem name="hg18/chr11.fa" value="1734497" />
+ <MappedItem name="hg18/chr10_random.fa" value="4" />
+ <MappedItem name="hg18/chrM.fa" value="1917138" />
+ <MappedItem name="hg18/chr18.fa" value="169567" />
+ <MappedItem name="OBF5.fa" value="125783" />
+ <MappedItem name="EPR-1.fa" value="28" />
+ <MappedItem name="hg18/chr11_random.fa" value="1" />
+ <MappedItem name="Apetala2.fa" value="310" />
+ <MappedItem name="hg18/chr13.fa" value="248252" />
+ <MappedItem name="hg18/chr21.fa" value="314085" />
+ <MappedItem name="hg18/chr13_random.fa" value="405" />
+ <MappedItem name="hg18/chr21_random.fa" value="50620" />
+ <MappedItem name="hg18/chr4.fa" value="495425" />
+ <MappedItem name="hg18/chr6_random.fa" value="46" />
+ <MappedItem name="hg18/chr22_random.fa" value="42" />
+ <MappedItem name="Lambda_2-3-2_9786nts.fa" value="137" />
+ <MappedItem name="hg18/chr16_random.fa" value="640" />
+ <MappedItem name="hg18/chr17_random.fa" value="4224" />
+ <MappedItem name="hg18/chr8_random.fa" value="139" />
+ <MappedItem name="hg18/chr9.fa" value="1356315" />
+ <MappedItem name="hg18/chr2.fa" value="1401418" />
+ <MappedItem name="Lambda_1-1_11936nts.fa" value="12293" />
+ <MappedItem name="hg18/chr19.fa" value="2167120" />
+ <MappedItem name="hg18/chr4_random.fa" value="1118" />
+ <MappedItem name="hg18/chr7_random.fa" value="1062" />
+ <MappedItem name="hg18/chr22.fa" value="572306" />
+ <MappedItem name="hg18/chr12.fa" value="1953327" />
+ <MappedItem name="hg18/chr8.fa" value="995413" />
+ <MappedItem name="hg18/chr10.fa" value="758115" />
+ <MappedItem name="hg18/chrY.fa" value="31615" />
+ <MappedItem name="hg18/chr15.fa" value="631556" />
+ <MappedItem name="hg18/chr16.fa" value="1568689" />
+ <MappedItem name="hg18/chr2_random.fa" value="141" />
+ <MappedItem name="hg18/chr14.fa" value="884077" />
+ <MappedItem name="newcontam_UK.fa" value="50" />
+ <MappedItem name="hg18/chr1.fa" value="2315522" />
+ <MappedItem name="BAC_plus_vector.fa" value="583" />
+ <MappedItem name="hg18/chrX.fa" value="1045292" />
+ <MappedItem name="hg18/chr3.fa" value="1297351" />
+ <MappedItem name="hg18/chr20.fa" value="1045350" />
+ <MappedItem name="VATG.fa" value="4908" />
+ <MappedItem name="hg18/chr1_random.fa" value="5447" />
+ <MappedItem name="hg18/chr6.fa" value="905023" />
+ <MappedItem name="hg18/chr3_random.fa" value="3" />
+ <MappedItem name="hg18/chr9_random.fa" value="394" />
+ <MappedItem name="hg18/chr5.fa" value="1009254" />
+ <MappedItem name="AGP.fa" value="34" />
+ </MappedReads>
+ <MatchCodes>
+ <Code name="QC" value="377748" />
+ <Code name="R0" value="22489651" />
+ <Code name="NM" value="2158049" />
+ <Code name="R2" value="12904686" />
+ <Code name="RM" value="0" />
+ <Code name="U1" value="4495980" />
+ <Code name="U0" value="19418018" />
+ <Code name="R1" value="16965863" />
+ <Code name="U2" value="9633798" />
+ </MatchCodes>
+ <Reads>37853044</Reads>
+ </ElandLane>
+ </ElandCollection>
+ </Gerald>
+</PipelineRun>
--- /dev/null
+"""
+Generate settings for the Django Application.
+
+To make it easier to customize the application the settings can be
+defined in a configuration file read by ConfigParser.
+
+The options understood by this module are (with their defaults):
+
+ [frontend]
+ email_host=localhost
+ email_port=25
+ database_engine=sqlite3
+ database_name=/path/to/db
+
+ [admins]
+ #name1=email1
+
+ [allowed_hosts]
+ #name1=ip
+ localhost=127.0.0.1
+
+ [allowed_analysis_hosts]
+ #name1=ip
+ localhost=127.0.0.1
+
+"""
+import ConfigParser
+import os
+import shlex
+import htsworkflow
+
+HTSWORKFLOW_ROOT = os.path.abspath(os.path.split(htsworkflow.__file__)[0])
+
+# make epydoc happy
+__docformat__ = "restructuredtext en"
+
+def options_to_list(options, dest, section_name, option_name):
+ """
+ Load a options from section_name and store in a dictionary
+ """
+ if options.has_option(section_name, option_name):
+ opt = options.get(section_name, option_name)
+ dest.extend( shlex.split(opt) )
+
+def options_to_dict(dest, section_name):
+ """
+ Load a options from section_name and store in a dictionary
+ """
+ if options.has_section(section_name):
+ for name in options.options(section_name):
+ dest[name] = options.get(section_name, name)
+
+# define your defaults here
+options = ConfigParser.SafeConfigParser(
+ { 'email_host': 'localhost',
+ 'email_port': '25',
+ 'database_engine': 'sqlite3',
+ 'database_name':
+ os.path.join(HTSWORKFLOW_ROOT, '..', 'fctracker.db'),
+ 'time_zone': 'America/Los_Angeles',
+ 'default_pm': '5',
+ 'link_flowcell_storage_device_url': "http://localhost:8000/inventory/lts/link/",
+ 'printer1_host': '127.0.0.1',
+ 'printer1_port': '9100',
+ 'printer2_host': '127.0.0.1',
+ 'printer2_port': '9100',
+ })
+
+options.read([os.path.expanduser("~/.htsworkflow.ini"),
+ '/etc/htsworkflow.ini',])
+
+# OptionParser will use the dictionary passed into the config parser as
+# 'Default' values in any section. However it still needs an empty section
+# to exist in order to retrieve anything.
+if not options.has_section('frontend'):
+ options.add_section('frontend')
+if not options.has_section('bcprinter'):
+ options.add_section('bcprinter')
+
+
+# Django settings for elandifier project.
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+ADMINS = []
+options_to_list(options, ADMINS, 'frontend', 'admins')
+
+MANAGERS = []
+options_to_list(options, MANAGERS, 'frontend', 'managers')
+
+DEFAULT_PM=int(options.get('frontend', 'default_pm'))
+
+AUTHENTICATION_BACKENDS = (
+ 'htsworkflow.frontend.samples.auth_backend.HTSUserModelBackend', )
+CUSTOM_USER_MODEL = 'samples.HTSUser'
+
+EMAIL_HOST = options.get('frontend', 'email_host')
+EMAIL_PORT = int(options.get('frontend', 'email_port'))
+
+if options.has_option('frontend', 'notification_sender'):
+ NOTIFICATION_SENDER = options.get('frontend', 'notification_sender')
+else:
+ NOTIFICATION_SENDER = "noreply@example.com"
+NOTIFICATION_BCC = []
+options_to_list(options, NOTIFICATION_BCC, 'frontend', 'notification_bcc')
+
+database_section = options.get('frontend', 'database', 'database')
+
+if not options.has_section(database_section):
+ raise ConfigParser.NoSectionError(
+ "No database=<database_section_name> defined")
+
+# 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
+DATABASE_ENGINE = options.get(database_section, 'engine')
+DATABASE_NAME = options.get(database_section, 'name')
+if options.has_option(database_section, 'user'):
+ DATABASE_USER = options.get(database_section, 'user')
+if options.has_option(database_section, 'host'):
+ DATABASE_HOST = options.get(database_section, 'host')
+if options.has_option(database_section, 'port'):
+ DATABASE_PORT = options.get(database_section, 'port')
+
+if options.has_option(database_section, 'password_file'):
+ password_file = options.get(database_section, 'password_file')
+ DATABASE_PASSWORD = open(password_file,'r').readline()
+elif options.has_option(database_section, 'password'):
+ DATABASE_PASSWORD = options.get(database_section, 'password')
+
+# Local time zone for this installation. Choices can be found here:
+# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
+# although not all variations may be possible on all operating systems.
+# If running in a Windows environment this must be set to the same as your
+# system time zone.
+TIME_ZONE = options.get('frontend', 'time_zone')
+
+# Language code for this installation. All choices can be found here:
+# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
+# http://blogs.law.harvard.edu/tech/stories/storyReader$15
+LANGUAGE_CODE = 'en-us'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# Absolute path to the directory that holds media.
+# Example: "/home/media/media.lawrence.com/"
+MEDIA_ROOT = os.path.join(HTSWORKFLOW_ROOT, 'frontend', 'static', '')
+
+# URL that handles the media served from MEDIA_ROOT.
+# Example: "http://media.lawrence.com"
+MEDIA_URL = '/static/'
+
+# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
+# trailing slash.
+# Examples: "http://foo.com/media/", "/media/".
+ADMIN_MEDIA_PREFIX = '/media/'
+
+# Make this unique, and don't share it with anybody.
+SECRET_KEY = '(ekv^=gf(j9f(x25@a7r+8)hqlz%&_1!tw^75l%^041#vi=@4n'
+
+# some of our urls need an api key
+DEFAULT_API_KEY = 'n7HsXGHIi0vp9j5u4TIRJyqAlXYc4wrH'
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+ 'django.template.loaders.filesystem.load_template_source',
+ 'django.template.loaders.app_directories.load_template_source',
+# 'django.template.loaders.eggs.load_template_source',
+)
+
+MIDDLEWARE_CLASSES = (
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.middleware.doc.XViewMiddleware',
+)
+
+ROOT_URLCONF = 'htsworkflow.frontend.urls'
+
+TEMPLATE_DIRS = (
+ # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
+ # Always use forward slashes, even on Windows.
+ # Don't forget to use absolute paths, not relative paths.
+ '/usr/share/python-support/python-django/django/contrib/admin/templates',
+ #'/usr/lib/pymodules/python2.6/django/contrib/admin/templates/',
+ os.path.join(HTSWORKFLOW_ROOT, 'frontend','templates'),
+)
+
+INSTALLED_APPS = (
+ 'django.contrib.admin',
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
+ 'django.contrib.humanize',
+ 'django.contrib.sessions',
+ 'django.contrib.sites',
+ 'htsworkflow.frontend.eland_config',
+ 'htsworkflow.frontend.samples',
+ # modules from htsworkflow branch
+ 'htsworkflow.frontend.experiments',
+ 'htsworkflow.frontend.analysis',
+ 'htsworkflow.frontend.reports',
+ 'htsworkflow.frontend.inventory',
+ 'htsworkflow.frontend.bcmagic',
+ 'htsworkflow.frontend.labels',
+ 'django.contrib.databrowse',
+)
+
+# Project specific settings
+
+ALLOWED_IPS={'127.0.0.1': '127.0.0.1'}
+options_to_dict(ALLOWED_IPS, 'allowed_hosts')
+
+ALLOWED_ANALYS_IPS = {'127.0.0.1': '127.0.0.1'}
+options_to_dict(ALLOWED_ANALYS_IPS, 'allowed_analysis_hosts')
+#UPLOADTO_HOME = os.path.abspath('../../uploads')
+#UPLOADTO_CONFIG_FILE = os.path.join(UPLOADTO_HOME, 'eland_config')
+#UPLOADTO_ELAND_RESULT_PACKS = os.path.join(UPLOADTO_HOME, 'eland_results')
+#UPLOADTO_BED_PACKS = os.path.join(UPLOADTO_HOME, 'bed_packs')
+# Where "results_dir" means directory with all the flowcells
+if options.has_option('frontend', 'results_dir'):
+ RESULT_HOME_DIR=os.path.expanduser(options.get('frontend', 'results_dir'))
+else:
+ RESULT_HOME_DIR='/tmp'
+
+LINK_FLOWCELL_STORAGE_DEVICE_URL = options.get('frontend', 'link_flowcell_storage_device_url')
+# PORT 9100 is default for Zebra tabletop/desktop printers
+# PORT 6101 is default for Zebra mobile printers
+BCPRINTER_PRINTER1_HOST = options.get('bcprinter', 'printer1_host')
+BCPRINTER_PRINTER1_PORT = int(options.get('bcprinter', 'printer1_port'))
+BCPRINTER_PRINTER2_HOST = options.get('bcprinter', 'printer2_host')
+BCPRINTER_PRINTER2_PORT = int(options.get('bcprinter', 'printer2_port'))
+
--- /dev/null
+"""Utilities to help with submitting results to public repositories"""
--- /dev/null
+"""Convert srf and qseq archive files to fastqs
+"""
+import logging
+import os
+import sys
+import types
+
+from htsworkflow.pipelines.sequences import scan_for_sequences
+from htsworkflow.pipelines import qseq2fastq
+from htsworkflow.pipelines import srf2fastq
+from htsworkflow.util.api import HtswApi
+from htsworkflow.util.conversion import parse_flowcell_id
+
+logger = logging.getLogger(__name__)
+
+class CondorFastqExtract(object):
+ def __init__(self, host, apidata, sequences_path,
+ log_path='log',
+ force=False):
+ """Extract fastqs from results archive
+
+ Args:
+ host (str): root of the htsworkflow api server
+ apidata (dict): id & key to post to the server
+ sequences_path (str): root of the directory tree to scan for files
+ log_path (str): where to put condor log files
+ force (bool): do we force overwriting current files?
+ """
+ self.api = HtswApi(host, apidata)
+ self.sequences_path = sequences_path
+ self.log_path = log_path
+ self.force = force
+
+ def build_fastqs(self, library_result_map ):
+ """
+ Generate condor scripts to build any needed fastq files
+
+ Args:
+ library_result_map (list): [(library_id, destination directory), ...]
+ """
+ qseq_condor_header = self.get_qseq_condor_header()
+ qseq_condor_entries = []
+ srf_condor_header = self.get_srf_condor_header()
+ srf_condor_entries = []
+ lib_db = self.find_archive_sequence_files(library_result_map)
+
+ needed_targets = self.find_missing_targets(library_result_map, lib_db)
+
+ for target_pathname, available_sources in needed_targets.items():
+ logger.debug(' target : %s' % (target_pathname,))
+ logger.debug(' candidate sources: %s' % (available_sources,))
+ if available_sources.has_key('qseq'):
+ source = available_sources['qseq']
+ qseq_condor_entries.append(
+ self.condor_qseq_to_fastq(source.path,
+ target_pathname,
+ source.flowcell)
+ )
+ elif available_sources.has_key('srf'):
+ source = available_sources['srf']
+ mid = getattr(source, 'mid_point', None)
+ srf_condor_entries.append(
+ self.condor_srf_to_fastq(source.path,
+ target_pathname,
+ source.paired,
+ source.flowcell,
+ mid)
+ )
+ else:
+ print " need file", target_pathname
+
+ if len(srf_condor_entries) > 0:
+ make_submit_script('srf.fastq.condor',
+ srf_condor_header,
+ srf_condor_entries)
+
+ if len(qseq_condor_entries) > 0:
+ make_submit_script('qseq.fastq.condor',
+ qseq_condor_header,
+ qseq_condor_entries)
+
+
+ def get_qseq_condor_header(self):
+ return """Universe=vanilla
+executable=%(exe)s
+error=%(log)s/qseq2fastq.$(process).out
+output=%(log)s/qseq2fastq.$(process).out
+log=%(log)s/qseq2fastq.log
+
+""" % {'exe': sys.executable,
+ 'log': self.log_path }
+
+ def get_srf_condor_header(self):
+ return """Universe=vanilla
+executable=%(exe)s
+output=%(log)s/srf_pair_fastq.$(process).out
+error=%(log)s/srf_pair_fastq.$(process).out
+log=%(log)s/srf_pair_fastq.log
+environment="PYTHONPATH=%(env)s"
+
+""" % {'exe': sys.executable,
+ 'log': self.log_path,
+ 'env': os.environ.get('PYTHONPATH', '')
+ }
+
+ def find_archive_sequence_files(self, library_result_map):
+ """
+ Find archived sequence files associated with our results.
+ """
+ logger.debug("Searching for sequence files in: %s" %(self.sequences_path,))
+
+ lib_db = {}
+ seq_dirs = set()
+ candidate_lanes = {}
+ for lib_id, result_dir in library_result_map:
+ lib_info = self.api.get_library(lib_id)
+ lib_info['lanes'] = {}
+ lib_db[lib_id] = lib_info
+
+ for lane in lib_info['lane_set']:
+ lane_key = (lane['flowcell'], lane['lane_number'])
+ candidate_lanes[lane_key] = lib_id
+ seq_dirs.add(os.path.join(self.sequences_path,
+ 'flowcells',
+ lane['flowcell']))
+ logger.debug("Seq_dirs = %s" %(unicode(seq_dirs)))
+ candidate_seq_list = scan_for_sequences(seq_dirs)
+
+ # at this point we have too many sequences as scan_for_sequences
+ # returns all the sequences in a flowcell directory
+ # so lets filter out the extras
+
+ for seq in candidate_seq_list:
+ lane_key = (seq.flowcell, seq.lane)
+ lib_id = candidate_lanes.get(lane_key, None)
+ if lib_id is not None:
+ lib_info = lib_db[lib_id]
+ lib_info['lanes'].setdefault(lane_key, set()).add(seq)
+
+ return lib_db
+
+ def find_missing_targets(self, library_result_map, lib_db):
+ """
+ Check if the sequence file exists.
+ This requires computing what the sequence name is and checking
+ to see if it can be found in the sequence location.
+
+ Adds seq.paired flag to sequences listed in lib_db[*]['lanes']
+ """
+ fastq_paired_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s_r%(read)s.fastq'
+ fastq_single_template = '%(lib_id)s_%(flowcell)s_c%(cycle)s_l%(lane)s.fastq'
+ # find what targets we're missing
+ needed_targets = {}
+ for lib_id, result_dir in library_result_map:
+ lib = lib_db[lib_id]
+ lane_dict = make_lane_dict(lib_db, lib_id)
+
+ for lane_key, sequences in lib['lanes'].items():
+ for seq in sequences:
+ seq.paired = lane_dict[seq.flowcell]['paired_end']
+ lane_status = lane_dict[seq.flowcell]['status']
+
+ if seq.paired and seq.read is None:
+ seq.read = 1
+ filename_attributes = {
+ 'flowcell': seq.flowcell,
+ 'lib_id': lib_id,
+ 'lane': seq.lane,
+ 'read': seq.read,
+ 'cycle': seq.cycle
+ }
+ # skip bad runs
+ if lane_status == 'Failed':
+ continue
+ if seq.flowcell == '30DY0AAXX':
+ # 30DY0 only ran for 151 bases instead of 152
+ # it is actually 76 1st read, 75 2nd read
+ seq.mid_point = 76
+
+ # end filters
+ if seq.paired:
+ target_name = fastq_paired_template % filename_attributes
+ else:
+ target_name = fastq_single_template % filename_attributes
+
+ target_pathname = os.path.join(result_dir, target_name)
+ if self.force or not os.path.exists(target_pathname):
+ t = needed_targets.setdefault(target_pathname, {})
+ t[seq.filetype] = seq
+
+ return needed_targets
+
+
+ def condor_srf_to_fastq(self,
+ srf_file,
+ target_pathname,
+ paired,
+ flowcell=None,
+ mid=None):
+ py = srf2fastq.__file__
+ args = [ py, srf_file, '--verbose']
+ if paired:
+ args.extend(['--left', target_pathname])
+ # this is ugly. I did it because I was pregenerating the target
+ # names before I tried to figure out what sources could generate
+ # those targets, and everything up to this point had been
+ # one-to-one. So I couldn't figure out how to pair the
+ # target names.
+ # With this at least the command will run correctly.
+ # however if we rename the default targets, this'll break
+ # also I think it'll generate it twice.
+ args.extend(['--right',
+ target_pathname.replace('_r1.fastq', '_r2.fastq')])
+ else:
+ args.extend(['--single', target_pathname ])
+ if flowcell is not None:
+ args.extend(['--flowcell', flowcell])
+
+ if mid is not None:
+ args.extend(['-m', str(mid)])
+
+ if self.force:
+ args.extend(['--force'])
+
+ script = """arguments="%s"
+queue
+""" % (" ".join(args),)
+
+ return script
+
+
+ def condor_qseq_to_fastq(self, qseq_file, target_pathname, flowcell=None):
+ py = qseq2fastq.__file__
+ args = [py, '-i', qseq_file, '-o', target_pathname ]
+ if flowcell is not None:
+ args.extend(['-f', flowcell])
+ script = """arguments="%s"
+queue
+""" % (" ".join(args))
+
+ return script
+
+def make_submit_script(target, header, body_list):
+ """
+ write out a text file
+
+ this was intended for condor submit scripts
+
+ Args:
+ target (str or stream):
+ if target is a string, we will open and close the file
+ if target is a stream, the caller is responsible.
+
+ header (str);
+ header to write at the beginning of the file
+ body_list (list of strs):
+ a list of blocks to add to the file.
+ """
+ if type(target) in types.StringTypes:
+ f = open(target,"w")
+ else:
+ f = target
+ f.write(header)
+ for entry in body_list:
+ f.write(entry)
+ if type(target) in types.StringTypes:
+ f.close()
+
+def make_lane_dict(lib_db, lib_id):
+ """
+ Convert the lane_set in a lib_db to a dictionary
+ indexed by flowcell ID
+ """
+ result = []
+ for lane in lib_db[lib_id]['lane_set']:
+ flowcell_id, status = parse_flowcell_id(lane['flowcell'])
+ lane['flowcell'] = flowcell_id
+ result.append((lane['flowcell'], lane))
+ return dict(result)
+
--- /dev/null
+"""Parse UCSC DAF File
+"""
+import logging
+import os
+import re
+import string
+from StringIO import StringIO
+import types
+import urlparse
+
+import RDF
+from htsworkflow.util.rdfhelp import \
+ blankOrUri, \
+ dafTermOntology, \
+ get_model, \
+ libraryOntology, \
+ owlNS, \
+ rdfNS, \
+ submissionLog, \
+ submissionOntology, \
+ toTypedNode, \
+ fromTypedNode
+from htsworkflow.util.hashfile import make_md5sum
+
+logger = logging.getLogger(__name__)
+
+#
+class ModelException(RuntimeError): pass
+class MetadataLookupException(RuntimeError):
+ """Problem accessing metadata"""
+ pass
+
+# STATES
+DAF_HEADER = 1
+DAF_VIEW = 2
+
+def parse_into_model(model, submission_name, filename):
+ """Read a DAF into RDF Model
+
+ requires a short submission name
+ """
+ attributes = parse(filename)
+ add_to_model(model, attributes, submission_name)
+
+def fromstream_into_model(model, submission_name, daf_stream):
+ attributes = parse_stream(daf_stream)
+ add_to_model(model, attributes, submission_name)
+
+def fromstring_into_model(model, submission_name, daf_string):
+ """Read a string containing a DAF into RDF Model
+
+ requires a short submission name
+ """
+ attributes = fromstring(daf_string)
+ add_to_model(model, attributes, submission_name)
+
+def parse(filename):
+ stream = open(filename,'r')
+ attributes = parse_stream(stream)
+ stream.close()
+ return attributes
+
+def fromstring(daf_string):
+ stream = StringIO(daf_string)
+ return parse_stream(stream)
+
+def parse_stream(stream):
+ comment_re = re.compile("#.*$")
+
+ state = DAF_HEADER
+ attributes = {'views': {}}
+ view_name = None
+ view_attributes = {}
+ for line in stream:
+ #remove comments
+ line = comment_re.sub("", line)
+ nstop = _extract_name_index(line)
+ name = line[0:nstop]
+ sstop = _consume_whitespace(line, start=nstop)
+ vstop = _extract_value_index(line, start=sstop)
+ value = line[sstop:vstop]
+
+ if value.lower() in ('yes',):
+ value = True
+ elif value.lower() in ('no',):
+ value = False
+
+ if len(name) == 0:
+ if view_name is not None:
+ attributes['views'][view_name] = view_attributes
+ view_name = None
+ view_attributes = {}
+ state = DAF_HEADER
+ elif state == DAF_HEADER and name == 'variables':
+ attributes[name] = [ x.strip() for x in value.split(',')]
+ elif state == DAF_HEADER and name == 'view':
+ view_name = value
+ view_attributes['view'] = value
+ state = DAF_VIEW
+ elif state == DAF_HEADER:
+ attributes[name] = value
+ elif state == DAF_VIEW:
+ view_attributes[name] = value
+
+ # save last block
+ if view_name is not None:
+ attributes['views'][view_name] = view_attributes
+
+ return attributes
+
+def _consume_whitespace(line, start=0):
+ for i in xrange(start, len(line)):
+ if line[i] not in string.whitespace:
+ return i
+
+ return len(line)
+
+def _extract_name_index(line, start=0):
+ for i in xrange(start, len(line)):
+ if line[i] in string.whitespace:
+ return i
+
+ return len(line)
+
+def _extract_value_index(line, start=0):
+ shortline = line.rstrip()
+ return len(shortline)
+
+def convert_to_rdf_statements(attributes, name):
+ submission_uri = get_submission_uri(name)
+ subject = RDF.Node(submission_uri)
+
+ statements = []
+ for daf_key in attributes:
+ predicate = dafTermOntology[daf_key]
+ if daf_key == 'views':
+ statements.extend(_views_to_statements(name,
+ dafTermOntology,
+ attributes[daf_key]))
+ elif daf_key == 'variables':
+ #predicate = ddfNS['variables']
+ for var in attributes.get('variables', []):
+ obj = toTypedNode(var)
+ statements.append(RDF.Statement(subject, predicate, obj))
+ else:
+ value = attributes[daf_key]
+ obj = toTypedNode(value)
+ statements.append(RDF.Statement(subject,predicate,obj))
+
+ return statements
+
+def _views_to_statements(name, dafNS, views):
+ subject = RDF.Node(get_submission_uri(name))
+ viewNS = get_view_namespace(name)
+
+ statements = []
+ for view_name in views:
+ view_attributes = views[view_name]
+ viewSubject = viewNS[view_name]
+ statements.append(RDF.Statement(subject, dafNS['views'], viewSubject))
+ statements.append(
+ RDF.Statement(viewSubject, dafNS['name'], toTypedNode(view_name)))
+ for view_attribute_name in view_attributes:
+ predicate = dafNS[view_attribute_name]
+ obj = toTypedNode(view_attributes[view_attribute_name])
+ statements.append(RDF.Statement(viewSubject, predicate, obj))
+
+ #statements.extend(convert_to_rdf_statements(view, viewNode))
+ return statements
+
+def add_to_model(model, attributes, name):
+ for statement in convert_to_rdf_statements(attributes, name):
+ model.add_statement(statement)
+
+def get_submission_uri(name):
+ return submissionLog[name].uri
+
+def get_view_namespace(name):
+ submission_uri = get_submission_uri(name)
+ viewNS = RDF.NS(str(submission_uri) + '/view/')
+ return viewNS
+
+class DAFMapper(object):
+ """Convert filenames to views in the UCSC Daf
+ """
+ def __init__(self, name, daf_file=None, model=None):
+ """Construct a RDF backed model of a UCSC DAF
+
+ :args:
+ name (str): the name of this submission (used to construct DAF url)
+ daf_file (str, stream, or None):
+ if str, use as filename
+ if stream, parse as stream
+ if none, don't attempt to load the DAF into our model
+ model (RDF.Model or None):
+ if None, construct a memory backed model
+ otherwise specifies model to use
+ """
+ if daf_file is None and model is None:
+ logger.error("We need a DAF or Model containing a DAF to work")
+
+ self.name = name
+ if model is not None:
+ self.model = model
+ else:
+ self.model = get_model()
+
+ if hasattr(daf_file, 'next'):
+ # its some kind of stream
+ fromstream_into_model(self.model, name, daf_file)
+ else:
+ # file
+ parse_into_model(self.model, name, daf_file)
+
+ self.libraryNS = RDF.NS('http://jumpgate.caltech.edu/library/')
+ self.submissionSet = get_submission_uri(self.name)
+ self.submissionSetNS = RDF.NS(str(self.submissionSet)+'/')
+ self.__view_map = None
+
+
+ def add_pattern(self, view_name, filename_pattern):
+ """Map a filename regular expression to a view name
+ """
+ viewNS = get_view_namespace(self.name)
+
+ obj = toTypedNode(filename_pattern)
+ self.model.add_statement(
+ RDF.Statement(viewNS[view_name],
+ dafTermOntology['filename_re'],
+ obj))
+
+
+ def import_submission_dir(self, submission_dir, library_id):
+ """Import a submission directories and update our model as needed
+ """
+ #attributes = get_filename_attribute_map(paired)
+ libNode = self.libraryNS[library_id + "/"]
+
+ submission_files = os.listdir(submission_dir)
+ for f in submission_files:
+ self.construct_file_attributes(submission_dir, libNode, f)
+
+
+ def construct_file_attributes(self, submission_dir, libNode, pathname):
+ """Looking for the best extension
+ The 'best' is the longest match
+
+ :Args:
+ filename (str): the filename whose extention we are about to examine
+ """
+ path, filename = os.path.split(pathname)
+
+ view = self.find_view(filename)
+ if view is None:
+ logger.warn("Unrecognized file: %s" % (pathname,))
+ return None
+ if str(view) == str(libraryOntology['ignore']):
+ return None
+
+ submission_name = self.make_submission_name(submission_dir)
+ submissionNode = self.get_submission_node(submission_dir)
+ submission_uri = str(submissionNode.uri)
+ view_name = fromTypedNode(self.model.get_target(view, dafTermOntology['name']))
+ submissionView = RDF.Node(RDF.Uri(submission_uri + '/' + view_name))
+
+ self.model.add_statement(
+ RDF.Statement(self.submissionSet, dafTermOntology['has_submission'], submissionNode))
+
+ self.model.add_statement(RDF.Statement(submissionNode, submissionOntology['has_view'], submissionView))
+ self.model.add_statement(RDF.Statement(submissionNode, submissionOntology['name'], toTypedNode(submission_name)))
+ self.model.add_statement(RDF.Statement(submissionNode, rdfNS['type'], submissionOntology['submission']))
+ self.model.add_statement(RDF.Statement(submissionNode, submissionOntology['library'], libNode))
+
+ # add trac specific information
+ self.model.add_statement(
+ RDF.Statement(submissionView, dafTermOntology['view'], view))
+ self.model.add_statement(
+ RDF.Statement(submissionView, dafTermOntology['paired'], toTypedNode(self._is_paired(libNode))))
+ self.model.add_statement(
+ RDF.Statement(submissionView, dafTermOntology['submission'], submissionNode))
+
+ # extra information
+ terms = [dafTermOntology['type'],
+ dafTermOntology['filename_re'],
+ ]
+ terms.extend((dafTermOntology[v] for v in self.get_daf_variables()))
+
+ # Add everything I can find
+ for term in terms:
+ value = self._get_library_attribute(libNode, term)
+ if value is not None:
+ self.model.add_statement(RDF.Statement(submissionView, term, value))
+
+ # add file specific information
+ fileNode = RDF.Node(RDF.Uri(submission_uri + '/' + filename))
+ submission_pathname = os.path.join(submission_dir, filename)
+ md5 = make_md5sum(submission_pathname)
+ self.model.add_statement(
+ RDF.Statement(submissionView, dafTermOntology['has_file'], fileNode))
+ self.model.add_statement(
+ RDF.Statement(fileNode, dafTermOntology['filename'], filename))
+
+ if md5 is None:
+ logging.warning("Unable to produce md5sum for %s" % ( submission_pathname))
+ else:
+ self.model.add_statement(
+ RDF.Statement(fileNode, dafTermOntology['md5sum'], md5))
+
+
+ def _add_library_details_to_model(self, libNode):
+ parser = RDF.Parser(name='rdfa')
+ new_statements = parser.parse_as_stream(libNode.uri)
+ for s in new_statements:
+ # don't override things we already have in the model
+ q = RDF.Statement(s.subject, s.predicate, None)
+ if len(list(self.model.find_statements(q))) == 0:
+ self.model.append(s)
+
+ statements = list(self.model.find_statements(q))
+ if len(statements) == 0:
+ logger.warning("Nothing known about %s" % (str(libNode),))
+
+ def get_daf_variables(self):
+ """Returns simple variables names that to include in the ddf
+ """
+ variableTerm = dafTermOntology['variables']
+ results = ['view']
+ for obj in self.model.get_targets(self.submissionSet, variableTerm):
+ value = str(fromTypedNode(obj))
+ results.append(value)
+ results.append('labVersion')
+ return results
+
+ def make_submission_name(self, submission_dir):
+ submission_dir = os.path.normpath(submission_dir)
+ submission_dir_name = os.path.split(submission_dir)[1]
+ if len(submission_dir_name) == 0:
+ raise RuntimeError(
+ "Submission dir name too short: %s" %(submission_dir,))
+ return submission_dir_name
+
+ def get_submission_node(self, submission_dir):
+ """Convert a submission directory name to a submission node
+ """
+ submission_name = self.make_submission_name(submission_dir)
+ return self.submissionSetNS[submission_name]
+
+ def _get_library_attribute(self, libNode, attribute):
+ if not isinstance(attribute, RDF.Node):
+ attribute = libraryOntology[attribute]
+
+ # search through the model twice (adding in data from website)
+ for i in xrange(2):
+ targets = list(self.model.get_targets(libNode, attribute))
+ if len(targets) > 0:
+ return self._format_library_attribute(targets)
+
+ targets = self._search_same_as(libNode, attribute)
+ if targets is not None:
+ return self._format_library_attribute(targets)
+
+ # we don't know anything about this attribute
+ self._add_library_details_to_model(libNode)
+
+ return None
+
+ def _format_library_attribute(self, targets):
+ if len(targets) == 0:
+ return None
+ elif len(targets) == 1:
+ return fromTypedNode(targets[0])
+ elif len(targets) > 1:
+ return [fromTypedNode(t) for t in targets]
+
+ def _search_same_as(self, subject, predicate):
+ # look for alternate names
+ other_predicates = self.model.get_targets(predicate, owlNS['sameAs'])
+ for other in other_predicates:
+ targets = list(self.model.get_targets(subject, other))
+ if len(targets) > 0:
+ return targets
+ return None
+
+ def find_view(self, filename):
+ """Search through potential DAF filename patterns
+ """
+ if self.__view_map is None:
+ self.__view_map = self._get_filename_view_map()
+
+ results = []
+ for pattern, view in self.__view_map.items():
+ if re.match(pattern, filename):
+ results.append(view)
+
+ if len(results) > 1:
+ msg = "%s matched multiple views %s" % (
+ filename,
+ [str(x) for x in results])
+ raise ModelException(msg)
+ elif len(results) == 1:
+ return results[0]
+ else:
+ return None
+
+
+ def _get_filename_view_map(self):
+ """Query our model for filename patterns
+
+ return a dictionary of compiled regular expressions to view names
+ """
+ filename_query = RDF.Statement(
+ None, dafTermOntology['filename_re'], None)
+
+ patterns = {}
+ for s in self.model.find_statements(filename_query):
+ view_name = s.subject
+ literal_re = s.object.literal_value['string']
+ logger.debug("Found: %s" % (literal_re,))
+ try:
+ filename_re = re.compile(literal_re)
+ except re.error, e:
+ logger.error("Unable to compile: %s" % (literal_re,))
+ patterns[literal_re] = view_name
+ return patterns
+
+ def _is_paired(self, libNode):
+ """Determine if a library is paired end"""
+ library_type = self._get_library_attribute(libNode, 'library_type')
+ if library_type is None:
+ raise ModelException("%s doesn't have a library type" % (str(libNode),))
+
+ #single = (1,3,6)
+ single = ['Single End', 'Small RNA', 'CSHL (lacking last nt)']
+ paired = ['Paired End', 'Multiplexing', 'Barcoded']
+ if library_type in single:
+ return False
+ elif library_type in paired:
+ return True
+ else:
+ raise MetadataLookupException(
+ "Unrecognized library type %s for %s" % \
+ (library_type, str(libNode)))
+
+ def _get_library_url(self):
+ return str(self.libraryNS[''].uri)
+ def _set_library_url(self, value):
+ self.libraryNS = RDF.NS(str(value))
+ library_url = property(_get_library_url, _set_library_url)
--- /dev/null
+from contextlib import contextmanager
+import os
+from StringIO import StringIO
+import shutil
+import tempfile
+import unittest
+
+from htsworkflow.submission import daf
+from htsworkflow.util.rdfhelp import \
+ dafTermOntology, \
+ fromTypedNode, \
+ rdfNS, \
+ submissionLog, \
+ submissionOntology, \
+ get_model, \
+ get_serializer
+
+import RDF
+
+test_daf = """# Lab and general info
+grant Hardison
+lab Caltech-m
+dataType ChipSeq
+variables cell, antibody,sex,age,strain,control
+compositeSuffix CaltechHistone
+assembly mm9
+dafVersion 2.0
+validationSettings validateFiles.bam:mismatches=2,bamPercent=99.9;validateFiles.fastq:quick=1000
+
+# Track/view definition
+view FastqRd1
+longLabelPrefix Caltech Fastq Read 1
+type fastq
+hasReplicates yes
+required no
+
+view Signal
+longLabelPrefix Caltech Histone Signal
+type bigWig
+hasReplicates yes
+required no
+"""
+
+class TestDAF(unittest.TestCase):
+ def test_parse(self):
+
+ parsed = daf.fromstring(test_daf)
+
+ self.failUnlessEqual(parsed['assembly'], 'mm9')
+ self.failUnlessEqual(parsed['grant'], 'Hardison')
+ self.failUnlessEqual(len(parsed['variables']), 6)
+ self.failUnlessEqual(len(parsed['views']), 2)
+ self.failUnlessEqual(len(parsed['views']['FastqRd1']), 5)
+ self.failUnlessEqual(len(parsed['views']['Signal']), 5)
+ signal = parsed['views']['Signal']
+ self.failUnlessEqual(signal['required'], False)
+ self.failUnlessEqual(signal['longLabelPrefix'],
+ 'Caltech Histone Signal')
+
+ def test_rdf(self):
+
+ parsed = daf.fromstring(test_daf)
+ #mem = RDF.Storage(storage_name='hashes',
+ # options_string='hash-type="memory"'),
+ mem = RDF.MemoryStorage()
+ model = RDF.Model(mem)
+
+ name = 'cursub'
+ subNS = RDF.NS(str(submissionLog[name].uri))
+ daf.add_to_model(model, parsed, name)
+
+ signal_view_node = RDF.Node(subNS['/view/Signal'].uri)
+
+ writer = get_serializer()
+ turtle = writer.serialize_model_to_string(model)
+
+ self.failUnless(str(signal_view_node.uri) in turtle)
+
+ statements = list(model.find_statements(
+ RDF.Statement(
+ signal_view_node, None, None)))
+ self.failUnlessEqual(len(statements), 6)
+ name = model.get_target(signal_view_node, dafTermOntology['name'])
+ self.failUnlessEqual(fromTypedNode(name), u'Signal')
+
+def load_daf_mapper(name, extra_statements=None):
+ """Load test model in
+ """
+ model = get_model()
+ if extra_statements is not None:
+ parser = RDF.Parser(name='turtle')
+ parser.parse_string_into_model(model, extra_statements,
+ 'http://extra.extra')
+
+ test_daf_stream = StringIO(test_daf)
+ mapper = daf.DAFMapper(name, daf_file = test_daf_stream, model=model)
+ return mapper
+
+def dump_model(model):
+ writer = get_serializer()
+ turtle = writer.serialize_model_to_string(model)
+ print turtle
+
+class TestDAFMapper(unittest.TestCase):
+ def test_create_mapper_add_pattern(self):
+ name = 'testsub'
+ mapper = load_daf_mapper(name)
+ pattern = '.bam\Z(?ms)'
+ mapper.add_pattern('Signal', pattern)
+
+ s = RDF.Statement(daf.get_view_namespace(name)['Signal'],
+ dafTermOntology['filename_re'],
+ None)
+ search = list(mapper.model.find_statements(s))
+ self.failUnlessEqual(len(search), 1)
+ self.failUnlessEqual(str(search[0].subject),
+ str(submissionLog['testsub/view/Signal']))
+ self.failUnlessEqual(str(search[0].predicate),
+ str(dafTermOntology['filename_re']))
+ #self.failUnlessEqual(search[0].object.literal_value['string'], pattern)
+
+ def test_find_one_view(self):
+ extra = '''@prefix dafTerm:<http://jumpgate.caltech.edu/wiki/UcscDaf#> .
+
+<%(submissionLog)s/testfind/view/Signal> dafTerm:filename_re ".*\\\\.bam" .
+<%(submissionLog)s/testfind/view/FastqRd1> dafTerm:filename_re ".*_r1\\\\.fastq" .
+''' % {'submissionLog': 'http://jumpgate.caltech.edu/wiki/SubmissionsLog'}
+
+ daf_mapper = load_daf_mapper('testfind', extra_statements = extra)
+
+ view = daf_mapper.find_view('filename_r1.fastq')
+ self.failUnlessEqual(str(view),
+ str(submissionLog['testfind/view/FastqRd1']))
+
+ #writer = get_serializer()
+ #turtle = writer.serialize_model_to_string(model)
+ #print turtle
+
+ def test_find_overlapping_view(self):
+ extra = '''@prefix dafTerm:<http://jumpgate.caltech.edu/wiki/UcscDaf#> .
+
+<%(submissionLog)s/testfind/view/fastq> dafTerm:filename_re ".*\\\\.fastq" .
+<%(submissionLog)s/testfind/view/FastqRd1> dafTerm:filename_re ".*_r1\\\\.fastq" .
+''' % {'submissionLog': 'http://jumpgate.caltech.edu/wiki/SubmissionsLog'}
+
+ daf_mapper = load_daf_mapper('testfind', extra_statements = extra)
+
+ self.failUnlessRaises(daf.ModelException,
+ daf_mapper.find_view,
+ 'filename_r1.fastq')
+
+ def test_find_attributes(self):
+ lib_id = '11204'
+ lib_url = 'http://jumpgate.caltech.edu/library/%s' %(lib_id)
+ extra = '''@prefix dafTerm: <http://jumpgate.caltech.edu/wiki/UcscDaf#> .
+@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
+
+<%(submissionLog)s/testfind/view/Signal> dafTerm:filename_re ".*\\\\.bam" .
+<%(submissionLog)s/testfind/view/FastqRd1> dafTerm:filename_re ".*\\\\.fastq" .
+<%(libUrl)s> <%(libraryOntology)sgel_cut> "100"^^xsd:decimal .
+''' % {'submissionLog': 'http://jumpgate.caltech.edu/wiki/SubmissionsLog',
+ 'libraryOntology': 'http://jumpgate.caltech.edu/wiki/LibraryOntology#',
+ 'libUrl': lib_url}
+
+ daf_mapper = load_daf_mapper('testfind', extra)
+ libNode = RDF.Node(RDF.Uri(lib_url))
+ daf_mapper._add_library_details_to_model(libNode)
+ gel_cut = daf_mapper._get_library_attribute(libNode, 'gel_cut')
+ # make sure we can override attributes, the value in our
+ # server is 500 for this library
+ self.failUnlessEqual(gel_cut, 100)
+
+ species = daf_mapper._get_library_attribute(libNode, 'species')
+ self.failUnlessEqual(species, "Homo sapiens")
+
+ with mktempdir('analysis') as analysis_dir:
+ path, analysis_name = os.path.split(analysis_dir)
+ with mktempfile('.bam', dir=analysis_dir) as filename:
+ print 'dir', os.listdir(analysis_dir)
+ daf_mapper.construct_file_attributes(analysis_dir,
+ libNode,
+ filename)
+
+ sub_root = "http://jumpgate.caltech.edu/wiki/SubmissionsLog/testfind/"
+ submission_name = sub_root + analysis_name
+ source = daf_mapper.model.get_source(rdfNS['type'], submissionOntology['submission'])
+
+ self.failUnlessEqual(str(source.uri), submission_name)
+
+ view_name = submission_name + '/Signal'
+ view = daf_mapper.model.get_target(source, submissionOntology['has_view'])
+ self.failUnlessEqual(str(view.uri), view_name)
+
+ def test_library_url(self):
+ daf_mapper = load_daf_mapper('urltest')
+
+ self.failUnlessEqual(daf_mapper.library_url,
+ 'http://jumpgate.caltech.edu/library/')
+ daf_mapper.library_url = 'http://google.com'
+ self.failUnlessEqual(daf_mapper.library_url, 'http://google.com' )
+
+@contextmanager
+def mktempdir(prefix='tmp'):
+ d = tempfile.mkdtemp(prefix=prefix)
+ print "made", d
+ yield d
+ shutil.rmtree(d)
+ print "unmade", d
+
+@contextmanager
+def mktempfile(suffix='', prefix='tmp', dir=None):
+ fd, pathname = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
+ yield pathname
+ print "made", pathname
+ os.close(fd)
+ os.unlink(pathname)
+ print "unmade", pathname
+
+def suite():
+ suite = unittest.makeSuite(TestDAF, 'test')
+ suite.addTest(unittest.makeSuite(TestDAFMapper, 'test'))
+ return suite
+
+if __name__ == "__main__":
+ unittest.main(defaultTest='suite')
--- /dev/null
+import urlparse
+
+UCSCEncodePipeline = "http://encodesubmit.ucsc.edu/pipeline/"
+
+def ddf_download_url(submission_id):
+ """Return url to download a DDF for a submission
+
+ >>> ddf_download_url(1234)
+ 'http://encodesubmit.ucsc.edu/pipeline/download_ddf/1234'
+ """
+ fragment = 'download_ddf/%s' % (submission_id,)
+ return urlparse.urljoin(UCSCEncodePipeline, fragment)
+
+def daf_download_url(submission_id):
+ """Return url to download a DAF for a submission
+
+ >>> daf_download_url(1234)
+ 'http://encodesubmit.ucsc.edu/pipeline/download_daf/1234'
+ """
+ fragment = 'download_daf/%s' % (submission_id,)
+ return urlparse.urljoin(UCSCEncodePipeline, fragment)
+
+def submission_view_url(submission_id):
+ """Return url to download a DAF for a submission
+
+ >>> submission_view_url(1234)
+ 'http://encodesubmit.ucsc.edu/pipeline/show/1234'
+ """
+ fragment = 'show/%s' % (submission_id,)
+ return urlparse.urljoin(UCSCEncodePipeline, fragment)
+"""Common functions for accessing the HTS Workflow REST API
"""
-Common functions for accessing the HTS Workflow REST API
-
-"""
+from ConfigParser import SafeConfigParser
import logging
# try to deal with python <2.6
except ImportError:
import simplejson as json
+import os
+from optparse import OptionGroup
import urllib
import urllib2
import urlparse
+
+def add_auth_options(parser):
+ """Add options OptParser configure authentication options
+ """
+ # Load defaults from the config files
+ config = SafeConfigParser()
+ config.read([os.path.expanduser('~/.htsworkflow.ini'),
+ '/etc/htsworkflow.ini'
+ ])
+
+ sequence_archive = None
+ apiid = None
+ apikey = None
+ apihost = None
+ SECTION = 'sequence_archive'
+ if config.has_section(SECTION):
+ sequence_archive = config.get(SECTION, 'sequence_archive',sequence_archive)
+ sequence_archive = os.path.expanduser(sequence_archive)
+ apiid = config.get(SECTION, 'apiid', apiid)
+ apikey = config.get(SECTION, 'apikey', apikey)
+ apihost = config.get(SECTION, 'host', apihost)
+
+ # configuration options
+ group = OptionGroup(parser, "htsw api authentication")
+ group.add_option('--apiid', default=apiid, help="Specify API ID")
+ group.add_option('--apikey', default=apikey, help="Specify API KEY")
+ group.add_option('--host', default=apihost,
+ help="specify HTSWorkflow host",)
+ group.add_option('--sequence', default=sequence_archive,
+ help="sequence repository")
+ parser.add_option_group(group)
+
+def make_auth_from_opts(opts, parser):
+ """Create htsw auth info dictionary from optparse info
+ """
+ if opts.host is None or opts.apiid is None or opts.apikey is None:
+ parser.error("Please specify host url, apiid, apikey")
+
+ return {'apiid': opts.apiid, 'apikey': opts.apikey }
+
+
def library_url(root_url, library_id):
"""
Return the url for retrieving information about a specific library.
headers = web.info()
return json.loads(contents)
+
+class HtswApi(object):
+ def __init__(self, root_url, authdata):
+ self.root_url = root_url
+ self.authdata = authdata
+
+ def get_flowcell(self, flowcellId):
+ url = flowcell_url(self.root_url, flowcellId)
+ return retrieve_info(url, self.authdata)
+
+ def get_library(self, libraryId):
+ url = library_url(self.root_url, libraryId)
+ return retrieve_info(url, self.authdata)
+
+ def get_lanes_for_user(self, user):
+ url = lanes_for_user(self.root_url, user)
+ return retrieve_info(url, self.authdata)
+
+ def get_url(self, url):
+ return retrieve_info(url, self.authdata)
+
return None
else:
return unicode(value)
+
+def parse_flowcell_id(flowcell_id):
+ """
+ Return flowcell id and any status encoded in the id
+
+ We stored the status information in the flowcell id name.
+ this was dumb, but database schemas are hard to update.
+ """
+ fields = flowcell_id.split()
+ fcid = None
+ status = None
+ if len(fields) > 0:
+ fcid = fields[0]
+ if len(fields) > 1:
+ status = fields[1]
+ return fcid, status
+
--- /dev/null
+"""Utility to make md5sums of a file caching as a parallel file
+"""
+import logging
+import os
+from subprocess import Popen, PIPE
+
+logger = logging.getLogger(__name__)
+
+def make_md5sum(filename):
+ """Quickly find the md5sum of a file
+ """
+ md5_cache = os.path.join(filename+".md5")
+ print md5_cache
+ if os.path.exists(md5_cache):
+ logger.debug("Found md5sum in {0}".format(md5_cache))
+ stream = open(md5_cache,'r')
+ lines = stream.readlines()
+ md5sum = parse_md5sum_line(lines, filename)
+ else:
+ md5sum = make_md5sum_unix(filename, md5_cache)
+ return md5sum
+
+def make_md5sum_unix(filename, md5_cache):
+ cmd = ["md5sum", filename]
+ logger.debug("Running {0}".format(" ".join(cmd)))
+ p = Popen(cmd, stdout=PIPE)
+ stdin, stdout = p.communicate()
+ retcode = p.wait()
+ logger.debug("Finished {0} retcode {1}".format(" ".join(cmd), retcode))
+ if retcode != 0:
+ logger.error("Trouble with md5sum for {0}".format(filename))
+ return None
+ lines = stdin.split(os.linesep)
+ md5sum = parse_md5sum_line(lines, filename)
+ if md5sum is not None:
+ logger.debug("Caching sum in {0}".format(md5_cache))
+ stream = open(md5_cache, "w")
+ stream.write(stdin)
+ stream.close()
+ return md5sum
+
+def parse_md5sum_line(lines, filename):
+ md5sum, md5sum_filename = lines[0].split()
+ if md5sum_filename != filename:
+ errmsg = "MD5sum and I disagre about filename. {0} != {1}"
+ logger.error(errmsg.format(filename, md5sum_filename))
+ return None
+ return md5sum
+
--- /dev/null
+"""Helper features for working with librdf
+"""
+import os
+import types
+
+import RDF
+
+# standard ontology namespaces
+owlNS = RDF.NS('http://www.w3.org/2002/07/owl#')
+dublinCoreNS = RDF.NS("http://purl.org/dc/elements/1.1/")
+rdfNS = RDF.NS("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+rdfsNS= RDF.NS("http://www.w3.org/2000/01/rdf-schema#")
+xsdNS = RDF.NS("http://www.w3.org/2001/XMLSchema#")
+
+# internal ontologies
+submissionOntology = RDF.NS("http://jumpgate.caltech.edu/wiki/UcscSubmissionOntology#")
+dafTermOntology = RDF.NS("http://jumpgate.caltech.edu/wiki/UcscDaf#")
+libraryOntology = RDF.NS("http://jumpgate.caltech.edu/wiki/LibraryOntology#")
+submissionLog = RDF.NS("http://jumpgate.caltech.edu/wiki/SubmissionsLog/")
+
+def sparql_query(model, query_filename):
+ """Execute sparql query from file
+ """
+ query_body = open(query_filename,'r').read()
+ query = RDF.SPARQLQuery(query_body)
+ results = query.execute(model)
+ for row in results:
+ output = []
+ for k,v in row.items()[::-1]:
+ print "{0}: {1}".format(k,v)
+ print
+
+
+def blankOrUri(value=None):
+ node = None
+ if value is None:
+ node = RDF.Node()
+ elif type(value) in types.StringTypes:
+ node = RDF.Node(uri_string=value)
+ elif isinstance(value, RDF.Node):
+ node = value
+
+ return node
+
+
+def toTypedNode(value):
+ if type(value) == types.BooleanType:
+ value_type = xsdNS['boolean'].uri
+ if value:
+ value = u'1'
+ else:
+ value = u'0'
+ elif type(value) in (types.IntType, types.LongType):
+ value_type = xsdNS['decimal'].uri
+ value = unicode(value)
+ elif type(value) == types.FloatType:
+ value_type = xsdNS['float'].uri
+ value = unicode(value)
+ elif type(value) in types.StringTypes:
+ value_type = xsdNS['string'].uri
+ else:
+ value_type = None
+ value = unicode(value)
+
+ return RDF.Node(literal=value, datatype=value_type)
+
+def fromTypedNode(node):
+ if node is None:
+ return None
+
+ value_type = str(node.literal_value['datatype'])
+ # chop off xml schema declaration
+ value_type = value_type.replace(str(xsdNS[''].uri),'')
+ literal = node.literal_value['string']
+ literal_lower = literal.lower()
+
+ if value_type == 'boolean':
+ if literal_lower in ('1', 'yes', 'true'):
+ return True
+ elif literal_lower in ('0', 'no', 'false'):
+ return False
+ else:
+ raise ValueError("Unrecognized boolean %s" % (literal,))
+ elif value_type == 'decimal' and literal.find('.') == -1:
+ return int(literal)
+ elif value_type in ('decimal', 'float', 'double'):
+ return float(literal)
+ elif value_type in ('string'):
+ return literal
+ elif value_type in ('dateTime'):
+ raise NotImplemented('need to parse isoformat date-time')
+
+ return literal
+
+
+def get_model(model_name=None, directory=None):
+ if directory is None:
+ directory = os.getcwd()
+
+ if model_name is None:
+ storage = RDF.MemoryStorage()
+ else:
+ storage = RDF.HashStorage(model_name,
+ options="hash-type='bdb',dir='{0}'".format(directory))
+ model = RDF.Model(storage)
+ return model
+
+
+def load_into_model(model, parser_name, filename, ns=None):
+ if not os.path.exists(filename):
+ raise IOError("Can't find {0}".format(filename))
+
+ data = open(filename, 'r').read()
+ rdf_parser = RDF.Parser(name=parser_name)
+ rdf_parser.parse_string_into_model(model, data, ns)
+
+
+def get_serializer(name='turtle'):
+ """Return a serializer with our standard prefixes loaded
+ """
+ writer = RDF.Serializer(name=name)
+ # really standard stuff
+ writer.set_namespace('owl', owlNS._prefix)
+ writer.set_namespace('rdf', rdfNS._prefix)
+ writer.set_namespace('rdfs', rdfsNS._prefix)
+ writer.set_namespace('xsd', xsdNS._prefix)
+
+ # should these be here, kind of specific to an application
+ writer.set_namespace('libraryOntology', libraryOntology._prefix)
+ writer.set_namespace('ucscSubmission', submissionOntology._prefix)
+ writer.set_namespace('ucscDaf', dafTermOntology._prefix)
+ return writer
+
if __name__ == "__main__":
unittest.main(defaultTest='suite')
-
-
-
-
--- /dev/null
+import unittest
+
+from htsworkflow.util.rdfhelp import toTypedNode, blankOrUri
+try:
+ import RDF
+
+ class TestRDFHelp(unittest.TestCase):
+ def test_typed_node_boolean(self):
+ node = toTypedNode(True)
+ self.failUnlessEqual(node.literal_value['string'], u'1')
+ self.failUnlessEqual(str(node.literal_value['datatype']),
+ 'http://www.w3.org/2001/XMLSchema#boolean')
+
+ def test_typed_node_string(self):
+ node = toTypedNode('hello')
+ self.failUnlessEqual(node.literal_value['string'], u'hello')
+ self.failUnlessEqual(str(node.literal_value['datatype']),
+ 'http://www.w3.org/2001/XMLSchema#string')
+
+ def test_blank_or_uri_blank(self):
+ node = blankOrUri()
+ self.failUnlessEqual(node.is_blank(), True)
+
+ def test_blank_or_uri_url(self):
+ s = 'http://google.com'
+ node = blankOrUri(s)
+ self.failUnlessEqual(node.is_resource(), True)
+ self.failUnlessEqual(str(node.uri), s)
+
+ def test_blank_or_uri_node(self):
+ s = RDF.Node(RDF.Uri('http://google.com'))
+ node = blankOrUri(s)
+ self.failUnlessEqual(node.is_resource(), True)
+ self.failUnlessEqual(node, s)
+
+ def suite():
+ return unittest.makeSuite(testRdfHelp, 'test')
+except ImportError, e:
+ print "Unable to test rdfhelp"
+
+ def suite():
+ return None
+
+if __name__ == "__main__":
+ unittest.main(defaultTest='suite')
from htsworkflow.util import validate
class TestValidate(unittest.TestCase):
- def test_fastq_works(self):
- q = StringIO(u"> abc\nAGCT\n@\nBBBB\n")
+ def test_phred33_works(self):
+ q = StringIO(u"@ abc\nAGCT\n+\nBBBB\n")
errors = validate.validate_fastq(q)
self.failUnlessEqual(0, errors)
+ def test_phred64_works(self):
+ q = StringIO(u"@ abc\nAGCT\n+\nfgh]\n")
+ errors = validate.validate_fastq(q, 'phred64')
+ self.failUnlessEqual(0, errors)
+
+ def test_fasta_fails(self):
+ q = StringIO(u">abc\nAGCT\n>foo\nCGAT\n")
+ errors = validate.validate_fastq(q)
+ self.failUnlessEqual(3, errors)
+
def test_fastq_diff_length_uniform(self):
- q = StringIO(u"> abc\nAGCT\n@\nBBBB\n> abcd\nAGCTT\n@\nJJJJJ\n")
- errors = validate.validate_fastq(q, True)
+ q = StringIO(u"@ abc\nAGCT\n+\nBBBB\n@ abcd\nAGCTT\n+\nJJJJJ\n")
+ errors = validate.validate_fastq(q, 'phred33', True)
self.failUnlessEqual(2, errors)
def test_fastq_diff_length_variable(self):
- q = StringIO(u"> abc\nAGCT\n@\n@@@@\n> abcd\nAGCTT\n@\nJJJJJ\n")
- errors = validate.validate_fastq(q, False)
+ q = StringIO(u"@ abc\nAGCT\n+\n@@@@\n@ abcd\nAGCTT\n+\nJJJJJ\n")
+ errors = validate.validate_fastq(q, 'phred33', False)
self.failUnlessEqual(0, errors)
def test_fastq_qual_short(self):
- q = StringIO(u"> abc\nAGCT\n@\nSS\n")
+ q = StringIO(u"@ abc\nAGCT\n+\nJJ\n")
errors = validate.validate_fastq(q)
self.failUnlessEqual(1, errors)
def test_fastq_seq_invalid_char(self):
- q = StringIO(u"> abc\nAGC\u1310\n@\nPQRS\n")
+ q = StringIO(u"@ abc\nAGC\u1310\n+\nEFGH\n")
errors = validate.validate_fastq(q)
self.failUnlessEqual(1, errors)
def test_fastq_qual_invalid_char(self):
- q = StringIO(u"> abc\nAGC.\n@\n!@#J\n")
+ q = StringIO(u"+ abc\nAGC.\n+\n!@#J\n")
errors = validate.validate_fastq(q)
self.failUnlessEqual(1, errors)
parser = make_parser()
opts, args = parser.parse_args(cmdline)
+ error_happened = False
for filename in args[1:]:
stream = open(filename, 'r')
+
if opts.fastq:
- validate_fastq(f, opts.uniform_lengths)
+ errors = validate_fastq(stream,
+ opts.format,
+ opts.uniform_lengths,
+ opts.max_errors)
+ if errors > 0:
+ print "%s failed validation" % (filename,)
+ error_happened = True
+
stream.close()
+
+ if error_happened:
+ return 1
+
return 0
def make_parser():
help="verify arguments are valid fastq file")
parser.add_option("--uniform-lengths", action="store_true", default=False,
help="require all reads to be of the same length")
+ parser.add_option("--max-errors", type="int", default=None)
+ encodings=['phred33', 'phred64']
+ parser.add_option("--format", type="choice",
+ choices=encodings,
+ default='phred64',
+ help="choose quality encoding one of: %s" % (", ".join(encodings)))
return parser
-def validate_fastq(stream, uniform_length=False):
+def validate_fastq(stream, format='phred33', uniform_length=False, max_errors=None):
"""Validate that a fastq file isn't corrupted
uniform_length - requires that all sequence & qualities must be
FQ_SEQ = 2
FQ_H2 = 3
FQ_QUAL = 4
- h1_re = re.compile("^>[ \t\w]*$")
+ h1_re = re.compile("^@[\s\w:-]*$")
seq_re = re.compile("^[AGCT.N]+$", re.IGNORECASE)
- h2_re = re.compile("^@[ \t\w]*$")
+ h2_re = re.compile("^\+[\s\w:-]*$")
phred33 = re.compile("^[!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJ]+$")
phred64 = re.compile("^[@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefgh]+$")
+ if format == 'phred33':
+ quality_re = phred33
+ elif format == 'phred64':
+ quality_re = phred64
+ else:
+ raise ValueError("Unrecognized quality format name")
+
state = FQ_H1
length = None
line_number = 1
errors = 0
for line in stream:
line = line.rstrip()
+ len_errors = 0
if state == FQ_H1:
# reset length at start of new record for non-uniform check
if not uniform_length:
length = None
# start of record checks
- errors = validate_re(h1_re, line, line_number, errors,
- "FAIL H1")
+ errors += validate_re(h1_re, line, line_number, "FAIL H1")
state = FQ_SEQ
elif state == FQ_SEQ:
- errors = validate_re(seq_re, line, line_number, errors,
- "FAIL SEQ")
- length, errors = validate_length(line, length, line_number,
- errors,
- "FAIL SEQ LEN")
+ errors += validate_re(seq_re, line, line_number, "FAIL SEQ")
+ length, len_errors = validate_length(line, length, line_number,
+ "FAIL SEQ LEN")
+ errors += len_errors
state = FQ_H2
elif state == FQ_H2:
- errors = validate_re(h2_re, line, line_number, errors, "FAIL H2")
+ errors += validate_re(h2_re, line, line_number, "FAIL H2")
state = FQ_QUAL
elif state == FQ_QUAL:
- errors = validate_re(phred64, line, line_number, errors,
- "FAIL QUAL")
- length, errors = validate_length(line, length, line_number, errors,
- "FAIL QUAL LEN")
+ errors += validate_re(quality_re, line, line_number, "FAIL QUAL")
+ length, len_errors = validate_length(line, length, line_number,
+ "FAIL QUAL LEN")
+ errors += len_errors
state = FQ_H1
else:
raise RuntimeError("Invalid state: %d" % (state,))
line_number += 1
+ if max_errors is not None and errors > max_errors:
+ break
+
return errors
-def validate_re(pattern, line, line_number, error_count, errmsg):
+def validate_re(pattern, line, line_number, errmsg):
if pattern.match(line) is None:
print errmsg, "[%d]: %s" % (line_number, line)
- error_count += 1
- return error_count
+ return 1
+ else:
+ return 0
-def validate_length(line, line_length, line_number, error_count, errmsg):
+def validate_length(line, line_length, line_number, errmsg):
"""
if line_length is None, sets it
"""
+ error_count = 0
if line_length is None:
line_length = len(line)
elif len(line) != line_length:
print errmsg, "%d: %s" %(line_number, line)
- error_count += 1
+ error_count = 1
return line_length, error_count
--- /dev/null
+#!/usr/bin/env python
+from django.core.management import execute_manager
+try:
+ import settings # Assumed to be in the same directory.
+except ImportError:
+ import sys
+ sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
+ sys.exit(1)
+
+if __name__ == "__main__":
+ execute_manager(settings)
#!/usr/bin/env python
-from htsworkflow.util.hdquery import get_hd_serial_num
-from htsworkflow.frontend import settings
-
from optparse import OptionParser
import os
import re
import urllib2
import urlparse
+from django.conf import settings
+
+from htsworkflow.util.hdquery import get_hd_serial_num
+
runfolder_pattern = re.compile(r'[0-9]{6}_[-A-Za-z\d]+_\d+_(?P<flowcell>[A-Z\d]+)\.tgz')
def extract_flowcell(runfolder_name):
from htsworkflow.pipelines import runfolder
from htsworkflow.pipelines.runfolder import ElementTree
-
+
def make_parser():
usage = 'usage: %prog [options] runfolder_root_dir'
parser = optparse.OptionParser(usage)
' GERALD directory, and it assumes the parent '
'directories are the bustard and image processing '
'directories.')
+ parser.add_option('--raw-format', dest="raw_format", default='qseq',
+ choices=['qseq', 'srf'],
+ help='Specify which type of raw format to use. '
+ 'Currently supported options: qseq, srf')
return parser
if opt.extract_results:
if opt.dry_run:
parser.error("Dry-run is not supported for extract-results")
- runfolder.extract_results(runs,
- opt.output_dir,
- opt.site,
- opt.max_jobs)
+ runfolder.extract_results(runs,
+ opt.output_dir,
+ opt.site,
+ opt.max_jobs,
+ opt.raw_format)
command_run = True
if opt.clean:
runfolder.clean_runs(runs, opt.dry_run)
command_run = True
if command_run == False:
- print "You need to specify a command."+os.linesep
+ print "You need to specify a command." + os.linesep
parser.print_help()
else:
- print "You need to specify some run folders to process..."+os.linesep
+ print "You need to specify some run folders to process..." + os.linesep
parser.print_help()
return 0
--- /dev/null
+#
+# provide a pointer to the right settings.py file for
+# programs that assume it starts in the current directory
+from htsworkflow.settings import *
from StringIO import StringIO
from htsworkflow.automation import copier
+from htsworkflow.automation.solexa import is_runfolder
-class testCopier(unittest.TestCase):
- def test_runfolder_validate(self):
- self.failUnlessEqual(copier.runfolder_validate(""), False)
- self.failUnlessEqual(copier.runfolder_validate("1345_23"), False)
- self.failUnlessEqual(copier.runfolder_validate("123456_asdf-$23'"), False)
- self.failUnlessEqual(copier.runfolder_validate("123456_USI-EAS44"), True)
- self.failUnlessEqual(copier.runfolder_validate("123456_USI-EAS44 "), False)
-
+class testCopier(unittest.TestCase):
def test_empty_config(self):
cfg = StringIO("""[fake]
something: unrelated
_module_path, _module_name = os.path.split(__file__)
sys.path.append(os.path.join(_module_path, '..', 'scripts'))
+from htsworkflow.pipelines.test.simulate_runfolder import TESTDATA_DIR
+
from htsworkflow.pipelines import srf2fastq
class testSrf2Fastq(unittest.TestCase):
def test_is_srf(self):
cnf4_srf = 'woldlab_070829_USI-EAS44_0017_FC11055_1.srf'
- cnf4_path = os.path.join(_module_path, cnf4_srf)
+ cnf4_path = os.path.join(TESTDATA_DIR, cnf4_srf)
cnf1_srf = 'woldlab_090512_HWI-EAS229_0114_428NNAAXX_5.srf'
- cnf1_path = os.path.join(_module_path, cnf1_srf)
+ cnf1_path = os.path.join(TESTDATA_DIR, cnf1_srf)
is_srf = srf2fastq.is_srf
self.failUnlessEqual(is_srf(__file__), False)
def test_is_cnf1(self):
cnf4_srf = 'woldlab_070829_USI-EAS44_0017_FC11055_1.srf'
- cnf4_path = os.path.join(_module_path, cnf4_srf)
+ cnf4_path = os.path.join(TESTDATA_DIR, cnf4_srf)
cnf1_srf = 'woldlab_090512_HWI-EAS229_0114_428NNAAXX_5.srf'
- cnf1_path = os.path.join(_module_path, cnf1_srf)
+ cnf1_path = os.path.join(TESTDATA_DIR, cnf1_srf)
is_cnf1 = srf2fastq.is_cnf1
self.failUnlessRaises(ValueError, is_cnf1, __file__)