From: Diane Trout Date: Thu, 17 Oct 2013 22:03:43 +0000 (-0700) Subject: Merge branch 'master' into django1.4 X-Git-Url: http://woldlab.caltech.edu/gitweb/?p=htsworkflow.git;a=commitdiff_plain;h=aa47067f5a80eef8eccd50b3c0f478f79cf0caf8;hp=10b111edbe6ceb8da7d4296795857060ea831d69 Merge branch 'master' into django1.4 --- diff --git a/.gitignore b/.gitignore index 6d73f11..3eb65bb 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ .tox dist RELEASE-VERSION +docs/build/ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..6fe1342 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/HTS-Workflow.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/HTS-Workflow.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/HTS-Workflow" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/HTS-Workflow" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/source/api.rst b/docs/source/api.rst new file mode 100644 index 0000000..e6c6c11 --- /dev/null +++ b/docs/source/api.rst @@ -0,0 +1,134 @@ +Runfolder Processing +==================== + +Runfolder +--------- + +The PipelineRun class is designed to combine information +from the following importers. + +* Image Analysis (one of the following) + + * :class:`Firecrest` + * :class:`IPAR` + +* BaseCaller + + * :class:`Bustard` + +* Sequence Alignment + + * :class:`Gerald` + +.. automodule:: htsworkflow.pipelines.runfolder + :members: + +.. _Firecrest: + +Firecrest +--------- + +.. automodule:: htsworkflow.pipelines.firecrest + :members: + +.. _IPAR: + +IPAR +---- + +.. automodule:: htsworkflow.pipelines.ipar + :members: + +.. _Bustard: + +Bustard +------- +.. automodule:: htsworkflow.pipelines.bustard + :members: + +.. _Gerald: + +Gerald +------ + +.. automodule:: htsworkflow.pipelines.gerald + :members: + +.. _Eland: + +Eland +----- + +.. automodule:: htsworkflow.pipelines.eland + :members: + +.. _Summary: + +Summary +------- + +.. automodule:: htsworkflow.pipelines.summary + :members: + +Sequence Archival +================= + +srf +--- + +.. automodule:: htsworkflow.pipelines.srf + :members: + +Fastq conversion +================ + +srf2fastq +--------- + +.. automodule:: htsworkflow.pipelines.srf2fastq + :members: + +qseq2fastq +---------- + +.. automodule:: htsworkflow.pipelines.qseq2fastq + :members: + +desplit_fastq +------------- + +.. automodule:: htsworkflow.pipelines.desplit_fastq + :members: + +sequences +--------- + +.. automodule:: htsworkflow.pipelines.sequences + :members: + +Utilities +========= + +.. automodule:: htsworkflow.pipelines.genome_mapper + :members: + +.. automodule:: htsworkflow.pipelines.genomemap + :members: + +.. automodule:: htsworkflow.pipelines.samplekey + :members: + +.. automodule:: htsworkflow.pipelines.recipe_parser + :members: + +Run Automation +============== + +.. automodule:: htsworkflow.pipelines.configure_run + :members: + +.. automodule:: htsworkflow.pipelines.retrieve_config + :members: + +.. automodule:: htsworkflow.pipelines.run_status + :members: diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..08f47eb --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# +# HTS-Workflow documentation build configuration file, created by +# sphinx-quickstart on Mon Jan 14 10:18:40 2013. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath('../../htsworkflow')) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['.templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'HTS-Workflow' +copyright = u'2013, Diane Trout' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.5' +# The full version, including alpha/beta/rc tags. +release = '0.5.4' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['.static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'HTS-Workflowdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'HTS-Workflow.tex', u'HTS-Workflow Documentation', + u'Diane Trout', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'hts-workflow', u'HTS-Workflow Documentation', + [u'Diane Trout'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'HTS-Workflow', u'HTS-Workflow Documentation', + u'Diane Trout', 'HTS-Workflow', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + + +# -- Options for Epub output --------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = u'HTS-Workflow' +epub_author = u'Diane Trout' +epub_publisher = u'Diane Trout' +epub_copyright = u'2013, Diane Trout' + +# The language of the text. It defaults to the language option +# or en if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +#epub_cover = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +#epub_exclude_files = [] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..9a15dbd --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,22 @@ +.. HTS-Workflow documentation master file, created by + sphinx-quickstart on Mon Jan 14 10:18:40 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to HTS-Workflow's documentation! +======================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + api + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/encode_submission/encode3.py b/encode_submission/encode3.py new file mode 100644 index 0000000..875d3bd --- /dev/null +++ b/encode_submission/encode3.py @@ -0,0 +1,199 @@ +"""Create a track hub +""" + +#!/usr/bin/env python +from ConfigParser import SafeConfigParser +import fnmatch +from glob import glob +import json +import logging +import netrc +from optparse import OptionParser, OptionGroup +import os +from pprint import pprint, pformat +import shlex +from StringIO import StringIO +import stat +import sys +import time +import types +import urllib +import urllib2 +import urlparse +from zipfile import ZipFile + +import RDF + +if not 'DJANGO_SETTINGS_MODULE' in os.environ: + os.environ['DJANGO_SETTINGS_MODULE'] = 'htsworkflow.settings' + +from htsworkflow.util import api +from htsworkflow.util.rdfhelp import \ + dafTermOntology, \ + fromTypedNode, \ + get_model, \ + get_serializer, \ + load_into_model, \ + sparql_query, \ + submissionOntology +from htsworkflow.submission.daf import get_submission_uri +from htsworkflow.submission.submission import list_submissions +from htsworkflow.submission.results import ResultMap +from htsworkflow.submission.trackhub_submission import TrackHubSubmission +from htsworkflow.submission.condorfastq import CondorFastqExtract + +logger = logging.getLogger(__name__) + +INDENTED = " " + os.linesep + +def main(cmdline=None): + parser = make_parser() + opts, args = parser.parse_args(cmdline) + submission_uri = None + + if opts.debug: + logging.basicConfig(level = logging.DEBUG ) + elif opts.verbose: + logging.basicConfig(level = logging.INFO ) + else: + logging.basicConfig(level = logging.WARNING ) + + apidata = api.make_auth_from_opts(opts, parser) + + model = get_model(opts.model, opts.db_path) + + submission_names = list(list_submissions(model)) + name = opts.name + if len(submission_names) == 0 and opts.name is None: + parser.error("Please name this submission") + elif opts.name and submission_names and opts.name not in submission_names: + parser.error("{} is not in this model. Choose from: {}{}".format( + opts.name, + os.linesep, + INDENTED.join(submission_names))) + elif opts.name is None and len(submission_names) > 1: + parser.error("Please choose submission name from: {}{}".format( + os.linesep, + INDENTED.join(submission_names))) + elif len(submission_names) == 1: + name = submission_names[0] + + mapper = None + if opts.make_track_hub: + mapper = TrackHubSubmission(name, + model, + baseurl=opts.make_track_hub, + baseupload=opts.track_hub_upload, + host=opts.host) + submission_uri = get_submission_uri(name) + + + if opts.load_rdf is not None: + if submission_uri is None: + parser.error("Please specify the submission name") + load_into_model(model, 'turtle', opts.load_rdf, submission_uri) + + results = ResultMap() + for a in args: + if os.path.exists(a): + results.add_results_from_file(a) + else: + logger.warn("File %s doesn't exist.", a) + + if opts.make_link_tree_from is not None: + results.make_tree_from(opts.make_link_tree_from, link=True) + + if opts.copy_tree_from is not None: + results.make_tree_from(opts.copy_tree_from, link=False) + + if opts.fastq: + logger.info("Building fastq extraction scripts") + flowcells = os.path.join(opts.sequence, 'flowcells') + extractor = CondorFastqExtract(opts.host, flowcells, + model=opts.model, + force=opts.force) + extractor.create_scripts(results) + + if opts.scan_submission: + if name is None: + parser.error("Please define a submission name") + mapper.scan_submission_dirs(results) + + if opts.make_track_hub: + trackdb = mapper.make_hub(results) + + if opts.make_manifest: + make_manifest(mapper, results, opts.make_manifest) + + if opts.sparql: + sparql_query(model, opts.sparql) + + if opts.print_rdf: + writer = get_serializer() + print writer.serialize_model_to_string(model) + + +def make_manifest(mapper, results, filename=None): + manifest = mapper.make_manifest(results) + + if filename is None or filename == '-': + sys.stdout.write(manifest) + else: + with open(filename, 'w') as mainifeststream: + mainifeststream.write(manifest) + +def make_parser(): + parser = OptionParser() + + model = OptionGroup(parser, 'model') + model.add_option('--name', help="Set submission name") + model.add_option('--db-path', default=None, + help="set rdf database path") + model.add_option('--model', default=None, + help="Load model database") + model.add_option('--load-rdf', default=None, + help="load rdf statements into model") + model.add_option('--sparql', default=None, help="execute sparql query") + model.add_option('--print-rdf', action="store_true", default=False, + help="print ending model state") + parser.add_option_group(model) + # commands + commands = OptionGroup(parser, 'commands') + commands.add_option('--make-link-tree-from', + help="create directories & link data files", + default=None) + commands.add_option('--copy-tree-from', + help="create directories & copy data files", + default=None) + commands.add_option('--fastq', default=False, action="store_true", + help="generate scripts for making fastq files") + commands.add_option('--scan-submission', default=False, action="store_true", + help="Import metadata for submission into our model") + commands.add_option('--make-track-hub', default=None, + help='web root that will host the trackhub.') + commands.add_option('--track-hub-upload', default=None, + help='where to upload track hub :') + commands.add_option('--make-manifest', + help='name the manifest file name or - for stdout to create it', + default=None) + + + parser.add_option_group(commands) + + parser.add_option('--force', default=False, action="store_true", + help="Force regenerating fastqs") + parser.add_option('--daf', default=None, help='specify daf name') + parser.add_option('--library-url', default=None, + help="specify an alternate source for library information") + # debugging + parser.add_option('--verbose', default=False, action="store_true", + help='verbose logging') + parser.add_option('--debug', default=False, action="store_true", + help='debug logging') + + api.add_auth_options(parser) + + return parser + +if __name__ == "__main__": + main() diff --git a/htsworkflow/frontend/experiments/experiments.py b/htsworkflow/frontend/experiments/experiments.py index 1ccba52..f24d13d 100644 --- a/htsworkflow/frontend/experiments/experiments.py +++ b/htsworkflow/frontend/experiments/experiments.py @@ -9,7 +9,7 @@ import os import re from django.contrib.auth.decorators import login_required -from django.contrib.csrf.middleware import csrf_exempt +from django.views.decorators.csrf import csrf_exempt from django.core.exceptions import ObjectDoesNotExist from django.core.mail import send_mail, mail_admins from django.http import HttpResponse, Http404 diff --git a/htsworkflow/frontend/experiments/fixtures/initial_data.json b/htsworkflow/frontend/experiments/fixtures/initial_data.json index 3371978..e18b5ff 100644 --- a/htsworkflow/frontend/experiments/fixtures/initial_data.json +++ b/htsworkflow/frontend/experiments/fixtures/initial_data.json @@ -195,5 +195,9 @@ "isdefault": false, "comment": "our first sequencer" } + }, + { "model": "experiments.ClusterStation", + "pk": 3, + "fields": { "name": "new", "isdefault": false } } -] \ No newline at end of file +] diff --git a/htsworkflow/frontend/experiments/test_experiments.py b/htsworkflow/frontend/experiments/test_experiments.py index 8eb9983..5878d72 100644 --- a/htsworkflow/frontend/experiments/test_experiments.py +++ b/htsworkflow/frontend/experiments/test_experiments.py @@ -15,6 +15,8 @@ from django.core import mail from django.core.exceptions import ObjectDoesNotExist from django.test import TestCase from django.test.utils import setup_test_environment, teardown_test_environment +from django.db import connection +from django.conf import settings from htsworkflow.frontend.experiments import models from htsworkflow.frontend.experiments import experiments from htsworkflow.frontend.auth import apidata @@ -26,6 +28,18 @@ LANE_SET = range(1,9) NSMAP = {'libns':'http://jumpgate.caltech.edu/wiki/LibraryOntology#'} +from django.db import connection +OLD_DB_NAME = settings.DATABASE_NAME +VERBOSITY = 0 +def setUpModule(): + setup_test_environment() + settings.DEBUG = False + connection.creation.create_test_db(VERBOSITY) + +def tearDownModule(): + connection.creation.destroy_test_db(OLD_DB_NAME, VERBOSITY) + teardown_test_environment() + class ClusterStationTestCases(TestCase): fixtures = ['test_flowcells.json'] @@ -480,10 +494,9 @@ class TestFileType(TestCase): file_type_objects = models.FileType.objects name = 'QSEQ tarfile' file_type_object = file_type_objects.get(name=name) - self.assertEqual(u"", + self.assertEqual(u"QSEQ tarfile", unicode(file_type_object)) -class TestFileType(TestCase): def test_find_file_type(self): file_type_objects = models.FileType.objects cases = [('woldlab_090921_HWUSI-EAS627_0009_42FC3AAXX_l7_r1.tar.bz2', @@ -537,17 +550,6 @@ class TestFileType(TestCase): class TestEmailNotify(TestCase): fixtures = ['test_flowcells.json'] - @classmethod - def setUpClass(self): - # isolate django mail when running under unittest2 - setup_test_environment() - - @classmethod - def tearDownClass(self): - # isolate django mail when running under unittest2 - teardown_test_environment() - - def test_started_email_not_logged_in(self): response = self.client.get('/experiments/started/153/') self.assertEqual(response.status_code, 302) @@ -684,3 +686,29 @@ class TestSequencer(TestCase): errmsgs = list(inference.run_validation()) self.assertEqual(len(errmsgs), 0) + + +OLD_DB = settings.DATABASES['default']['NAME'] +def setUpModule(): + setup_test_environment() + connection.creation.create_test_db() + +def tearDownModule(): + connection.creation.destroy_test_db(OLD_DB) + teardown_test_environment() + +def suite(): + from unittest2 import TestSuite, defaultTestLoader + suite = TestSuite() + for testcase in [ClusterStationTestCases, + SequencerTestCases, + ExerimentsTestCases, + TestFileType, + TestEmailNotify, + TestSequencer]: + suite.addTests(defaultTestLoader.loadTestsFromTestCase(testcase)) + return suite + +if __name__ == "__main__": + from unittest2 import main + main(defaultTest="suite") diff --git a/htsworkflow/frontend/inventory/test_inventory.py b/htsworkflow/frontend/inventory/test_inventory.py index 118c654..86d37b7 100644 --- a/htsworkflow/frontend/inventory/test_inventory.py +++ b/htsworkflow/frontend/inventory/test_inventory.py @@ -1,6 +1,11 @@ import RDF from django.test import TestCase +from django.test.utils import setup_test_environment, \ + teardown_test_environment +from django.db import connection +from django.conf import settings + from django.contrib.auth.models import User from django.core import urlresolvers @@ -108,6 +113,15 @@ class InventoryTestCase(TestCase): flowcells = [ str(x.uri) for x in targets] return flowcells +OLD_DB = settings.DATABASES['default']['NAME'] +def setUpModule(): + setup_test_environment() + connection.creation.create_test_db() + +def tearDownModule(): + connection.creation.destroy_test_db(OLD_DB) + teardown_test_environment() + def suite(): from unittest2 import TestSuite, defaultTestLoader suite = TestSuite() diff --git a/htsworkflow/frontend/inventory/views.py b/htsworkflow/frontend/inventory/views.py index 1fb7378..265e1bc 100644 --- a/htsworkflow/frontend/inventory/views.py +++ b/htsworkflow/frontend/inventory/views.py @@ -1,5 +1,6 @@ -from htsworkflow.frontend.samples.changelist import ChangeList +from htsworkflow.frontend.samples.changelist import HTSChangeList from htsworkflow.frontend.inventory.models import Item, LongTermStorage, ItemType +from htsworkflow.frontend.inventory.admin import ItemAdmin, ItemTypeAdmin from htsworkflow.frontend.inventory.bcmagic import item_search from htsworkflow.frontend.bcmagic.plugin import register_search_plugin from htsworkflow.frontend.experiments.models import FlowCell @@ -138,11 +139,11 @@ def all_index(request): Inventory Index View """ # build changelist - item_changelist = ChangeList(request, Item, + item_changelist = HTSChangeList(request, Item, list_filter=[], search_fields=[], list_per_page=200, - queryset=Item.objects.all() + model_admin=ItemAdmin(Item, None) ) context_dict = { @@ -161,11 +162,11 @@ def index(request): Inventory Index View """ # build changelist - item_changelist = ChangeList(request, Item, + item_changelist = HTSChangeList(request, ItemType, list_filter=[], - search_fields=['name'], + search_fields=['name', 'description'], list_per_page=50, - queryset=ItemType.objects.all() + model_admin=ItemTypeAdmin(ItemType, None) ) context_dict = { @@ -173,7 +174,6 @@ def index(request): 'page_name': 'Inventory Index' } context_dict.update(INVENTORY_CONTEXT_DEFAULTS) - return render_to_response('inventory/inventory_index.html', context_dict, context_instance=RequestContext(request)) @@ -189,11 +189,11 @@ def itemtype_index(request, name): itemtype = ItemType.objects.get(name=name) # build changelist - item_changelist = ChangeList(request, Item, + item_changelist = HTSChangeList(request, Item, list_filter=[], search_fields=[], list_per_page=200, - queryset=itemtype.item_set.all() + model_admin=ItemAdmin(Item, None) ) context_dict = { diff --git a/htsworkflow/frontend/samples/admin.py b/htsworkflow/frontend/samples/admin.py index e31f581..b97668d 100644 --- a/htsworkflow/frontend/samples/admin.py +++ b/htsworkflow/frontend/samples/admin.py @@ -1,6 +1,6 @@ from django.contrib import admin from django.contrib.admin import widgets -from django.contrib.admin.models import User +from django.contrib.auth.models import User from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import UserCreationForm, UserChangeForm from django.template import Context, Template diff --git a/htsworkflow/frontend/samples/changelist.py b/htsworkflow/frontend/samples/changelist.py index cbbfd38..1b50418 100644 --- a/htsworkflow/frontend/samples/changelist.py +++ b/htsworkflow/frontend/samples/changelist.py @@ -1,253 +1,48 @@ -""" -Slightly modified version of the django admin component that handles filters and searches -""" -from django.contrib.admin.filterspecs import FilterSpec -from django.contrib.admin.options import IncorrectLookupParameters -from django.core.paginator import Paginator, InvalidPage, EmptyPage -from django.db import models -from django.db.models.query import QuerySet -from django.utils.encoding import force_unicode, smart_str -from django.utils.translation import ugettext -from django.utils.http import urlencode -import operator +import django +from django.contrib.admin.views.main import ChangeList + +class HTSChangeList(ChangeList): + def __init__(self, request, model, list_filter, search_fields, + list_per_page, model_admin, extra_filters=None): + """Simplification of the django model filter view + + The new parameter "extra_filter" should be a mapping + of that will be passed as keyword arguments to + queryset.filter + """ + self.extra_filters = extra_filters + + args = { + 'request': request, #request + 'model': model, #model + 'list_display': [], # list_display + 'list_display_links': None, # list_display_links + 'list_filter': list_filter, #list_filter + 'date_hierarchy': None, # date_hierarchy + 'search_fields': search_fields, #search_fields + 'list_select_related': None, # list_select_related, + 'list_per_page': list_per_page, #list_per_page + 'list_editable': None, # list_editable + 'model_admin': model_admin #model_admin + } + if django.VERSION[0] >= 1 and django.VERSION[1] >= 4: + args['list_max_show_all'] = 20000, #list_max_show_all + super(HTSChangeList, self).__init__(**args) + + self.is_popup = False + # I removed to field in the first version -try: - set -except NameError: - from sets import Set as set # Python 2.3 fallback - -# The system will display a "Show all" link on the change list only if the -# total result count is less than or equal to this setting. -MAX_SHOW_ALL_ALLOWED = 20000 - -# Changelist settings -ALL_VAR = 'all' -ORDER_VAR = 'o' -ORDER_TYPE_VAR = 'ot' -PAGE_VAR = 'p' -SEARCH_VAR = 'q' -TO_FIELD_VAR = 't' -IS_POPUP_VAR = 'pop' -ERROR_FLAG = 'e' - -# Text to display within change-list table cells if the value is blank. -EMPTY_CHANGELIST_VALUE = '(None)' - -class ChangeList(object): - - #def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_editable, model_admin): - def __init__(self, request, model, list_filter, search_fields, list_per_page, queryset=None): - self.model = model - self.opts = model._meta - self.lookup_opts = self.opts - if queryset is None: - self.root_query_set = model.objects.all() - else: - self.root_query_set = queryset - self.list_display = [] - self.list_display_links = None - self.list_filter = list_filter - #self.date_hierarchy = date_hierarchy - self.search_fields = search_fields - self.list_select_related = None - self.list_per_page = list_per_page - #self.list_editable = list_editable - self.model_admin = None - - # Get search parameters from the query string. - try: - self.page_num = int(request.GET.get(PAGE_VAR, '0')) - except ValueError: - self.page_num = 0 - self.show_all = 'all' in request.GET - #self.is_popup = IS_POPUP_VAR in request.GET - #self.to_field = request.GET.get(TO_FIELD_VAR) - self.params = dict(request.GET.items()) - if PAGE_VAR in self.params: - del self.params[PAGE_VAR] - #if TO_FIELD_VAR in self.params: - # del self.params[TO_FIELD_VAR] - if ERROR_FLAG in self.params: - del self.params[ERROR_FLAG] - self.multi_page = True self.can_show_all = False - self.order_field, self.order_type = self.get_ordering() - self.query = request.GET.get(SEARCH_VAR, '') - self.query_set = self.get_query_set() - self.get_results(request) - #self.title = (self.is_popup and ugettext('Select %s') % force_unicode(self.opts.verbose_name) or ugettext('Select %s to change') % force_unicode(self.opts.verbose_name)) - self.filter_specs, self.has_filters = self.get_filters(request) - #self.pk_attname = self.lookup_opts.pk.attname - - def get_filters(self, request): - filter_specs = [] - if self.list_filter: - filter_fields = [self.lookup_opts.get_field(field_name) for field_name in self.list_filter] - for f in filter_fields: - spec = FilterSpec.create(f, request, self.params, self.model, self.model_admin) - if spec and spec.has_output(): - filter_specs.append(spec) - return filter_specs, bool(filter_specs) - - def get_query_string(self, new_params=None, remove=None): - if new_params is None: new_params = {} - if remove is None: remove = [] - p = self.params.copy() - for r in remove: - for k in p.keys(): - if k.startswith(r): - del p[k] - for k, v in new_params.items(): - if v is None: - if k in p: - del p[k] - else: - p[k] = v - return '?%s' % urlencode(p) - - def get_results(self, request): - paginator = Paginator(self.query_set, self.list_per_page) - # Get the number of objects, with admin filters applied. - result_count = paginator.count - - # Get the total number of objects, with no admin filters applied. - # Perform a slight optimization: Check to see whether any filters were - # given. If not, use paginator.hits to calculate the number of objects, - # because we've already done paginator.hits and the value is cached. - if not self.query_set.query.where: - full_result_count = result_count - else: - full_result_count = self.root_query_set.count() - - can_show_all = result_count <= MAX_SHOW_ALL_ALLOWED - multi_page = result_count > self.list_per_page - - # Get the list of objects to display on this page. - if (self.show_all and can_show_all) or not multi_page: - result_list = self.query_set._clone() - else: - try: - result_list = paginator.page(self.page_num+1).object_list - except InvalidPage: - result_list = () - - self.result_count = result_count - self.full_result_count = full_result_count - self.result_list = result_list - self.can_show_all = can_show_all - self.multi_page = multi_page - self.paginator = paginator - - def get_ordering(self): - lookup_opts, params = self.lookup_opts, self.params - # For ordering, first check the "ordering" parameter in the admin - # options, then check the object's default ordering. If neither of - # those exist, order descending by ID by default. Finally, look for - # manually-specified ordering from the query string. - ordering = lookup_opts.ordering or ['-' + lookup_opts.pk.name] - - if ordering[0].startswith('-'): - order_field, order_type = ordering[0][1:], 'desc' - else: - order_field, order_type = ordering[0], 'asc' - if ORDER_VAR in params: - try: - field_name = self.list_display[int(params[ORDER_VAR])] - try: - f = lookup_opts.get_field(field_name) - except models.FieldDoesNotExist: - # See whether field_name is a name of a non-field - # that allows sorting. - try: - if callable(field_name): - attr = field_name - elif hasattr(self.model_admin, field_name): - attr = getattr(self.model_admin, field_name) - else: - attr = getattr(self.model, field_name) - order_field = attr.admin_order_field - except AttributeError: - pass - else: - order_field = f.name - except (IndexError, ValueError): - pass # Invalid ordering specified. Just use the default. - if ORDER_TYPE_VAR in params and params[ORDER_TYPE_VAR] in ('asc', 'desc'): - order_type = params[ORDER_TYPE_VAR] - return order_field, order_type - - def get_query_set(self): - qs = self.root_query_set - lookup_params = self.params.copy() # a dictionary of the query string - for i in (ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR): - if i in lookup_params: - del lookup_params[i] - for key, value in lookup_params.items(): - if not isinstance(key, str): - # 'key' will be used as a keyword argument later, so Python - # requires it to be a string. - del lookup_params[key] - lookup_params[smart_str(key)] = value - - # if key ends with __in, split parameter into separate values - if key.endswith('__in'): - lookup_params[key] = value.split(',') - - # Apply lookup parameters from the query string. - try: - qs = qs.filter(**lookup_params) - # Naked except! Because we don't have any other way of validating "params". - # They might be invalid if the keyword arguments are incorrect, or if the - # values are not in the correct type, so we might get FieldError, ValueError, - # ValicationError, or ? from a custom field that raises yet something else - # when handed impossible data. - except: - raise IncorrectLookupParameters - - # Use select_related() if one of the list_display options is a field - # with a relationship and the provided queryset doesn't already have - # select_related defined. - if not qs.query.select_related: - if self.list_select_related: - qs = qs.select_related() - else: - for field_name in self.list_display: - try: - f = self.lookup_opts.get_field(field_name) - except models.FieldDoesNotExist: - pass - else: - if isinstance(f.rel, models.ManyToOneRel): - qs = qs.select_related() - break - - # Set ordering. - if self.order_field: - qs = qs.order_by('%s%s' % ((self.order_type == 'desc' and '-' or ''), self.order_field)) - - # Apply keyword searches. - def construct_search(field_name): - if field_name.startswith('^'): - return "%s__istartswith" % field_name[1:] - elif field_name.startswith('='): - return "%s__iexact" % field_name[1:] - elif field_name.startswith('@'): - return "%s__search" % field_name[1:] - else: - return "%s__icontains" % field_name - - if self.search_fields and self.query: - for bit in self.query.split(): - or_queries = [models.Q(**{construct_search(str(field_name)): bit}) for field_name in self.search_fields] - qs = qs.filter(reduce(operator.or_, or_queries)) - for field_name in self.search_fields: - if '__' in field_name: - qs = qs.distinct() - break + def get_query_set(self, request=None): + args = {} + if django.VERSION[0] >= 1 and django.VERSION[1] >= 4: + args['request'] = request #list_max_show_all + qs = super(HTSChangeList, self).get_query_set(**args) + if self.extra_filters: + new_qs = qs.filter(**self.extra_filters) + if new_qs is not None: + qs = new_qs return qs - - #def url_for_result(self, result): - # return "%s/" % quote(getattr(result, self.pk_attname)) diff --git a/htsworkflow/frontend/samples/fixtures/initial_data.json b/htsworkflow/frontend/samples/fixtures/initial_data.json index de3ad89..ae22c1d 100644 --- a/htsworkflow/frontend/samples/fixtures/initial_data.json +++ b/htsworkflow/frontend/samples/fixtures/initial_data.json @@ -23,6 +23,13 @@ "notes": "Unknown" } }, + { "pk": 1, "model": "samples.Condition", + "fields": { + "condition_name": "Unknown", + "nickname": "", + "notes": "Unknown" + } + }, { "model": "samples.LibraryType", "pk": 1, @@ -50,6 +57,24 @@ "is_paired_end": true } }, + { + "model": "samples.LibraryType", + "pk": 7, + "fields": { + "name": "Barcoded Small RNA", + "can_multiplex": true, + "is_paired_end": true + } + }, + { + "model": "samples.LibraryType", + "pk": 8, + "fields": { + "name": "Nextera", + "can_multiplex": true, + "is_paired_end": true + } + }, { "model": "samples.LibraryType", "pk": 9, diff --git a/htsworkflow/frontend/samples/test_samples.py b/htsworkflow/frontend/samples/test_samples.py index 2fb3945..f0844e5 100644 --- a/htsworkflow/frontend/samples/test_samples.py +++ b/htsworkflow/frontend/samples/test_samples.py @@ -6,6 +6,10 @@ except ImportError, e: import simplejson as json from django.test import TestCase +from django.test.utils import setup_test_environment, \ + teardown_test_environment +from django.db import connection +from django.conf import settings from htsworkflow.frontend.samples.models import \ Affiliation, \ @@ -327,3 +331,24 @@ def get_rdf_memory_model(): storage = RDF.MemoryStorage() model = RDF.Model(storage) return model + +OLD_DB = settings.DATABASES['default']['NAME'] +def setUpModule(): + setup_test_environment() + connection.creation.create_test_db() + +def tearDownModule(): + connection.creation.destroy_test_db(OLD_DB) + teardown_test_environment() + +def suite(): + from unittest2 import TestSuite, defaultTestLoader + suite = TestSuite() + suite.addTests(defaultTestLoader.loadTestsFromTestCase(LibraryTestCase)) + suite.addTests(defaultTestLoader.loadTestsFromTestCase(SampleWebTestCase)) + suite.addTests(defaultTestLoader.loadTestsFromTestCase(TestRDFaLibrary)) + return suite + +if __name__ == "__main__": + from unittest2 import main + main(defaultTest="suite") diff --git a/htsworkflow/frontend/samples/views.py b/htsworkflow/frontend/samples/views.py index 7150d08..5ab54a9 100644 --- a/htsworkflow/frontend/samples/views.py +++ b/htsworkflow/frontend/samples/views.py @@ -9,11 +9,13 @@ try: except ImportError, e: import simplejson as json -from django.contrib.csrf.middleware import csrf_exempt +from django.views.decorators.csrf import csrf_exempt from htsworkflow.frontend.auth import require_api_key from htsworkflow.frontend.experiments.models import FlowCell, Lane, LANE_STATUS_MAP -from htsworkflow.frontend.samples.changelist import ChangeList +from htsworkflow.frontend.experiments.admin import LaneOptions +from htsworkflow.frontend.samples.changelist import HTSChangeList from htsworkflow.frontend.samples.models import Antibody, Library, Species, HTSUser +from htsworkflow.frontend.samples.admin import LibraryOptions from htsworkflow.frontend.samples.results import get_flowcell_result_dict from htsworkflow.frontend.bcmagic.forms import BarcodeMagicForm from htsworkflow.pipelines.runfolder import load_pipeline_run_xml @@ -95,14 +97,16 @@ def create_library_context(cl): def library(request, todo_only=False): queryset = Library.objects.filter(hidden__exact=0) + filters = {'hidden__exact': 0} if todo_only: - queryset = queryset.filter(lane=None) + filters[lane] = None # build changelist - fcl = ChangeList(request, Library, + fcl = HTSChangeList(request, Library, list_filter=['affiliations', 'library_species'], search_fields=['id', 'library_name', 'amplified_from_sample__id'], list_per_page=200, - queryset=queryset + model_admin=LibraryOptions(Library, None), + extra_filters=filters ) context = { 'cl': fcl, 'title': 'Library Index', 'todo_only': todo_only} @@ -164,10 +168,11 @@ def lanes_for(request, username=None): if username is not None: user = HTSUser.objects.get(username=username) query.update({'library__affiliations__users__id':user.id}) - fcl = ChangeList(request, Lane, + fcl = HTSChangeList(request, Lane, list_filter=[], search_fields=['flowcell__flowcell_id', 'library__id', 'library__library_name'], list_per_page=200, + model_admin=LaneOptions, queryset=Lane.objects.filter(**query) ) @@ -553,5 +558,3 @@ def user_profile(request): context.update(SAMPLES_CONTEXT_DEFAULTS) return render_to_response('registration/profile.html', context, context_instance=RequestContext(request)) - - diff --git a/htsworkflow/frontend/templates/admin/index.html b/htsworkflow/frontend/templates/admin/index.html index 4a5677d..66b6942 100644 --- a/htsworkflow/frontend/templates/admin/index.html +++ b/htsworkflow/frontend/templates/admin/index.html @@ -1,7 +1,7 @@ {% extends "admin/base_site.html" %} {% load i18n %} -{% block extrastyle %}{{ block.super }}{% endblock %} +{% block extrastyle %}{{ block.super }}{% endblock %} {% block coltype %}colMS{% endblock %} diff --git a/htsworkflow/frontend/templates/base.html b/htsworkflow/frontend/templates/base.html index f8899df..3365689 100644 --- a/htsworkflow/frontend/templates/base.html +++ b/htsworkflow/frontend/templates/base.html @@ -17,8 +17,8 @@ {% block title %}{{ app_name }} - {{ page_name }}{% endblock %} {% block additional_css %} - {% load adminmedia %} - {% if LANGUAGE_BIDI %}{% endif %} + {% load staticfiles %} + {% if LANGUAGE_BIDI %}{% endif %} {% block extrastyle %}{% endblock %} {% block extrahead %}{% endblock %} {% block blockbots %}{% endblock %} @@ -39,7 +39,7 @@ {% trans 'Welcome,' %} {% firstof user.first_name user.username %}. {% block userlinks %} - {% url django-admindocs-docroot as docsroot %} + {% url "django.admindocs.docroot" as docsroot %} {% if docsroot %} {% trans 'Documentation' %} / {% endif %} diff --git a/htsworkflow/frontend/templates/experiments/flowcell_detail.html b/htsworkflow/frontend/templates/experiments/flowcell_detail.html index 4183c99..d958bf1 100644 --- a/htsworkflow/frontend/templates/experiments/flowcell_detail.html +++ b/htsworkflow/frontend/templates/experiments/flowcell_detail.html @@ -1,5 +1,5 @@ {% extends "base_site.html" %} -{% load adminmedia humanize i18n %} +{% load humanize i18n %} {% block extrahead %} diff --git a/htsworkflow/frontend/templates/experiments/flowcell_lane_detail.html b/htsworkflow/frontend/templates/experiments/flowcell_lane_detail.html index 7e834eb..591b5ba 100644 --- a/htsworkflow/frontend/templates/experiments/flowcell_lane_detail.html +++ b/htsworkflow/frontend/templates/experiments/flowcell_lane_detail.html @@ -1,5 +1,5 @@ {% extends "base_site.html" %} -{% load adminmedia humanize i18n %} +{% load humanize i18n %} {% block extrahead %} diff --git a/htsworkflow/frontend/templates/experiments/sequencer.html b/htsworkflow/frontend/templates/experiments/sequencer.html index e4a6e0b..b79aa38 100644 --- a/htsworkflow/frontend/templates/experiments/sequencer.html +++ b/htsworkflow/frontend/templates/experiments/sequencer.html @@ -1,5 +1,5 @@ {% extends "base_site.html" %} -{% load adminmedia humanize i18n %} +{% load humanize i18n %} {% block extrahead %} diff --git a/htsworkflow/frontend/templates/inventory/inventory_all_index.html b/htsworkflow/frontend/templates/inventory/inventory_all_index.html index 399dae1..c95a78b 100644 --- a/htsworkflow/frontend/templates/inventory/inventory_all_index.html +++ b/htsworkflow/frontend/templates/inventory/inventory_all_index.html @@ -1,5 +1,5 @@ {% extends "base_site.html" %} -{% load adminmedia admin_list i18n %} +{% load admin_list i18n %} {% block extrahead %} evil scammer" @@ -199,7 +205,7 @@ _:a owl:imports "{loc}extra.turtle" . # so it drops the stuff after the javascript link. # I suppose it could be worse hostile_result = """hi there""" - self.failUnlessEqual(str(hostile_sanitized), hostile_result) + self.assertEqual(str(hostile_sanitized), hostile_result) def test_guess_parser_from_file(self): DATA = [ diff --git a/htsworkflow/util/test/test_ucsc.py b/htsworkflow/util/test/test_ucsc.py new file mode 100644 index 0000000..05a64ba --- /dev/null +++ b/htsworkflow/util/test/test_ucsc.py @@ -0,0 +1,29 @@ +"""Test wrappers around ucsc file formats +""" +import os +from unittest2 import TestCase +from htsworkflow.util.test import TEST_DATA_DIR +from htsworkflow.util.ucsc import bigWigInfo + +from distutils.spawn import find_executable + +class TestUCSC(TestCase): + def test_bigwig_info(self): + if not find_executable('bigWigInfo'): + self.skipTest('Need bigWigInfo on path to test') + + filename = os.path.join(TEST_DATA_DIR, 'foo.bigWig') + info = bigWigInfo(filename) + self.assertEqual(info.version, 4) + self.assertEqual(info.isCompressed, True) + # what should i do for byteswapped arch? + self.assertEqual(info.isSwapped, True) + self.assertEqual(info.primaryDataSize, 48) + self.assertEqual(info.primaryIndexSize, 6204) + self.assertEqual(info.zoomLevels, 2) + self.assertEqual(info.basesCovered, 30) + self.assertAlmostEqual(info.mean, 0.0) + self.assertAlmostEqual(info.min, -5.5) + self.assertAlmostEqual(info.max, 5.5) + self.assertAlmostEqual(info.std, 4.567501) + diff --git a/htsworkflow/util/test/test_url.py b/htsworkflow/util/test/test_url.py new file mode 100644 index 0000000..979e144 --- /dev/null +++ b/htsworkflow/util/test/test_url.py @@ -0,0 +1,46 @@ +from unittest2 import TestCase + +from htsworkflow.util.url import normalize_url, parse_ssh_url + +class TestURLUtilities(TestCase): + def test_normalize_url(self): + + self.assertEqual(normalize_url('caltech.edu'), + 'http://caltech.edu') + self.assertEqual(normalize_url('http://caltech.edu'), + 'http://caltech.edu') + self.assertEqual(normalize_url("foo.com/a/b/c/d/e/f.html"), + 'http://foo.com/a/b/c/d/e/f.html') + self.assertEqual(normalize_url("foo.com", "https"), + 'https://foo.com') + self.assertEqual(normalize_url(None), + None) + + def test_parse_ssh_url(self): + + u = parse_ssh_url('me@caltech.edu:/test/path') + self.assertEqual(u.user, 'me') + self.assertEqual(u.host, 'caltech.edu') + self.assertEqual(u.path, '/test/path') + + u = parse_ssh_url('caltech.edu:path@there') + self.assertEqual(u.user, None) + self.assertEqual(u.host, 'caltech.edu') + self.assertEqual(u.path, 'path@there') + + u = parse_ssh_url('caltech.edu:C:/me/@work') + self.assertEqual(u.user, None) + self.assertEqual(u.host, 'caltech.edu') + self.assertEqual(u.path, 'C:/me/@work') + + self.assertRaises(ValueError, parse_ssh_url, 'hello') + +def suite(): + from unittest2 import TestSuite, defaultTestLoader + suite = TestSuite() + suite.addTests(defaultTestLoader.loadTestsFromTestCase(TestURLUtilities)) + return suite + +if __name__ == '__main__': + from unittest2 import main + main(defaultTest="suite") diff --git a/htsworkflow/util/test/test_version.py b/htsworkflow/util/test/test_version.py new file mode 100644 index 0000000..212f124 --- /dev/null +++ b/htsworkflow/util/test/test_version.py @@ -0,0 +1,21 @@ +from unittest2 import TestCase + +from htsworkflow.util import version + +class TestVersion(TestCase): + def test_version(self): + long_version = version.version() + self.assertTrue(long_version) + self.assertEqual(long_version.project_name, 'htsworkflow') + self.assertTrue(long_version.version) + + +def suite(): + from unittest2 import TestSuite, defaultTestLoader + suite = TestSuite() + suite.addTest(defaultTestLoader.loadTestsFromTestCase(TestVersion)) + return suite + +if __name__ == "__main__": + from unittest2 import main + main(defaultTest="suite") diff --git a/htsworkflow/util/test/testdata/foo.bigWig b/htsworkflow/util/test/testdata/foo.bigWig new file mode 100644 index 0000000..98090a8 Binary files /dev/null and b/htsworkflow/util/test/testdata/foo.bigWig differ diff --git a/htsworkflow/util/ucsc.py b/htsworkflow/util/ucsc.py new file mode 100644 index 0000000..e9ff77e --- /dev/null +++ b/htsworkflow/util/ucsc.py @@ -0,0 +1,67 @@ +"""Wrap ucsc command line utilities +""" + +import logging +import os +from subprocess import Popen, PIPE + +LOGGER = logging.getLogger(__name__) + +def parseNumber(number): + buffer = [] + isFloat = False + for n in number: + if n == ',': + continue + if n == '.': + isFloat = True + buffer.append(n) + else: + buffer.append(n) + if isFloat: + return float(''.join(buffer)) + else: + return int(''.join(buffer)) + +def parseBoolean(value): + if value.lower() in ('yes', '1', 'true'): + return True + elif value.lower() in ('no', '0', 'false'): + return False + +class bigWigInfo: + def __init__(self, filename=None): + self.version = None + self.isCompressed = None + self.isSwapped = None + self.primaryDataSize = None + self.primaryIndexSize = None + self.zoomLevels = None + self.chromCount = None + self.basesCovered = None + self.mean = None + self.min = None + self.max = None + self.std = None + self.filename = None + if filename: + self.scan_file(filename) + self.filename = filename + + def scan_file(self, filename): + cmd = ['bigWigInfo', + filename] + p = Popen(cmd, stdout=PIPE) + stdout, _ = p.communicate() + for line in stdout.split(os.linesep): + if len(line) > 0: + term, value = line.split(': ') + if term in ('isCompressed', 'isSwapped'): + value = parseBoolean(value) + else: + value = parseNumber(value) + LOGGER.debug('%s: %s', term, str(value)) + setattr(self, term, value) + + + diff --git a/htsworkflow/util/url.py b/htsworkflow/util/url.py index 4e49c2d..503e9e3 100644 --- a/htsworkflow/util/url.py +++ b/htsworkflow/util/url.py @@ -1,20 +1,11 @@ """ Utilities to help handle urls """ +import collections def normalize_url(url, scheme='http'): """ Make sure there is a http at the head of what should be a url - - >>> normalize_url("google.com") - 'http://google.com' - >>> normalize_url("http://google.com") - 'http://google.com' - >>> normalize_url("foo.com/a/b/c/d/e/f.html") - 'http://foo.com/a/b/c/d/e/f.html' - >>> normalize_url("foo.com", "https") - 'https://foo.com' - >>> normalize_url(None) """ # not much to do with None except avoid an exception if url is None: @@ -25,3 +16,30 @@ def normalize_url(url, scheme='http'): return url else: return scheme + scheme_sep + url + +SSHURL = collections.namedtuple("SSHURL", "user host path") + +def parse_ssh_url(url): + """Parse scp-style username, host and path. + """ + # simple initialization + user = None + host = None + path = None + + colon = url.find(':') + if colon == -1: + raise ValueError("Invalid SSH URL: need :") + + path = url[colon+1:] + + user_host = url[:colon] + atsign = user_host.find('@') + if atsign != -1: + user = user_host[:atsign] + host = user_host[atsign+1:] + else: + host = user_host + + return SSHURL(user, host, path) + diff --git a/htsworkflow/util/version.py b/htsworkflow/util/version.py new file mode 100644 index 0000000..8097edb --- /dev/null +++ b/htsworkflow/util/version.py @@ -0,0 +1,21 @@ +import logging + +LOGGER = logging.getLogger(__name__) + +def version(): + """Return version number + """ + version = None + try: + import pkg_resources + except ImportError, e: + LOGGER.error("Can't find version number, please install setuptools") + raise e + + try: + version = pkg_resources.get_distribution("htsworkflow") + except pkg_resources.DistributionNotFound, e: + LOGGER.error("Package not installed") + + return version + diff --git a/htsworkflow/version.py b/htsworkflow/version.py deleted file mode 100644 index 8097edb..0000000 --- a/htsworkflow/version.py +++ /dev/null @@ -1,21 +0,0 @@ -import logging - -LOGGER = logging.getLogger(__name__) - -def version(): - """Return version number - """ - version = None - try: - import pkg_resources - except ImportError, e: - LOGGER.error("Can't find version number, please install setuptools") - raise e - - try: - version = pkg_resources.get_distribution("htsworkflow") - except pkg_resources.DistributionNotFound, e: - LOGGER.error("Package not installed") - - return version -