django provides django.utils.timezone.now to return a timezone aware timestamp if...
[htsworkflow.git] / htsworkflow / frontend / experiments / experiments.py
old mode 100755 (executable)
new mode 100644 (file)
index d0dd764..9493765
@@ -4,44 +4,68 @@ try:
     import json
 except ImportError, e:
     import simplejson as json
-    
+
 import os
 import re
 
 from django.contrib.auth.decorators import login_required
+from django.views.decorators.csrf import csrf_exempt
 from django.core.exceptions import ObjectDoesNotExist
 from django.core.mail import send_mail, mail_admins
 from django.http import HttpResponse, Http404
+from django.conf import settings
+from django.utils import timezone
 
-from htsworkflow.frontend import settings
-from htsworkflow.frontend.experiments.models import FlowCell, DataRun
-from htsworkflow.frontend.samples.models import Library
+from htsworkflow.frontend.auth import require_api_key
+from htsworkflow.frontend.experiments.models import \
+    FlowCell, \
+    DataRun, \
+    Lane, \
+    LANE_STATUS_MAP
+from htsworkflow.frontend.samples.models import Library, MultiplexIndex, HTSUser
 
 def flowcell_information(flowcell_id):
     """
     Return a dictionary describing a flowcell
     """
     try:
-        fc = FlowCell.objects.get(flowcell_id=flowcell_id)
+        fc = FlowCell.objects.get(flowcell_id__startswith=flowcell_id)
     except FlowCell.DoesNotExist, e:
         return None
 
     lane_set = {}
     for lane in fc.lane_set.all():
-        lane_set[lane.lane_number] = {
+        lane_item = {
             'cluster_estimate': lane.cluster_estimate,
             'comment': lane.comment,
+            'experiment_type': lane.library.experiment_type.name,
+            'experiment_type_id': lane.library.experiment_type_id,
             'flowcell': lane.flowcell.flowcell_id,
-            'lane_number': int(lane.lane_number),
+            'lane_number': lane.lane_number,
             'library_name': lane.library.library_name,
-            'library_id': lane.library_id,
-            'pM': float(lane.pM),
+            'library_id': lane.library.id,
+            'library_species': lane.library.library_species.scientific_name,
+            'pM': unicode(lane.pM),
+            'read_length': lane.flowcell.read_length,
+            'status_code': lane.status,
+            'status': LANE_STATUS_MAP[lane.status]
         }
+        sequences = lane.library.index_sequences()
+        if sequences is not None:
+            lane_item['index_sequence'] = sequences
+
+        lane_set.setdefault(lane.lane_number,[]).append(lane_item)
+
+    if fc.control_lane is None:
+        control_lane = None
+    else:
+        control_lane = int(fc.control_lane)
+
     info = {
         'advanced_run': fc.advanced_run,
         'cluster_station_id': fc.cluster_station_id,
         'cluster_station': fc.cluster_station.name,
-        'control_lane': int(fc.control_lane),
+        'control_lane': control_lane,
         # 'datarun_set': how should this be represented?,
         'flowcell_id': fc.flowcell_id,
         'id': fc.id,
@@ -53,22 +77,67 @@ def flowcell_information(flowcell_id):
         'sequencer_id': fc.sequencer_id,
         'sequencer': fc.sequencer.name,
     }
-    
+
     return info
 
-@login_required    
+@csrf_exempt
 def flowcell_json(request, fc_id):
     """
     Return a JSON blob containing enough information to generate a config file.
     """
+    require_api_key(request)
+
     fc_dict = flowcell_information(fc_id)
 
     if fc_dict is None:
         raise Http404
-    
+
     fc_json = json.dumps(fc_dict)
     return HttpResponse(fc_json, mimetype = 'application/json')
-    
+
+def lanes_for(username=None):
+    """
+    Given a user id try to return recent lanes as a list of dictionaries
+    """
+    query = {}
+    if username is not None:
+        user = HTSUser.objects.get(username=username)
+        query.update({'library__affiliations__users__id': user.id})
+
+    lanes = Lane.objects.filter(**query).order_by('-flowcell__run_date')
+
+
+    result = []
+    for l in lanes:
+        affiliations = l.library.affiliations.all()
+        affiliations_list = [(a.id, a.name) for a in affiliations]
+        result.append({ 'flowcell': l.flowcell.flowcell_id,
+                        'run_date': l.flowcell.run_date.isoformat(),
+                        'lane_number': l.lane_number,
+                        'library': l.library.id,
+                        'library_name': l.library.library_name,
+                        'comment': l.comment,
+                        'affiliations': affiliations_list})
+    return result
+
+@csrf_exempt
+def lanes_for_json(request, username):
+    """
+    Format lanes for a user
+    """
+    require_api_key(request)
+
+    try:
+        result = lanes_for(username)
+    except ObjectDoesNotExist, e:
+        raise Http404
+
+    #convert query set to python structure
+
+    result_json = json.dumps(result)
+    return HttpResponse(result_json, mimetype='application/json')
+
+
 def updStatus(request):
     output=''
     user = 'none'
@@ -82,7 +151,7 @@ def updStatus(request):
       user = request.user
 
     #Check access permission
-    if not (user.is_superuser and settings.ALLOWED_IPS.has_key(ClIP)): 
+    if not (user.is_superuser and settings.ALLOWED_IPS.has_key(ClIP)):
         return HttpResponse("%s access denied from %s." % (user, ClIP))
 
     # ~~~~~~Parameters for the job ~~~~
@@ -90,28 +159,28 @@ def updStatus(request):
       fcid = request.REQUEST['fcid']
     else:
       return HttpResponse('missing fcid')
-    
+
     if request.REQUEST.has_key('runf'):
       runfolder = request.REQUEST['runf']
     else:
       return HttpResponse('missing runf')
 
-    
+
     if request.REQUEST.has_key('updst'):
       UpdatedStatus = request.REQUEST['updst']
     else:
       return HttpResponse('missing status')
-    
-    # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 
+
+    # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
     # Update Data Run status in DB
-    # Try get rec. If not found return 'entry not found + <fcid><runfolder>', if found try update and return updated 
+    # Try get rec. If not found return 'entry not found + <fcid><runfolder>', if found try update and return updated
     try:
       rec = DataRun.objects.get(run_folder=runfolder)
       rec.run_status = UpdatedStatus
 
       #if there's a message update that too
-      mytimestamp = datetime.now().__str__()
+      mytimestamp = timezone.now().__str__()
       mytimestamp = re.sub(pattern=":[^:]*$",repl="",string=mytimestamp)
       if request.REQUEST.has_key('msg'):
         rec.run_note += ", "+request.REQUEST['msg']+" ("+mytimestamp+")"
@@ -145,16 +214,15 @@ def generateConfile(request,fcid):
     config += ['ELAND_MULTIPLE_INSTANCES 8']
     genome_dir = 'GENOME_DIR /Volumes/Genomes/'
     eland_genome = 'ELAND_GENOME /Volumes/Genomes/'
-    
-    try:                                                                                                                                              
+
+    try:
       fc = FlowCell.objects.get(flowcell_id=fcid)
       for lane in fc.lane_set.all():
-          print dir(lane.library.library_species)
           config += [ str(lane.lane_number) +":" + \
                       genome_dir + lane.library.library_species.scientific_name ]
           config += [ str(lane.lane_number) +":" + \
                       eland_genome + lane.library.library_species.scientific_name ]
-      
+
     except ObjectDoesNotExist:
       config = 'Entry not found for fcid  = '+fcid
 
@@ -171,9 +239,6 @@ def getConfile(req):
     cnfgfile = 'Nothing found'
     runfolder = 'unknown'
     request = req.REQUEST
-    print request, dir(request)
-    print request['fcid'], request.has_key('fcid')
-    print request['runf']
     if request.has_key('fcid'):
       fcid = request['fcid']
       if request.has_key('runf'):
@@ -190,8 +255,8 @@ def getConfile(req):
               rec.config_params = cnfgfile
               rec.save()
             else:
-              cnfgfile = 'Failed generating config params for RunFolder = '+runfolder +', Flowcell id = '+ fcid+ ' Config Text:\n'+cnfgfile  
-            
+              cnfgfile = 'Failed generating config params for RunFolder = '+runfolder +', Flowcell id = '+ fcid+ ' Config Text:\n'+cnfgfile
+
         except ObjectDoesNotExist:
           cnfgfile = 'Entry not found for RunFolder = '+runfolder
 
@@ -209,28 +274,28 @@ def getLaneLibs(req):
     outputfile = ''
     if request.has_key('fcid'):
       fcid = request['fcid']
-      try:                                
+      try:
         rec = FlowCell.objects.get(flowcell_id=fcid)
         #Ex: 071211
         year = datetime.today().year.__str__()
         year = replace(year,'20','')
         month = datetime.today().month
         if month < 10: month = "0"+month.__str__()
-        else: month = month.__str__() 
+        else: month = month.__str__()
         day = datetime.today().day
         if day < 10: day = "0"+day.__str__()
         else: day = day.__str__()
         mydate = year+month+day
         outputfile = '<?xml version="1.0" ?>'
         outputfile += '\n<SolexaResult Date="'+mydate+'" Flowcell="'+fcid+'" Client="'+settings.ALLOWED_IPS[ClIP]+'">'
-        outputfile += '\n<Lane Index="1" Name="'+rec.lane_1_library.library_name+'" Library="'+rec.lane_1_library.library_id+'" Genome="'+rec.lane_1_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="2" Name="'+rec.lane_2_library.library_name+'" Library="'+rec.lane_2_library.library_id+'" Genome="'+rec.lane_2_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="3" Name="'+rec.lane_3_library.library_name+'" Library="'+rec.lane_3_library.library_id+'" Genome="'+rec.lane_3_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="4" Name="'+rec.lane_4_library.library_name+'" Library="'+rec.lane_4_library.library_id+'" Genome="'+rec.lane_4_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="5" Name="'+rec.lane_5_library.library_name+'" Library="'+rec.lane_5_library.library_id+'" Genome="'+rec.lane_5_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="6" Name="'+rec.lane_6_library.library_name+'" Library="'+rec.lane_6_library.library_id+'" Genome="'+rec.lane_6_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="7" Name="'+rec.lane_7_library.library_name+'" Library="'+rec.lane_7_library.library_id+'" Genome="'+rec.lane_7_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
-        outputfile += '\n<Lane Index="8" Name="'+rec.lane_8_library.library_name+'" Library="'+rec.lane_8_library.library_id+'" Genome="'+rec.lane_8_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="1" Name="'+rec.lane_1_library.library_name+'" Library="'+rec.lane_1_library.id+'" Genome="'+rec.lane_1_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="2" Name="'+rec.lane_2_library.library_name+'" Library="'+rec.lane_2_library.id+'" Genome="'+rec.lane_2_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="3" Name="'+rec.lane_3_library.library_name+'" Library="'+rec.lane_3_library.id+'" Genome="'+rec.lane_3_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="4" Name="'+rec.lane_4_library.library_name+'" Library="'+rec.lane_4_library.id+'" Genome="'+rec.lane_4_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="5" Name="'+rec.lane_5_library.library_name+'" Library="'+rec.lane_5_library.id+'" Genome="'+rec.lane_5_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="6" Name="'+rec.lane_6_library.library_name+'" Library="'+rec.lane_6_library.id+'" Genome="'+rec.lane_6_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="7" Name="'+rec.lane_7_library.library_name+'" Library="'+rec.lane_7_library.id+'" Genome="'+rec.lane_7_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
+        outputfile += '\n<Lane Index="8" Name="'+rec.lane_8_library.library_name+'" Library="'+rec.lane_8_library.id+'" Genome="'+rec.lane_8_library.library_species.use_genome_build+'" PrimerName="" PrimerSeq=""/>'
         outputfile += '\n</SolexaResult>'
       except ObjectDoesNotExist:
         outputfile = 'Flowcell entry not found for: '+fcid
@@ -247,20 +312,36 @@ def estimateFlowcellDuration(flowcell):
     sequencing_seconds_per_cycle= 3600 * 1.5
     # 800 is a rough guess
     pipeline_seconds_per_cycle = 800
-    
+
     cycles = flowcell.read_length
     if flowcell.paired_end:
         cycles *= 2
     sequencing_time = timedelta(0, cycles * sequencing_seconds_per_cycle)
     analysis_time = timedelta(0, cycles * pipeline_seconds_per_cycle)
     estimate_mid = sequencing_time + analysis_time
+
+    return estimate_mid
+
+def estimateFlowcellTimeRemaining(flowcell):
+    estimate_mid = estimateFlowcellDuration(flowcell)
+
+    # offset for how long we've been running
+    running_time = timezone.now() - flowcell.run_date
+    estimate_mid -= running_time
+
+    return estimate_mid
+
+def roundToDays(estimate):
+    """
+    Given a time estimate round up and down in days
+    """
     # floor estimate_mid
-    estimate_low = timedelta(estimate_mid.days, 0)
+    estimate_low = timedelta(estimate.days, 0)
     # floor estimate_mid and add a day
-    estimate_high = timedelta(estimate_mid.days+1, 0)
-    
+    estimate_high = timedelta(estimate.days+1, 0)
+
     return (estimate_low, estimate_high)
-    
+
 
 def makeUserLaneMap(flowcell):
     """
@@ -278,26 +359,26 @@ def makeUserLaneMap(flowcell):
 
 def getUsersForFlowcell(flowcell):
     users = set()
-    
+
     for lane in flowcell.lane_set.all():
         for affiliation in lane.library.affiliations.all():
             for user in affiliation.users.all():
                 users.add(user)
-                
+
     return users
-    
+
 def makeUserLibraryMap(libraries):
     """
     Given an interable set of libraries return a mapping or
     users interested in those libraries.
     """
     users = {}
-    
+
     for library in libraries:
         for affiliation in library.affiliations.all():
             for user in affiliation.users.all():
                 users.setdefault(user,[]).append(library)
-                
+
     return users
 
 def makeAffiliationLaneMap(flowcell):