make a branch to discuss what our merged front end should like
[htsworkflow.git] / htsworkflow / automation / runner.py
1 #!/usr/bin/env python
2 from glob import glob
3 import logging
4 import os
5 import re
6 import sys
7 import time
8 import threading
9
10 from benderjab import rpc
11
12 from htsworkflow.pipelines.configure_run import *
13
14 #s_fc = re.compile('FC[0-9]+')
15 s_fc = re.compile('_[0-9a-zA-Z]*$')
16
17
18 def _get_flowcell_from_rundir(run_dir):
19     """
20     Returns flowcell string based on run_dir.
21     Returns None and logs error if flowcell can't be found.
22     """
23     junk, dirname = os.path.split(run_dir)
24     mo = s_fc.search(dirname)
25     if not mo:
26         logging.error('RunDir 2 FlowCell error: %s' % (run_dir))
27         return None
28
29     return dirname[mo.start()+1:]
30     
31
32
33 class Runner(rpc.XmlRpcBot):
34     """
35     Manage running pipeline jobs.
36     """    
37     def __init__(self, section=None, configfile=None):
38         #if configfile is None:
39         #    self.configfile = "~/.htsworkflow"
40         super(Runner, self).__init__(section, configfile)
41         
42         self.cfg['notify_users'] = None
43         self.cfg['genome_dir'] = None
44         self.cfg['base_analysis_dir'] = None
45
46         self.cfg['notify_users'] = None
47         self.cfg['notify_postanalysis'] = None
48
49         self.conf_info_dict = {}
50         
51         self.register_function(self.sequencingFinished)
52         #self.eventTasks.append(self.update)
53
54     
55     def read_config(self, section=None, configfile=None):
56         super(Runner, self).read_config(section, configfile)
57
58         self.genome_dir = self._check_required_option('genome_dir')
59         self.base_analysis_dir = self._check_required_option('base_analysis_dir')
60
61         self.notify_users = self._parse_user_list(self.cfg['notify_users'])
62         #FIXME: process notify_postpipeline cfg
63         
64     
65     def _parser(self, msg, who):
66         """
67         Parse xmpp chat messages
68         """
69         help = u"I can send [start] a run, or report [status]"
70         if re.match(u"help", msg):
71             reply = help
72         elif re.match("status", msg):
73             words = msg.split()
74             if len(words) == 2:
75                 reply = self.getStatusReport(words[1])
76             else:
77                 reply = u"Status available for: %s" \
78                         % (', '.join([k for k in self.conf_info_dict.keys()]))
79         elif re.match(u"start", msg):
80             words = msg.split()
81             if len(words) == 2:
82                 self.sequencingFinished(words[1])
83                 reply = u"starting run for %s" % (words[1])
84             else:
85                 reply = u"need runfolder name"
86         elif re.match(u"path", msg):
87             reply = u"My path is: " + unicode(os.environ['PATH'])
88         else:
89             reply = u"I didn't understand '%s'" %(msg)
90
91         logging.debug("reply: " + str(reply))
92         return reply
93
94
95     def getStatusReport(self, fc_num):
96         """
97         Returns text status report for flow cell number 
98         """
99         if fc_num not in self.conf_info_dict:
100             return "No record of a %s run." % (fc_num)
101
102         status = self.conf_info_dict[fc_num].status
103
104         if status is None:
105             return "No status information for %s yet." \
106                    " Probably still in configure step. Try again later." % (fc_num)
107
108         output = status.statusReport()
109
110         return '\n'.join(output)
111     
112             
113     def sequencingFinished(self, run_dir):
114         """
115         Sequenceing (and copying) is finished, time to start pipeline
116         """
117         logging.debug("received sequencing finished message")
118
119         # Setup config info object
120         ci = ConfigInfo()
121         ci.base_analysis_dir = self.base_analysis_dir
122         ci.analysis_dir = os.path.join(self.base_analysis_dir, run_dir)        
123
124         # get flowcell from run_dir name
125         flowcell = _get_flowcell_from_rundir(run_dir)
126
127         # Store ci object in dictionary
128         self.conf_info_dict[flowcell] = ci
129
130
131         # Launch the job in it's own thread and turn.
132         self.launchJob(run_dir, flowcell, ci)
133         return "started"
134         
135         
136     def pipelineFinished(self, run_dir):
137         # need to strip off self.watch_dir from rundir I suspect.
138         logging.info("pipeline finished in" + str(run_dir))
139         #pattern = self.watch_dir
140         #if pattern[-1] != os.path.sep:
141         #    pattern += os.path.sep
142         #stripped_run_dir = re.sub(pattern, "", run_dir)
143         #logging.debug("stripped to " + stripped_run_dir)
144
145         # Notify each user that the run has finished.
146         if self.notify_users is not None:
147             for u in self.notify_users:
148                 self.send(u, 'Pipeline run %s finished' % (run_dir))
149                 
150         #if self.notify_runner is not None:
151         #    for r in self.notify_runner:
152         #        self.rpc_send(r, (stripped_run_dir,), 'sequencingFinished')
153
154     def reportMsg(self, msg):
155
156         if self.notify_users is not None:
157             for u in self.notify_users:
158                 self.send(u, msg)
159
160
161     def _runner(self, run_dir, flowcell, conf_info):
162
163         # retrieve config step
164         cfg_filepath = os.path.join(conf_info.analysis_dir,
165                                     'config-auto.txt')
166         status_retrieve_cfg = retrieve_config(conf_info,
167                                           flowcell,
168                                           cfg_filepath,
169                                           self.genome_dir)
170         if status_retrieve_cfg:
171             logging.info("Runner: Retrieve config: success")
172             self.reportMsg("Retrieve config (%s): success" % (run_dir))
173         else:
174             logging.error("Runner: Retrieve config: failed")
175             self.reportMsg("Retrieve config (%s): FAILED" % (run_dir))
176
177         
178         # configure step
179         if status_retrieve_cfg:
180             status = configure(conf_info)
181             if status:
182                 logging.info("Runner: Configure: success")
183                 self.reportMsg("Configure (%s): success" % (run_dir))
184                 self.reportMsg(
185                     os.linesep.join(glob(os.path.join(run_dir,'Data','C*')))
186                 )
187             else:
188                 logging.error("Runner: Configure: failed")
189                 self.reportMsg("Configure (%s): FAILED" % (run_dir))
190
191             #if successful, continue
192             if status:
193                 # Setup status cmdline status monitor
194                 #startCmdLineStatusMonitor(ci)
195                 
196                 # running step
197                 print 'Running pipeline now!'
198                 run_status = run_pipeline(conf_info)
199                 if run_status is True:
200                     logging.info('Runner: Pipeline: success')
201                     self.reportMsg("Pipeline run (%s): Finished" % (run_dir,))
202                 else:
203                     logging.info('Runner: Pipeline: failed')
204                     self.reportMsg("Pipeline run (%s): FAILED" % (run_dir))
205
206
207     def launchJob(self, run_dir, flowcell, conf_info):
208         """
209         Starts up a thread for running the pipeline
210         """
211         t = threading.Thread(target=self._runner,
212                         args=[run_dir, flowcell, conf_info])
213         t.setDaemon(True)
214         t.start()
215         
216
217         
218 def main(args=None):
219     bot = Runner()
220     return bot.main(args)
221     
222 if __name__ == "__main__":
223     sys.exit(main(sys.argv[1:]))
224