8 from gaworkflow.pipeline.retrieve_config import getCombinedOptions, saveConfigFile
9 from gaworkflow.pipeline.retrieve_config import FlowCellNotFound, WebError404
10 from gaworkflow.pipeline.genome_mapper import DuplicateGenome, getAvailableGenomes, constructMapperDict
11 from gaworkflow.pipeline.run_status import GARunStatus
13 from pyinotify import WatchManager, ThreadedNotifier
14 from pyinotify import EventsCodes, ProcessEvent
16 logging.basicConfig(level=logging.DEBUG,
17 format='%(asctime)s %(levelname)-8s %(message)s',
18 datefmt='%a, %d %b %Y %H:%M:%S',
19 filename='pipeline_main.log',
25 #run_path = firecrest analysis directory to run analysis from
27 self.bustard_path = None
28 self.config_filepath = None
31 #top level directory where all analyses are placed
32 self.base_analysis_dir = None
33 #analysis_dir, top level analysis dir...
34 # base_analysis_dir + '/070924_USI-EAS44_0022_FC12150'
35 self.analysis_dir = None
38 def createStatusObject(self):
40 Creates a status object which can be queried for
41 status of running the pipeline
43 returns True if object created
44 returns False if object cannot be created
46 if self.config_filepath is None:
49 self.status = GARunStatus(self.config_filepath)
54 ####################################
55 # inotify event processor
57 s_firecrest_finished = re.compile('Firecrest[0-9\._\-A-Za-z]+/finished.txt')
58 s_bustard_finished = re.compile('Bustard[0-9\._\-A-Za-z]+/finished.txt')
59 s_gerald_finished = re.compile('GERALD[0-9\._\-A-Za-z]+/finished.txt')
61 s_gerald_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/Bustard[0-9\._\-A-Za-z]+/GERALD[0-9\._\-A-Za-z]+/')
62 s_bustard_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/Bustard[0-9\._\-A-Za-z]+/')
63 s_firecrest_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/')
65 class RunEvent(ProcessEvent):
67 def __init__(self, conf_info):
69 self.run_status_dict = {'firecrest': False,
75 ProcessEvent.__init__(self)
78 def process_IN_CREATE(self, event):
79 fullpath = os.path.join(event.path, event.name)
80 if s_finished.search(fullpath):
81 logging.info("File Found: %s" % (fullpath))
83 if s_firecrest_finished.search(fullpath):
84 self.run_status_dict['firecrest'] = True
85 self._ci.status.updateFirecrest(event.name)
86 elif s_bustard_finished.search(fullpath):
87 self.run_status_dict['bustard'] = True
88 self._ci.status.updateBustard(event.name)
89 elif s_gerald_finished.search(fullpath):
90 self.run_status_dict['gerald'] = True
91 self._ci.status.updateGerald(event.name)
93 #WARNING: The following order is important!!
94 # Firecrest regex will catch all gerald, bustard, and firecrest
95 # Bustard regex will catch all gerald and bustard
96 # Gerald regex will catch all gerald
97 # So, order needs to be Gerald, Bustard, Firecrest, or this
98 # won't work properly.
99 elif s_gerald_all.search(fullpath):
100 self._ci.status.updateGerald(event.name)
101 elif s_bustard_all.search(fullpath):
102 self._ci.status.updateBustard(event.name)
103 elif s_firecrest_all.search(fullpath):
104 self._ci.status.updateFirecrest(event.name)
106 #print "Create: %s" % (os.path.join(event.path, event.name))
108 def process_IN_DELETE(self, event):
109 #print "Remove %s" % (os.path.join(event.path, event.name))
119 RUN_FAILED = 'failed'
122 #####################################
123 # Configure Step (goat_pipeline.py)
125 s_start = re.compile('Starting Genome Analyzer Pipeline')
126 s_gerald = re.compile("[\S\s]+--GERALD[\S\s]+--make[\S\s]+")
127 s_generating = re.compile('^Generating journals, Makefiles')
128 s_seq_folder = re.compile('^Sequence folder: ')
129 s_seq_folder_sub = re.compile('want to make ')
130 s_stderr_taskcomplete = re.compile('^Task complete, exiting')
133 s_invalid_cmdline = re.compile('Usage:[\S\s]*goat_pipeline.py')
134 s_species_dir_err = re.compile('Error: Lane [1-8]:')
135 s_goat_traceb = re.compile("^Traceback \(most recent call last\):")
136 s_missing_cycles = re.compile('^Error: Tile s_[1-8]_[0-9]+: Different number of cycles: [0-9]+ instead of [0-9]+')
138 SUPPRESS_MISSING_CYCLES = False
141 ##Ignore - Example of out above each ignore regex.
142 #NOTE: Commenting out an ignore will cause it to be
143 # logged as DEBUG with the logging module.
144 #CF_STDERR_IGNORE_LIST = []
145 s_skip = re.compile('s_[0-8]_[0-9]+')
148 ##########################################
149 # Pipeline Run Step (make -j8 recursive)
152 s_finished = re.compile('finished')
155 s_make_error = re.compile('^make[\S\s]+Error')
156 s_no_gnuplot = re.compile('gnuplot: command not found')
157 s_no_convert = re.compile('^Can\'t exec "convert"')
158 s_no_ghostscript = re.compile('gs: command not found')
160 ##Ignore - Example of out above each ignore regex.
161 #NOTE: Commenting out an ignore will cause it to be
162 # logged as DEBUG with the logging module.
164 PL_STDERR_IGNORE_LIST = []
166 PL_STDERR_IGNORE_LIST.append( re.compile('^Info: PF') )
167 # About to analyse intensity file s_4_0101_sig2.txt
168 PL_STDERR_IGNORE_LIST.append( re.compile('^About to analyse intensity file') )
169 # Will send output to standard output
170 PL_STDERR_IGNORE_LIST.append( re.compile('^Will send output to standard output') )
171 # Found 31877 clusters
172 PL_STDERR_IGNORE_LIST.append( re.compile('^Found [0-9]+ clusters') )
173 # Will use quality criterion ((CHASTITY>=0.6)
174 PL_STDERR_IGNORE_LIST.append( re.compile('^Will use quality criterion') )
175 # Quality criterion translated to (($F[5]>=0.6))
176 PL_STDERR_IGNORE_LIST.append( re.compile('^Quality criterion translated to') )
177 # opened /woldlab/trog/data1/king/070924_USI-EAS44_0022_FC12150/Data/C1-36_Firecrest1.9.1_14-11-2007_king.4/Bustard1.9.1_14-11-2007_king/s_4_0101_qhg.txt
179 # opened s_4_0103_qhg.txt
180 PL_STDERR_IGNORE_LIST.append( re.compile('^opened[\S\s]+qhg.txt') )
181 # 81129 sequences out of 157651 passed filter criteria
182 PL_STDERR_IGNORE_LIST.append( re.compile('^[0-9]+ sequences out of [0-9]+ passed filter criteria') )
185 def pl_stderr_ignore(line):
187 Searches lines for lines to ignore (i.e. not to log)
189 returns True if line should be ignored
190 returns False if line should NOT be ignored
192 for s in PL_STDERR_IGNORE_LIST:
198 def config_stdout_handler(line, conf_info):
200 Processes each line of output from GOAT
201 and stores useful information using the logging module
203 Loads useful information into conf_info as well, for future
204 use outside the function.
206 returns True if found condition that signifies success.
209 # Skip irrelevant line (without logging)
210 if s_skip.search(line):
213 # Detect invalid command-line arguments
214 elif s_invalid_cmdline.search(line):
215 logging.error("Invalid commandline options!")
217 # Detect starting of configuration
218 elif s_start.search(line):
219 logging.info('START: Configuring pipeline')
221 # Detect it made it past invalid arguments
222 elif s_gerald.search(line):
223 logging.info('Running make now')
225 # Detect that make files have been generated (based on output)
226 elif s_generating.search(line):
227 logging.info('Make files generted')
230 # Capture run directory
231 elif s_seq_folder.search(line):
232 mo = s_seq_folder_sub.search(line)
233 #Output changed when using --tiles=<tiles>
234 # at least in pipeline v0.3.0b2
236 firecrest_bustard_gerald_makefile = line[mo.end():]
237 firecrest_bustard_gerald, junk = \
238 os.path.split(firecrest_bustard_gerald_makefile)
239 firecrest_bustard, junk = os.path.split(firecrest_bustard_gerald)
240 firecrest, junk = os.path.split(firecrest_bustard)
242 conf_info.bustard_path = firecrest_bustard
243 conf_info.run_path = firecrest
245 #Standard output handling
247 print 'Sequence line:', line
248 mo = s_seq_folder.search(line)
249 conf_info.bustard_path = line[mo.end():]
250 conf_info.run_path, temp = os.path.split(conf_info.bustard_path)
252 # Log all other output for debugging purposes
254 logging.warning('CONF:?: %s' % (line))
260 def config_stderr_handler(line, conf_info):
262 Processes each line of output from GOAT
263 and stores useful information using the logging module
265 Loads useful information into conf_info as well, for future
266 use outside the function.
268 returns RUN_ABORT upon detecting failure;
269 True on success message;
270 False if neutral message
271 (i.e. doesn't signify failure or success)
273 global SUPPRESS_MISSING_CYCLES
275 # Detect invalid species directory error
276 if s_species_dir_err.search(line):
279 # Detect goat_pipeline.py traceback
280 elif s_goat_traceb.search(line):
281 logging.error("Goat config script died, traceback in debug output")
283 # Detect indication of successful configuration (from stderr; odd, but ok)
284 elif s_stderr_taskcomplete.search(line):
285 logging.info('Configure step successful (from: stderr)')
287 # Detect missing cycles
288 elif s_missing_cycles.search(line):
290 # Only display error once
291 if not SUPPRESS_MISSING_CYCLES:
292 logging.error("Missing cycles detected; Not all cycles copied?")
293 logging.debug("CONF:STDERR:MISSING_CYCLES: %s" % (line))
294 SUPPRESS_MISSING_CYCLES = True
297 # Log all other output as debug output
299 logging.debug('CONF:STDERR:?: %s' % (line))
301 # Neutral (not failure; nor success)
305 #def pipeline_stdout_handler(line, conf_info):
307 # Processes each line of output from running the pipeline
308 # and stores useful information using the logging module
310 # Loads useful information into conf_info as well, for future
311 # use outside the function.
313 # returns True if found condition that signifies success.
316 # #f.write(line + '\n')
322 def pipeline_stderr_handler(line, conf_info):
324 Processes each line of stderr from pipelien run
325 and stores useful information using the logging module
327 ##FIXME: Future feature (doesn't actually do this yet)
328 #Loads useful information into conf_info as well, for future
329 #use outside the function.
331 returns RUN_FAILED upon detecting failure;
332 #True on success message; (no clear success state)
333 False if neutral message
334 (i.e. doesn't signify failure or success)
337 if pl_stderr_ignore(line):
339 elif s_make_error.search(line):
340 logging.error("make error detected; run failed")
342 elif s_no_gnuplot.search(line):
343 logging.error("gnuplot not found")
345 elif s_no_convert.search(line):
346 logging.error("imagemagick's convert command not found")
348 elif s_no_ghostscript.search(line):
349 logging.error("ghostscript not found")
352 logging.debug('PIPE:STDERR:?: %s' % (line))
357 def retrieve_config(conf_info, flowcell, cfg_filepath, genome_dir):
359 Gets the config file from server...
360 requires config file in:
361 /etc/ga_frontend/ga_frontend.conf
367 base_host_url: http://host:port
369 return True if successful, False is failure
371 options = getCombinedOptions()
373 if options.url is None:
374 logging.error("~/.ga_frontend.conf or /etc/ga_frontend/ga_frontend.conf" \
375 " missing base_host_url option")
379 saveConfigFile(flowcell, options.url, cfg_filepath)
380 conf_info.config_filepath = cfg_filepath
381 except FlowCellNotFound, e:
384 except WebError404, e:
394 f = open(cfg_filepath, 'r')
398 genome_dict = getAvailableGenomes(genome_dir)
399 mapper_dict = constructMapperDict(genome_dict)
401 f = open(cfg_filepath, 'w')
402 f.write(data % (mapper_dict))
409 def configure(conf_info):
411 Attempts to configure the GA pipeline using goat.
413 Uses logging module to store information about status.
415 returns True if configuration successful, otherwise False.
418 #pipe = subprocess.Popen(['goat_pipeline.py',
419 # '--GERALD=config32bk.txt',
422 # stdout=subprocess.PIPE,
423 # stderr=subprocess.PIPE)
425 #ERROR Test (2), causes goat_pipeline.py traceback
426 #pipe = subprocess.Popen(['goat_pipeline.py',
427 # '--GERALD=%s' % (conf_info.config_filepath),
428 # '--tiles=s_4_100,s_4_101,s_4_102,s_4_103,s_4_104',
431 # stdout=subprocess.PIPE,
432 # stderr=subprocess.PIPE)
434 ##########################
435 # Run configuration step
436 # Not a test; actual configure attempt.
437 #pipe = subprocess.Popen(['goat_pipeline.py',
438 # '--GERALD=%s' % (conf_info.config_filepath),
441 # stdout=subprocess.PIPE,
442 # stderr=subprocess.PIPE)
445 stdout_filepath = os.path.join(conf_info.analysis_dir,
446 "pipeline_configure_stdout.txt")
447 stderr_filepath = os.path.join(conf_info.analysis_dir,
448 "pipeline_configure_stderr.txt")
450 fout = open(stdout_filepath, 'w')
451 ferr = open(stderr_filepath, 'w')
453 pipe = subprocess.Popen(['goat_pipeline.py',
454 '--GERALD=%s' % (conf_info.config_filepath),
455 #'--tiles=s_4_0100,s_4_0101,s_4_0102,s_4_0103,s_4_0104',
457 conf_info.analysis_dir],
461 print "Configuring pipeline: %s" % (time.ctime())
462 error_code = pipe.wait()
471 fout = open(stdout_filepath, 'r')
473 stdout_line = fout.readline()
476 while stdout_line != '':
478 if config_stdout_handler(stdout_line, conf_info):
480 stdout_line = fout.readline()
485 #error_code = pipe.wait()
487 logging.error('Recieved error_code: %s' % (error_code))
489 logging.info('We are go for launch!')
492 ferr = open(stderr_filepath, 'r')
493 stderr_line = ferr.readline()
496 stderr_success = False
497 while stderr_line != '':
498 stderr_status = config_stderr_handler(stderr_line, conf_info)
499 if stderr_status == RUN_ABORT:
501 elif stderr_status is True:
502 stderr_success = True
503 stderr_line = ferr.readline()
508 #Success requirements:
509 # 1) The stdout completed without error
510 # 2) The program exited with status 0
511 # 3) No errors found in stdout
512 print '#Expect: True, False, True, True'
513 print complete, bool(error_code), abort != RUN_ABORT, stderr_success is True
514 status = complete is True and \
515 bool(error_code) is False and \
516 abort != RUN_ABORT and \
517 stderr_success is True
519 # If everything was successful, but for some reason
520 # we didn't retrieve the path info, log it.
522 if conf_info.bustard_path is None or conf_info.run_path is None:
523 logging.error("Failed to retrieve run_path")
529 def run_pipeline(conf_info):
531 Run the pipeline and monitor status.
533 # Fail if the run_path doesn't actually exist
534 if not os.path.exists(conf_info.run_path):
535 logging.error('Run path does not exist: %s' \
536 % (conf_info.run_path))
539 # Change cwd to run_path
540 stdout_filepath = os.path.join(conf_info.analysis_dir, 'pipeline_run_stdout.txt')
541 stderr_filepath = os.path.join(conf_info.analysis_dir, 'pipeline_run_stderr.txt')
543 # Create status object
544 conf_info.createStatusObject()
546 # Monitor file creation
548 mask = EventsCodes.IN_DELETE | EventsCodes.IN_CREATE
549 event = RunEvent(conf_info)
550 notifier = ThreadedNotifier(wm, event)
552 wdd = wm.add_watch(conf_info.run_path, mask, rec=True)
554 # Log pipeline starting
555 logging.info('STARTING PIPELINE @ %s' % (time.ctime()))
557 # Start the pipeline (and hide!)
558 #pipe = subprocess.Popen(['make',
561 # stdout=subprocess.PIPE,
562 # stderr=subprocess.PIPE)
564 fout = open(stdout_filepath, 'w')
565 ferr = open(stderr_filepath, 'w')
567 pipe = subprocess.Popen(['make',
568 '--directory=%s' % (conf_info.run_path),
574 # Wait for run to finish
575 retcode = pipe.wait()
584 ferr = open(stderr_filepath, 'r')
586 run_failed_stderr = False
588 err_status = pipeline_stderr_handler(line, conf_info)
589 if err_status == RUN_FAILED:
590 run_failed_stderr = True
594 # Finished file check!
595 print 'RUN SUCCESS CHECK:'
596 for key, value in event.run_status_dict.items():
597 print ' %s: %s' % (key, value)
599 dstatus = event.run_status_dict
601 # Success or failure check
602 status = (retcode == 0) and \
603 run_failed_stderr is False and \
604 dstatus['firecrest'] is True and \
605 dstatus['bustard'] is True and \
606 dstatus['gerald'] is True