OB.DAAC Logo
NASA Logo
Ocean Color Science Software

ocssw V2022
multilevel_processor2.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 
3 """
4 Program to perform multilevel processing (previously known as the
5 seadas_processor and sometimes referred to as the 'uber' processor).
6 """
7 
8 try:
9  import configparser
10 except ImportError:
11  import ConfigParser as configparser
12 
13 import datetime
14 import logging
15 import optparse
16 import os
17 import re
18 import subprocess
19 import sys
20 import tarfile
21 import time
22 import traceback
23 
24 import get_output_name
26 import mlp.mlp_utils as mlp_utils
27 import mlp.benchmark_timer as benchmark_timer
28 import seadasutils.MetaUtils as MetaUtils
29 import mlp.name_finder_utils as name_finder_utils
30 import mlp.obpg_data_file as obpg_data_file
31 import seadasutils.ProcUtils as ProcUtils
32 import mlp.processor as processor
33 import mlp.processing_rules as processing_rules
34 import mlp.uber_par_file_reader as uber_par_file_reader
35 #import product
36 
37 __version__ = '1.0.6'
38 
39 __author__ = 'melliott'
40 
41 class ProcessorConfig(object):
42  """
43  Configuration data for the program which needs to be widely available.
44  """
45  SECS_PER_DAY = 86400
46  def __init__(self, hidden_dir, ori_dir, verbose, overwrite, use_existing,
47  deletefiles, out_dir=None):
48  self.prog_name = os.path.basename(sys.argv[0])
49 
50  if not os.path.exists(hidden_dir):
51  try:
52  os.mkdir(hidden_dir)
53  except OSError:
54  if sys.exc_info()[1].find('Permission denied:') != -1:
55  log_and_exit('Error! Unable to create directory {0}'.\
56  format(hidden_dir))
57  self.hidden_dir = hidden_dir
58  self.original_dir = ori_dir
59  self.verbose = verbose
60  self.deletefiles = deletefiles
61  self.overwrite = overwrite
62  self.use_existing = use_existing
63  self.get_anc = True
64  # self.tar_filename = tar_name
65  # self.timing = timing
66  if out_dir:
67  self.output_dir = out_dir
68  self.output_dir_is_settable = False
69  else:
70  self.output_dir = '.' # default to current dir, change later if
71  # specified in par file or command line
72  self.output_dir_is_settable = True
73  cfg_file_path = os.path.join(self.hidden_dir, 'seadas_ocssw.cfg')
74  if os.path.exists(cfg_file_path):
75  self._read_saved_options(cfg_file_path)
76  else:
77  self.max_file_age = 2592000 # number of seconds in 30 days
78  self._write_default_cfg_file(cfg_file_path)
79  ProcessorConfig._instance = self
80 
81  def _read_saved_options(self, cfg_path):
82  """
83  Gets options stored in the program's configuration file.
84  """
85  try:
86  cfg_parser = configparser.ConfigParser()
87  cfg_parser.read(cfg_path)
88  try:
89  self.max_file_age = ProcessorConfig.SECS_PER_DAY * \
90  int(cfg_parser.get('main',
91  'par_file_age').\
92  split(' ', 2)[0])
93  except configparser.NoSectionError as nse:
94  print ('nse: ' + str(nse))
95  print ('sys.exc_info(): ')
96  for msg in sys.exc_info():
97  print (' ' + str(msg))
98  log_and_exit('Error! Configuration file has no "main" ' +
99  'section.')
100  except configparser.NoOptionError:
101  log_and_exit('Error! The "main" section of the configuration ' +
102  'file does not specify a "par_file_age".')
103  except configparser.MissingSectionHeaderError:
104  log_and_exit('Error! Bad configuration file, no section headers ' +
105  'found.')
106 
107  def _set_temp_dir(self):
108  """
109  Sets the value of the temporary directory.
110  """
111  if os.path.exists('/tmp') and os.path.isdir('/tmp') and \
112  os.access('/tmp', os.W_OK):
113  return '/tmp'
114  else:
115  cwd = os.getcwd()
116  if os.path.exists(cwd) and os.path.isdir(cwd) and \
117  os.access(cwd, os.W_OK):
118  return cwd
119  else:
120  log_and_exit('Error! Unable to establish a temporary ' +
121  'directory.')
122 
123  def _write_default_cfg_file(self, cfg_path):
124  """
125  Writes out a configuration file using default values.
126  """
127  with open(cfg_path, 'wt') as cfg_file:
128  cfg_file.write('[main]\n')
129  cfg_file.write('par_file_age=30 # units are days\n')
130 
131 class Sensor(object):
132  """
133  Sensor contains the recipe and procssing method for general sensors.
134  """
135  def __init__(self):
136  self.name = 'general'
137  self.rules_dict = {
138  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
139  self.run_bottom_error, False),
140  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
141  self.run_l1brsgen, False),
142  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
143  self.run_l2brsgen, False),
144  'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
145  self.run_l1mapgen, False),
146  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
147  self.run_l2mapgen, False),
148  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
149  self.run_l1b, False),
150  'l2gen': processing_rules.build_rule('l2gen', ['l1'], self.run_l2gen,
151  False),
152  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
153  self.run_l2extract, False),
154  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
155  True),
156  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
157  True),
158  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
159  self.run_l3mapgen, False),
160  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
161  False)
162  }
163  self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
164  'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
165  'l3mapgen', 'smigen']
166  self.name = 'general'
167  self.require_geo = False
169  self.recipe = processing_rules.RuleSet('General rules', self.rules_dict, self.rules_order)
170 
171  def run_bottom_error(self, proc):
172  """
173  Exits with an error message when there is an attempt to process a source
174  file at the lowest level of a rule chain.
175  """
176  err_msg = 'Error! Attempting to create {0} product, but no creation program is known.'.format(proc.target_type)
177  log_and_exit(err_msg)
178 
179  def run_l1b(self, proc):
180  """
181  Sets up and runs an executable program.
182  """
183  #todo: replace l1bgen with the appropriate proc.whatever
184  prog = os.path.join(proc.ocssw_bin, 'l1bgen_generic')
185  args = 'ifile=' + proc.input_file + ' '
186  args += 'ofile=' + proc.output_file + ' '
187  if not proc.geo_file is None:
188  args += proc.geo_file + ' '
189  args += get_options(proc.par_data)
190  cmd = ' '.join([prog, args])
191  return execute_command(cmd)
192 
193  def run_l1brsgen(self, proc):
194  """
195  Runs the l1brsgen executable.
196  """
197  # l1brs_suffixes = {'0':'L1_BRS', '1':'L1_BRS', '2':'ppm',
198  # '3':'flt', '4':'png',
199  # 'hdf4': 'hdf', 'bin': 'bin', 'png': 'png',
200  # 'ppm': 'ppm'}
201  prog = os.path.join(proc.ocssw_bin, 'l1brsgen')
202  opts = get_options(proc.par_data)
203  #output_name = get_output_name(proc.par_data['ifile'], suffix)
204  # output_name = get_output_name2(proc.input_file, 'l1brsgen')
205  # if 'outmode' in proc.par_data and proc.par_data['outmode']:
206  # suffix = l1brs_suffixes[proc.par_data['outmode']]
207  # if not proc.geo_file is None:
208  # cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
209  # 'geofile' + proc.geo_file,
210  # 'ofile=' + output_name, 'outmode=' + suffix])
211  # else:
212  # cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
213  # 'ofile=' + output_name, 'outmode=' + suffix])
214  # elif 'oformat' in proc.par_data and proc.par_data['oformat']:
215  # suffix = l1brs_suffixes['oformat']
216  # if not proc.geo_file is None:
217  # cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
218  # 'geofile' + proc.geo_file,
219  # 'ofile=' + output_name, 'oformat=' + suffix])
220  # else:
221  # cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
222  # 'ofile=' + output_name, 'oformat=' + suffix])
223  # else:
224  # suffix = l1brs_suffixes['0']
225  # cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
226  # 'ofile=' + output_name])
227  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file])
228  if not proc.geo_file is None:
229  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
230  'geofile=' + proc.geo_file,
231  'ofile=' + proc.output_file])
232  else:
233  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
234  'ofile=' + proc.output_file])
235  logging.debug('Executing: "%s"', cmd)
236  status = execute_command(cmd)
237  return status
238 
239  def run_l1mapgen(self, proc):
240  """
241  Runs the l1mapgen executable, handling the range of successful return
242  values.
243  """
244  # Instead of a 0 for a successful exit code, the l1mapgen program returns
245  # the percentage of pixels mapped, so a range of possible successful values
246  # must be accepted.
247  # It should be noted that an exit code of 1 is still an error.
248  # l1map_suffixes = {'0': 'ppm', '1': 'png', '2': 'geotiff'}
249  acceptable_min = 2
250  acceptable_max = 100
251  prog = os.path.join(proc.ocssw_bin, 'l1mapgen')
252  opts = get_options(proc.par_data)
253  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file])
254  if not proc.geo_file is None:
255  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
256  'geofile=' + proc.geo_file,
257  'ofile=' + proc.output_file])
258  else:
259  cmd = ' '.join([prog, opts, ' ifile=' + proc.input_file,
260  'ofile=' + proc.output_file])
261  logging.debug('Executing: "%s"', cmd)
262  lvl_nm = execute_command(cmd)
263  logging.debug('l1mapgen run complete! Return value: "%s"', lvl_nm)
264  if (lvl_nm >= acceptable_min) and (lvl_nm <= acceptable_max):
265  return 0
266  else:
267  return lvl_nm
268 
269  def run_l2bin(self, proc):
270  """
271  Set up for and perform L2 binning.
272  """
273  prog = os.path.join(proc.ocssw_bin, 'l2bin')
274  if not os.path.exists(prog):
275  print ("Error! Cannot find executable needed for {0}".\
276  format(proc.rule_set.rules[proc.target_type].action))
277  args = 'infile=' + proc.input_file
278  args += ' ofile=' + proc.output_file
279  args += ' ' + get_options(proc.par_data)
280  cmd = ' '.join([prog, args])
281  logging.debug('Running l2bin cmd: ' + cmd)
282  if cfg_data.verbose:
283  print ('l2bin cmd: ' + cmd)
284  ret_val = execute_command(cmd)
285  if ret_val != 0:
286  if os.path.exists(proc.output_file):
287  msg = '-I- The l2bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(ret_val, proc.output_file)
288  logging.info(msg)
289  ret_val = 0
290  else:
291  msg = '-I- The l2bin program produced a bin file with no data. No further processing will be done.'
292  sys.exit(msg)
293  return ret_val
294 
295  def run_l2brsgen(self, proc):
296  """
297  Runs the l2brsgen executable.
298  """
299  logging.debug("In run_l2brsgen")
300  prog = os.path.join(proc.ocssw_bin, 'l2brsgen')
301  opts = get_options(proc.par_data)
302  cmd = ' '.join([prog, opts, 'ifile='+proc.input_file,
303  'ofile=' + proc.output_file])
304  logging.debug('Executing: "%s"', cmd)
305  status = execute_command(cmd)
306  return status
307 
308  def run_l2extract(self, proc):
309  """
310  Set up and run l2extract.
311  """
312  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and \
313  'NElon' in proc.par_data and 'NElat' in proc.par_data:
314  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
315  if (start_line is None) or (end_line is None) or (start_pixel is None) \
316  or (end_pixel is None):
317  err_msg = 'Error! Could not compute coordinates for l2extract.'
318  log_and_exit(err_msg)
319  l2extract_prog = os.path.join(proc.ocssw_bin, 'l2extract')
320  l2extract_cmd = ' '.join([l2extract_prog, proc.input_file,
321  str(start_pixel), str(end_pixel),
322  str(start_line), str(end_line), '1', '1',
323  proc.output_file])
324  logging.debug('Executing l2extract command: "%s"', l2extract_cmd)
325  status = execute_command(l2extract_cmd)
326  return status
327  else:
328  err_msg = 'Error! Geographical coordinates not specified for l2extract.'
329  log_and_exit(err_msg)
330 
331  def run_l2gen(self, proc):
332  """
333  Set up for and perform L2 processing.
334  """
335  if cfg_data.get_anc:
336  getanc_prog = build_executable_path('getanc')
337  getanc_cmd = ' '.join([getanc_prog, proc.input_file])
338  logging.debug('running getanc command: ' + getanc_cmd)
339  execute_command(getanc_cmd)
340  l2gen_prog = os.path.join(proc.ocssw_bin, 'l2gen')
341  if not os.path.exists(l2gen_prog):
342  print ("Error! Cannot find executable needed for {0}".\
343  format(proc.rule_set.rules[proc.target_type].action))
344  par_name = build_l2gen_par_file(proc.par_data, proc.input_file,
345  proc.geo_file, proc.output_file)
346  logging.debug('L2GEN_FILE=' + proc.output_file)
347 
348  args = 'par=' + par_name
349  l2gen_cmd = ' '.join([l2gen_prog, args])
350  if cfg_data.verbose or DEBUG:
351  logging.debug('l2gen cmd: %s', l2gen_cmd)
352  return execute_command(l2gen_cmd)
353 
354  def run_l2mapgen(self, proc):
355  """
356  Runs the l2mapgen executable.
357  """
358  prog = os.path.join(proc.ocssw_bin, 'l2mapgen')
359  args = 'ifile=' + proc.input_file
360  for key in proc.par_data:
361  if (key != 'odir') and (key != 'ofile') and not key.lower() in FILE_USE_OPTS:
362  args += ' ' + key + '=' + proc.par_data[key]
363  args += ' ofile=' + proc.output_file
364  cmd = ' '.join([prog, args])
365  logging.debug('Executing: "%s"', cmd)
366  status = execute_command(cmd)
367  logging.debug("l2mapgen run complete with status " + str(status))
368  if status == 110:
369  # A return status of 110 indicates that there was insufficient data
370  # to plot. That status should be handled as a normal condition here.
371  return 0
372  return status
373 
374  def run_l3bin(self, proc):
375  """
376  Set up and run the l3Bin program
377  """
378  prog = os.path.join(proc.ocssw_bin, 'l3bin')
379  if not os.path.exists(prog):
380  print ("Error! Cannot find executable needed for {0}".\
381  format(proc.rule_set.rules[proc.target_type].action))
382  args = 'ifile=' + proc.input_file
383  for key in proc.par_data:
384  if (key != 'odir') and (key != 'ofile') and not key.lower() in FILE_USE_OPTS:
385  args += ' ' + key + '=' + proc.par_data[key]
386  args = 'in=' + proc.input_file
387  args += ' ' + "out=" + proc.output_file
388  # for key in proc.par_data:
389  # args += ' ' + key + '=' + proc.par_data[key]
390  cmd = ' '.join([prog, args])
391  logging.debug('Executing l3bin command: "%s"', cmd)
392  ret_val = execute_command(cmd)
393  if ret_val != 0:
394  if os.path.exists(proc.output_file):
395  msg = '-I- The l3bin program returned a status value of {0}. Proceeding with processing, using the output l2 bin file {1}'.format(
396  ret_val, proc.output_file)
397  logging.info(msg)
398  ret_val = 0
399  else:
400  msg = "-I- The l3bin program produced a bin file with no data. No further processing will be done."
401  sys.exit(msg)
402  return ret_val
403 
404  def run_l3mapgen(self, proc):
405  """
406  Set up and run the l3mapgen program.
407  """
408  prog = os.path.join(proc.ocssw_bin, 'l3mapgen')
409  if not os.path.exists(prog):
410  print ("Error! Cannot find executable needed for {0}".\
411  format(proc.rule_set.rules[proc.target_type].action))
412  args = 'ifile=' + proc.input_file
413  for key in proc.par_data:
414  if (key != 'odir') and (key != 'ofile') and not key.lower() in FILE_USE_OPTS:
415  args += ' ' + key + '=' + proc.par_data[key]
416  args += ' ofile=' + proc.output_file
417  cmd = ' '.join([prog, args])
418  logging.debug('Executing l3mapgen command: "%s"', cmd)
419  return execute_command(cmd)
420 
421  def run_smigen(self, proc):
422  """
423  Set up for and perform SMI (Standard Mapped Image) generation.
424  """
425  status = None
426  prog = os.path.join(proc.ocssw_bin, 'smigen')
427  if not os.path.exists(prog):
428  print ("Error! Cannot find executable needed for {0}".\
429  format(proc.rule_set.rules[proc.target_type].action))
430  if 'prod' in proc.par_data:
431  args = 'ifile=' + proc.input_file + ' ofile=' + proc.output_file + \
432  ' prod=' + proc.par_data['prod']
433  cmd = ' '.join([prog, args])
434  for key in proc.par_data:
435  if (key != 'prod') and not (key.lower() in FILE_USE_OPTS):
436  args += ' ' + key + '=' + proc.par_data[key]
437  logging.debug('\nRunning smigen command: ' + cmd)
438  status = execute_command(cmd)
439  else:
440  err_msg = 'Error! No product specified for smigen.'
441  log_and_exit(err_msg)
442  return status
443 
445  """
446  Sensor GOCI contains the GOCI recipe.
447  """
448  def __init__(self):
449  self.name = 'goci'
450  self.rules_dict = {
451  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
452  self.run_bottom_error, False),
453  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
454  self.run_l1brsgen, False),
455  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
456  self.run_l2brsgen, False),
457  'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
458  self.run_l1mapgen, False),
459  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
460  self.run_l2mapgen, False),
461  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
462  self.run_l1b, False),
463  'l2gen': processing_rules.build_rule('l2gen', ['level 1b'], self.run_l2gen,
464  False),
465  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
466  self.run_l2extract, False),
467  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
468  True),
469  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
470  True),
471  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
472  self.run_l3mapgen, False),
473  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
474  False)
475  }
476  self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
477  'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
478  'l3mapgen', 'smigen']
479  self.require_geo = False
481  self.recipe = processing_rules.RuleSet('GOCI rules', self.rules_dict, self.rules_order)
482 
484  """
485  Modis object.
486  """
487  def __init__(self):
488  self.name = 'hawkeye'
489  self.rules_dict = {
490  'level 1a': processing_rules.build_rule('level 1a', ['nothing lower'],
491  self.run_bottom_error, False),
492  'l1brsgen': processing_rules.build_rule('l1brsgen', ['level 1a', 'geo'],
493  self.run_l1brsgen, False),
494  'l1mapgen': processing_rules.build_rule('l1mapgen', ['level 1a', 'geo'],
495  self.run_l1mapgen, False),
496  'geo': processing_rules.build_rule('geo', ['level 1a'],
497  self.run_geo, False),
498  'l2gen': processing_rules.build_rule('l2gen', ['level 1a', 'geo'],
499  self.run_l2gen, False),
500  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
501  self.run_l2extract, False),
502  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
503  self.run_l2brsgen, False),
504  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
505  self.run_l2mapgen, False),
506  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
507  True),
508  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
509  True),
510  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
511  self.run_l3mapgen, False, False),
512  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
513  False)
514  }
515  self.rules_order = ['level 1a', 'geo', 'l1brsgen',
516  'l1mapgen','l2gen', 'l2extract', 'l2bin',
517  'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen', 'smigen']
518  self.require_geo = True
520  self.recipe = processing_rules.RuleSet('HAWKEYE Rules', self.rules_dict, self.rules_order)
521 
522  def run_geo(self, proc):
523  """
524  Set up and run the geolocate_hawkeye program, returning the exit status of the run.
525  """
526  logging.debug('In run_geolocate_hawkeye')
527  prog = build_executable_path('geolocate_hawkeye')
528 
531  if not prog:
532  err_msg = 'Error! Cannot find program geolocate_hawkeye.'
533  logging.info(err_msg)
534  sys.exit(err_msg)
535  args = ''.join([proc.input_file, ' ', proc.output_file])
536  args += get_options(proc.par_data)
537  cmd = ' '.join([prog, args])
538  logging.debug("\nRunning: " + cmd)
539  return execute_command(cmd)
540 
542  """
543  Sensor MERIS contains MERIS specific recipe.
544 
545  Rule format:
546  target type (string), source types (list of strings), batch processing
547  flag (Boolean), action to take (function name)
548  """
549  def __init__(self):
550  self.name = 'meris'
551  self.rules_dict = {
552  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
553  self.run_bottom_error, False),
554  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
555  self.run_l1brsgen, False),
556  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
557  self.run_l2brsgen, False),
558  'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
559  self.run_l1mapgen, False),
560  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
561  self.run_l2mapgen, False),
562  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
563  self.run_l1b, False),
564  'l2gen': processing_rules.build_rule('l2gen', ['level 1b'], self.run_l2gen,
565  False),
566  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
567  self.run_l2extract, False),
568  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
569  True),
570  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
571  True),
572  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
573  self.run_l3mapgen, False),
574  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
575  False)
576  }
577  self.rules_order = ['level 1a', 'l1brsgen', 'l1mapgen', 'level 1b', 'l2gen',
578  'l2extract', 'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
579  'l3mapgen', 'smigen']
580  self.require_geo = False
582  self.recipe = processing_rules.RuleSet('MERIS rules', self.rules_dict, self.rules_order)
583 
585  """
586  Sensor MODIS contains MODIS specific recipe and processing methos.
587  """
588  def __init__(self):
589  self.name = 'modis'
590  self.rules_dict = {
591  'level 0': processing_rules.build_rule('level 0', ['nothing lower'],
592  self.run_bottom_error, False),
593  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
594  self.run_l1a, False),
595  'l1brsgen': processing_rules.build_rule('l1brsgen', ['level 1b', 'geo'],
596  self.run_l1brsgen, False),
597  'l1mapgen': processing_rules.build_rule('l1mapgen', ['level 1b', 'geo'],
598  self.run_l1mapgen, False),
599  'geo': processing_rules.build_rule('geo', ['level 1a'], self.run_geo,
600  False),
601  'l1aextract': processing_rules.build_rule('l1aextract',
602  ['level 1a', 'geo'],
603  self.run_l1aextract,
604  False),
605  'level 1b': processing_rules.build_rule('level 1b',
606  ['level 1a', 'geo'],
607  self.run_l1b, False),
608  'l2gen': processing_rules.build_rule('l2gen', ['level 1b', 'geo'],
609  self.run_l2gen, False),
610  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
611  self.run_l2extract, False),
612  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
613  self.run_l2brsgen, False),
614  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
615  self.run_l2mapgen, False),
616  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
617  True),
618  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
619  True),
620  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
621  self.run_l3mapgen, False, False),
622  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
623  False)
624  }
625  self.rules_order = ['level 0', 'level 1a', 'geo', 'l1aextract',
626  'level 1b', 'l1brsgen', 'l1mapgen', 'l2gen', 'l2extract',
627  'l2bin', 'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen',
628  'smigen']
629  self.require_geo = True
631  self.recipe = processing_rules.RuleSet('MODIS Rules', self.rules_dict, self.rules_order)
632 
633  def run_l1aextract(self, proc):
634  """
635  Set up and run l1aextract_modis.
636  """
637  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
638  'NElon' in proc.par_data and 'NElat' in proc.par_data:
639  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
640  if (start_line is None) or (end_line is None) or (start_pixel is None)\
641  or (end_pixel is None):
642  err_msg = 'Error! Cannot find l1aextract_modis coordinates.'
643  log_and_exit(err_msg)
644  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_modis')
645  l1aextract_cmd = ' '.join([l1aextract_prog, proc.input_file,
646  str(start_pixel), str(end_pixel),
647  str(start_line), str(end_line),
648  proc.output_file])
649  logging.debug('Executing l1aextract_modis command: "%s"',
650  l1aextract_cmd)
651  status = execute_command(l1aextract_cmd)
652  return status
653 
654  def run_geo(self, proc):
655  """
656  Sets up and runs the MODIS GEO script.
657  """
658  prog = build_executable_path('modis_GEO')
659  # os.path.join(proc.ocssw_root, 'run', 'scripts', 'modis_GEO')
660  args = proc.input_file + ' --output=' + proc.output_file
661  args += get_options(proc.par_data)
662  cmd = ' '.join([prog, args])
663  logging.debug("\nRunning: " + cmd)
664  return execute_command(cmd)
665 
666  def run_l1a(self, proc):
667  """
668  Sets up and runs the MODIS L1A script.
669  """
670  prog = build_executable_path('modis_L1A')
671  args = proc.input_file
672  args += ' --output=' + proc.output_file
673  args += get_options(proc.par_data)
674  cmd = ' '.join([prog, args])
675  logging.debug("\nRunning: " + cmd)
676  return execute_command(cmd)
677 
678  def run_l1b(self, proc):
679  """
680  Runs the L1B script.
681  """
682  prog = build_executable_path('modis_L1B')
683  args = ' -o ' + proc.output_file
684  args += get_options(proc.par_data)
685  # The following is no longer needed, but kept for reference.
686  # args += ' --lutdir $OCSSWROOT/run/var/modisa/cal/EVAL --lutver=6.1.15.1z'
687  args += ' ' + proc.input_file
688  if not proc.geo_file is None:
689  args += ' ' + proc.geo_file
690  cmd = ' '.join([prog, args])
691  logging.debug("\nRunning: " + cmd)
692  return execute_command(cmd)
693 
695  """
696  Sensor SeaWiFS contains SeaWiFS sepcific recipe and processing method.
697  """
698  def __init__(self):
699  self.name = 'seawifs'
700  self.rules_dict = {
701  'level 1a': processing_rules.build_rule('level 1a', ['level 0'],
702  self.run_bottom_error, False),
703  'l1aextract': processing_rules.build_rule('l1aextract',
704  ['level 1a'],
705  self.run_l1aextract,
706  False),
707  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1'],
708  self.run_l1brsgen, False),
709  'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1'],
710  self.run_l1mapgen, False),
711  'level 1b': processing_rules.build_rule('level 1b', ['level 1a'],
712  self.run_l1b, False),
713  'l2gen': processing_rules.build_rule('l2gen', ['l1'], self.run_l2gen,
714  False),
715  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
716  self.run_l2extract, False),
717  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
718  self.run_l2brsgen, False),
719  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
720  self.run_l2mapgen, False),
721  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
722  True),
723  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
724  True, False),
725  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
726  self.run_l3mapgen, False, False),
727  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
728  False)
729  }
730  self.rules_order = ['level 1a', 'l1aextract', 'l1brsgen',
731  'l1mapgen', 'level 1b', 'l2gen', 'l2extract',
732  'l2brsgen', 'l2mapgen', 'l2bin', 'l3bin',
733  'l3mapgen', 'smigen']
734  self.require_geo = False
736  self.recipe = processing_rules.RuleSet("SeaWiFS Rules", self.rules_dict, self.rules_order)
737 
738  def run_l1aextract(self, proc):
739  """
740  Set up and run l1aextract_seawifs.
741  """
742  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
743  'NElon' in proc.par_data and 'NElat' in proc.par_data:
744  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
745  if (start_line is None) or (end_line is None) or (start_pixel is None)\
746  or (end_pixel is None):
747  err_msg = 'Error! Cannot compute l1aextract_seawifs coordinates.'
748  log_and_exit(err_msg)
749  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_seawifs')
750  l1aextract_cmd = ' '.join([l1aextract_prog, proc.input_file,
751  str(start_pixel), str(end_pixel),
752  str(start_line), str(end_line), '1', '1',
753  proc.output_file])
754  logging.debug('Executing l1aextract_seawifs command: "%s"',
755  l1aextract_cmd)
756  status = execute_command(l1aextract_cmd)
757  return status
758 
760  """
761  Viirs object.
762  """
763  def __init__(self):
764  self.name = 'viirs'
765  self.rules_dict = {
766  'level 1a': processing_rules.build_rule('level 1a', ['nothing lower'],
767  self.run_bottom_error, False),
768  'l1brsgen': processing_rules.build_rule('l1brsgen', ['l1', 'geo'],
769  self.run_l1brsgen, False),
770  'l1mapgen': processing_rules.build_rule('l1mapgen', ['l1', 'geo'],
771  self.run_l1mapgen, False),
772  'geo': processing_rules.build_rule('geo', ['level 1a'],
773  self.run_geo, False),
774  'l1aextract': processing_rules.build_rule('l1aextract',
775  ['level 1a', 'geo'],
776  self.run_l1aextract,
777  False),
778  'level 1b': processing_rules.build_rule('level 1b', ['level 1a', 'geo'],
779  self.run_l1b, False),
780  'l2gen': processing_rules.build_rule('l2gen', ['l1', 'geo'],
781  self.run_l2gen, False),
782  'l2extract': processing_rules.build_rule('l2extract', ['l2gen'],
783  self.run_l2extract, False),
784  'l2brsgen': processing_rules.build_rule('l2brsgen', ['l2gen'],
785  self.run_l2brsgen, False),
786  'l2mapgen': processing_rules.build_rule('l2mapgen', ['l2gen'],
787  self.run_l2mapgen, False),
788  'l2bin': processing_rules.build_rule('l2bin', ['l2gen'], self.run_l2bin,
789  True),
790  'l3bin': processing_rules.build_rule('l3bin', ['l2bin'], self.run_l3bin,
791  True),
792  'l3mapgen': processing_rules.build_rule('l3mapgen', ['l2bin'],
793  self.run_l3mapgen, False, False),
794  'smigen': processing_rules.build_rule('smigen', ['l3bin'], self.run_smigen,
795  False)
796  }
797  self.rules_order = ['level 1a', 'geo', 'l1aextract', 'level 1b', 'l1brsgen',
798  'l1mapgen','l2gen', 'l2extract', 'l2bin',
799  'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen', 'smigen']
800  self.require_geo = True
802  self.recipe = processing_rules.RuleSet('VIIRS Rules', self.rules_dict, self.rules_order)
803 
804  def run_geo(self, proc):
805  """
806  Set up and run the geolocate_viirs program, returning the exit status of the run.
807  """
808  logging.debug('In run_geolocate_viirs')
809  prog = build_executable_path('geolocate_viirs')
810 
813  if not prog:
814  err_msg = 'Error! Cannot find program geolocate_viirs.'
815  logging.info(err_msg)
816  sys.exit(err_msg)
817  args = ''.join(['-ifile=', proc.input_file, ' -geofile_mod=', proc.output_file])
818  args += get_options(proc.par_data)
819  cmd = ' '.join([prog, args])
820  logging.debug("\nRunning: " + cmd)
821  return execute_command(cmd)
822 
823  def run_l1b(self, proc):
824  logging.debug('In run_viirs_l1b')
825  prog = build_executable_path('calibrate_viirs')
826  # prog='/accounts/melliott/seadas/ocssw/bin/calibrate_viirs'
827 
828  args = ''.join(['ifile=', proc.input_file, ' l1bfile_mod=', proc.output_file])
829  args += get_options(proc.par_data)
830  # The following is no longer needed, but kept for reference.
831  # args += ' --lutdir $OCSSWROOT/run/var/modisa/cal/EVAL --lutver=6.1.15.1z'
832  # args += ' ' + proc.input_file
833  if proc.geo_file:
834  pass
835  # args += ' geofile=' + proc.geo_file
836  cmd = ' '.join([prog, args])
837  logging.debug("\nRunning: " + cmd)
838  return execute_command(cmd)
839 
840  def run_l1aextract(self, proc):
841  """
842  Set up and run l1aextract_viirs.
843  """
844  if 'SWlon' in proc.par_data and 'SWlat' in proc.par_data and\
845  'NElon' in proc.par_data and 'NElat' in proc.par_data:
846  start_line, end_line, start_pixel, end_pixel = get_extract_params(proc)
847  elif 'sline' in proc.par_data and 'eline' in proc.par_data and\
848  'spixl' in proc.par_data and 'epixl' in proc.par_data:
849  start_line = proc.par_data['sline']
850  end_line = proc.par_data['eline']
851  start_pixel = proc.par_data['spixl']
852  end_pixel = proc.par_data['epixl']
853 
854  if (start_line is None) or (end_line is None) or (start_pixel is None)\
855  or (end_pixel is None):
856  err_msg = 'Error! Cannot find l1aextract_viirs coordinates.'
857  log_and_exit(err_msg)
858  l1aextract_prog = os.path.join(proc.ocssw_bin, 'l1aextract_viirs')
859  l1aextract_cmd = ' '.join([l1aextract_prog, proc.input_file,
860  str(start_pixel), str(end_pixel),
861  str(start_line), str(end_line),
862  proc.output_file])
863  logging.debug('Executing l1aextract_viirs command: "%s"',
864  l1aextract_cmd)
865  status = execute_command(l1aextract_cmd)
866  return status
867 
868 def get_obpg_data_file_object(file_specification):
869  """
870  Returns an obpg_data_file object for the file named in file_specification.
871  """
872  ftyper = mlp.get_obpg_file_type.ObpgFileTyper(file_specification)
873  (ftype, sensor) = ftyper.get_file_type()
874  (stime, etime) = ftyper.get_file_times()
875  obpg_data_file_obj = obpg_data_file.ObpgDataFile(file_specification, ftype,
876  sensor, stime, etime,
877  ftyper.attributes)
878  return obpg_data_file_obj
879 
880 def build_executable_path(prog_name):
881  """
882  Returns the directory in which the program named in prog_name is found.
883  None is returned if the program is not found.
884  """
885  exe_path = None
886  candidate_subdirs = ['bin', 'scripts']
887  for subdir in candidate_subdirs:
888  cand_path = os.path.join(OCSSWROOT_DIR, subdir, prog_name)
889  if os.path.exists(cand_path):
890  exe_path = cand_path
891  break
892  return exe_path
893 
894 def build_file_list_file(filename, file_list):
895  """
896  Create a file listing the names of the files to be processed.
897  """
898  with open(filename, 'wt') as file_list_file:
899  for fname in file_list:
900  file_list_file.write(fname + '\n')
901 
902 def build_l2gen_par_file(par_contents, input_file, geo_file, output_file):
903  """
904  Build the parameter file for L2 processing.
905  """
906  dt_stamp = datetime.datetime.today()
907  par_name = ''.join(['L2_', dt_stamp.strftime('%Y%m%d%H%M%S'), '.par'])
908  par_path = os.path.join(cfg_data.hidden_dir, par_name)
909  with open(par_path, 'wt') as par_file:
910  par_file.write('# Automatically generated par file for l2gen\n')
911  par_file.write('ifile=' + input_file + '\n')
912  if not geo_file is None:
913  par_file.write('geofile=' + geo_file + '\n')
914  par_file.write('ofile=' + output_file + '\n')
915  for l2_opt in par_contents:
916  if l2_opt != 'ifile' and l2_opt != 'geofile' \
917  and l2_opt != 'ofile' and l2_opt != 'odir' \
918  and not l2_opt in FILE_USE_OPTS:
919  par_file.write(l2_opt + '=' + par_contents[l2_opt] + '\n')
920  return par_path
921 
922 def check_options(options):
923  """
924  Check command line options
925  """
926  # if options.tar_file:
927  # if os.path.exists(options.tar_file):
928  # err_msg = 'Error! The tar file, {0}, already exists.'. \
929  # format(options.tar_file)
930  # log_and_exit(err_msg)
931  if options.ifile:
932  if not os.path.exists(options.ifile):
933  err_msg = 'Error! The specified input file, {0}, does not exist.'. \
934  format(options.ifile)
935  log_and_exit(err_msg)
936 
937 def clean_files(delete_list):
938  """
939  Delete unwanted files created during processing.
940  """
941  if cfg_data.verbose:
942  print ("Cleaning up files")
943  sys.stdout.flush()
944  files_deleted = 0
945  # Delete any files in the delete list. This contain "interemediate" files
946  # which were needed to complete processing, but which weren't explicitly
947  # requested as output targets.
948  for filepath in delete_list:
949  if cfg_data.verbose:
950  print ('Deleting {0}'.format(filepath))
951  sys.stdout.flush()
952  os.remove(filepath)
953  files_deleted += 1
954  # Delete hidden par files older than the cut off age
955  hidden_files = os.listdir(cfg_data.hidden_dir)
956  par_files = [f for f in hidden_files if f.endswith('.par')]
957  for par_file in par_files:
958  par_path = os.path.join(cfg_data.hidden_dir, par_file)
959  file_age = round(time.time()) - os.path.getmtime(par_path)
960  if file_age > cfg_data.max_file_age:
961  if cfg_data.verbose:
962  print ('Deleting {0}'.format(par_path))
963  sys.stdout.flush()
964  os.remove(par_path)
965  files_deleted += 1
966  if cfg_data.verbose:
967  if not files_deleted:
968  print ('No files were found for deletion.')
969  sys.stdout.flush()
970  elif files_deleted == 1:
971  print ('One file was deleted.')
972  sys.stdout.flush()
973  else:
974  print ('A total of {0} files were deleted.'.format(files_deleted))
975  sys.stdout.flush()
976 
977 def create_levels_list(rules_sets):
978  """
979  Returns a list containing all the levels from all the rules sets.
980  """
981  set_key = list(rules_sets.keys())[0]
982  logging.debug('set_key = %s', (set_key))
983  lvls_lst = [(lvl, [set_key]) for lvl in rules_sets[set_key].rules_order[1:]]
984  for rules_set_name in list(rules_sets.keys())[1:]:
985  for lvl_name in rules_sets[rules_set_name].rules_order[1:]:
986  names_list = [lst_item[0] for lst_item in lvls_lst]
987  if lvl_name in names_list:
988  lvls_lst[names_list.index(lvl_name)][1].append(rules_set_name)
989  else:
990  prev_ndx = rules_sets[rules_set_name].rules_order.index(lvl_name) - 1
991  if rules_sets[rules_set_name].rules_order[prev_ndx] in names_list:
992  ins_ndx = names_list.index(rules_sets[rules_set_name].rules_order[prev_ndx]) + 1
993  else:
994  ins_ndx = 0
995  lvls_lst.insert(ins_ndx, (lvl_name, [rules_set_name]))
996  return lvls_lst
997 
998 
999 def create_help_message(rules_sets):
1000  """
1001  Creates the message to be displayed when help is provided.
1002  """
1003  level_names = create_levels_list(rules_sets)
1004  message = """
1005  %prog [options] parameter_file
1006 
1007  The parameter_file is similar to, but not exactly like, parameter
1008  files for OCSSW processing programs:
1009  - It has sections separated by headers which are denoted by "["
1010  and "]".
1011  The section named "main" is required. Its allowed options are:
1012  ifile - Required entry naming the input file(s) to be processed.
1013  use_nrt_anc - use near real time ancillary data
1014  deletefiles - delete all the intermediate data files genereated
1015  overwrite - overwrite any data files which already exist
1016  use_existing - use any data files which already exist
1017 
1018  Simultaneous use of both the overwrite and use_existing options
1019  is not permitted.
1020 
1021  The names for other sections are the programs for which that section's
1022  entries are to be applied. Intermediate sections which are required for the
1023  final level of processing do not need to be defined if their default options
1024  are acceptable. A section can be empty. The final level of processing
1025  must have a section header, even if no entries appear within that section.
1026  - Entries within a section appear as key=value. Comma separated lists of
1027  values can be used when appropriate.
1028  - Comments are marked by "#"; anything appearing on a line after that
1029  character is ignored. A line beginning with a "#" is completely ignored.
1030 
1031  In addition to the main section, the following sections are allowed:
1032  Section name: Applicable Instrument(s):
1033  ------------- -------------------------\n"""
1034 
1035  lvl_name_help = ''
1036  for lname in level_names:
1037  lvl_name_help += ' {0:24s}{1}\n'.\
1038  format(lname[0] + ':', ', '.join(lname[1]))
1039 
1040  message += lvl_name_help
1041  message += """
1042  Example:
1043 
1044  # Sample par file for %prog.
1045  [main]
1046  ifile=2010345034027.L1A_LAC
1047  [l2gen]
1048  l2prod=chlor_a
1049  # final processing level
1050  """
1051  return message
1052 
1053 def do_processing(sensors_sets, par_file, cmd_line_ifile=None):
1054  """
1055  Perform the processing for each step (element of processor_list) needed.
1056  """
1057  global input_file_data
1058  #todo: Break this up into smaller parts!
1059  files_to_delete = []
1060  input_files_list = []
1061  (par_contnts, input_files_list) = get_par_file_contents(par_file,
1062  FILE_USE_OPTS)
1063  if cmd_line_ifile:
1064  skip_par_ifile = True
1065  if os.path.exists(cmd_line_ifile):
1066  input_files_list = [cmd_line_ifile]
1067  else:
1068  msg = 'Error! Specified ifile {0} does not exist.'.\
1069  format(cmd_line_ifile)
1070  sys.exit(msg)
1071  else:
1072  skip_par_ifile = False
1073  if par_contnts['main']:
1074  if (not skip_par_ifile) and (not 'ifile' in par_contnts['main']):
1075  msg = 'Error! No ifile specified in the main section of {0}.'.\
1076  format(par_file)
1077  sys.exit(msg)
1078  # Avoid overwriting file options that are already turned on in cfg_data
1079  # (from command line input).
1080  deletefiles, use_existing, overwrite = get_file_handling_opts(par_contnts)
1081  if deletefiles:
1082  cfg_data.deletefiles = True
1083  if use_existing:
1084  cfg_data.use_existing = True
1085  if overwrite:
1086  cfg_data.overwrite = True
1087  if 'use_nrt_anc' in par_contnts['main'] and \
1088  int(par_contnts['main']['use_nrt_anc']) == 0:
1089  cfg_data.get_anc = False
1090  if 'odir' in par_contnts['main']:
1091  dname = par_contnts['main']['odir']
1092  if os.path.exists(dname):
1093  if os.path.isdir(dname):
1094  if cfg_data.output_dir_is_settable:
1095  cfg_data.output_dir = os.path.realpath(dname)
1096  else:
1097  log_msg = 'Ignoring par file specification for output directory, {0}; using command line value, {1}.'.format(par_contnts['main']['odir'], cfg_data.output_dir)
1098  logging.info(log_msg)
1099  else:
1100  msg = 'Error! {0} is not a directory.'.format(dname)
1101  sys.exit(msg)
1102  else:
1103  msg = 'Error! {0} does not exist.'.format(dname)
1104  sys.exit(msg)
1105 
1106  logging.debug('cfg_data.overwrite: ' + str(cfg_data.overwrite))
1107  logging.debug('cfg_data.use_existing: ' + str(cfg_data.use_existing))
1108  logging.debug('cfg_data.deletefiles: ' + str(cfg_data.deletefiles))
1109  if cfg_data.overwrite and cfg_data.use_existing:
1110  err_msg = 'Error! Incompatible options overwrite and use_existing were found in {0}.'.format(par_file)
1111  log_and_exit(err_msg)
1112  if len(input_files_list) == 1:
1113  if MetaUtils.is_ascii_file(input_files_list[0]) and not MetaUtils.is_metadata_file(input_files_list[0]):
1114  input_files_list = read_file_list_file(input_files_list[0])
1115  input_file_data = get_input_files_type_data(input_files_list)
1116  if not input_file_data:
1117  log_and_exit('No valid data files were specified for processing.')
1118  logging.debug("input_file_data: " + str(input_file_data))
1119  src_files = get_source_files(input_file_data)
1120  sys.stdout.flush()
1121  try:
1122  get_processors(src_files, input_file_data, par_contnts, files_to_delete)
1123  except Exception:
1124  if DEBUG:
1125  err_msg = get_traceback_message()
1126  log_and_exit(err_msg)
1127  else:
1128  err_msg = "Unrecoverable error encountered in processing."
1129  log_and_exit(err_msg)
1130  finally:
1131  clean_files(files_to_delete)
1132  if cfg_data.verbose:
1133  print ("Processing complete.")
1134  sys.stdout.flush()
1135  logging.debug("Processing complete.")
1136  return
1137 
1138 def execute_command(command):
1139  """
1140  Execute what is contained in command and then output the results to log
1141  files and the console, as appropriate.
1142  """
1143  if DEBUG:
1144  print ("Entering execute_command, cfg_data.verbose =",
1145  cfg_data.verbose)
1146  log_msg = 'Executing command:\n {0}'.format(command)
1147  logging.debug(log_msg)
1148  subproc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
1149  stderr=subprocess.PIPE)
1150  std_out, err_out = subproc.communicate()
1151  status = subproc.returncode
1152  logging.info(std_out)
1153  logging.info(err_out)
1154  if cfg_data.verbose:
1155  print (std_out)
1156  return status
1157 
1158 def extract_par_section(par_contents, section):
1159  """
1160  Returns a single section (e.g. L1a, GEO, L1B, L2, etc.) from the "par" file.
1161  """
1162  sect_dict = {}
1163  for key in list(par_contents[section].keys()):
1164  sect_dict[key] = par_contents[section][key]
1165  return sect_dict
1166 
1167 def find_geo_file(inp_file):
1168  """
1169  Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1170  returns that file name; otherwise, returns None.
1171  """
1172  src_dir = os.path.dirname(inp_file)
1173  src_base = os.path.basename(inp_file)
1174  src_base_tmp = src_base.replace("L1B", "GEO")
1175  geo_base = src_base_tmp.replace("_LAC", "")
1176  # geo_base = src_base.rsplit('.', 1)[0]
1177  geo_file = os.path.join(src_dir, geo_base)
1178  if not os.path.exists(geo_file):
1179  geo_file = None
1180  return geo_file
1181 
1182 def find_geo_file2(inp_file, instrument, lvl):
1183  """
1184  Searches for a GEO file corresponding to inp_file. If that GEO file exists,
1185  returns that file name; otherwise, returns None.
1186  """
1187  src_dir = os.path.dirname(inp_file)
1188  src_base = os.path.basename(inp_file)
1189  if instrument.find('hawkeye') != -1:
1190  geo_base = src_base.replace("L1A", "GEO")
1191  elif instrument.find('modis') != -1:
1192  if lvl.find('level 1a') != -1:
1193  src_base_tmp = src_base.replace("L1A", "GEO")
1194  geo_base = src_base_tmp.replace("_LAC", "")
1195  elif lvl.find('level 1b') != -1:
1196  src_base_tmp = src_base.replace("L1B", "GEO")
1197  geo_base = src_base_tmp.replace("_LAC", "")
1198  elif instrument.find('viirs') != -1:
1199  if lvl.find('level 1a') != -1:
1200  geo_base = src_base.replace("L1A", "GEO-M")
1201  elif lvl.find('level 1b') != -1:
1202  geo_base = src_base.replace("L1B", "GEO")
1203  # geo_base = src_base.rsplit('.', 1)[0]
1204  geo_file = os.path.join(src_dir, geo_base)
1205  if not os.path.exists(geo_file):
1206  geo_file = None
1207  return geo_file
1208 
1209 def find_viirs_geo_file(proc, first_svm_file):
1210  """
1211  Searches for a GEO file corresponding to first_svm_file. If that GEO file
1212  exists, returns that file name; otherwise, returns None.
1213  """
1214  fname = first_svm_file.replace('SVM01', 'GMTCO').rstrip()
1215  if not os.path.exists(fname):
1216  fname = None
1217  return fname
1218 
1219 def get_batch_output_name(file_set, suffix):
1220  """
1221  Returns the output file for a "batch" run, i.e. a process that can accept
1222  multiple inputs, such as l2bin or l3bin.
1223  """
1224  mission_prefixes = ['A', 'C', 'O', 'S', 'T']
1225  stem = 'out'
1226  if not len(file_set): # == 0:
1227  err_msg = "Error! An output file name could not be determined."
1228  log_and_exit(err_msg)
1229  elif len(file_set) == 1:
1230  stem = os.path.splitext(file_set[0])[0]
1231  else:
1232  earliest_file = file_set[0]
1233  latest_file = file_set[0]
1234  earliest_file_date = get_file_date(earliest_file)
1235  latest_file_date = get_file_date(latest_file)
1236  for cur_file in file_set[1:]:
1237  file_date = get_file_date(cur_file)
1238  if file_date < earliest_file_date:
1239  earliest_file = cur_file
1240  earliest_file_date = file_date
1241  elif file_date > latest_file_date:
1242  latest_file = cur_file
1243  latest_file_date = file_date
1244  if (earliest_file[0] == latest_file[0]) and \
1245  (earliest_file[0] in mission_prefixes):
1246  stem = earliest_file[0]
1247  else:
1248  stem = ''
1249  earliest_file_date_stamp = earliest_file_date.strftime('%Y%j')
1250  latest_file_date_stamp = latest_file_date.strftime('%Y%j')
1251  if earliest_file_date_stamp == latest_file_date_stamp:
1252  stem += earliest_file_date_stamp
1253  else:
1254  stem += earliest_file_date_stamp + latest_file_date_stamp
1255  return ''.join([stem, '.', suffix])
1256 
1257 def get_data_file_option(par_contents, opt_text):
1258  """
1259  If found in par_contents, the value for the option specified by opt_text
1260  is returned; otherwise, False is returned.
1261  """
1262  opt_found = False
1263  if opt_text in par_contents['main']:
1264  opt_str = par_contents['main'][opt_text].upper()
1265  opt_found = mlp_utils.is_option_value_true(opt_str)
1266  return opt_found
1267 
1269  """
1270  Run the lonlat2pixline program and return the parameters found.
1271  """
1272  if proc.geo_file:
1273  # MODIS
1274  in_file = proc.geo_file
1275  else:
1276  # SeaWiFS
1277  in_file = proc.input_file
1278  args = ' '.join([in_file, proc.par_data['SWlon'],
1279  proc.par_data['SWlat'], proc.par_data['NElon'],
1280  proc.par_data['NElat']])
1281  lonlat_prog = os.path.join(proc.ocssw_bin, 'lonlat2pixline')
1282  lonlat_cmd = ' '.join([lonlat_prog, args])
1283  logging.debug('Executing lonlat2pixline command: "%s"', lonlat_cmd)
1284  process_output = subprocess.Popen(lonlat_cmd, shell=True,
1285  stdout=subprocess.PIPE).communicate()[0]
1286  lonlat_output = process_output.splitlines()
1287  start_line = None
1288  end_line = None
1289  start_pixel = None
1290  end_pixel = None
1291  for line in lonlat_output:
1292  line_text = str(line).strip("'")
1293  if 'sline' in line_text:
1294  start_line = int(line_text.split('=')[1])
1295  if 'eline' in line_text:
1296  end_line = int(line_text.split('=')[1])
1297  if 'spixl' in line_text:
1298  start_pixel = int(line_text.split('=')[1])
1299  if 'epixl' in line_text:
1300  end_pixel = int(line_text.split('=')[1])
1301  return start_line, end_line, start_pixel, end_pixel
1302 
1303 def get_file_date(filename):
1304  """
1305  Get a Python Date object from a recognized file name's year and day of year.
1306  """
1307  base_filename = os.path.basename(filename)
1308  if re.match(r'[ACMOQSTV]\d\d\d\d\d\d\d.*', base_filename):
1309  year = int(base_filename[1:5])
1310  doy = int(base_filename[5:8])
1311  elif re.match(r'\d\d\d\d\d\d\d.*', base_filename):
1312  # Some Aquarius
1313  year = int(base_filename[0:4])
1314  doy = int(base_filename[4:7])
1315  elif re.match(r'\w*_npp_d\d\d\d\d\d\d\d_.*', base_filename):
1316  # NPP
1317  prefix_removed_name = re.sub(r'\w*_npp_d', '', base_filename)
1318  year = int(prefix_removed_name[0:4])
1319  doy = int(prefix_removed_name[5:7])
1320  else:
1321  err_msg = 'Unable to determine date for {0}'.format(filename)
1322  log_and_exit(err_msg)
1323  file_date = datetime.datetime(year, 1, 1) + datetime.timedelta(doy - 1)
1324  return file_date
1325 
1326 def get_file_handling_opts(par_contents):
1327  """
1328  Returns the values of the file handling options in par_contents.
1329  """
1330  deletefiles = get_data_file_option(par_contents, 'deletefiles')
1331  use_existing = get_data_file_option(par_contents, 'use_existing')
1332  overwrite = get_data_file_option(par_contents, 'overwrite')
1333  return deletefiles, use_existing, overwrite
1334 
1335 def get_input_files(par_data):
1336  """
1337  Get input files found in the uber par file's ifile line, a file list file,
1338  or both. Ensure that the list contains no duplicates.
1339  """
1340  #inp_file_list = None
1341  from_ifiles = []
1342  from_infilelist = []
1343  if 'ifile' in par_data['main']:
1344  inp_file_str = par_data['main']['ifile'].split('#', 2)[0]
1345  cleaned_str = re.sub(r'[\t,:\[\]()"\']', ' ', inp_file_str)
1346  from_ifiles = cleaned_str.split()
1347  if 'infilelist' in par_data['main']:
1348  infilelist_name = par_data['main']['infilelist']
1349  if os.path.exists(infilelist_name):
1350  if os.path.isfile(infilelist_name) and \
1351  os.access(infilelist_name, os.R_OK):
1352  with open(infilelist_name, 'rt') as in_file_list_file:
1353  inp_lines = in_file_list_file.readlines()
1354  from_infilelist = [fn.rstrip() for fn in inp_lines
1355  if not re.match(r'^\s*#', fn)]
1356  if len(from_ifiles) == 0 and len(from_infilelist) == 0:
1357  return None
1358  # Make sure there are no duplicates. Tests with timeit showed that
1359  # list(set()) is much faster than a "uniqify" function.
1360  return list(set(from_ifiles + from_infilelist))
1361 
1362 def get_input_files_type_data(input_files_list):
1363  """
1364  Returns a dictionary with the the file_type (L0, L1A, L2, etc) and
1365  instrument for each file in the input list.
1366  """
1367  converter = {
1368  'geo': 'geo',
1369  'level 0': 'level 0',
1370  'level 1 browse data': 'l1brsgen',
1371  'level 1a': 'level 1a',
1372  'level 1b': 'level 1b',
1373  'sdr': 'level 1b',
1374  'level 2': 'l2gen',
1375  'level 3 binned': 'l3bin',
1376  'level 3 smi': 'smigen'
1377  }
1378  input_file_type_data = {}
1379  for inp_file in input_files_list:
1380  # if os.path.dirname((inp_file)) == '':
1381  # inp_path = os.path.join(os.getcwd(), inp_file)
1382  # else:
1383  # inp_path = inp_file
1384  file_typer = mlp.get_obpg_file_type.ObpgFileTyper(inp_file)
1385  file_type, file_instr = file_typer.get_file_type()
1386  #if file_type in converter:
1387  # file_type = converter[file_type.lower()]
1388  #else:
1389  # err_msg =
1390  # 'Error! Cannot process file type {0} of {1}'.format(file_type,
1391  # inp_file)
1392  if file_type.lower() in converter:
1393  file_type = converter[file_type.lower()]
1394  input_file_type_data[inp_file] = (file_type, file_instr.lower())
1395  else:
1396 
1397  # input_file_type_data[inp_file] = ('unknown', 'unknown')
1398  warn_msg = "Warning: Unable to determine a type for file {0}. It will not be processed.".format(inp_file)
1399  print (warn_msg)
1400  logging.info(warn_msg)
1401  return input_file_type_data
1402 
1403 def get_intermediate_processors(sensor, existing_procs, rules, lowest_source_level):
1404  """
1405  Create processor objects for products which are needed, but not explicitly
1406  specified in the par file.
1407  """
1408  existing_products = [proc.target_type for proc in existing_procs]
1409  intermediate_products = get_intermediate_products(existing_products, rules,
1410  lowest_source_level)
1411  intermediate_processors = []
1412  for prod in intermediate_products:
1413  # Create a processor for the product and add it to the intermediate
1414  # processors list
1415  if not prod in existing_products:
1416  new_proc = processor.Processor(sensor, rules, prod, {},
1417  cfg_data.hidden_dir)
1418  intermediate_processors.append(new_proc)
1419  return intermediate_processors
1420 
1421 def get_intermediate_products(existing_prod_names, ruleset,
1422  lowest_source_level):
1423  """
1424  Find products which are needed, but not explicitly specified by the
1425  par file.
1426  """
1427  required_progs = []
1428  for prog in existing_prod_names:
1429  candidate_progs = get_required_programs(prog, ruleset,
1430  lowest_source_level)
1431  if not isinstance(candidate_progs, type(None)):
1432  for candidate_prog in candidate_progs:
1433  required_progs.append(candidate_prog)
1434  required_progs = uniqify_list(required_progs)
1435  required_progs.sort()
1436  return required_progs
1437 
1439  """
1440  Returns the extension for an L2 file. For the time being, this is
1441  just '.L2'; however, different extensions may be wanted in the future, thus
1442  this function is in place.
1443  """
1444  return '.L2'
1445 
1447  """
1448  Returns the extension for an L3 Binned file. For the time being, this is
1449  just '.L3bin'; however, different extensions may be wanted in the future,
1450  thus this function is in place.
1451  """
1452  return '.L3b'
1453 
1454 def get_lowest_source_level(source_files):
1455  """
1456  Find the level of the lowest level source file to be processed.
1457  """
1458  order = ['level 1a', 'geo', 'level 1b', 'l2gen',
1459  'l2bin', 'l3bin', 'l3mapgen']
1460  if len(source_files) == 1:
1461  return list(source_files.keys())[0]
1462  else:
1463  lowest = list(source_files.keys())[0]
1464  for key in list(source_files.keys())[1:]:
1465  # if key < lowest:
1466  if order.index(key) < order.index(lowest):
1467  lowest = key
1468  return lowest
1469 
1470 def get_options(par_data):
1471  """
1472  Extract the options for a program to be run from the corresponding data in
1473  the uber par file.
1474  """
1475  options = ''
1476  for key in par_data:
1477  # if key == 'ofile':
1478  # log_and_exit('Error! The "ofile" option is not permitted.')
1479  # else:
1480  if key != 'ofile' and key != 'odir' and not key.lower() in FILE_USE_OPTS:
1481  if par_data[key]:
1482  options += ' ' + key + '=' + par_data[key]
1483  else:
1484  options += ' ' + key
1485  return options
1486 
1487 def get_output_name2(inp_files, targ_prog, suite=None, oformt=None, res=None):
1488  """
1489  Determine what the output name would be if targ_prog is run on input_files.
1490  """
1491  cl_opts = optparse.Values()
1492  cl_opts.suite = suite
1493  cl_opts.oformat = oformt
1494  cl_opts.resolution = res
1495  if not isinstance(inp_files, list):
1496  data_file = get_obpg_data_file_object(inp_files)
1497  output_name = get_output_name.get_output_name([data_file], targ_prog,
1498  cl_opts)
1499  else:
1500  output_name = get_output_name.get_output_name(inp_files, targ_prog,
1501  cl_opts)
1502  return output_name
1503 
1504 def get_output_name3(input_name, input_files, suffix):
1505  """
1506  Determine the output name for a program to be run.
1507  """
1508  # Todo: rename to get_output_name and delete other get_output_name
1509  output_name = None
1510  if input_name in input_files:
1511  if input_files[input_name][0] == 'level 0' and \
1512  input_files[input_name][1].find('modis') != -1:
1513  if input_files[input_name][1].find('aqua') != -1:
1514  first_char = 'A'
1515  else:
1516  first_char = 'T'
1517  time_stamp = ''
1518  if os.path.exists(input_name + '.const'):
1519  with open(input_name + '.const') as constructor_file:
1520  constructor_data = constructor_file.readlines()
1521  for line in constructor_data:
1522  if line.find('starttime=') != -1:
1523  start_time = line[line.find('=') + 1].strip()
1524  break
1525  time_stamp = ProcUtils.date_convert(start_time, 't', 'j')
1526  else:
1527  if re.match(r'MOD00.P\d\d\d\d\d\d\d\.\d\d\d\d', input_name):
1528  time_stamp = input_name[7:14] + input_name[15:19] + '00'
1529  else:
1530  err_msg = "Cannot determine time stamp for input file {0}".\
1531  format(input_name)
1532  log_and_exit(err_msg)
1533  output_name = first_char + time_stamp + '.L1A'
1534  else:
1535 # if input_files[input_name] == ''
1536  (dirname, basename) = os.path.split(input_name)
1537  basename_parts = basename.rsplit('.', 2)
1538  output_name = os.path.join(dirname, basename_parts[0] + '.' +
1539  suffix)
1540  else:
1541  (dirname, basename) = os.path.split(input_name)
1542  basename_parts = basename.rsplit('.', 2)
1543  output_name = os.path.join(dirname, basename_parts[0] + '.' + suffix)
1544  return output_name
1545 
1546 def get_par_file_contents(par_file, acceptable_single_keys):
1547  """
1548  Return the contents of the input "par" file.
1549  """
1550  acceptable_par_keys = {
1551  'level 0' : 'level 0', 'l0' : 'level 0',
1552  'level 1a' : 'level 1a', 'l1a' : 'level 1a', 'l1agen': 'level 1a',
1553  'modis_L1A': 'level 1a',
1554  'l1brsgen': 'l1brsgen',
1555  'l1mapgen': 'l1mapgen',
1556  'l1aextract': 'l1aextract',
1557  'l1aextract_modis': 'l1aextract_modis',
1558  'l1aextract_seawifs' : 'l1aextract_seawifs',
1559  'l1aextract_viirs' : 'l1aextract_viirs',
1560  'l1brsgen' : 'l1brsgen',
1561  'geo' : 'geo', 'modis_GEO': 'geo', 'geolocate_viirs': 'geo',
1562  'geolocate_hawkeye': 'geo',
1563  'level 1b' : 'level 1b', 'l1b' : 'level 1b', 'l1bgen' : 'level 1b',
1564  'modis_L1B': 'level 1b', 'calibrate_viirs': 'level 1b',
1565  'level 2' : 'l2gen',
1566  'l2gen' : 'l2gen',
1567  'l2bin' : 'l2bin',
1568  'l2brsgen' : 'l2brsgen',
1569  'l2extract' : 'l2extract',
1570  'l2mapgen' : 'l2mapgen',
1571  'l3bin' : 'l3bin',
1572  'l3mapgen' : 'l3mapgen',
1573  'smigen' : 'smigen',
1574  'main' : 'main'
1575  }
1576  if cfg_data.verbose:
1577  print ("Processing %s" % par_file)
1578  par_reader = uber_par_file_reader.ParReader(par_file,
1579  acceptable_single_keys,
1580  acceptable_par_keys)
1581  par_contents = par_reader.read_par_file()
1582  ori_keys = list(par_contents.keys())
1583  for key in ori_keys:
1584  if key in acceptable_par_keys:
1585  if key != acceptable_par_keys[key]:
1586  par_contents[acceptable_par_keys[key]] = par_contents[key]
1587  del par_contents[key]
1588  else:
1589  acc_key_str = ', '.join(list(acceptable_par_keys.keys()))
1590  err_msg = """Error! Parameter file {0} contains a section titled "{1}", which is not a recognized program.
1591 The recognized programs are: {2}""".format(par_file, key, acc_key_str)
1592 
1593  log_and_exit(err_msg)
1594  if 'main' in par_contents:
1595  input_files_list = get_input_files(par_contents)
1596  else:
1597  err_msg = 'Error! Could not find section "main" in {0}'.format(par_file)
1598  log_and_exit(err_msg)
1599  return par_contents, input_files_list
1600 
1601 def get_processors2(sensor, par_contents, rules, lowest_source_level):
1602  """
1603  Determine the processors which are needed.
1604  """
1605  processors = []
1606  for key in list(par_contents.keys()):
1607  if key != 'main':
1608  section_contents = extract_par_section(par_contents, key)
1609  proc = processor.Processor(sensor, rules, key, section_contents,
1610  cfg_data.hidden_dir)
1611  processors.append(proc)
1612  if processors:
1613  processors.sort() # needs sorted for get_intermediate_processors
1614  processors += get_intermediate_processors(sensor, processors, rules,
1615  lowest_source_level)
1616  processors.sort()
1617  return processors
1618 
1619 def exe_processor(proc, src_files, src_lvl):
1620  if proc.out_directory == cfg_data.hidden_dir:
1621  proc.out_directory = cfg_data.output_dir
1622  if proc.requires_batch_processing():
1623  logging.debug('Performing batch processing for ' + str(proc))
1624  out_file = run_batch_processor(proc,
1625  src_files[src_lvl])
1626  return out_file
1627  else:
1628  if proc.rule_set.rules[proc.target_type].action:
1629  logging.debug('Performing nonbatch processing for ' + str(proc))
1630  # src_file_sets = get_source_file_sets(proc.rule_set.rules[proc.target_type].src_file_types,
1631  # src_files, src_lvl,
1632  # proc.rule_set.rules[proc.target_type].requires_all_sources)
1633  success_count = 0
1634  # for file_set in src_file_sets:
1635  out_file = run_nonbatch_processor(proc)
1636  if out_file:
1637  success_count += 1
1638  if success_count == 0:
1639  msg = 'The {0} processor produced no output files.'.format(proc.target_type)
1640  logging.info(msg)
1641  return out_file
1642  else:
1643  msg = '-I- There is no way to create {0} files for {1}.'.format(proc.target_type, proc.instrument)
1644  logging.info(msg)
1645 
1646 def get_processors(src_files, input_files, par_contents, files_to_delete):
1647  """
1648  Determine the processors which are needed.
1649  """
1650  order = ['level 0', 'level 1a', 'geo', 'l1aextract',
1651  'level 1b', 'l1brsgen', 'l1mapgen', 'l2gen', 'l2extract',
1652  'l2bin', 'l2brsgen', 'l2mapgen', 'l3bin', 'l3mapgen',
1653  'smigen']
1654  key_list = list(par_contents.keys())
1655  last_key = key_list[-1]
1656  for key in key_list:
1657  if key != 'main':
1658  section_contents = extract_par_section(par_contents, key)
1659  if not order.index(key) > order.index('l2gen'):
1660  src_lvls = list(src_files.keys())
1661  if not key in src_files:
1662  src_files[key] = []
1663  for src_lvl in src_lvls:
1664  if order.index(src_lvl) < order.index('l2gen'):
1665  for file in src_files[src_lvl]:
1666  file_typer = mlp.get_obpg_file_type.ObpgFileTyper(file)
1667  instrument = file_typer.get_file_type()[1].lower().split()[0]
1668  # instrument = input_files[file][1].split()[0]
1669  logging.debug("instrument: " + instrument)
1670  if instrument in sensors_sets:
1671  rules = sensors_sets[instrument].recipe
1672  sensor = sensors_sets[instrument]
1673  else:
1674  rules = sensors_sets['general'].recipe
1675  sensor = sensors_sets['general']
1676  proc = processor.Processor(sensor, rules, key, section_contents,
1677  cfg_data.hidden_dir)
1678  proc.input_file = file
1679  if file_typer.get_file_type()[0].lower().find('level 0') == -1:
1680  if sensor.require_geo and key != 'geo':
1681  proc.geo_file = find_geo_file2(proc.input_file, instrument, src_lvl)
1682 
1683  if not proc.geo_file:
1684  if src_lvl.find('level 1b') != -1:
1685  err_msg = 'Error! Need level 1a file for GEO'
1686  log_and_exit(err_msg)
1687  proc_geo = processor.Processor(sensor, rules, 'geo', {},
1688  cfg_data.hidden_dir)
1689  proc_geo.input_file = file
1690  print ('Running geo on file {0}.'.format(file))
1691  logging.debug('')
1692  log_msg = 'Processing for geo:'
1693  logging.debug(log_msg)
1694  proc.geo_file = exe_processor(proc_geo, src_files, src_lvl)
1695  if cfg_data.deletefiles:
1696  if proc.geo_file:
1697  files_to_delete.append(proc.geo_file)
1698  if key == 'l2gen' and sensor.require_l1b_for_l2gen and src_lvl.find('level 1b') == -1:
1699  proc_l1b = processor.Processor(sensor, rules, 'level 1b', {},
1700  cfg_data.hidden_dir)
1701  if sensor.require_geo:
1702  proc_l1b.input_file = file
1703  proc_l1b.geo_file = proc.geo_file
1704  print ('Running level 1b on file {0}.'.format(file))
1705  logging.debug('')
1706  log_msg = 'Processing for level 1b:'
1707  logging.debug(log_msg)
1708  proc.input_file = exe_processor(proc_l1b, src_files, src_lvl)
1709  if cfg_data.deletefiles:
1710  if proc.input_file:
1711  files_to_delete.append(proc.input_file)
1712  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1713  logging.debug('')
1714  log_msg = 'Processing for {0}:'.format(proc.target_type)
1715  logging.debug(log_msg)
1716  out_file = exe_processor(proc, src_files, src_lvl)
1717  src_files[key].append(out_file)
1718  if cfg_data.deletefiles and key != last_key:
1719  if out_file:
1720  files_to_delete.append(out_file)
1721  if key.find('l1aextract') != -1:
1722  src_files['level 1a'] = src_files[key]
1723  del src_files['l1aextract']
1724  # input_files[src_files[src_lvl][0]] = input_files[file]
1725  else:
1726  if key != 'level 1a':
1727  proc_l1a = processor.Processor(sensor, rules, 'level 1a', {},
1728  cfg_data.hidden_dir)
1729  proc_l1a.input_file = file
1730  print ('Running level 1a on file {0}.'.format(file))
1731  logging.debug('')
1732  log_msg = 'Processing for level 1a:'
1733  logging.debug(log_msg)
1734  proc.input_file = exe_processor(proc_l1a, src_files, src_lvl)
1735  if cfg_data.deletefiles:
1736  if proc.input_file:
1737  files_to_delete.append(proc.input_file)
1738  if sensor.require_geo and key != 'geo' and key != 'level 1a':
1739  proc.geo_file = find_geo_file2(proc.input_file, instrument, 'level 1a')
1740  if not proc.geo_file:
1741  proc_geo = processor.Processor(sensor, rules, 'geo', {},
1742  cfg_data.hidden_dir)
1743  proc_geo.input_file = proc.input_file
1744  print ('Running geo on file {0}.'.format(proc.input_file))
1745  logging.debug('')
1746  log_msg = 'Processing for geo:'
1747  logging.debug(log_msg)
1748  proc.geo_file = exe_processor(proc_geo, src_files, src_lvl)
1749  if cfg_data.deletefiles:
1750  if proc.geo_file:
1751  files_to_delete.append(proc.geo_file)
1752  if key == 'l2gen' and sensor.require_l1b_for_l2gen:
1753  proc_l1b = processor.Processor(sensor, rules, 'level 1b', {},
1754  cfg_data.hidden_dir)
1755  if sensor.require_geo:
1756  proc_l1b.input_file = proc.input_file
1757  proc_l1b.geo_file = proc.geo_file
1758  print ('Running level 1b on file {0}.'.format(proc.input_file))
1759  logging.debug('')
1760  log_msg = 'Processing for level 1b:'
1761  logging.debug(log_msg)
1762  proc.input_file = exe_processor(proc_l1b, src_files, src_lvl)
1763  if cfg_data.deletefiles:
1764  if proc.input_file:
1765  files_to_delete.append(proc.input_file)
1766  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1767  logging.debug('')
1768  log_msg = 'Processing for {0}:'.format(proc.target_type)
1769  logging.debug(log_msg)
1770  out_file = exe_processor(proc, src_files, src_lvl)
1771  src_files[key].append(out_file)
1772  if cfg_data.deletefiles and key != last_key:
1773  if out_file:
1774  files_to_delete.append(out_file)
1775  if key.find('l1aextract') != -1:
1776  src_files['level 1a'] = src_files[key]
1777  del src_files['l1aextract']
1778  # input_files[src_files[src_lvl][0]] = input_files[file]
1779  if len(src_files) > 1:
1780  del src_files[src_lvl]
1781  else:
1782  src_lvls = list(src_files.keys())
1783  rules = sensors_sets['general'].recipe
1784  sensor = sensors_sets['general']
1785  if not key in src_files:
1786  src_files[key] = []
1787  for src_lvl in src_lvls:
1788  if not order.index(src_lvl) < order.index('l2gen'):
1789  for file in src_files[src_lvl]:
1790  proc = processor.Processor(sensor, rules, key, section_contents,
1791  cfg_data.hidden_dir)
1792  proc.input_file = file
1793  for program in proc.required_types:
1794  if not program in src_files:
1795  proc1 = processor.Processor(sensor, rules, program, {},
1796  cfg_data.hidden_dir)
1797  proc1.input_file = file
1798  # proc1.deletefiles = cfg_data.deletefiles
1799  for program2 in proc1.required_types:
1800  if program2.find(src_lvl) == -1:
1801  proc2 = processor.Processor(sensor, rules, program2, {},
1802  cfg_data.hidden_dir)
1803  proc2.input_file = file
1804  # proc2.deletefiles = cfg_data.deletefiles
1805  print ('Running {0} on file {1}.'.format(proc2.target_type, proc2.input_file))
1806  logging.debug('')
1807  log_msg = 'Processing for {0}:'.format(proc2.target_type)
1808  logging.debug(log_msg)
1809  proc1.input_file = exe_processor(proc2, src_files, src_lvl)
1810  if cfg_data.deletefiles:
1811  if proc1.input_file:
1812  files_to_delete.append(proc1.input_file)
1813  print ('Running {0} on file {1}.'.format(proc1.target_type, proc1.input_file))
1814  logging.debug('')
1815  log_msg = 'Processing for {0}:'.format(proc1.target_type)
1816  logging.debug(log_msg)
1817  proc.input_file = exe_processor(proc1, src_files, src_lvl)
1818  if cfg_data.deletefiles:
1819  if proc.input_file:
1820  files_to_delete.append(proc.input_file)
1821  del src_files[src_lvl]
1822  print ('Running {0} on file {1}.'.format(proc.target_type, proc.input_file))
1823  logging.debug('')
1824  log_msg = 'Processing for {0}:'.format(proc.target_type)
1825  logging.debug(log_msg)
1826  out_file = exe_processor(proc, src_files, program)
1827  src_files[key].append(out_file)
1828  if cfg_data.deletefiles and key != last_key:
1829  if out_file:
1830  files_to_delete.append(out_file)
1831  if program in src_files:
1832  del src_files[program]
1833  if key.find('l2extract') != -1:
1834  src_files['l2gen'] = src_files[key]
1835  del src_files['l2extract']
1836  if proc.requires_batch_processing:
1837  break
1838 
1839  return
1840 
1841 def get_required_programs(target_program, ruleset, lowest_source_level):
1842  """
1843  Returns the programs required too produce the desired final output.
1844  """
1845  programs_to_run = []
1846  cur_rule = ruleset.rules[target_program]
1847  src_types = cur_rule.src_file_types
1848  if src_types[0] == cur_rule.target_type:
1849  programs_to_run = [target_program]
1850  else:
1851  for src_type in src_types:
1852  if src_type in ruleset.rules:
1853  if ruleset.order.index(src_type) > \
1854  ruleset.order.index(lowest_source_level):
1855  programs_to_run.insert(0, src_type)
1856  if len(src_types) > 1:
1857  programs_to_run.insert(0, src_types[1])
1858  programs_to_add = get_required_programs(src_type, ruleset,
1859  lowest_source_level)
1860  for prog in programs_to_add:
1861  programs_to_run.insert(0, prog)
1862  return programs_to_run
1863 
1864 def get_source_geo_files(source_files, proc_src_types, proc_src_ndx):
1865  """
1866  :param source_files: list of source files
1867  :param proc_src_types: list of source types for the processor
1868  :param proc_src_ndx: index into the proc_src_types list pointing to the
1869  source type to use to get the input files
1870  :return: list of GEO files that correspond to the files in source_files
1871  """
1872  inp_files = source_files[proc_src_types[proc_src_ndx]]
1873  geo_files = []
1874  for inp_file in inp_files:
1875  geo_file = find_geo_file(inp_file)
1876  if geo_file:
1877  geo_files.append(geo_file)
1878  else:
1879  err_msg = 'Error! Cannot find GEO ' \
1880  'file {0}.'.format(geo_file)
1881  log_and_exit(err_msg)
1882  return geo_files
1883 
1884 def get_source_file_sets(proc_src_types, source_files, src_key, requires_all_sources):
1885  """
1886  Returns the set of source files needed.
1887  """
1888  if len(proc_src_types) == 1:
1889  try:
1890  src_file_sets = source_files[src_key]
1891  except Exception:
1892  # print "Exception encountered: "
1893  # e_info = sys.exc_info()
1894  # err_msg = ''
1895  # for info in e_info:
1896  # err_msg += " " + str(info)
1897  if DEBUG:
1898  err_msg = get_traceback_message()
1899  log_and_exit(err_msg)
1900  else:
1901  err_msg = 'Error! Unable to determine what source files are required for the specified output files.'
1902  log_and_exit(err_msg)
1903  else:
1904  if requires_all_sources:
1905  if len(proc_src_types) == 2:
1906  if proc_src_types[0] in source_files \
1907  and proc_src_types[1] in source_files:
1908  src_file_sets = list(zip(source_files[proc_src_types[0]],
1909  source_files[proc_src_types[1]]))
1910  else:
1911  if proc_src_types[0] in source_files:
1912  if proc_src_types[1] == 'geo':
1913  geo_files = get_source_geo_files(source_files, proc_src_types, 0)
1914  src_file_sets = list(zip(source_files[proc_src_types[0]],
1915  geo_files))
1916  else:
1917  err_msg = 'Error! Cannot find all {0} and' \
1918  ' {1} source files.'.format(proc_src_types[0],
1919  proc_src_types[1])
1920  log_and_exit(err_msg)
1921  elif proc_src_types[1] in source_files:
1922  if proc_src_types[0] == 'geo':
1923  geo_files = get_source_geo_files(source_files, proc_src_types, 1)
1924  src_file_sets = list(zip(source_files[proc_src_types[1]],
1925  geo_files))
1926  else:
1927  err_msg = 'Error! Cannot find all {0} and' \
1928  ' {1} source files.'.format(proc_src_types[0],
1929  proc_src_types[1])
1930  log_and_exit(err_msg)
1931  else:
1932  err_msg = 'Error! Cannot find all source files.'
1933  log_and_exit(err_msg)
1934  else:
1935  err_msg = 'Error! Encountered too many source file types.'
1936  log_and_exit(err_msg)
1937  else:
1938  for proc_src_type in proc_src_types:
1939  if proc_src_type in source_files:
1940  src_file_sets = source_files[proc_src_type]
1941  return src_file_sets
1942 
1943 def get_source_files(input_files):
1944  """
1945  Returns a dictionary containing the programs to be run (as keys) and the
1946  a list of files on which that program should be run.
1947  """
1948  source_files = {}
1949  for file_path in input_files:
1950  ftype = input_files[file_path][0]
1951  if ftype in source_files:
1952  source_files[ftype].append(file_path)
1953  else:
1954  source_files[ftype] = [file_path]
1955  return source_files
1956 
1957 def get_source_products_types(targt_prod, ruleset):
1958  """
1959  Return the list of source product types needed to produce the final product.
1960  """
1961  src_prod_names = [targt_prod]
1962  targt_pos = ruleset.order.index(targt_prod)
1963  new_prod_names = []
1964  for pos in range(targt_pos, 1, -1):
1965  for prod_name in src_prod_names:
1966  if ruleset.rules[ruleset.order[pos]].target_type == prod_name:
1967  for src_typ in ruleset.rules[ruleset.order[pos]].src_file_types:
1968  new_prod_names.append(src_typ)
1969  src_prod_names += new_prod_names
1970  return src_prod_names
1971 
1973  """
1974  Returns an error message built from traceback data.
1975  """
1976  exc_parts = [str(l) for l in sys.exc_info()]
1977  err_type_parts = str(exc_parts[0]).strip().split('.')
1978  err_type = err_type_parts[-1].strip("'>")
1979  tb_data = traceback.format_exc()
1980  tb_line = tb_data.splitlines()[-3]
1981  line_num = tb_line.split(',')[1]
1982  st_data = traceback.extract_stack()
1983  err_file = os.path.basename(st_data[-1][0])
1984  msg = 'Error! The {0} program encountered an unrecoverable {1}, {2}, at {3} of {4}!'.\
1985  format(cfg_data.prog_name,
1986  err_type, exc_parts[1], line_num.strip(), err_file)
1987  return msg
1988 
1990  """
1991  Initialize sensors.
1992  """
1993  sensors = dict(general=Sensor(),
1994  goci=Sensor_goci(),
1995  hawkeye=Sensor_hawkeye(),
1996  meris=Sensor_meris(),
1997  modis=Sensor_modis(),
1998  seawifs=Sensor_seawifs(),
1999  viirs=Sensor_viirs())
2000  return sensors
2001 
2002 def log_and_exit(error_msg):
2003  """
2004  Record error_msg in the debug log, then exit with error_msg going to stderr
2005  and an exit code of 1; see:
2006  http://docs.python.org/library/sys.html#exit.
2007  """
2008  logging.info(error_msg)
2009  sys.exit(error_msg)
2010 
2011 def main():
2012  """
2013  main processing function.
2014  """
2015  global cfg_data
2016  global DEBUG
2017  # rules_sets = build_rules()
2018  global sensors_sets
2019  sensors_sets = initialze_sensors()
2020  cl_parser = optparse.OptionParser(usage=create_help_message(sensors_sets),
2021  version=' '.join(['%prog', __version__]))
2022  (options, args) = process_command_line(cl_parser)
2023 
2024  if len(args) < 1:
2025  print ("\nError! No file specified for processing.\n")
2026  cl_parser.print_help()
2027  else:
2028  if options.debug:
2029  # Don't just set DEBUG = options.debug, as that would override the
2030  # in-program setting.
2031  DEBUG = True
2032  check_options(options)
2033  # cfg_data = ProcessorConfig('.seadas_data', os.getcwd(),
2034  # options.verbose, options.overwrite,
2035  # options.use_existing, options.tar_file,
2036  # options.timing, options.odir)
2037  cfg_data = ProcessorConfig('.seadas_data', os.getcwd(),
2038  options.verbose, options.overwrite,
2039  options.use_existing,
2040  options.deletefiles, options.odir)
2041  if not os.access(cfg_data.hidden_dir, os.R_OK):
2042  log_and_exit("Error! The working directory is not readable!")
2043  if os.path.exists(args[0]):
2044  log_timestamp = datetime.datetime.today().strftime('%Y%m%d%H%M%S')
2045  start_logging(log_timestamp)
2046  try:
2047  # if cfg_data.timing:
2048  # main_timer = benchmark_timer.BenchmarkTimer()
2049  # main_timer.start()
2050  # do_processing(sensors_sets, args[0])
2051  # main_timer.end()
2052  # timing_msg = 'Total processing time: {0}'.format(
2053  # str(main_timer.get_total_time_str()))
2054  # print (timing_msg)
2055  # logging.info(timing_msg)
2056  # else:
2057  if options.ifile:
2058  do_processing(sensors_sets, args[0], options.ifile)
2059  else:
2060  do_processing(sensors_sets, args[0])
2061  except Exception:
2062  if DEBUG:
2063  err_msg = get_traceback_message()
2064  log_and_exit(err_msg)
2065  else:
2066  # todo: make a friendlier error message
2067  err_msg = 'Unanticipated error encountered during processing!'
2068  log_and_exit(err_msg)
2069  else:
2070  err_msg = 'Error! Parameter file {0} does not exist.'.\
2071  format(args[0])
2072  sys.exit(err_msg)
2073  logging.shutdown()
2074  return 0
2075 
2076 def process_command_line(cl_parser):
2077  """
2078  Get arguments and options from the calling command line.
2079  To be consistent with other OBPG programs, an underscore ('_') is used for
2080  multiword options, instead of a dash ('-').
2081  """
2082  cl_parser.add_option('--debug', action='store_true', dest='debug',
2083  default=False, help=optparse.SUPPRESS_HELP)
2084  cl_parser.add_option('-d', '--deletefiles', action='store_true',
2085  dest='deletefiles', default=False,
2086  help='delete files created during processing')
2087  cl_parser.add_option('--ifile', action='store', type='string',
2088  dest='ifile', help="input file")
2089  cl_parser.add_option('--output_dir', '--odir',
2090  action='store', type='string', dest='odir',
2091  help="user specified directory for output")
2092  cl_parser.add_option('--overwrite', action='store_true',
2093  dest='overwrite', default=False,
2094  help='overwrite files which already exist (default = stop processing if file already exists)')
2095  # cl_parser.add_option('-t', '--tar', type=str, dest='tar_file',
2096  # help=optparse.SUPPRESS_HELP)
2097  # cl_parser.add_option('--timing', dest='timing', action='store_true',
2098  # default=False,
2099  # help='report time required to run each program and total')
2100  cl_parser.add_option('--use_existing', action='store_true',
2101  dest='use_existing', default=False,
2102  help='use files which already exist (default = stop processing if file already exists)')
2103  cl_parser.add_option('-v', '--verbose',
2104  action='store_true', dest='verbose', default=False,
2105  help='print status messages to stdout')
2106 
2107  (options, args) = cl_parser.parse_args()
2108  for ndx, cl_arg in enumerate(args):
2109  if cl_arg.startswith('par='):
2110  args[ndx] = cl_arg.lstrip('par=')
2111  if options.overwrite and options.use_existing:
2112  log_and_exit('Error! Options overwrite and use_existing cannot be ' + \
2113  'used simultaneously.')
2114  return options, args
2115 
2116 def read_file_list_file(flf_name):
2117  """
2118  Reads flf_name and returns the list of files to be processed.
2119  """
2120  files_list = []
2121  bad_lines = []
2122  with open(flf_name, 'rt') as flf:
2123  inp_lines = flf.readlines()
2124  for line in inp_lines:
2125  fname = line.split('#')[0].strip()
2126  if fname != '':
2127  if os.path.exists(fname):
2128  files_list.append(fname)
2129  else:
2130  bad_lines.append(fname)
2131  if len(bad_lines) > 0:
2132  err_msg = 'Error! File {0} specified the following input files which could not be located:\n {1}'.\
2133  format(flf_name, ', '.join([bl for bl in bad_lines]))
2134  log_and_exit(err_msg)
2135  return files_list
2136 
2137 def run_batch_processor(processor, file_set):
2138  """
2139  Run a processor, e.g. l2bin, which processes batches of files.
2140  """
2141  # logging.debug('in run_batch_processor, ndx = %d', ndx)
2142  if os.path.exists((file_set[0])) and tarfile.is_tarfile(file_set[0]):
2143  processor.input_file = file_set[0]
2144  else:
2145  timestamp = time.strftime('%Y%m%d_%H%M%S', time.gmtime(time.time()))
2146  file_list_name = cfg_data.hidden_dir + os.sep + 'files_' + \
2147  processor.target_type + '_' + timestamp + '.lis'
2148  with open(file_list_name, 'wt') as file_list:
2149  for fname in file_set:
2150  file_list.write(fname + '\n')
2151  processor.input_file = file_list_name
2152  data_file_list = []
2153  finder_opts = {}
2154  for fspec in file_set:
2155  dfile = get_obpg_data_file_object(fspec)
2156  data_file_list.append(dfile)
2157  if 'suite' in processor.par_data:
2158  finder_opts['suite'] = processor.par_data['suite']
2159  elif 'prod' in processor.par_data:
2160  finder_opts['suite'] = processor.par_data['prod']
2161  if 'resolution' in processor.par_data:
2162  finder_opts['resolution'] = processor.par_data['resolution']
2163  if 'oformat' in processor.par_data:
2164  finder_opts['oformat'] = processor.par_data['oformat']
2165  # name_finder = name_finder_utils.get_level_finder(data_file_list,
2166  # processors[ndx].target_type,
2167  # finder_opts)
2168  if processor.output_file:
2169  processor.output_file = os.path.join(processor.out_directory,
2170  processor.output_file )
2171  else:
2172  processor.output_file = os.path.join(processor.out_directory,
2173  get_output_name.get_output_name(data_file_list,
2174  processor.target_type,
2175  finder_opts))
2176  if DEBUG:
2177  log_msg = "Running {0} with input file {1} to generate {2} ".\
2178  format(processor.target_type,
2179  processor.input_file,
2180  processor.output_file)
2181  logging.debug(log_msg)
2182  processor.execute()
2183  return processor.output_file
2184 
2185 def run_nonbatch_processor(processor):
2186  """
2187  Run a processor which deals with single input files (or pairs of files in
2188  the case of MODIS L1B processing in which GEO files are also needed).
2189  """
2190  # if isinstance(file_set, tuple):
2191  # input_file = file_set[0]
2192  # geo_file = file_set[1]
2193  # else:
2194  # input_file = file_set
2195  # geo_file = None
2196  dfile = get_obpg_data_file_object(processor.input_file)
2197 
2198  cl_opts = optparse.Values()
2199  if 'suite' in processor.par_data:
2200  cl_opts.suite = processor.par_data['suite']
2201  elif 'prod' in processor.par_data:
2202  cl_opts.suite = processor.par_data['prod']
2203  else:
2204  cl_opts.suite = None
2205  if 'resolution' in processor.par_data:
2206  cl_opts.resolution = processor.par_data['resolution']
2207  else:
2208  cl_opts.resolution = None
2209  if 'oformat' in processor.par_data:
2210  cl_opts.oformat = processor.par_data['oformat']
2211  else:
2212  cl_opts.oformat = None
2213  # name_finder = name_finder_utils.get_level_finder([dfile],
2214  # processors[ndx].target_type,
2215  # cl_opts)
2216  if processor.output_file:
2217  output_file = os.path.join(processor.out_directory, processor.output_file)
2218  else:
2219  output_file = os.path.join(processor.out_directory,
2220  get_output_name.get_output_name([dfile], processor.target_type, cl_opts))
2221  if DEBUG:
2222  print ('in run_nonbatch_processor, output_file = ' + output_file)
2223  # processor.input_file = input_file
2224  processor.output_file = output_file
2225  # processor.geo_file = geo_file
2226  # if 'deletefiles' in processor.par_data:
2227  # if processor.par_data['deletefiles']: # != 0:
2228  # if processor.par_data['deletefiles'] == 1:
2229  # processor.deletefiles = True
2230  # else:
2231  # processor.deletefiles = False
2232  if (not os.path.exists(output_file)) or cfg_data.overwrite:
2233  if cfg_data.verbose:
2234  print ()
2235  print ('\nRunning ' + str(processor))
2236  sys.stdout.flush()
2237  proc_status = processor.execute()
2238 
2239  if proc_status:
2240  output_file = None
2241  msg = "Error! Status {0} was returned during {1} {2} processing.".\
2242  format(proc_status, processor.instrument,
2243  processor.target_type)
2244  # log_and_exit(msg)
2245  logging.info(msg)
2246  # Todo: remove the failed file from future processing
2247  elif not cfg_data.use_existing:
2248  log_and_exit('Error! Target file {0} already exists.'.\
2249  format(output_file))
2250  processor.input_file = ''
2251  processor.output_file = ''
2252  return output_file
2253 
2254 def run_script(proc, script_name):
2255  """
2256  Build the command to run the processing script which is passed in.
2257  """
2258  prog = build_executable_path(script_name)
2259  args = ' ifile=' + proc.input_file
2260  args += ' ofile=' + proc.output_file
2261  args += get_options(proc.par_data)
2262  cmd = ' '.join([prog, args])
2263  logging.debug("\nRunning: " + cmd)
2264  return execute_command(cmd)
2265 
2266 # def run_smigen(proc):
2267 # """
2268 # Set up for and perform SMI (Standard Mapped Image) generation.
2269 # """
2270 # status = None
2271 # prog = os.path.join(proc.ocssw_bin, 'smigen')
2272 # if not os.path.exists(prog):
2273 # print ("Error! Cannot find executable needed for {0}".\
2274 # format(proc.rule_set.rules[proc.target_type].action))
2275 # if 'prod' in proc.par_data:
2276 # args = 'ifile=' + proc.input_file + ' ofile=' + proc.output_file + \
2277 # ' prod=' + proc.par_data['prod']
2278 # cmd = ' '.join([prog, args])
2279 # for key in proc.par_data:
2280 # if (key != 'prod') and not (key.lower() in FILE_USE_OPTS):
2281 # args += ' ' + key + '=' + proc.par_data[key]
2282 # logging.debug('\nRunning smigen command: ' + cmd)
2283 # status = execute_command(cmd)
2284 # else:
2285 # err_msg = 'Error! No product specified for smigen.'
2286 # log_and_exit(err_msg)
2287 # return status
2288 
2289 def start_logging(time_stamp):
2290  """
2291  Opens log file(s) for debugging.
2292  """
2293  info_log_name = ''.join(['Processor_', time_stamp, '.log'])
2294  debug_log_name = ''.join(['multilevel_processor_debug_', time_stamp,
2295  '.log'])
2296  info_log_path = os.path.join(cfg_data.output_dir, info_log_name)
2297  debug_log_path = os.path.join(cfg_data.output_dir, debug_log_name)
2298  mlp_logger = logging.getLogger()
2299  #mlp_logger.setLevel(logging.DEBUG)
2300 
2301  info_hndl = logging.FileHandler(info_log_path)
2302  info_hndl.setLevel(logging.INFO)
2303  mlp_logger.addHandler(info_hndl)
2304 
2305  if DEBUG:
2306  debug_hndl = logging.FileHandler(debug_log_path)
2307  debug_hndl.setLevel(logging.DEBUG)
2308  mlp_logger.addHandler(debug_hndl)
2309  logging.debug('Starting ' + os.path.basename(sys.argv[0]) + ' at ' +
2310  datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
2311 
2312 def uniqify_list(orig_list):
2313  """
2314  Returns a list with no duplicates. Somewhat borrowed from:
2315  http://www.peterbe.com/plog/uniqifiers-benchmark (example f5)
2316  """
2317  uniqified_list = []
2318  seen_items = {}
2319  for item in orig_list:
2320  if item not in seen_items:
2321  seen_items[item] = 1
2322  uniqified_list.append(item)
2323  return uniqified_list
2324 
2325 
2326 
2327 DEBUG = False
2328 #DEBUG = True
2329 
2330 cfg_data = None
2331 FILE_USE_OPTS = ['deletefiles', 'overwrite', 'use_existing']
2332 SUFFIXES = {
2333  'geo': 'GEO',
2334  'l1brsgen': 'L1B_BRS',
2335  'l1aextract': 'L1A.sub',
2336  'l1aextract_viirs': 'L1A.sub',
2337  'l1aextract_seawifs': 'L1A.sub',
2338  'l1aextract_modis': 'L1A.sub',
2339  'l1mapgen': 'L1B_MAP',
2340  'l2bin': 'L3b',
2341  'l2brsgen': 'L2_BRS',
2342  'l2extract': 'L2.sub',
2343  'l2gen': 'L2',
2344  'l2mapgen': 'L2B_MAP',
2345  'l3bin': 'L3b',
2346  'l3mapgen': 'L3m',
2347  'level 1a': 'L1A',
2348  'level 1b': 'L1B_LAC',
2349  'smigen': 'SMI'
2350 }
2351 input_file_data = {}
2352 #verbose = False
2353 
2354 if os.environ['OCSSWROOT']:
2355  OCSSWROOT_DIR = os.environ['OCSSWROOT']
2356  logging.debug('OCSSWROOT -> %s', OCSSWROOT_DIR)
2357 else:
2358  sys.exit('Error! Cannot find OCSSWROOT environment variable.')
2359 
2360 if __name__ == "__main__":
2361  sys.exit(main())
def get_source_files(input_files)
def get_batch_output_name(file_set, suffix)
def get_output_name2(inp_files, targ_prog, suite=None, oformt=None, res=None)
list(APPEND LIBS ${PGSTK_LIBRARIES}) add_executable(atteph_info_modis atteph_info_modis.c) target_link_libraries(atteph_info_modis $
Definition: CMakeLists.txt:7
def run_nonbatch_processor(processor)
def find_geo_file2(inp_file, instrument, lvl)
def get_source_file_sets(proc_src_types, source_files, src_key, requires_all_sources)
def build_executable_path(prog_name)
def exe_processor(proc, src_files, src_lvl)
def get_lowest_source_level(source_files)
def get_input_files_type_data(input_files_list)
def get_data_file_option(par_contents, opt_text)
def get_intermediate_products(existing_prod_names, ruleset, lowest_source_level)
def get_source_products_types(targt_prod, ruleset)
def get_required_programs(target_program, ruleset, lowest_source_level)
def build_l2gen_par_file(par_contents, input_file, geo_file, output_file)
def find_viirs_geo_file(proc, first_svm_file)
def run_script(proc, script_name)
def get_processors(src_files, input_files, par_contents, files_to_delete)
def get_par_file_contents(par_file, acceptable_single_keys)
def extract_par_section(par_contents, section)
def run_batch_processor(processor, file_set)
const char * str
Definition: l1c_msi.cpp:35
def get_output_name3(input_name, input_files, suffix)
def get_intermediate_processors(sensor, existing_procs, rules, lowest_source_level)
def create_levels_list(rules_sets)
def get_obpg_data_file_object(file_specification)
def get_source_geo_files(source_files, proc_src_types, proc_src_ndx)
def get_file_handling_opts(par_contents)
def get_processors2(sensor, par_contents, rules, lowest_source_level)
def create_help_message(rules_sets)
def __init__(self, hidden_dir, ori_dir, verbose, overwrite, use_existing, deletefiles, out_dir=None)
def do_processing(sensors_sets, par_file, cmd_line_ifile=None)
def build_file_list_file(filename, file_list)