NASA Logo
Ocean Color Science Software

ocssw V2022
hktgen_pace.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 import sys
4 import numpy as np
5 import datetime
6 import os
7 import argparse
8 import netCDF4
9 import logging
10 from io import BytesIO
11 
12 from telemetry.PacketUtils import readFileHeader, pad_packet
13 from telemetry.timestamp import *
14 from telemetry.derive_orbitparams import derive_orbitparams
15 from telemetry import ccsdspy
16 
17 __version__ = '1.5.4 (2024-05-28)'
18 
19 ignored_apids = [
20  636, # OCI Ancillary packet
21  700, # OCI science packet with spin number but no time field
22  720, # OCI SWIR science packet
23  751, # HARP science packet
24  848, # SPEX science packet
25 ] # probably don't need this.
26 
27 # define required packets
28 att = 108
29 orb = 128
30 tilt = 198
31 good_apids = [att, orb, tilt]
32 
33 # define packet categories
34 # keep these in APID order
35 pktypes = {}
36 pktypes['SC'] = {'maxapid': 549, 'maxlen': 2048, 'name': 'Spacecraft'}
37 pktypes['OCI'] = {'maxapid': 749, 'maxlen': 1618, 'name': 'OCI'}
38 pktypes['HARP'] = {'maxapid': 799, 'maxlen': 40, 'name': 'HARP2'}
39 pktypes['SPEX'] = {'maxapid': 849, 'maxlen': 298, 'name': 'SPEXone'}
40 
41 # constant valid max for seconds of day
42 MAX_SECONDS_OF_DAY = 172800.0 # covers 2 days
43 
44 
45 def main():
46  print("hktgen_pace", __version__)
47 
48  # Read command line options
49  parser = argparse.ArgumentParser(
50  formatter_class=argparse.RawTextHelpFormatter,
51  description='Convert S-band housekeeping telemetry to NetCDF4',
52  epilog='''
53 EXIT Status:
54 0 : Success
55 1 : Fatal error
56 101 : Non-fatal file open error
57 102 : Invalid file or instrument from CFE header
58 103 : Invalid packet header
59 104 : Invalid packet [header/datatype/length]
60 110 : No valid packets found
61 120 : Multiple warnings; see log
62 '''
63  )
64  parser.add_argument('ifile', type=str,
65  help='path to S-band telemetry (HSK) file OR list of input files, '
66  'one per line, in chronological order')
67  parser.add_argument("-o", "--output", metavar="ofile", dest="ofile",
68  help="output NetCDF4 file; defaults to PACE.yyyymmddThhmmss.HKT.nc")
69  parser.add_argument('-s', '--skip_raw', action='store_true',
70  default=False, help="don't save raw housekeeping data")
71  parser.add_argument('-v', '--verbose', action='store_true',
72  default=False, help="print status messages")
73  parser.add_argument("--pversion", metavar="pversion", dest="pversion",
74  help="Processing version of the program")
75  parser.add_argument("--doi", metavar="doi", dest="doi",
76  help="Identifier_product_doi")
77 
78  args = parser.parse_args()
79  status = 0
80 
81  loglevel = logging.INFO if args.verbose else logging.WARNING
82  logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
83 
84  # read packet definitions
85  pktDir = os.path.join(os.getenv('OCDATAROOT'), 'telemetry', 'pace', 'hkt')
86  if not os.path.exists(pktDir):
87  logging.error(f'ERROR: The directory {pktDir} does not exist.')
88  return 1
89  pktDict = {}
90  for apid in good_apids:
91  pktDict[apid] = {}
92  pktDict[apid]['recordlist'] = []
93  csvfile = os.path.join(f'{pktDir}',f'APID{apid}.csv')
94  if not os.path.exists(csvfile):
95  logging.error(f'ERROR: The file {csvfile} does not exist.')
96  return 1
97  pktDict[apid]['packetdef'] = ccsdspy.FixedLength.from_file(csvfile)
98 
99  # add conversions (TODO: specify conversions in csv for ccsdspy)
100  pace_tsade_enc_tilt = ccsdspy.converters.LinearConverter(0.0075, -245.76)
101  pktDict[tilt]['packetdef'].add_converted_field('TILT_ENCPOS', 'TILT_ENCPOS',
102  pace_tsade_enc_tilt)
103 
104  # Is input file tlm or list of files?
105  filelist = []
106  infile = os.path.expandvars(args.ifile)
107 
108  try:
109  with open(infile, mode='rt') as flist: # try to read as list of files
110  try:
111  input_list = True
112  for ifile in flist:
113  filelist.append(os.path.expandvars(ifile.rstrip()))
114  except UnicodeDecodeError:
115  input_list = False # contains invalid byte - infile is binary
116 
117  except IOError as e:
118  logging.error(f'{e}; exiting')
119  return 1
120 
121  if not len(filelist): # if that didn't work, it's a single HSK file
122  filelist.append(infile)
123 
124  # Initialize record and packet lists
125  for val in pktypes.values():
126  val['data'] = []
127  oversize_packets = bytearray()
128  alltimes = []
129 
130  # Step through all input files
131  for filename in filelist:
132  logging.info(f'Reading {filename}')
133 
134  try:
135  ifile = open(filename, mode='rb')
136  except IOError as e:
137  status = 120 if status > 0 else 101
138  logging.warning(f'{e}; continuing')
139  continue # input_list errors already caught above
140 
141  # Read any file header(s)
142  filehdr = readFileHeader(ifile)
143  if filehdr:
144  logging.info(filehdr)
145  logging.info('')
146 
147  # Is it the right kind of file?
148  desired = (filehdr['subtype'] == 101 and
149  filehdr['length'] == 64 and
150  filehdr['SCID'] == b'PACE' and
151  filehdr['processorid'] in (1, 2, 30))
152 
153  if not filehdr or not desired:
154  status = 120 if status > 0 else 102
155  if input_list:
156  logging.warning(f'File {filename} has invalid header; continuing')
157  continue # go to next file
158  else:
159  logging.error(f'File {filename} has invalid header; returning')
160  return status
161 
162  # read CCSDS packets
163  for packet in ccsdspy.utils.iter_packet_bytes(ifile, include_primary_header=True):
164 
165  data = packet[6:]
166  header = ccsdspy.utils.read_primary_headers(BytesIO(packet))
167  for k, v in header.items():
168  header[k] = v[0] # remove outer array
169  logging.info(header)
170 
171  # check for invalid header
172  if header['CCSDS_PACKET_LENGTH'] > 16378:
173  status = 120 if status > 0 else 103
174  logging.warning(
175  f'File {filename} contains invalid CCSDS packet header: {header}')
176  continue # go to next packet
177 
178  # check for truncated data
179  if len(data) < header['CCSDS_PACKET_LENGTH'] + 1:
180  status = 120 if status > 0 else 104
181  logging.warning(f'File {filename} has unexpected EOF: '
182  f'expected {header["CCSDS_PACKET_LENGTH"]+1} more bytes, '
183  f'got {len(data)}')
184  break # done with this file
185 
186  # check for invalid timestamp
187  if (header['CCSDS_SECONDARY_FLAG'] == 1
188  and data[0] > 112 # after 2017-07-18T05:49:15Z
189  and data[0] < 192): # before 2060-01-28T16:50:35Z
190  ts = readTimestamp(data)
191  alltimes.append(ts)
192  header['timestamp'] = ts
193  # keep going to save packet, regardless of timestamp
194 
195  # parse useful fields
196  apid = header['CCSDS_APID']
197  if apid in good_apids:
198  myDict = (pktDict[apid]['packetdef']).load(BytesIO(packet))
199  for key, val in myDict.items():
200  myDict[key] = val[0] # remove outer array
201  myDict['timestamp'] = decode_timestamp(myDict['seconds'],
202  myDict['subsecs'])
203  # TODO: implement custom converter in CCSDSPy
204  pktDict[apid]['recordlist'].append(myDict)
205  logging.info(f"taiTime={myDict['taiTime']} seconds={myDict['seconds']} "
206  f"subsecs={myDict['subsecs']} timestamp={myDict['timestamp']}")
207 
208  # accumulate raw packets by category
209  if not args.skip_raw and apid not in ignored_apids:
210  for val in pktypes.values():
211  if apid <= val['maxapid']:
212  if len(packet) > val['maxlen']:
213  oversize_packets += packet
214  else:
215  packet = pad_packet(packet, val['maxlen'])
216  val['data'].append(packet)
217  break # add packet to only one category
218 
219  # end (for packet in packets)
220 
221  # close input file
222  ifile.close()
223 
224  # end (for filename in filelist)
225 
226  # get start and end times
227  if len(alltimes) == 0:
228  logging.warning('No input packets with valid times')
229  return 110
230 
231  stime = tai58_as_datetime(alltimes[0])
232  etime = tai58_as_datetime(alltimes[-1])
233  daystart = stime.replace(hour=0, minute=0, second=0, microsecond=0)
234  timeunits = f"seconds since {daystart.strftime('%Y-%m-%d')}"
235 
236  # construct product name
237  prod_name = stime.strftime('PACE.%Y%m%dT%H%M%S.HKT.nc')
238  if args.ofile is None:
239  args.ofile = prod_name
240 
241  # create new netcdf file
242  try:
243  logging.info(f'Writing {args.ofile}')
244  ofile = netCDF4.Dataset(args.ofile, 'w')
245  except BaseException:
246  logging.error("Cannot write file \"%s\": exiting." % args.ofile)
247  return 1
248 
249  # define dimensions
250  ofile.createDimension('vector_elements', 3)
251  ofile.createDimension('quaternion_elements', 4)
252 
253  # write raw housekeeping data to file
254  if not args.skip_raw:
255  group = ofile.createGroup('housekeeping_data')
256  for key, val in pktypes.items():
257  npkts = len(val['data'])
258  if npkts > 0:
259  maxlen = val['maxlen']
260  logging.info(
261  f"{npkts}\t{key}_HKT_packets\tx {maxlen}\t= {npkts*maxlen}\tbytes")
262  ofile.createDimension(f'{key}_hkt_pkts', None)
263  ofile.createDimension(f'max_{key}_packet', maxlen)
264  var = group.createVariable(f'{key}_HKT_packets', 'u1',
265  (f'{key}_hkt_pkts', f'max_{key}_packet'))
266  var.long_name = f"{val['name']} housekeeping telemetry packets"
267  var[:] = val['data']
268  if len(oversize_packets) > 0:
269  logging.info(f'{len(oversize_packets)}\toversize_packets')
270  ofile.createDimension('os_pkts', None)
271  var = group.createVariable('oversize_packets', 'u1', ('os_pkts'))
272  var.long_name = 'Buffer for packets exceeding maximum size'
273  var[:] = oversize_packets
274 
275  # write navigation data to file
276  group = ofile.createGroup('navigation_data')
277 
278  if len(pktDict[att]['recordlist']) > 0: # ATTITUDE
279  ofile.createDimension('att_records', None)
280 
281  var = group.createVariable( # att_time
282  'att_time', 'f8', ('att_records'), fill_value=-32767.)
283  var.long_name = "Attitude sample time (seconds of day)"
284  var.valid_min = 0
285  var.valid_max = MAX_SECONDS_OF_DAY
286  var.units = timeunits
287  var[:] = [seconds_since(rec['Est_Time'], basetime=daystart)
288  for rec in pktDict[att]['recordlist']]
289 
290  var = group.createVariable( # att_quat
291  'att_quat', 'f4', ('att_records', 'quaternion_elements'), fill_value=-32767.)
292  var.long_name = "Attitude quaternions (J2000 to spacecraft)"
293  var.valid_min = -1
294  var.valid_max = 1
295  var.units = "seconds"
296  var[:] = [rec['q_EciToBrf_Est'] for rec in pktDict[att]['recordlist']]
297 
298  var = group.createVariable( # att_rate
299  'att_rate', 'f4', ('att_records', 'vector_elements'), fill_value=-32767.)
300  var.long_name = "Attitude angular rates in spacecraft frame"
301  var.valid_min = np.array((-0.004), 'f4')
302  var.valid_max = np.array(( 0.004), 'f4')
303  var.units = "radians/second"
304  var[:] = [rec['w_EciToBrf_Brf_Est']
305  for rec in pktDict[att]['recordlist']]
306 
307  if len(pktDict[orb]['recordlist']) > 0: # EPHEMERIS (orbit)
308  ofile.createDimension('orb_records', None)
309 
310  # calculate orbit parameters
311  posr = [np.matmul(rec['DCM_ecef2eci'], rec['scPosJ2000'])
312  for rec in pktDict[orb]['recordlist']]
313  velr = [np.matmul(rec['DCM_ecef2eci'], rec['scVelJ2000'])
314  for rec in pktDict[orb]['recordlist']]
315  orbitparams = derive_orbitparams(posr, velr)
316 
317  var = group.createVariable( # orb_time
318  'orb_time', 'f8', ('orb_records'), fill_value=-32767.)
319  var.long_name = "Orbit vector time (seconds of day)"
320  var.valid_min = 0
321  var.valid_max = MAX_SECONDS_OF_DAY
322  var.units = timeunits
323  var[:] = [seconds_since(rec['FSWTime'], basetime=daystart)
324  for rec in pktDict[orb]['recordlist']]
325 
326  var = group.createVariable( # orb_pos
327  'orb_pos', 'f4', ('orb_records', 'vector_elements'), fill_value=-9999999)
328  var.long_name = "Orbit position vectors (ECR)"
329  var.valid_min = -7200000
330  var.valid_max = 7200000
331  var.units = "meters"
332  var[:] = orbitparams['posr']
333 
334  var = group.createVariable( # orb_vel
335  'orb_vel', 'f4', ('orb_records', 'vector_elements'), fill_value=-32767.)
336  var.long_name = "Orbit velocity vectors (ECR)"
337  var.valid_min = -7600
338  var.valid_max = 7600
339  var.units = "meters/second"
340  var[:] = orbitparams['velr']
341 
342  var = group.createVariable( # orb_lon
343  'orb_lon', 'f8', ('orb_records'), fill_value=-32767.)
344  var.long_name = "Orbit longitude (degrees East)"
345  var.valid_min = -180
346  var.valid_max = 180
347  var.units = "degrees_east"
348  var[:] = orbitparams['lon']
349 
350  var = group.createVariable( # orb_lat
351  'orb_lat', 'f8', ('orb_records'), fill_value=-32767.)
352  var.long_name = "Orbit latitude (degrees North)"
353  var.valid_min = -90
354  var.valid_max = 90
355  var.units = "degrees_north"
356  var[:] = orbitparams['lat']
357 
358  var = group.createVariable( # orb_alt
359  'orb_alt', 'f8', ('orb_records'), fill_value=-32767.)
360  var.long_name = "Orbit altitude"
361  var.valid_min = 670000
362  var.valid_max = 710000
363  var.units = "meters"
364  var[:] = orbitparams['alt']
365 
366  if len(pktDict[tilt]['recordlist']) > 0: # TILT
367  ofile.createDimension('tilt_records', None)
368 
369  var = group.createVariable( # tilt_time
370  'tilt_time', 'f8', ('tilt_records'), fill_value=-32767.)
371  var.long_name = "Tilt sample time (seconds of day)"
372  var.valid_min = 0
373  var.valid_max = MAX_SECONDS_OF_DAY
374  var.units = timeunits
375  var[:] = [seconds_since(rec['timestamp'], basetime=daystart)
376  for rec in pktDict[tilt]['recordlist']]
377 
378  var = group.createVariable( # tilt
379  'tilt', 'f4', ('tilt_records'), fill_value=-32767.)
380  var.long_name = "Tilt angle"
381  var.valid_min = np.array((-20.1), 'f4')
382  var.valid_max = np.array(( 20.1), 'f4')
383  var.units = "degrees"
384  var[:] = [rec['TILT_ENCPOS'] for rec in pktDict[tilt]['recordlist']]
385 
386  var = group.createVariable( # tilt
387  'tilt_flag', 'u1', ('tilt_records'), fill_value=255)
388  var.long_name = "Tilt quality flag"
389  var.flag_values = np.array([0, 1], 'u1')
390  var.flag_meanings = "Valid Not_initialized"
391  var[:] = [1 - rec['TILT_HOME_INIT']
392  for rec in pktDict[tilt]['recordlist']]
393 
394  # write global metadata
395 
396  # static
397  ofile.title = "PACE HKT Data"
398  ofile.instrument = "Observatory"
399  ofile.processing_version = "V1.0"
400  ofile.institution = "NASA Goddard Space Flight Center, Ocean Biology Processing Group"
401  ofile.license = "https://science.nasa.gov/earth-science/earth-science-data/data-information-policy/"
402  ofile.naming_authority = "gov.nasa.gsfc.oceancolor"
403  ofile.creator_name = "NASA/GSFC/OBPG"
404  ofile.creator_email = "data@oceancolor.gsfc.nasa.gov"
405  ofile.creator_url = "https://oceancolor.gsfc.nasa.gov"
406  ofile.project = "Ocean Biology Processing Group"
407  ofile.publisher_name = "NASA/GSFC/OB.DAAC"
408  ofile.publisher_email = "data@oceancolor.gsfc.nasa.gov"
409  ofile.publisher_url = "https://oceancolor.gsfc.nasa.gov"
410  ofile.processing_level = "L0"
411  ofile.Conventions = "CF-1.8 ACDD-1.3"
412  ofile.standard_name_vocabulary = 'CF Standard Name Table v79'
413  ofile.keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Science Keywords"
414  # dynamic
415  ofile.product_name = args.ofile
416  ofile.time_coverage_start = datetime_repr(stime)
417  ofile.time_coverage_end = datetime_repr(etime)
418  ofile.history = ' '.join([v for v in sys.argv])
419  if input_list:
420  ofile.source = ','.join([os.path.basename(f) for f in filelist])
421  ofile.date_created = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
422 
423  # if pversion and doi are specified, write the, to the global attributes
424  if args.pversion != None:
425  ofile.processing_version = args.pversion
426 
427  if args.doi != None:
428  ofile.identifier_product_doi_authority = "https://dx.doi.org"
429  ofile.identifier_product_doi = args.doi
430 
431  # write processing control data into file
432  group = ofile.createGroup('processing_control')
433  group.setncattr("software_name", "hktgen_pace")
434  group.setncattr("software_version", __version__)
435  group.setncattr("hsk_files", ",".join(filelist))
436 
437  # write input parameters into file, including default files that the
438  # program used and not specified by the user
439  group1 = group.createGroup("input_parameters")
440  group1.setncattr("ifile", args.ifile)
441  group1.setncattr("ofile", args.ofile)
442  group1.setncattr("skip_raw", "True" if args.skip_raw else "False")
443 
444  # close file
445  ofile.close()
446 
447  if status:
448  logging.warning('Exiting with status code %s' % status)
449  return status
450 
451 
452 if __name__ == '__main__':
453  sys.exit(main())
def main()
Definition: hktgen_pace.py:45
def seconds_since(tai58, basetime=None)
Definition: PacketUtils.py:29
def read_primary_headers(file)
Definition: utils.py:109
void load(float x1, float v[], float y[])
def iter_packet_bytes(file, include_primary_header=True)
Definition: utils.py:33
void print(std::ostream &stream, const char *format)
Definition: PrintDebug.hpp:38
def readTimestamp(data)
Definition: PacketUtils.py:19
def decode_timestamp(seconds, subseconds)
Definition: PacketUtils.py:8
def pad_packet(data, length)
Definition: PacketUtils.py:55
def tai58_as_datetime(tai58)
Definition: PacketUtils.py:24
def datetime_repr(dt)
Definition: PacketUtils.py:36