OB.DAAC Logo
NASA Logo
Ocean Color Science Software

ocssw V2022
merge_met_reanalysis.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 import numpy as np
4 from scipy import ndimage
5 from pyhdf.SD import *
6 
7 from datetime import datetime
8 from shutil import copyfile
9 import re
10 
11 '''
12 G. Fireman May 2017; adapted from Wayne Robinson's met_reanal_full.pro
13 '''
14 
15 NY_OUT = 181 # 'Number of Rows'
16 NX_OUT = 360 # 'Number of Columns'
17 
18 PARAM_RANGES = {
19  'z_wind' : [-50, 50],
20  'm_wind' : [-50, 50],
21  'press' : [850, 1084],
22  'rel_hum': [ 0, 100],
23  'p_water': [ 0, 200]
24 }
25 params = PARAM_RANGES.keys()
26 
27 
28 def enforce_range(values, param):
29  try:
30  limits = PARAM_RANGES[param]
31  np.clip(values, limits[0], limits[1], out=values)
32  except Exception as e:
33  print(e)
34  print('Unknown parameter referenced in enforce_range: ', param)
35 
36 
37 def smooth_latlon(old_arr, width=3):
38  # y axis = latitude: truncate at poles
39  new_arr = ndimage.uniform_filter1d(old_arr, width, axis=0, mode='nearest')
40  # x axis = longitude: wrap around 180
41  new_arr = ndimage.uniform_filter1d(new_arr, width, axis=1, mode='wrap')
42  return new_arr
43 
44 
45 def fix_bias(old_dat, new_dat, width=3):
46 
47  # resize new data to same resolution as old
48  zoom = np.divide(old_dat.shape, new_dat.shape)
49  new_dat2 = ndimage.interpolation.zoom(new_dat, zoom, order=1, mode='nearest')
50 
51  # difference smoothed fields to find bias
52  bias = smooth_latlon(old_dat, width) - smooth_latlon(new_dat2, width)
53 
54  # return original data corrected with bias
55  return old_dat - bias
56 
57 
58 def merge_met_reanalysis(file_upd, file_nrt, file_tpl, file_out=None):
59 
60  try:
61  h4_upd = SD(file_upd, SDC.READ)
62  h4_nrt = SD(file_nrt, SDC.READ)
63 
64  # get metadata from updated file
65  try:
66  start_time = getattr(h4_upd, 'Start Time')[:9] # yyyyjjjhh
67  data_source = getattr(h4_upd, 'Data Source')
68  p = re.compile('NCEP Reanalysis (\d)')
69  reanalysis_type = p.search(data_source).group(1)
70  except:
71  print('Reanalysis file has non-standard Data Source: "'
72  + data_source + '"')
73  reanalysis_type = 'X'
74 
75  # define output filename
76  if file_out is None:
77  file_out ="N" + start_time + "_MET_NCEPR" + reanalysis_type + "_6h.hdf"
78 
79  # start with realtime file if it's full-size
80  nx = getattr(h4_nrt, 'Number of Columns')
81  ny = getattr(h4_nrt, 'Number of Rows')
82 
83  if (nx == NX_OUT) and (ny == NY_OUT):
84  print('Calling apply_reanalysis_bias to make file: ', file_out)
85  copyfile(file_nrt, file_out)
86  h4_out = SD(file_out, SDC.WRITE)
87 
88  apply_reanalysis_bias(h4_nrt, h4_upd, h4_out)
89  infiles = file_nrt+' '+file_upd
90  copy_atts = ('Title', 'Data Source', 'Data Source Desc')
91 
92  # otherwise, start with file size appropriate to observation date
93  else:
94  print('Calling expand_reanalysis_file to make file: ', file_out)
95  is_modern = (int(start_time[:4]) > 1995) # expand post-CZCS era only
96  if is_modern:
97  copyfile(file_tpl, file_out) # start with large dummy file
98  else:
99  copyfile(file_nrt, file_out) # start with small realtime file
100  h4_out = SD(file_out, SDC.WRITE)
101  expand_reanalysis_file(h4_nrt, h4_upd, h4_out, expand_array=is_modern)
102  infiles = file_upd
103  copy_atts = ('Title', 'Data Source', 'Data Source Desc',
104  'Start Time', 'Start Year', 'Start Day', 'Start Millisec',
105  'End Time' , 'End Year' , 'End Day' , 'End Millisec' )
106 
107  # update output file attributes
108  setattr(h4_out, 'Product Name', file_out)
109  setattr(h4_out, 'Data Center', 'NASA/GSFC Ocean Biology Processing Group')
110  setattr(h4_out, 'Mission', 'General')
111  setattr(h4_out, 'Satellite Platform', ' ')
112  setattr(h4_out, 'Software ID', 'merge_met_reanalysis')
113  setattr(h4_out, 'Processing Time',
114  datetime.utcnow().strftime('%Y%j%H%M%S%f'))
115  setattr(h4_out, 'Input Files', infiles)
116  setattr(h4_out, 'Processing Control', ' ')
117  for att in copy_atts: # copy others from reanalysis file
118  setattr(h4_out, att, getattr(h4_upd, att))
119 
120  print('Success!')
121 
122  except Exception as e:
123  print(e)
124  exit(1)
125 
126  finally:
127  try:
128  h4_upd.end()
129  h4_nrt.end()
130  h4_out.end()
131  except:
132  pass
133 
134 
135 def apply_reanalysis_bias(h4_nrt, h4_upd, h4_out, width=11):
136 
137  try:
138  for sds in params: # {
139 
140  # for wind params, use realtime data
141  if (sds == 'z_wind') or (sds == 'm_wind'):
142  data_out = h4_nrt.select(sds).get()
143 
144  # otherwise, adjust according to reanalysis data
145  else:
146  data_upd = h4_upd.select(sds).get()
147  data_nrt = h4_nrt.select(sds).get()
148  enforce_range(data_upd, sds)
149  enforce_range(data_nrt, sds)
150 
151  # derive bias and apply to realtime data
152  data_out = fix_bias(data_nrt, data_upd, width=width)
153 
154  # write data to existing file
155  enforce_range(data_out, sds)
156  h4_out.select(sds).set(data_out)
157  # }
158 
159  except Exception as e:
160  print(e)
161  raise
162 
163 
164 def expand_reanalysis_file(h4_nrt, h4_upd, h4_out, expand_array=False):
165 
166  try:
167  for sds in params: # {
168 
169  # for wind params, use realtime data
170  if (sds == 'z_wind') or (sds == 'm_wind'):
171  data_upd = h4_nrt.select(sds).get()
172 
173  # otherwise, use reanalysis data
174  else:
175  data_upd = h4_upd.select(sds).get()
176 
177  # resize reanalysis data to operational resolution
178  if expand_array:
179  zoom = (NY_OUT/data_upd.shape[0], NX_OUT/data_upd.shape[1])
180  data_out = ndimage.interpolation.zoom(
181  data_upd, zoom, order=1, mode='nearest')
182  else:
183  data_out = data_upd
184 
185  # write data to existing file
186  enforce_range(data_out, sds)
187  h4_out.select(sds).set(data_out)
188  # }
189 
190  except Exception as e:
191  print(e)
192  raise
193 
194 
195 if __name__ == "__main__":
196  import argparse
197  parser = argparse.ArgumentParser(description='Adjust predicted met files with bias derived from NCEP Reanalysis.')
198  parser.add_argument('file_upd', metavar='prer2file', help='predicted file')
199  parser.add_argument('file_nrt', metavar='metfile', help='updated file')
200  parser.add_argument('file_tpl', metavar='r2_template', help='file template', nargs='?')
201  parser.add_argument('file_out', metavar='ofile', help='output file', nargs='?')
202 
203  args = parser.parse_args()
204  dict_args=vars(args)
205  #print(dict_args)
206 
207  merge_met_reanalysis(dict_args['file_upd'],
208  dict_args['file_nrt'],
209  dict_args['file_tpl'],
210  dict_args['file_out'])
211  exit(0)
def smooth_latlon(old_arr, width=3)
def expand_reanalysis_file(h4_nrt, h4_upd, h4_out, expand_array=False)
#define SD
Definition: usrmac.h:196
def enforce_range(values, param)
def merge_met_reanalysis(file_upd, file_nrt, file_tpl, file_out=None)
def apply_reanalysis_bias(h4_nrt, h4_upd, h4_out, width=11)
def fix_bias(old_dat, new_dat, width=3)