NASA Logo
Ocean Color Science Software

ocssw V2022
geo_eval.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 import numpy as np
3 import netCDF4 as nc
4 import os
5 import csv
6 import sys
7 import argparse
8 import pyproj
9 from scipy.interpolate import RegularGridInterpolator, griddata
10 from shapely.geometry import Point
11 from shapely.geometry.polygon import Polygon
12 from scipy.ndimage import uniform_filter
13 import time
14 from copy import deepcopy
15 from inspect import currentframe, getframeinfo
16 import datetime
17 from scipy import optimize
18 from typing import List, Dict
19 
20 PROGRAM_NAME = 'geo_eval'
21 VERSION = "1.3.1"
22 bad_float = -32767.0
23 running_time = datetime.datetime.now()
24 RUNNING_TIME = running_time.strftime("%Y-%m-%dT%H%M%S")
25 subpixels_default_max = 30
26 subpixels_default = 15
27 window_size_defalut = 5
28 min_number_of_points = 100
29 cf = currentframe()
30 filename_script = getframeinfo(cf).filename
31 
33  if coord[0] == bad_float or np.isnan(coord[0]):
34  return True
35  if coord[1] == bad_float or np.isnan(coord[1]):
36  return True
37  return False
38 
39 def get_cloud_mask(cld_file: str):
40  if not os.path.exists(cld_file):
41  print(
42  f"File {cld_file} does not exist. See line {get_linenumber()} in {filename_script}. Exiting")
43  sys.exit(1)
44  with nc.Dataset(cld_file) as f:
45  return f["geophysical_data/cloud_flag"][:]
46 
47 
48 def read_file_list(path_chips: str) -> List[str]:
49  with open(path_chips) as file_txt:
50  file_list_chip = file_txt.read().splitlines()
51  for file in file_list_chip:
52  if not os.path.exists(file):
53  print(
54  f"File {file} does not exist. See line {get_linenumber()} in {filename_script}. Exiting")
55  sys.exit(1)
56  return file_list_chip
57 
58 
59 def get_scantime(year: List[int], day: List[int], msec: List[int]) -> np.ndarray:
60  scantime = np.zeros(len(year))
61  for i, (iyear, iday, imsec) in enumerate(zip(year, day, msec)):
62  date_scan = datetime.datetime.strptime(f'{iyear} {iday}', '%Y %j')
63  scantime[i] = time.mktime(date_scan.timetuple(
64  )) + date_scan.microsecond / 1.0e6 + float(imsec) / 1.0e3
65  return scantime
66 
67 
69  lat = np.array(f["navigation_data/latitude"][:], dtype=np.float64)
70  lon = np.array(f["navigation_data/longitude"][:], dtype=np.float64)
71  year: list[int] = list(
72  np.array(f["scan_line_attributes/year"][:], dtype=np.int32))
73  day: list[int] = list(
74  np.array(f["scan_line_attributes/day"][:], dtype=np.int32))
75  msec: list[int] = list(
76  np.array(f["scan_line_attributes/msec"][:], dtype=np.int32))
77  scantime = get_scantime(year, day, msec)
78  return lat, lon, scantime
79 
80 
82  cf = currentframe()
83  return cf.f_back.f_lineno
84 
85 
86 def read_file_l1b(path: str):
87  with nc.Dataset(path) as f:
88  if f.instrument == "MODIS":
89  data = np.array(
90  f["geophysical_data/Lt_645"][:], dtype=np.float64)
91  lat, lon, scantime = get_viirs_modis_lat_lon_scantime(f)
92  return lat, lon, data, scantime, None
93  if f.instrument == "VIIRS":
94  if f.platform == "Suomi-NPP":
95  wave = 671
96  else:
97  wave = 667
98  data = np.array(
99  f[f"geophysical_data/Lt_{wave}"][:], dtype=np.float64)
100  l2_flags_var = f["geophysical_data/l2_flags"]
101  flag_masks = l2_flags_var.flag_masks
102  flag_meanings = l2_flags_var.flag_meanings.split(sep=' ')
103  bit_bowtie = 1
104  for bit, name in zip(flag_masks, flag_meanings):
105  if name == "BOWTIEDEL":
106  bit_bowtie = bit
107  mask = (l2_flags_var[:] & bit_bowtie) > 0
108  data[mask] = np.NaN
109  lat, lon, scantime = get_viirs_modis_lat_lon_scantime(f)
110  return lat, lon, data, scantime, None
111  if f.instrument == "OCI" or f.instrument == "OCIS":
112  red_wv = 650.0
113  red_wavelength = f["sensor_band_parameters/red_wavelength"][:]
114  index = np.abs(red_wavelength - red_wv).argmin()
115  data = np.array(
116  f["observation_data/rhot_red"][:][index, :, :], dtype=np.float64)
117  lat = np.array(f["geolocation_data/latitude"][:], dtype=np.float64)
118  lon = np.array(f["geolocation_data/longitude"]
119  [:], dtype=np.float64)
120  scantime = np.array(f["scan_line_attributes/time"]
121  [:], dtype=np.float64)
122  time_coverage_start = f.time_coverage_start
123  if time_coverage_start[-1] == 'Z':
124  time_coverage_start = time_coverage_start[:-1]
125  date_scan = datetime.datetime.strptime(
126  time_coverage_start, '%Y-%m-%dT%H:%M:%S.%f')
127  scantime += time.mktime(datetime.datetime(date_scan.year, date_scan.month, date_scan.day).timetuple(
128  ))
129  tilt = f["navigation_data/tilt_angle"][:]
130  return lat, lon, data, scantime, tilt
131  print(
132  f"-Error- : instrument {f.instrument} not found. See line {get_linenumber()} in {filename_script}")
133  sys.exit(1)
134 
135 
136 def get_position(lat: np.ndarray, lon: np.ndarray, feature_lat, feature_lon):
137  diff = np.abs(lat - feature_lat) + np.abs(lon - feature_lon)
138  (x, y) = np.unravel_index(diff.argmin(), diff.shape)
139  point = Point(feature_lat, feature_lon)
140  w = 1
141  while w < 32:
142  i_min = max(0, x - w)
143  i_max = min(lat.shape[0] - 1, x + w)
144  j_min = max(0, y - w)
145  j_max = min(lat.shape[1] - 1, y + w)
146  for i in range(i_min, i_max):
147  for j in range(j_min, j_max):
148  p_0 = (lat[i, j], lon[i, j])
149  p_1 = (lat[i, j + 1], lon[i, j + 1])
150  p_2 = (lat[i + 1, j + 1], lon[i + 1, j + 1])
151  p_3 = (lat[i + 1, j], lon[i + 1, j])
153  print(f"Bad geolocation in lat, lon array for line {i}, pixel {j}")
154  return None
155  polygon = Polygon([p_0, p_1, p_2, p_3, p_0])
156  # a bit more accurate determination of the scan and pixel number
157  if polygon.contains(point):
158  dist_dict = {}
159  min_dist = point.distance(Point(p_0))
160  i_min = i
161  j_min = j
162  dist_dict[(i, j + 1)] = point.distance(Point(p_1))
163  dist_dict[(i + 1, j + 1)] = point.distance(Point(p_2))
164  dist_dict[(i + 1, j)] = point.distance(Point(p_3))
165  for (i_k, j_k), dist in dist_dict.items():
166  if dist < min_dist:
167  min_dist = dist
168  i_min = i_k
169  j_min = j_k
170  return i_min, j_min, p_0, p_1, p_2, p_3, point
171  w = w * 2
172  print(
173  f"-Warning- : Could not locate the chip with lat={feature_lat} and lon={feature_lon} within the granule. See {get_linenumber()} in {filename_script}")
174  return None
175 
176 
177 def lorentzian(latlon, x0, y0, GammaX, GammaY, Factor, alpha, Amplitude, offset):
178  lat, lon = latlon
179  lat = lat - x0
180  lon = lon - y0
181  x = lat * np.cos(alpha) + lon * np.sin(alpha)
182  y = - lat * np.sin(alpha) + lon * np.cos(alpha)
183  val: np.ndarray = offset + Amplitude / \
184  ((x - x0) ** 2 / GammaX ** 2 + (y - y0) ** 2 / GammaY ** 2 + Factor)
185  return val.ravel()
186 
187 
188 def read_chip_file(path: str):
189  with nc.Dataset(path) as f:
190  ecrx = f["ECR_x_coordinate_array"][:]
191  ecry = f["ECR_y_coordinate_array"][:]
192  ecrz = f["ECR_z_coordinate_array"][:]
193  ecef = {'proj': 'geocent', 'ellps': 'WGS84', 'datum': 'WGS84'}
194  lla = {'proj': 'latlong', 'ellps': 'WGS84', 'datum': 'WGS84'}
195  transformer = pyproj.Transformer.from_crs(ecef, lla)
196  lon, lat, _ = transformer.transform(ecrx, ecry, ecrz)
197  return f["Band_1"][:], lat, lon, f.FEATURELATITUDE, f.FEATURELONGITUDE
198 
199 
201  parser = argparse.ArgumentParser(
202  prog=PROGRAM_NAME,
203  description='''\
204  Output:
205  scantime - scantime
206  granule - input L1B granules
207  chip - input chip file
208  feature_latitude - latitude of the center of the chip
209  feature_longitude - longitude of the center of the chip
210  delta_latitude - latitude shift, latitude coordinate of the peak of NCC
211  delta_longitude - longitude shift, longitude coordinate of the peak of NCC
212  pixel_number - pixel number between 0 and pixels_per_line-1 (or ccd_pixels-1 for OCI)
213  confidence_level - the maximum value of the normalized cross correlation (NCC) within a sliding lat/lon window
214  tilt - tilt angle
215  fit_delta_latitude - latitude shift from the Lorentzian fit of NCC
216  fit_delta_longitude - longitude shift from the Lorentzian fit of NCC
217  r_squared - R_squared of the the Lorentzian fit
218  ''',
219  formatter_class=argparse.RawDescriptionHelpFormatter,
220  epilog='''\
221  Additional output info:
222  scantime is a scantime of the line where the feature pixel was found.
223  delta_latitude and delta_longitude mean that a point in the chip with (lat,lon) coordinates correspond to a point in the granule with (lat + delta_latitude, lon + delta_longitude).
224  The maximum possible value of confidence_level is 1 which means 100% match. Higher value is better. Values lower 0.5 indicate poor match.
225  fit_delta_latitude, fit_delta_longitude and r_squared can be NaN if the fit does not converge.
226  ''')
227  parser.add_argument("-v", "--version", action="version", version=VERSION)
228  parser.add_argument(
229  "--ifile", help="input L1B netcdf file", type=str, required=True)
230  parser.add_argument(
231  "--ilist",
232  help="input chip file list, each filename should be separated by a new line terminator. No two filenames can be put in the same line",
233  type=str, required=True)
234  parser.add_argument("--ofile", help="output csv", type=str, required=True)
235  parser.add_argument("--cloudmask", help="input cloudmask file", type=str)
236  parser.add_argument("--odebugfile", help="output debug file", type=str)
237  parser.add_argument("--subpixels", help="number of subpixels", type=int)
238  parser.add_argument(
239  "--window_size", help="the size of sliding window in sensor pixels", type=int)
240  parser.add_argument(
241  "--confidence", help="minimum confidence level to be included. Default is 0.5", type=float, default=0.5)
242  parser.add_argument('--debug', action='store_true')
243  parser.add_argument('--no-debug', dest='debug', action='store_false')
244  parser.set_defaults(debug=False)
245  if len(sys.argv) == 1:
246  parser.print_help()
247  sys.exit(0)
248  args = parser.parse_args()
249  return args
250 
251 
252 def ini_input_data(lat, lon, path_chip):
253  tm_band, lat_chip, lon_chip, feature_lat, feature_lon = read_chip_file(
254  path_chip)
255  return get_position(lat, lon, feature_lat, feature_lon), tm_band, lat_chip, lon_chip
256 
257 
258 def make_rectangular_grid(lat: np.ndarray, lon: np.ndarray, data: np.ndarray):
259  len_x, len_y = lat.shape
260 
261  lat_dir = lat[1, 0] - lat[0, 0] > 0
262  lon_dir = lon[0, 1] - lon[0, 0] > 0
263  # find max and min
264  lat_max = np.min(lat.max(axis=0))
265  lat_min = np.max(lat.min(axis=0))
266  lon_max = np.min((lon.max(axis=1)))
267  lon_min = np.max((lon.min(axis=1)))
268  lat_st = lat_min
269  lat_end = lat_max
270  lon_st = lon_min
271  lon_end = lon_max
272  if not lat_dir:
273  lat_st = lat_max
274  lat_end = lat_min
275  if not lon_dir:
276  lon_st = lon_max
277  lon_end = lon_min
278 
279  lat_lin = np.linspace(lat_st, lat_end, len_x, endpoint=True)
280  lon_lin = np.linspace(lon_st, lon_end, len_x, endpoint=True)
281 
282  lon_grid, lat_grid = np.meshgrid(lon_lin, lat_lin)
283  data_gridded = griddata((lat.flatten(), lon.flatten()), data.flatten(),
284  (lat_grid.flatten(), lon_grid.flatten()), method="nearest")
285  return data_gridded.reshape(len_x, len_y), lat_lin, lon_lin
286 
287 
288 def extract_scene(lat, lon, i, j, w, h, image=None):
289  sc_lat = deepcopy(lat[i:i + w, j:j + h])
290  sc_lon = deepcopy(lon[i:i + w, j:j + h])
291  if image is not None:
292  sc_image = deepcopy(image[i:i + w, j:j + h])
293  return sc_lat, sc_lon, sc_image
294  else:
295  return sc_lat, sc_lon
296 
297 
298 def get_match_layers(rbs, point, sc_lon, sc_lat, sc_image, shift_lon,
299  shift_lat):
300  sc_lon_shift = sc_lon - point.y - shift_lon
301  sc_lat_shift = sc_lat - point.x - shift_lat
302  sc_image_copy = deepcopy(sc_image)
303  tm_sc_band_n = rbs(np.column_stack(
304  [sc_lat_shift.flatten(), sc_lon_shift.flatten()]))
305  tm_sc_band_n = tm_sc_band_n.reshape(sc_image_copy.shape)
306  sc_image_copy[np.isnan(tm_sc_band_n)] = np.NaN
307  tm_sc_band_n[np.isnan(sc_image_copy)] = np.NaN
308  return sc_image_copy, tm_sc_band_n
309 
310 
311 def get_cross_normalized_correlation(rbs, point, sc_lon, sc_lat, sc_image, shift_lon,
312  shift_lat):
313  sc_image_copy, tm_sc_band_n = get_match_layers(rbs, point, sc_lon, sc_lat, sc_image, shift_lon,
314  shift_lat)
315  # check if it makes sense to compute it :
316  valid_pnts = np.sum(np.isfinite(tm_sc_band_n))
317  if valid_pnts < min_number_of_points:
318  return -1.0
319  # shift mean
320  sc_image_copy = sc_image_copy - np.nanmean(sc_image_copy)
321  tm_sc_band_n = tm_sc_band_n - np.nanmean(tm_sc_band_n)
322  sc_image_copy /= np.nanstd(sc_image_copy)
323  tm_sc_band_n /= np.nanstd(tm_sc_band_n)
324  ncc = np.nansum(sc_image_copy * tm_sc_band_n) / \
325  np.sum(~np.isnan(tm_sc_band_n))
326  return ncc
327 
328 
329 def get_lat_lon_fit_shifts(ncc_original, lat_grid, lon_grid, delta_latitude, delta_longitude):
330  ncc_fit = 1 / (1 - ncc_original)
331  gamma_lat = np.abs(lat_grid[1] - lat_grid[0]) * 100
332  gamma_lon = np.abs(lon_grid[1] - lon_grid[0]) * 100
333  latlon = np.meshgrid(lat_grid, lon_grid)
334  try:
335  p0 = delta_latitude, delta_longitude, gamma_lat, gamma_lon, 1, 0, 100, 0
336  popt, pcov = optimize.curve_fit(lorentzian, latlon, ncc_fit.ravel(),
337  p0=p0, bounds=((-0.1, -0.1, -1, -1, -np.inf, -np.pi / 2, 0, -np.inf),
338  (0.1, 0.1, 1, 1, np.inf, np.pi / 2, np.inf, np.inf)))
339  x0, y0, GammaX, GammaY, Factor, alpha, Amplitude, offset = popt
340  fit = lorentzian(latlon, x0, y0, GammaX, GammaY,
341  Factor, alpha, Amplitude, offset)
342  fit = fit.reshape(ncc_fit.shape)
343  residual = ncc_fit - fit
344  residual *= residual
345  ss_res = np.sum(residual)
346  ss_data = np.sum((ncc_fit - np.mean(ncc_fit)) ** 2)
347  r_squared = 1 - ss_res / ss_data
348  return x0, y0, GammaX, GammaY, Factor, alpha, Amplitude, offset, fit, r_squared
349  except Exception as e:
350  print(e)
351  return None
352 
353 
354 def get_lat_lon_shift(l1b_path: str, chips_list: List[str], output_file_name: str, debug_mode=False, **kwargs):
355  lat, lon, image, scantime, tilt_angle = read_file_l1b(l1b_path)
356  subpixels_set = subpixels_default # can't be more than 30, 1km // 30 meters == 30
357  # the sliding window size in 1km resolution
358  window_size_set = window_size_defalut
359  coeff_scale = 1
360  min_included_ncc = -1
361  if "confidence" in kwargs:
362  if not kwargs["confidence"] is None:
363  min_included_ncc = kwargs["confidence"]
364  w = 30 # // half width of the crop
365  h = w
366 
367  if "subpixels" in kwargs:
368  if not kwargs["subpixels"] is None:
369  if kwargs["subpixels"] > subpixels_default_max:
370  print("Warning: the subpixel count can't exceed {}".format(
371  subpixels_default_max))
372  else:
373  subpixels_set = kwargs["subpixels"]
374  if "window_size" in kwargs:
375  if not kwargs["window_size"] is None:
376  if kwargs["window_size"] > w:
377  print("Warning: the window_size count can't exceed {}".format(w))
378  else:
379  window_size_set = kwargs["window_size"]
380  # set debug mode if odebug
381  if "odebugfile" in kwargs:
382  if not kwargs["odebugfile"] is None:
383  debug_mode = True
384 
385  len_ncc = ((subpixels_set * window_size_set) // 2) * 2 + \
386  1 # number of steps, window size
387  coeff_scale = subpixels_default_max / subpixels_set
388  wide = len_ncc // 2 # half width
389 
390  # setting the cloud mask
391  if "cloud_mask_file" in kwargs:
392  if not kwargs["cloud_mask_file"] is None:
393  cld_mask = get_cloud_mask(kwargs["cloud_mask_file"])
394  image[cld_mask == 1] = np.NaN
395  # data in the output file, csv format
396  data_out: List[Dict] = []
397 
398  for chip_path in chips_list:
399  start_time = time.time()
400  # getting chip position within the scene
401  position_of_the_chip, tm_band, lat_chip, lon_chip = ini_input_data(lat, lon,
402  chip_path)
403  if position_of_the_chip is None:
404  print(f"Skipping the chip {chip_path} ...")
405  continue
406  i_cell, j_cell, p0, p1, p2, p3, point = position_of_the_chip
407  if i_cell < 2 * w or (lat.shape[0] - i_cell) < 2 * w:
408  print(
409  f"The chip {chip_path} is too close to the swath edge : line_number={i_cell}. Skipping ...")
410  continue
411  if j_cell < 2 * h or (lat.shape[1] - j_cell) < 2 * h:
412  print(
413  f"The chip {chip_path} is too close to the swath edge : pixel_number={j_cell}. Skipping ...")
414  continue
415  tb_band_gridded, lat_lin, lon_lin = make_rectangular_grid(
416  lat=lat_chip, lon=lon_chip, data=tm_band)
417  if debug_mode:
418  tb_band_gridded_original = deepcopy(tb_band_gridded)
419  # apply sensor view (uniform filter)
420  tb_band_gridded = uniform_filter(tb_band_gridded, 33)
421  step_size_lat = np.abs(lat_lin[1] - lat_lin[0]) * coeff_scale
422  step_size_lon = np.abs(lon_lin[1] - lon_lin[0]) * coeff_scale
423  shift_lon_array = np.linspace(
424  step_size_lon * (-wide), step_size_lon * wide, len_ncc)
425  shift_lat_array = np.linspace(
426  step_size_lat * (-wide), step_size_lat * wide, len_ncc)
427  rbs = RegularGridInterpolator(
428  (lat_lin - point.x, lon_lin - point.y), tb_band_gridded, method="linear", bounds_error=False)
429  sc_lat, sc_lon, sc_image = extract_scene(
430  lat, lon, i_cell - w, j_cell - h, w * 2 + 1, h * 2 + 1, image)
431  ncc_array = np.ones((len_ncc, len_ncc)) * -1.0
432  # check if sc_image has enough valid pixels
433  valid_pnts = np.sum(np.isfinite(sc_image))
434  if valid_pnts < min_number_of_points:
435  print(
436  f"Number of valid (cloud free) points in the scene {valid_pnts} is too low. Skipping the chip {chip_path}")
437  continue
438  for ind_lat in range(len_ncc):
439  for ind_lon in range(len_ncc):
440  shift_lat = shift_lat_array[ind_lat]
441  shift_lon = shift_lon_array[ind_lon]
442  ncc = get_cross_normalized_correlation(rbs, point, sc_lon, sc_lat, sc_image,
443  shift_lon,
444  shift_lat)
445  ncc_array[ind_lat, ind_lon] = ncc
446 
447  print("--- Execution time: {} seconds ---".format(time.time() - start_time))
448  print(f"Chip {chip_path} has been processed.")
449  (ilat, ilon) = np.unravel_index(ncc_array.argmax(), ncc_array.shape)
450  delta_latitude = shift_lat_array[ilat]
451  delta_longitude = shift_lon_array[ilon]
452  ncc_array[np.isnan(ncc_array)] = -1.0
453  max_ncc = np.nanmax(ncc_array)
454  if max_ncc < min_included_ncc:
455  print(
456  f"The confidence level is {max_ncc} which is lower than the threshold = {min_included_ncc}. Skipping ..")
457  continue
458  if tilt_angle is None:
459  tilt = 0.0
460  else:
461  tilt = tilt_angle[i_cell]
462  # computing the Lorentzian fit parameters
463  result_out = get_lat_lon_fit_shifts(
464  ncc_array, shift_lat_array, shift_lon_array, delta_latitude, delta_longitude)
465  if result_out:
466  x0_fit, y0_fit, GammaX, GammaY, Factor, alpha, Amplitude, offset, fit, r_squared = result_out
467  else:
468  print(f"Warning: lorentzian fit for {chip_path} has not converged")
469  x0_fit = np.NAN
470  y0_fit = np.NAN
471  r_squared = np.NAN
472  fit = np.ones(ncc_array.shape) * np.NAN
473  alpha = np.NAN
474  Amplitude = np.NAN
475  offset = np.NAN
476  Factor = np.NAN
477  GammaX = np.NAN
478  GammaY = np.NAN
479  if debug_mode:
480  mode = "a"
481  out_ncc_filename = f"ncc_{RUNNING_TIME}_{os.path.basename(l1b_path)}.nc"
482  out_ncc_filename = os.path.join(
483  os.path.dirname(output_file_name), out_ncc_filename)
484  if "odebugfile" in kwargs:
485  if not kwargs["odebugfile"] is None:
486  out_ncc_filename = kwargs["odebugfile"]
487  if not os.path.exists(out_ncc_filename):
488  mode = "w"
489  with nc.Dataset(out_ncc_filename, mode=mode) as out_ncc:
490  out_ncc: nc.Dataset = out_ncc
491  if mode == "w":
492  out_ncc.createDimension("x", len_ncc)
493  out_ncc.createDimension("y", len_ncc)
494  out_ncc.createDimension("lat", sc_image.shape[1])
495  out_ncc.createDimension("lon", sc_image.shape[0])
496  out_ncc.version = VERSION
497  out_ncc.runtime = RUNNING_TIME
498  out_ncc.setncattr("l1b_file", l1b_path)
499  out_ncc.setncattr("chips",",".join(chips_list))
500  for key,val in kwargs.items():
501  if val is not None:
502  out_ncc.setncattr(key,val)
503  else:
504  out_ncc.setncattr(key,"")
505  grp: nc.Group = out_ncc.createGroup(
506  os.path.basename(chip_path))
507  grp.delta_latitude = delta_latitude
508  grp.delta_longitude = delta_longitude
509  grp.line = i_cell
510  grp.pixel = j_cell
511  wkt_polygon_str : str = Polygon([p0, p1, p2, p3]).wkt
512  wkt_point_str : str = point.wkt
513  grp.setncattr("Point",wkt_point_str)
514  grp.setncattr("Bounding_polygon",wkt_polygon_str)
515  grp.lat_fit = x0_fit
516  grp.lon_fit = y0_fit
517  grp.alpha = alpha
518  grp.Amplitude = Amplitude
519  grp.offset = offset
520  grp.Factor = Factor
521  grp.GammaX = GammaX
522  grp.GammaY = GammaY
523  grp.r_squared = r_squared
524  grp.tilt = tilt
525  grp.feature_latitude = point.x
526  grp.feature_longitude = point.y
527  grp.createDimension("chip_lat_len", lat_lin.size)
528  grp.createDimension("chip_lon_len", lon_lin.size)
529  var_grp_local = grp.createVariable(varname="lat_lin", dimensions='chip_lat_len',
530  datatype='f4', zlib=True)
531  var_grp_local[:] = lat_lin
532  var_grp_local = grp.createVariable(varname="lon_lin", dimensions='chip_lon_len',
533  datatype='f4', zlib=True)
534  var_grp_local[:] = lon_lin
535  var_grp_local = grp.createVariable(varname="tm_band", dimensions=('chip_lat_len', 'chip_lon_len'),
536  datatype='f4', zlib=True)
537  var_grp_local[:] = tb_band_gridded_original
538  mat_ncc = grp.createVariable(varname="ncc", dimensions=('x', 'y'),
539  datatype='f4', zlib=True)
540  mat_ncc[:] = ncc_array
541  mat_fit = grp.createVariable(varname="fit", dimensions=('x', 'y'),
542  datatype='f4', zlib=True)
543  mat_fit[:] = fit
544  lat_array = grp.createVariable(varname="latGrid", dimensions=('x'),
545  datatype='f4', zlib=True)
546  lat_array[:] = shift_lat_array
547 
548  lon_array = grp.createVariable(varname="lonGrid", dimensions=('y'),
549  datatype='f4', zlib=True)
550  lon_array[:] = shift_lon_array
551 
552  sc_image_crop, chip_downsample = get_match_layers(rbs, point, sc_lon, sc_lat, sc_image,
553  delta_longitude,
554  delta_latitude)
555  chip = grp.createVariable(varname="chip", dimensions=('lon', 'lat'),
556  datatype='f4', zlib=True)
557  chip[:] = chip_downsample
558  crop = grp.createVariable(varname="crop", dimensions=('lon', 'lat'),
559  datatype='f4', zlib=True)
560  crop[:] = sc_image_crop
561  scene = grp.createVariable(varname="scene",
562  dimensions=('lon', 'lat'),
563  datatype='f4', zlib=True)
564  scene[:] = sc_image
565  lat_crop = grp.createVariable(varname="lat",
566  dimensions=('lon', 'lat'),
567  datatype='f4', zlib=True)
568  lat_crop[:] = sc_lat
569  lon_crop = grp.createVariable(varname="lon",
570  dimensions=('lon', 'lat'),
571  datatype='f4', zlib=True)
572  lon_crop[:] = sc_lon
573  granule_name_len = len(os.path.basename(l1b_path))
574  chip_name_len = len(os.path.basename(chip_path))
575  data_out.append({'scantime'.ljust(14): f"{scantime[i_cell]:14.3f}"})
576  data_out[-1]["granule".ljust(granule_name_len)
577  ] = os.path.basename(l1b_path)
578  data_out[-1]['chip'.ljust(chip_name_len)] = os.path.basename(chip_path)
579  data_out[-1]['feature_latitude'] = f"{point.x:16.7f}"
580  data_out[-1]['feature_longitude'] = f"{point.y:17.7f}"
581  data_out[-1]['delta_latitude'] = f"{delta_latitude:14.7f}"
582  data_out[-1]['delta_longitude'] = f"{delta_longitude:15.7f}"
583  data_out[-1]['pixel_number'] = f"{j_cell:12d}"
584  data_out[-1]['confidence_level'] = f"{max_ncc:16.7f}"
585  data_out[-1]['tilt'.rjust(10)] = f"{tilt:10.5f}"
586  if abs(x0_fit) > 1.0 or abs(y0_fit) > 1.0:
587  print(f"Fit did not converge for {x0_fit} and {y0_fit}")
588  r_squared = np.NAN
589  if not np.isnan(r_squared):
590  data_out[-1]['fit_delta_latitude'] = f"{x0_fit:18.7f}"
591  data_out[-1]['fit_delta_longitude'] = f"{y0_fit:18.7f}"
592  data_out[-1]['r_squared'.rjust(15)] = f"{r_squared:15.7f}"
593  else:
594  data_out[-1]['fit_delta_latitude'] = "NaN".rjust(18)
595  data_out[-1]['fit_delta_longitude'] = "NaN".rjust(18)
596  data_out[-1]['r_squared'.rjust(15)] = "NaN"
597  if len(data_out) > 0:
598  fields = []
599  for key in data_out[0]:
600  fields.append(key)
601  with open(output_file_name, 'w', newline='') as output_csv:
602  fields = []
603  for key in data_out[0]:
604  fields.append(key)
605  recorder = csv.DictWriter(output_csv, fieldnames=fields)
606  recorder.writeheader()
607  recorder.writerows(data_out)
608  return 0
609  else:
610  return 110
611 
612 
613 if __name__ == "__main__":
614  print(PROGRAM_NAME, VERSION)
616  print("You are running {}, version {}. Current time is {}".format(
617  PROGRAM_NAME, VERSION, running_time))
618  l1b_path = args.ifile
619  chips_list = read_file_list(args.ilist)
620  ofile = args.ofile
621  cloud_mask_file = args.cloudmask
622  window_size = args.window_size
623  subpixels = args.subpixels
624  ret_code = get_lat_lon_shift(
625  l1b_path, chips_list, ofile, debug_mode=args.debug, cloud_mask_file=cloud_mask_file, window_size=window_size,
626  subpixels=subpixels, confidence=args.confidence, odebugfile=args.odebugfile)
627  sys.exit(ret_code)
def make_rectangular_grid(np.ndarray lat, np.ndarray lon, np.ndarray data)
Definition: geo_eval.py:258
np.ndarray get_scantime(List[int] year, List[int] day, List[int] msec)
Definition: geo_eval.py:59
def read_file_l1b(str path)
Definition: geo_eval.py:86
def lorentzian(latlon, x0, y0, GammaX, GammaY, Factor, alpha, Amplitude, offset)
Definition: geo_eval.py:177
List[str] read_file_list(str path_chips)
Definition: geo_eval.py:48
def extract_scene(lat, lon, i, j, w, h, image=None)
Definition: geo_eval.py:288
def parse_command_line()
Definition: geo_eval.py:200
def get_lat_lon_shift(str l1b_path, List[str] chips_list, str output_file_name, debug_mode=False, **kwargs)
Definition: geo_eval.py:354
def get_linenumber()
Definition: geo_eval.py:81
list(APPEND LIBS ${NETCDF_LIBRARIES}) find_package(GSL REQUIRED) include_directories($
Definition: CMakeLists.txt:8
def get_cloud_mask(str cld_file)
Definition: geo_eval.py:39
void print(std::ostream &stream, const char *format)
Definition: PrintDebug.hpp:38
def get_match_layers(rbs, point, sc_lon, sc_lat, sc_image, shift_lon, shift_lat)
Definition: geo_eval.py:298
def check_for_bad_geolocation(coord)
Definition: geo_eval.py:32
K::Point_2 Point
Definition: cgal_interp.cpp:16
def get_cross_normalized_correlation(rbs, point, sc_lon, sc_lat, sc_image, shift_lon, shift_lat)
Definition: geo_eval.py:311
def get_lat_lon_fit_shifts(ncc_original, lat_grid, lon_grid, delta_latitude, delta_longitude)
Definition: geo_eval.py:329
def ini_input_data(lat, lon, path_chip)
Definition: geo_eval.py:252
#define abs(a)
Definition: misc.h:90
def get_position(np.ndarray lat, np.ndarray lon, feature_lat, feature_lon)
Definition: geo_eval.py:136
def get_viirs_modis_lat_lon_scantime(nc.Dataset f)
Definition: geo_eval.py:68
def read_chip_file(str path)
Definition: geo_eval.py:188