8 from os.path
import basename
16 from shutil
import copyfile
as cp
17 import xml.etree.ElementTree
as ET
24 versionStr =
"1.3 (2024-09-23)"
29 north=None, south=None, west=None, east=None,
30 spixl=None, epixl=None, sline=None, eline=None,
33 self.
ifile = pathlib.Path(ifile)
34 self.
ofile = pathlib.Path(ofile)
62 print(
'Extracting', srcfile)
67 infile = netCDF4.Dataset(srcfile,
'r')
69 outfile = netCDF4.Dataset(dstfile,
'r+')
74 nadir_bin = np.dtype(
'int32').
type(infile.nadir_bin)
75 if (nadir_bin > self.
epixl):
76 outfile.nadir_bin = np.dtype(
'int32').
type(-1)
78 outfile.nadir_bin = np.dtype(
'int32').
type(nadir_bin - (self.
spixl + 1))
87 if 'extract_pixel_start' in infile.ncattrs():
88 outfile.extract_pixel_start = np.dtype(
'int32').
type(infile.extract_pixel_start + self.
spixl + 1)
89 outfile.extract_pixel_stop = np.dtype(
'int32').
type(infile.extract_pixel_stop + self.
epixl + 1)
90 outfile.extract_line_start = np.dtype(
'int32').
type(infile.extract_line_start + self.
sline + 1)
91 outfile.extract_line_stop = np.dtype(
'int32').
type(infile.extract_line_stop + self.
eline + 1)
93 outfile.extract_pixel_start = np.dtype(
'int32').
type(self.
spixl + 1)
94 outfile.extract_pixel_stop = np.dtype(
'int32').
type(self.
epixl + 1)
96 outfile.extract_line_start = np.dtype(
'int32').
type(self.
sline + 1)
97 outfile.extract_line_stop = np.dtype(
'int32').
type(self.
eline + 1)
105 infile = netCDF4.Dataset(srcfile,
'r')
107 outfile = netCDF4.Dataset(dstfile,
'r+')
112 if 'nadir_view_time' in infile.groups[
'bin_attributes'].variables:
114 infile_start_sec = infile.groups[
'bin_attributes'].variables[
'nadir_view_time'][0]
116 if (np.ma.is_masked(infile_start_sec)):
raise ValueError(
"iFile contains a negative start time for the variable nadir_view_time.")
117 infile_end_sec = infile.groups[
'bin_attributes'].variables[
'nadir_view_time'][infile.dimensions[
'bins_along_track'].size - 1]
119 outfile_start_sec = outfile.groups[
'bin_attributes'].variables[
'nadir_view_time'][0]
120 outfile_end_sec = outfile.groups[
'bin_attributes'].variables[
'nadir_view_time'][outfile.dimensions[
'bins_along_track'].size - 1]
123 infile_start_time = infile.time_coverage_start
124 infile_end_time = infile.time_coverage_end
127 start_form = datetime.datetime.strptime(infile_start_time[0:19],
'%Y-%m-%dT%H:%M:%S')
128 end_form = datetime.datetime.strptime(infile_end_time[0:19],
'%Y-%m-%dT%H:%M:%S')
132 print(
"iFile contains a negative start time for the variable nadir_view_time.")
134 except AttributeError:
135 if 'time_nadir' in infile.groups[
'geolocation_data'].variables:
140 infile_start_sec: float = infile.groups[
'geolocation_data'].variables[
'time_nadir'][0]
141 infile_end_sec = infile.groups[
'geolocation_data'].variables[
'time_nadir'][infile.dimensions[
'bins_along_track'].size - 1]
143 outfile_start_sec = outfile.groups[
'geolocation_data'].variables[
'time_nadir'][0]
144 outfile_end_sec = outfile.groups[
'geolocation_data'].variables[
'time_nadir'][outfile.dimensions[
'bins_along_track'].size - 1]
147 infile_start_time = infile.time_coverage_start
148 infile_end_time = infile.time_coverage_end
151 start_form = datetime.datetime.strptime(infile_start_time[0:19],
'%Y-%m-%dT%H:%M:%S')
152 end_form = datetime.datetime.strptime(infile_end_time[0:19],
'%Y-%m-%dT%H:%M:%S')
160 diff_sec_start = start_form.timestamp()
162 diff_sec_end = end_form.timestamp()
165 diff_infile_outfile_start = outfile_start_sec - infile_start_sec
166 diff_infile_outfile_end = outfile_end_sec - infile_end_sec
169 outfile_tot_start_sec = diff_sec_start + diff_infile_outfile_start
170 outfile_tot_end_sec = diff_sec_end + diff_infile_outfile_end
173 outfile_start_time_since = time.gmtime(outfile_tot_start_sec)
174 outfile_end_time_since = time.gmtime(outfile_tot_end_sec)
177 ostart_y = outfile_start_time_since.tm_year
178 ostart_mon =
"{0:0=2d}".format(outfile_start_time_since.tm_mon)
179 ostart_d =
"{0:0=2d}".format(outfile_start_time_since.tm_mday)
180 ostart_h =
"{0:0=2d}".format(outfile_start_time_since.tm_hour)
181 ostart_min =
"{0:0=2d}".format(outfile_start_time_since.tm_min)
182 ostart_s =
"{0:0=2d}".format(outfile_start_time_since.tm_sec)
184 oend_y = outfile_end_time_since.tm_year
185 oend_mon =
"{0:0=2d}".format(outfile_end_time_since.tm_mon)
186 oend_d =
"{0:0=2d}".format(outfile_end_time_since.tm_mday)
187 oend_h =
"{0:0=2d}".format(outfile_end_time_since.tm_hour)
188 oend_min =
"{0:0=2d}".format(outfile_end_time_since.tm_min)
189 oend_s =
"{0:0=2d}".format(outfile_end_time_since.tm_sec)
192 outfile.time_coverage_start =
str(ostart_y) +
'-' +
str(ostart_mon) +
'-' +
str(ostart_d) +
'T' +
str(ostart_h) +
':' +
str(ostart_min) +
':' +
str(ostart_s) +
'Z'
193 outfile.time_coverage_end =
str(oend_y) +
'-' +
str(oend_mon) +
'-' +
str(oend_d) +
'T' +
str(oend_h) +
':' +
str(oend_min) +
':' +
str(oend_s) +
'Z'
199 outfile.set_auto_mask(
False)
201 if 'latitude' in outfile.groups[
'geolocation_data'].variables:
202 bins_along_track = outfile.dimensions[
'bins_along_track'].size - 1
203 bins_across_track = outfile.dimensions[
'bins_across_track'].size - 1
205 latitude = outfile.groups[
'geolocation_data'].variables[
'latitude']
206 longitude = outfile.groups[
'geolocation_data'].variables[
'longitude']
208 lon_min = longitude[0, 0]
209 lon_max = longitude[bins_along_track, bins_across_track]
210 lat_min = latitude[0, 0]
211 lat_max = latitude[bins_along_track, bins_across_track]
214 lat_add =
float((lat_max - lat_min) / 20)
223 for i
in range(0, bins_along_track - 1,
int(bins_along_track/lat_add)):
227 lon_r.append(bins_across_track)
230 lat_l =
list(reversed(lat_r))
233 lon_u = [bins_across_track, (bins_across_track/2), 0]
235 lat_u = [bins_along_track, bins_along_track, bins_along_track]
237 lon_d =
list(reversed(lon_u))
251 for i
in range(len(lat_u)):
252 lat_values_u.append(
float(lat_u[i]))
253 lon_values_u.append(
float(lon_u[i]))
255 for i
in range(len(lat_l)):
256 lat_values_l.append(
float(lat_l[i]))
257 lon_values_l.append(
float(lon_l[i]))
259 for i
in range(len(lat_d)):
260 lat_values_d.append(
float(lat_d[i]))
261 lon_values_d.append(
float(lon_d[i]))
263 for i
in range(len(lat_r)):
264 lat_values_r.append(
float(lat_r[i]))
265 lon_values_r.append(
float(lon_r[i]))
271 p_seq.append(np.dtype(
'int32').
type(i + 1))
273 args_lat = (lat_values_u, lat_values_l, lat_values_d, lat_values_r)
274 args_lon = (lon_values_u, lon_values_l, lon_values_d, lon_values_r)
275 lat_values = np.concatenate(args_lat)
276 lon_values = np.concatenate(args_lon)
281 for i
in range(0,len(lat_values)):
282 g_lat.append(latitude[
int(lat_values[i])][
int(lon_values[i])])
283 g_lon.append(longitude[
int(lat_values[i])][
int(lon_values[i])])
286 geospatial_bounds_str =
"POLYGON((" +
', '.join([f
"{lon} {lat}" for lon, lat
in zip(g_lon, g_lat)]) +
'))'
287 outfile.setncattr(
'geospatial_bounds', geospatial_bounds_str)
290 outfile.setncattr(
'geospatial_lat_max',
str(max(g_lat)))
291 outfile.setncattr(
'geospatial_lat_min',
str(min(g_lat)))
292 outfile.setncattr(
'geospatial_lon_max',
str(max(g_lon)))
293 outfile.setncattr(
'geospatial_lon_min',
str(min(g_lon)))
296 outfile.setncattr(
'gringpointlatitude',
', '.join(map(str, g_lat[:-1])))
297 outfile.setncattr(
'gringpointlongitude',
', '.join(map(str, g_lon[:-1])))
298 outfile.setncattr(
'gringpointsequence',
', '.join(map(str, p_seq[:-1])))
307 print(
"north={} south={} west={} east={}".
315 pl.lonlat2pixline(zero=
False)
317 (pl.spixl, pl.epixl, pl.sline, pl.eline)
326 subset = {
'bins_across_track':[self.
spixl, self.
epixl],
335 args.epixl = infile.dimensions[
'bins_across_track'].size
337 args.eline = infile.dimensions[
'bins_along_track'].size
338 return args.spixl, args.epixl, args.sline, args.eline
344 args.eline = infile.dimensions[
'bins_along_track'].size
345 return args.spixl, args.epixl, args.sline, args.eline
347 if __name__ ==
"__main__":
348 print(
"l1cextract", versionStr)
351 parser = argparse.ArgumentParser(
352 description=
'Extract specified area from OLCI Level 1C files.',
353 epilog=
'Specify either geographic limits or pixel/line ranges, not both.')
354 parser.add_argument(
'-v',
'--verbose', help=
'print status messages',
356 parser.add_argument(
'ifile',
357 help=
'Level 1C input file')
358 parser.add_argument(
'ofile',
361 group1 = parser.add_argument_group(
'geographic limits')
362 group1.add_argument(
'-n',
'--north', type=float, help=
'northernmost latitude')
363 group1.add_argument(
'-s',
'--south', type=float, help=
'southernmost latitude')
364 group1.add_argument(
'-w',
'--west', type=float, help=
'westernmost longitude')
365 group1.add_argument(
'-e',
'--east', type=float, help=
'easternmost longitude')
367 group2 = parser.add_argument_group(
'pixel/line ranges (1-based)')
368 group2.add_argument(
'--spixl', type=int, help=
'start pixel', default = 1)
369 group2.add_argument(
'--epixl', type=int, help=
'end pixel', default = -1)
371 group2.add_argument(
'--sline', type=int, help=
'start line', default = 1)
372 group2.add_argument(
'--eline', type=int, help=
'end line', default = -1)
374 group3 = parser.add_argument_group(
'spex width (overwrites any pixel ranges or geographic limits)')
375 group3.add_argument(
'--spex_width', help=
'spex width', action=
'store_true', default=
None)
377 if len(sys.argv) == 1:
380 args = parser.parse_args()
383 infile = netCDF4.Dataset(args.ifile,
'r')
385 if args.spex_width ==
None:
388 if args.spex_width !=
None:
394 args.spixl, args.epixl, args.sline, args.in_eline =
chk_pixl(args, infile)
407 verbose=args.verbose)
410 goodlatlons =
None not in (this.north, this.south, this.west, this.east)
411 goodindices =
None not in (this.spixl, this.epixl, this.sline, this.eline)
412 if (goodlatlons
and goodindices):
413 print(
"ERROR: Specify either geographic limits or pixel/line ranges, not both.")
416 status = this.getpixlin()
417 if status
not in (0, 110):
418 print(
"No extract; lonlat2pixline status =", status)