8 from os.path
import basename
13 from shutil
import copyfile
as cp
20 versionStr =
"1.1 (2024-03-15)"
25 north=None, south=None, west=None, east=None,
26 spixl=None, epixl=None, sline=None, eline=None,
29 self.
ifile = pathlib.Path(ifile)
30 self.
ofile = pathlib.Path(ofile)
58 print(
'Extracting', srcfile)
63 infile = netCDF4.Dataset(srcfile,
'r')
65 outfile = netCDF4.Dataset(dstfile,
'r+')
73 infile = netCDF4.Dataset(srcfile,
'r')
75 outfile = netCDF4.Dataset(dstfile,
'r+')
78 infile_start_sec: float = infile.groups[
'scan_line_attributes'].variables[
'time'][0]
79 infile_end_sec = infile.groups[
'scan_line_attributes'].variables[
'time'][infile.dimensions[
'number_of_scans'].size - 1]
81 outfile_start_sec = outfile.groups[
'scan_line_attributes'].variables[
'time'][0]
82 outfile_end_sec = outfile.groups[
'scan_line_attributes'].variables[
'time'][outfile.dimensions[
'number_of_scans'].size - 1]
85 infile_start_time = infile.time_coverage_start
86 infile_end_time = infile.time_coverage_end
89 start_form = datetime.datetime.strptime(infile_start_time[0:20] +
'000',
'%Y-%m-%dT%H:%M:%S.%f')
90 end_form = datetime.datetime.strptime(infile_end_time[0:20] +
'000',
'%Y-%m-%dT%H:%M:%S.%f')
92 epoch = datetime.datetime.strptime(
'1970 01 01 00 00 00.000',
'%Y %m %d %H %M %S.%f')
94 diff_start = start_form - epoch
96 diff_end = end_form - epoch
99 diff_sec_start = diff_start.total_seconds()
101 diff_sec_end = diff_end.total_seconds()
104 diff_infile_outfile_start = outfile_start_sec - infile_start_sec
105 diff_infile_outfile_end = outfile_end_sec - infile_end_sec
108 outfile_tot_start_sec = diff_sec_start + diff_infile_outfile_start
109 outfile_tot_end_sec = diff_sec_end + diff_infile_outfile_end
112 outfile_start_time_since = time.gmtime(outfile_tot_start_sec)
113 outfile_end_time_since = time.gmtime(outfile_tot_end_sec)
116 ostart_y = outfile_start_time_since.tm_year
117 ostart_mon =
"{0:0=2d}".format(outfile_start_time_since.tm_mon)
118 ostart_d =
"{0:0=2d}".format(outfile_start_time_since.tm_mday)
119 ostart_h =
"{0:0=2d}".format(outfile_start_time_since.tm_hour)
120 ostart_min =
"{0:0=2d}".format(outfile_start_time_since.tm_min)
121 ostart_s =
"{0:0=2d}".format(outfile_start_time_since.tm_sec)
123 oend_y = outfile_end_time_since.tm_year
124 oend_mon =
"{0:0=2d}".format(outfile_end_time_since.tm_mon)
125 oend_d =
"{0:0=2d}".format(outfile_end_time_since.tm_mday)
126 oend_h =
"{0:0=2d}".format(outfile_end_time_since.tm_hour)
127 oend_min =
"{0:0=2d}".format(outfile_end_time_since.tm_min)
128 oend_s =
"{0:0=2d}".format(outfile_end_time_since.tm_sec)
131 outfile.time_coverage_start =
str(ostart_y) +
'-' +
str(ostart_mon) +
'-' +
str(ostart_d) +
'T' +
str(ostart_h) +
':' +
str(ostart_min) +
':' +
str(ostart_s)
132 outfile.time_coverage_end =
str(oend_y) +
'-' +
str(oend_mon) +
'-' +
str(oend_d) +
'T' +
str(oend_h) +
':' +
str(oend_min) +
':' +
str(oend_s)
139 outfile.set_auto_mask(
False)
140 number_of_scans = outfile.dimensions[
'number_of_scans'].size - 1
141 SWIR_pixels = outfile.dimensions[
'SWIR_pixels'].size - 1
143 latitude = outfile.groups[
'geolocation_data'].variables[
'latitude']
144 longitude = outfile.groups[
'geolocation_data'].variables[
'longitude']
146 lon_min = longitude[0, 0]
147 lon_max = longitude[number_of_scans, SWIR_pixels]
148 lat_min = latitude[0, 0]
149 lat_max = latitude[number_of_scans, SWIR_pixels]
152 lat_add =
float((lat_max - lat_min) / 20)
161 if lat_add > 0
and int(number_of_scans/lat_add) != 0:
162 for i
in range(0, number_of_scans - 1,
int(number_of_scans/lat_add)):
166 lon_r.append(SWIR_pixels)
168 for i
in range(0, number_of_scans - 1, 1):
172 lon_r.append(SWIR_pixels)
175 lat_l =
list(reversed(lat_r))
178 lon_u = [SWIR_pixels, (SWIR_pixels/2), 0]
180 lat_u = [number_of_scans, number_of_scans, number_of_scans]
182 lon_d =
list(reversed(lon_u))
196 for i
in range(len(lat_u)):
197 lat_values_u.append(
float(lat_u[i]))
198 lon_values_u.append(
float(lon_u[i]))
200 for i
in range(len(lat_l)):
201 lat_values_l.append(
float(lat_l[i]))
202 lon_values_l.append(
float(lon_l[i]))
204 for i
in range(len(lat_d)):
205 lat_values_d.append(
float(lat_d[i]))
206 lon_values_d.append(
float(lon_d[i]))
208 for i
in range(len(lat_r)):
209 lat_values_r.append(
float(lat_r[i]))
210 lon_values_r.append(
float(lon_r[i]))
216 p_seq.append(np.dtype(
'int32').
type(i + 1))
218 args_lat = (lat_values_u, lat_values_l, lat_values_d, lat_values_r)
219 args_lon = (lon_values_u, lon_values_l, lon_values_d, lon_values_r)
220 lat_values = np.concatenate(args_lat)
221 lon_values = np.concatenate(args_lon)
226 for i
in range(0,len(lat_values)):
227 g_lat.append(latitude[
int(lat_values[i])][
int(lon_values[i])])
228 g_lon.append(longitude[
int(lat_values[i])][
int(lon_values[i])])
230 outfile.setncattr(
'GRingPointLatitude', g_lat)
231 outfile.setncattr(
'GRingPointLongitude', g_lon)
232 outfile.setncattr(
'GRingPointSequenceNo', p_seq)
239 return arr.min(), arr.max()
241 lat_min, lat_max = latitude[0, 0], latitude[number_of_scans, SWIR_pixels]
242 lon_min, lon_max = longitude[0, 0], longitude[number_of_scans, SWIR_pixels]
244 outfile.setncattr(
'geospatial_lat_min', lat_min)
245 outfile.setncattr(
'geospatial_lat_max', lat_max)
246 outfile.setncattr(
'geospatial_lon_min', lon_min)
247 outfile.setncattr(
'geospatial_lon_max', lon_max)
253 print(
"north={} south={} west={} east={}".
261 pl.lonlat2pixline(zero=
False)
263 (pl.spixl, pl.epixl, pl.sline, pl.eline)
272 subset = {
'SWIR_pixels':[self.
spixl, self.
epixl],
282 args.epixl = infile.dimensions[
'SWIR_pixels'].size
284 args.eline = infile.dimensions[
'number_of_scans'].size
285 return args.spixl, args.epixl, args.sline, args.eline
287 if __name__ ==
"__main__":
288 print(
"l1bextract_oci", versionStr)
292 parser = argparse.ArgumentParser(
293 description=
'Extract specified area from OCI Level 1B files.',
294 epilog=
'Specify either geographic limits or pixel/line ranges, not both.')
295 parser.add_argument(
'-v',
'--verbose', help=
'print status messages',
297 parser.add_argument(
'ifile',
298 help=
'Level 1B input file')
299 parser.add_argument(
'ofile', nargs=
'?',
302 group1 = parser.add_argument_group(
'geographic limits')
303 group1.add_argument(
'-n',
'--north', type=float, help=
'northernmost latitude')
304 group1.add_argument(
'-s',
'--south', type=float, help=
'southernmost latitude')
305 group1.add_argument(
'-w',
'--west', type=float, help=
'westernmost longitude')
306 group1.add_argument(
'-e',
'--east', type=float, help=
'easternmost longitude')
308 group2 = parser.add_argument_group(
'pixel/line ranges (1-based)')
309 group2.add_argument(
'--spixl', type=int, help=
'start pixel', default = 1)
310 group2.add_argument(
'--epixl', type=int, help=
'end pixel', default = -1)
312 group2.add_argument(
'--sline', type=int, help=
'start line', default = 1)
313 group2.add_argument(
'--eline', type=int, help=
'end line', default = -1)
315 if len(sys.argv) == 1:
318 args = parser.parse_args()
320 pixel_bounds_specified =
not (args.eline == -1
and args.sline == 1
and args.epixl == -1
and args.spixl == 1)
323 infile = netCDF4.Dataset(args.ifile,
'r')
325 args.spixl, args.epixl, args.sline, args.in_eline =
chk_pixl(args, infile)
338 verbose=args.verbose)
341 goodlatlons =
None not in (this.north, this.south, this.west, this.east)
342 if (goodlatlons
and pixel_bounds_specified):
343 print(
"ERROR: Specify either geographic limits or pixel/line ranges, not both.")
345 elif (goodlatlons
and not pixel_bounds_specified):
346 status = this.getpixlin()
347 if status
not in (0, 110):
348 print(
"No extract; lonlat2pixline status =", status)
351 elif (pixel_bounds_specified
and not goodlatlons):
354 print(
"No extract; subset not specified")