Skip to content

Commit a59e73a

Browse files
committed
modifications to read SWIR spectra
1 parent e5dec9a commit a59e73a

File tree

12 files changed

+872
-37
lines changed

12 files changed

+872
-37
lines changed
Binary file not shown.
Binary file not shown.

hypernets_processor/calibration/calibrate.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,10 +231,20 @@ def preprocess_l0(self, datasetl0, datasetl1a):
231231

232232
flagval = 2 ** (self.context.get_config_value("outliers"))
233233

234+
print(np.where(mask > 0))
235+
236+
break
237+
datasetl0["quality_flag"].values = [
238+
flagval + datasetl0["quality_flag"].values[i] if mask[i] == 1 else
239+
datasetl0["quality_flag"].values[i] for i in range(len(mask))]
240+
241+
242+
234243
datasetl0["quality_flag"].values = [
235244
flagval + datasetl0["quality_flag"].values[i] if mask[i] == 1 else
236245
datasetl0["quality_flag"].values[i] for i in range(len(mask))]
237246

247+
238248
datasetl1a["quality_flag"].values = [
239249
flagval + datasetl1a["quality_flag"].values[i] if mask[i] == 1 else
240250
datasetl1a["quality_flag"].values[i] for i in range(len(mask))]

hypernets_processor/data_io/dataset_util.py

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,138 @@ def get_default_fill_value(dtype):
220220
elif dtype == np.float64:
221221
return np.float64(9.969209968386869E36)
222222

223+
@staticmethod
224+
def _get_flag_encoding(da):
225+
"""
226+
Returns flag encoding for flag type data array
227+
:type da: xarray.DataArray
228+
:param da: data array
229+
:return: flag meanings
230+
:rtype: list
231+
:return: flag masks
232+
:rtype: list
233+
"""
234+
235+
try:
236+
flag_meanings = da.attrs["flag_meanings"].split()
237+
flag_masks = [int(fm) for fm in da.attrs["flag_masks"].split(",")]
238+
except KeyError:
239+
raise KeyError(da.name + " not a flag variable")
240+
241+
return flag_meanings, flag_masks
242+
243+
@staticmethod
244+
def unpack_flags(da):
245+
"""
246+
Breaks down flag data array into dataset of boolean masks for each flag
247+
:type da: xarray.DataArray
248+
:param da: dataset
249+
:return: flag masks
250+
:rtype: xarray.Dataset
251+
"""
252+
253+
flag_meanings, flag_masks = DatasetUtil._get_flag_encoding(da)
254+
255+
ds = Dataset()
256+
for flag_meaning, flag_mask in zip(flag_meanings, flag_masks):
257+
ds[flag_meaning] = DatasetUtil.create_variable(list(da.shape), bool, dim_names=list(da.dims))
258+
ds[flag_meaning] = (da & flag_mask).astype(bool)
259+
260+
return ds
261+
262+
@staticmethod
263+
def set_flag(da, flag_name, error_if_set=False):
264+
"""
265+
Sets named flag for elements in data array
266+
:type da: xarray.DataArray
267+
:param da: dataset
268+
:type flag_name: str
269+
:param flag_name: name of flag to set
270+
:type error_if_set: bool
271+
:param error_if_set: raises error if chosen flag is already set for any element
272+
"""
273+
274+
set_flags = DatasetUtil.unpack_flags(da)[flag_name]
275+
276+
if np.any(set_flags == True) and error_if_set:
277+
raise ValueError("Flag " + flag_name + " already set for variable " + da.name)
278+
279+
# Find flag mask
280+
flag_meanings, flag_masks = DatasetUtil._get_flag_encoding(da)
281+
flag_bit = flag_meanings.index(flag_name)
282+
flag_mask = flag_masks[flag_bit]
283+
284+
return da | flag_mask
285+
286+
@staticmethod
287+
def unset_flag(da, flag_name, error_if_unset=False):
288+
"""
289+
Unsets named flag for specified index of dataset variable
290+
:type da: xarray.DataArray
291+
:param da: data array
292+
:type flag_name: str
293+
:param flag_name: name of flag to unset
294+
:type error_if_unset: bool
295+
:param error_if_unset: raises error if chosen flag is already set at specified index
296+
"""
297+
298+
set_flags = DatasetUtil.unpack_flags(da)[flag_name]
299+
300+
if np.any(set_flags == False) and error_if_unset:
301+
raise ValueError("Flag " + flag_name + " already set for variable " + da.name)
302+
303+
# Find flag mask
304+
flag_meanings, flag_masks = DatasetUtil._get_flag_encoding(da)
305+
flag_bit = flag_meanings.index(flag_name)
306+
flag_mask = flag_masks[flag_bit]
307+
308+
return da & ~flag_mask
309+
310+
@staticmethod
311+
def get_set_flags(da):
312+
"""
313+
Return list of set flags for single element data array
314+
:type da: xarray.DataArray
315+
:param da: single element data array
316+
:return: set flags
317+
:rtype: list
318+
"""
319+
320+
if da.shape != ():
321+
raise ValueError("Must pass single element data array")
322+
323+
flag_meanings, flag_masks = DatasetUtil._get_flag_encoding(da)
324+
325+
set_flags = []
326+
for flag_meaning, flag_mask in zip(flag_meanings, flag_masks):
327+
if (da & flag_mask):
328+
set_flags.append(flag_meaning)
329+
330+
return set_flags
331+
332+
@staticmethod
333+
def check_flag_set(da, flag_name):
334+
"""
335+
Returns if flag for single element data array
336+
:type da: xarray.DataArray
337+
:param da: single element data array
338+
:type flag_name: str
339+
:param flag_name: name of flag to set
340+
:return: set flags
341+
:rtype: list
342+
"""
343+
344+
if da.shape != ():
345+
raise ValueError("Must pass single element data array")
346+
347+
set_flags = DatasetUtil.get_set_flags(da)
348+
349+
if flag_name in set_flags:
350+
return True
351+
return False
352+
353+
354+
223355

224356
if __name__ == "__main__":
225357
pass

hypernets_processor/data_io/format/variables.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@
204204
"long_name": "",
205205
"units": "-"},
206206
"encoding": {'dtype': np.uint32, "scale_factor": 0.01, "offset": 0.0}},
207-
"digital_number": {"dim": [SCAN_DIM],
207+
"digital_number": {"dim": [WL_DIM, SCAN_DIM],
208208
"dtype": np.float32,
209209
"attributes": {"standard_name": "digital_number",
210210
"long_name": "Digital number, raw data",

hypernets_processor/data_io/hypernets_reader.py

Lines changed: 34 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from hypernets_processor.data_io.format.header import HEADER_DEF
1717
from hypernets_processor.data_io.hypernets_ds_builder import HypernetsDSBuilder
1818
from hypernets_processor.version import __version__
19-
19+
from hypernets_processor.data_io.dataset_util import DatasetUtil as du
2020
'''___Authorship___'''
2121
__author__ = "Clémence Goyens"
2222
__created__ = "12/2/2020"
@@ -187,10 +187,10 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
187187
f = open(FOLDER_NAME + series[1], "rb")
188188
# Header definition with length, description and decoding format
189189

190-
#header = self.read_header(f, HEADER_DEF)
191-
#print(header)
190+
header = self.read_header(f, HEADER_DEF)
191+
print(header)
192192

193-
pixCount= 2048 #header['Pixel Count']
193+
pixCount= header['Pixel Count']
194194

195195
# if bool(header) == False:
196196
# print("Data corrupt go to next line")
@@ -251,14 +251,14 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
251251

252252
nextLine = True
253253
while nextLine:
254-
# # if no reader comment those lines
255-
# header = self.read_header(f, HEADER_DEF)
256-
# print(header)
257-
# if bool(header) == False:
258-
# print("Data corrupt go to next line")
259-
# break
260-
# continue
261-
pixCount= 2048#header['Pixel Count']
254+
# if no header comment those lines
255+
header = self.read_header(f, HEADER_DEF)
256+
print(header)
257+
if bool(header) == False:
258+
print("Data corrupt go to next line")
259+
break
260+
continue
261+
pixCount= header['Pixel Count']
262262
scan = self.read_data(f, pixCount)
263263
# should include this back again when crc32 is in the headers!
264264
#crc32 = self.read_footer(f, 4)
@@ -300,8 +300,8 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
300300
ds["solar_azimuth_angle"][scan_number] = get_azimuth(float(lat), float(lon), acquisitionTime)
301301

302302
ds['quality_flag'][scan_number] = flag
303-
ds['integration_time'][scan_number] = 1000#header['integration_time']
304-
ds['temperature'][scan_number] = 1000#header['temperature']
303+
ds['integration_time'][scan_number] = header['integration_time']
304+
ds['temperature'][scan_number] = header['temperature']
305305

306306
# accelaration:
307307
# Reference acceleration data contains 3x 16 bit signed integers with X, Y and Z
@@ -314,13 +314,16 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
314314

315315
a = 19.6
316316
b = 2 ** 15
317-
# ds['acceleration_x_mean'][scan_number] = header['acceleration_x_mean'] * a / b
318-
# ds['acceleration_x_std'][scan_number] = header['acceleration_x_std'] * a / b
319-
# ds['acceleration_y_mean'][scan_number] = header['acceleration_y_mean'] * a / b
320-
# ds['acceleration_y_std'][scan_number] = header['acceleration_y_std'] * a / b
321-
# ds['acceleration_z_mean'][scan_number] = header['acceleration_z_mean'] * a / b
322-
# ds['acceleration_z_std'][scan_number] = header['acceleration_z_std'] * a / b
323-
317+
ds['acceleration_x_mean'][scan_number] = header['acceleration_x_mean'] * a / b
318+
ds['acceleration_x_std'][scan_number] = header['acceleration_x_std'] * a / b
319+
ds['acceleration_y_mean'][scan_number] = header['acceleration_y_mean'] * a / b
320+
ds['acceleration_y_std'][scan_number] = header['acceleration_y_std'] * a / b
321+
ds['acceleration_z_mean'][scan_number] = header['acceleration_z_mean'] * a / b
322+
ds['acceleration_z_std'][scan_number] = header['acceleration_z_std'] * a / b
323+
324+
print(header['Pixel Count'])
325+
print(pixCount)
326+
print(len(scan))
324327
ds['digital_number'][0:pixCount, scan_number] = scan
325328

326329
scan_number += 1
@@ -388,7 +391,7 @@ def read_metadata(self, seq_dir):
388391
else:
389392
print("Latitude is not given, use default")
390393
lat = self.context.get_config_value("lat")
391-
flag = flag + 2 ** self.context.get_config_value("lat_default")
394+
flag = flag + 2 ** self.context.get_config_value("lat_default") #du.set_flag(flag, "lat_default") #
392395

393396
if 'longitude' in (globalattr.keys()):
394397
lon = float(globalattr['longitude'])
@@ -397,7 +400,7 @@ def read_metadata(self, seq_dir):
397400
else:
398401
print("Longitude is not given, use default")
399402
lon = self.context.get_config_value("lon")
400-
flag = flag + 2 ** self.context.get_config_value("lon_default")
403+
flag = flag + 2 ** self.context.get_config_value("lon_default") #du.set_flag(flag, "lon_default") #
401404

402405
# 2. Estimate wavelengths - NEED TO CHANGE HERE!!!!!!
403406
# ----------------------
@@ -477,14 +480,14 @@ def read_sequence(self, seq_dir, setfile=None):
477480
else:
478481
print("No irradiance data for this sequence")
479482

480-
# if seriesRad:
481-
# L0_RAD = self.read_series(seq_dir, seriesRad, lat, lon, metadata,flag, "L0_RAD")
482-
# # if all([os.path.isfile(os.path.join(seq_dir,"RADIOMETER/",f)) for f in seriesRad]):
483-
# # L0_RAD=read_series(seriesRad,cc, lat, lon, metadata, "L0_RAD")
484-
# # else:
485-
# # print("Radiance files listed but don't exist")
486-
# else:
487-
# print("No radiance data for this sequence")
483+
if seriesRad:
484+
L0_RAD = self.read_series(seq_dir, seriesRad, lat, lon, metadata,flag, "L0_RAD")
485+
# if all([os.path.isfile(os.path.join(seq_dir,"RADIOMETER/",f)) for f in seriesRad]):
486+
# L0_RAD=read_series(seriesRad,cc, lat, lon, metadata, "L0_RAD")
487+
# else:
488+
# print("Radiance files listed but don't exist")
489+
else:
490+
print("No radiance data for this sequence")
488491

489492
if seriesBlack:
490493
L0_BLA = self.read_series(seq_dir, seriesBlack, lat, lon, metadata,flag, "L0_BLA")

hypernets_processor/data_io/tests/reader/SEQ20200506T130443/metadata.txt

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,88 @@ pt_ask=90.00; 180.00
4141
pt_abs=90.00; 180.00
4242
pt_ref=90.13; 179.93
4343

44+
[01_002_0090_2_0180]
45+
01_002_0090_2_0180_192_00_0000_03_0000.spe=20200506T130519
46+
pt_ask=90.00; 180.00
47+
pt_abs=90.00; 180.00
48+
pt_ref=89.78; 179.77
49+
50+
[01_003_0090_2_0180]
51+
01_003_0090_2_0180.jpg=20200506T130519
52+
pt_ask=90.00; 180.00
53+
pt_abs=90.00; 180.00
54+
pt_ref=89.87; 180.20
55+
56+
[01_004_0090_2_0140]
57+
01_004_0090_2_0140_128_16_0000_03_0000.spe=20200506T130519
58+
pt_ask=90.00; 140.00
59+
pt_abs=90.00; 140.00
60+
pt_ref=90.09; 139.76
61+
62+
[01_005_0090_2_0180]
63+
01_005_0090_2_0180_192_00_0000_03_0000.spe=20200506T130527
64+
pt_ask=90.00; 180.00
65+
pt_abs=90.00; 180.00
66+
pt_ref=90.09; 179.96
67+
68+
[01_006_0090_2_0140]
69+
01_006_0090_2_0140.jpg=20200506T130535
70+
pt_ask=90.00; 140.00
71+
pt_abs=90.00; 140.00
72+
pt_ref=89.89; 140.04
73+
74+
[01_007_0090_2_0040]
75+
01_007_0090_2_0040_128_16_0000_06_0000.spe=20200506T130543
76+
pt_ask=90.00; 40.00
77+
pt_abs=90.00; 40.00
78+
pt_ref=90.12; 40.24
79+
80+
[01_008_0090_2_0180]
81+
01_008_0090_2_0180_192_00_0000_03_0000.spe=20200506T130603
82+
pt_ask=90.00; 180.00
83+
pt_abs=90.00; 180.00
84+
pt_ref=89.78; 180.12
85+
86+
[01_009_0090_2_0040]
87+
01_009_0090_2_0040.jpg=20200506T130631
88+
pt_ask=90.00; 40.00
89+
pt_abs=90.00; 40.00
90+
pt_ref=89.98; 39.92
91+
92+
[01_010_0090_2_0140]
93+
01_010_0090_2_0140_128_16_0000_03_0000.spe=20200506T130659
94+
pt_ask=90.00; 140.00
95+
pt_abs=90.00; 140.00
96+
pt_ref=90.05; 139.82
97+
98+
[01_011_0090_2_0180]
99+
01_011_0090_2_0180_192_00_0000_03_0000.spe=20200506T130719
100+
pt_ask=90.00; 180.00
101+
pt_abs=90.00; 180.00
102+
pt_ref=90.05; 179.88
103+
104+
[01_012_0090_2_0140]
105+
01_012_0090_2_0140.jpg=20200506T130727
106+
pt_ask=90.00; 140.00
107+
pt_abs=90.00; 140.00
108+
pt_ref=89.94; 139.99
109+
44110
[01_013_0090_2_0180]
45111
01_013_0090_2_0180_128_08_0000_03_0000.spe=20200506T130735
46112
pt_ask=90.00; 180.00
47113
pt_abs=90.00; 180.00
48114
pt_ref=90.19; 180.05
49115

116+
[01_014_0090_2_0180]
117+
01_014_0090_2_0180_192_00_0000_03_0000.spe=20200506T130743
118+
pt_ask=90.00; 180.00
119+
pt_abs=90.00; 180.00
120+
pt_ref=89.87; 180.10
121+
122+
[01_015_0090_2_0180]
123+
01_015_0090_2_0180.jpg=20200506T130743
124+
pt_ask=90.00; 180.00
125+
pt_abs=90.00; 180.00
126+
pt_ref=90.19; 179.80
127+
50128

hypernets_processor/data_io/tests/reader/SEQ20200821T154756/metadata.txt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@ PyxisVersion=PYXIS_V000.50
33
Datetime=20200821T154756
44
PI=CPE2
55
Site_name=Villefranche-sur-mer
6-
Lat=43.69862
7-
Lon=7.30692
86

97
[01_001_0090_2_0180]
108
pt_ask=90.00;180.00

hypernets_processor/etc/job.config

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ verbosity: 0
1414
#raw_data_directory: None
1515
#metadata_db_url: None
1616

17+
#SEQ20201016T141413
1718
sequence_id: SEQ20200821T154756
1819
Lat: 43.69886
1920
Lon: 7.30720

0 commit comments

Comments
 (0)