Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix AAPP L1b reader not to up-cast data to float64 #2893

Merged
merged 3 commits into from
Sep 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 23 additions & 21 deletions satpy/readers/aapp_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
from satpy.readers.file_handlers import BaseFileHandler
from satpy.utils import get_chunk_size_limit

CHANNEL_DTYPE = np.float64
CHANNEL_DTYPE = np.float32


def get_avhrr_lac_chunks(shape, dtype):
Expand Down Expand Up @@ -239,7 +239,6 @@ def available_datasets(self, configured_datasets=None):
def get_angles(self, angle_id):
"""Get sun-satellite viewing angles."""
sunz, satz, azidiff = self._get_all_interpolated_angles()

name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff)))
return create_xarray(name_to_variable[angle_id])

Expand All @@ -248,9 +247,10 @@ def _get_all_interpolated_angles_uncached(self):
return self._interpolate_arrays(sunz40km, satz40km, azidiff40km)

def _get_tiepoint_angles_in_degrees(self):
sunz40km = self._data["ang"][:, :, 0] * 1e-2
satz40km = self._data["ang"][:, :, 1] * 1e-2
azidiff40km = self._data["ang"][:, :, 2] * 1e-2
angles = self._data["ang"].astype(np.float32)
sunz40km = angles[:, :, 0] * 1e-2
satz40km = angles[:, :, 1] * 1e-2
azidiff40km = angles[:, :, 2] * 1e-2
return sunz40km, satz40km, azidiff40km

def _interpolate_arrays(self, *input_arrays, geolocation=False):
Expand Down Expand Up @@ -299,8 +299,10 @@ def _get_all_interpolated_coordinates_uncached(self):
return self._interpolate_arrays(lons40km, lats40km, geolocation=True)

def _get_coordinates_in_degrees(self):
lons40km = self._data["pos"][:, :, 1] * 1e-4
lats40km = self._data["pos"][:, :, 0] * 1e-4
position_data = self._data["pos"].astype(np.float32)
lons40km = position_data[:, :, 1] * 1e-4
lats40km = position_data[:, :, 0] * 1e-4

return lons40km, lats40km

def calibrate(self,
Expand Down Expand Up @@ -586,14 +588,11 @@ def _vis_calibrate(data,
slope2 = da.from_array(calib_coeffs[2], chunks=line_chunks)
intercept2 = da.from_array(calib_coeffs[3], chunks=line_chunks)
else:
slope1 = da.from_array(data["calvis"][:, chn, coeff_idx, 0],
chunks=line_chunks) * 1e-10
intercept1 = da.from_array(data["calvis"][:, chn, coeff_idx, 1],
chunks=line_chunks) * 1e-7
slope2 = da.from_array(data["calvis"][:, chn, coeff_idx, 2],
chunks=line_chunks) * 1e-10
intercept2 = da.from_array(data["calvis"][:, chn, coeff_idx, 3],
chunks=line_chunks) * 1e-7
calvis = data["calvis"]
slope1 = da.from_array(calvis[:, chn, coeff_idx, 0], chunks=line_chunks).astype(np.float32) * 1e-10
intercept1 = da.from_array(calvis[:, chn, coeff_idx, 1], chunks=line_chunks).astype(np.float32) * 1e-7
slope2 = da.from_array(calvis[:, chn, coeff_idx, 2], chunks=line_chunks).astype(np.float32) * 1e-10
intercept2 = da.from_array(calvis[:, chn, coeff_idx, 3], chunks=line_chunks).astype(np.float32) * 1e-7

# In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert
# them to real numbers which are applied to the measured counts. The coefficient is different depending on
Expand Down Expand Up @@ -632,9 +631,10 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True):
mask &= count != 0
count = count.astype(CHANNEL_DTYPE)

k1_ = da.from_array(data["calir"][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9
k2_ = da.from_array(data["calir"][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6
k3_ = da.from_array(data["calir"][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6
calir = data["calir"]
k1_ = da.from_array(calir[:, irchn, 0, 0], chunks=line_chunks).astype(np.float32) * 1.0e-9
k2_ = da.from_array(calir[:, irchn, 0, 1], chunks=line_chunks).astype(np.float32) * 1.0e-6
k3_ = da.from_array(calir[:, irchn, 0, 2], chunks=line_chunks).astype(np.float32) * 1.0e-6

# Count to radiance conversion:
rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None]
Expand All @@ -646,15 +646,17 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True):
mask &= rad > 0.0
return da.where(mask, rad, np.nan)

radtempcnv = header["radtempcnv"].astype(np.float32)

# Central wavenumber:
cwnum = header["radtempcnv"][0, irchn, 0]
cwnum = radtempcnv[0, irchn, 0]
if irchn == 0:
cwnum = cwnum / 1.0e2
else:
cwnum = cwnum / 1.0e3

bandcor_2 = header["radtempcnv"][0, irchn, 1] / 1e5
bandcor_3 = header["radtempcnv"][0, irchn, 2] / 1e6
bandcor_2 = radtempcnv[0, irchn, 1] / 1e5
bandcor_3 = radtempcnv[0, irchn, 2] / 1e6

ir_const_1 = 1.1910659e-5
ir_const_2 = 1.438833
Expand Down
25 changes: 16 additions & 9 deletions satpy/tests/reader_tests/test_aapp_l1b.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python

Check notice on line 1 in satpy/tests/reader_tests/test_aapp_l1b.py

View check run for this annotation

CodeScene Delta Analysis / CodeScene Cloud Delta Analysis (main)

✅ Getting better: Code Duplication

reduced similar code in: TestAAPPL1BAllChannelsPresent.test_angles,TestAAPPL1BAllChannelsPresent.test_navigation,TestAAPPL1BChannel3AMissing.test_loading_missing_channels_returns_none. Avoid duplicated, aka copy-pasted, code inside the module. More duplication lowers the code health.
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Satpy developers
#
Expand Down Expand Up @@ -106,6 +106,7 @@
for name in ["1", "2", "3a"]:
key = make_dataid(name=name, calibration="reflectance")
res = fh.get_dataset(key, info)
assert res.dtype == np.float32
assert res.min() == 0
assert res.max() >= 100
mins.append(res.min().values)
Expand All @@ -116,14 +117,13 @@
for name in ["3b", "4", "5"]:
key = make_dataid(name=name, calibration="reflectance")
res = fh.get_dataset(key, info)
assert res.dtype == np.float32
mins.append(res.min().values)
maxs.append(res.max().values)
if name == "3b":
assert np.all(np.isnan(res[2:, :]))

np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758])
np.testing.assert_allclose(maxs, [108.40391775, 107.68545158, 106.80061233,
337.71416096, 355.15898219, 350.87182166])
np.testing.assert_allclose(mins, [0., 0., 0., 204.1018, 103.24155, 106.426704])
np.testing.assert_allclose(maxs, [108.40393, 107.68546, 106.80061, 337.71414, 355.15897, 350.87186])

def test_angles(self):
"""Test reading the angles."""
Expand All @@ -136,6 +136,7 @@
info = {}
key = make_dataid(name="solar_zenith_angle")
res = fh.get_dataset(key, info)
assert res.dtype == np.float32
assert np.all(res == 0)

def test_navigation(self):
Expand All @@ -149,9 +150,11 @@
info = {}
key = make_dataid(name="longitude")
res = fh.get_dataset(key, info)
assert res.dtype == np.float32
assert np.all(res == 0)
key = make_dataid(name="latitude")
res = fh.get_dataset(key, info)
assert res.dtype == np.float32
assert np.all(res == 0)

def test_interpolation(self):
Expand Down Expand Up @@ -188,7 +191,7 @@
-176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416,
178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635,
173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734,
166.3931, 164.9982, 163.4507]])
166.3931, 164.9982, 163.4507]], dtype=np.float32)
lats40km = np.array([
[78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202,
78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145,
Expand All @@ -213,11 +216,12 @@
75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207,
73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882,
71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116,
67.4012, 66.2872]])
67.4012, 66.2872]], dtype=np.float32)
fh._get_coordinates_in_degrees = mock.MagicMock()
fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km)
(lons, lats) = fh._get_all_interpolated_coordinates()
lon_data = lons.compute()
assert lon_data.dtype == np.float32
assert (np.max(lon_data) <= 180)
# Not longitdes between -110, 110 in indata
assert np.all(np.abs(lon_data) > 110)
Expand All @@ -242,7 +246,7 @@
116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54,
114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42,
112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17,
107.32]])
107.32]], dtype=np.float32)
satz40km = np.array(
[[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01,
4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01,
Expand All @@ -259,7 +263,7 @@
7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01,
2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01,
4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01,
6.290e+01, 6.633e+01]])
6.290e+01, 6.633e+01]], dtype=np.float32)
azidiff40km = np.array([
[56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59,
53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1,
Expand All @@ -272,10 +276,13 @@
51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96,
129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08,
130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89,
132.19]])
132.19]], dtype=np.float32)
fh._get_tiepoint_angles_in_degrees = mock.MagicMock()
fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km)
(sunz, satz, azidiff) = fh._get_all_interpolated_angles()
assert sunz.dtype == np.float32
assert satz.dtype == np.float32
assert azidiff.dtype == np.float32
assert (np.max(sunz) <= 123)
assert (np.max(satz) <= 70)

Expand Down
Loading