Skip to content

Commit

Permalink
Merge pull request #93 from ACTRIS-CCRES/various_problems
Browse files Browse the repository at this point in the history
Various problems
  • Loading branch information
gritya authored Oct 8, 2024
2 parents a8ec045 + 613a5a2 commit b296ee8
Show file tree
Hide file tree
Showing 37 changed files with 494 additions and 363 deletions.
19 changes: 2 additions & 17 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,23 +52,8 @@ jobs:
python -m pip install numpy wheel
python -m pip install -U .[dev]
- name: Cache pre-commits
id: cache-pre-commits
# https://tobiasmcnulty.com/posts/caching-pre-commit/
# This restore and/or save in the same actions
uses: actions/cache@v3
with:
path: ~/.cache/pre-commit/
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}

- name: Install pre-commits
# https://github.com/actions/cache#restoring-and-saving-cache-using-a-single-action
if: steps.cache-pre-commits.outputs.cache-hit != 'true'
run: pre-commit install

- name: Compute pre-commit cache key
run: |
pre-commit run --all-files
- name: run pre-commits
uses: pre-commit/action@v3.0.1

multi-os-tests:
if: ( github.ref == format('refs/heads/{0}', github.event.repository.default_branch) ) || startsWith(github.ref, 'refs/tags')
Expand Down
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v5.0.0
hooks:
- id: check-case-conflict
- id: check-docstring-first
Expand All @@ -18,14 +18,14 @@ repos:
- id: check-yaml
- id: detect-private-key
- id: requirements-txt-fixer
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.0
hooks:
- id: docformatter
args: ["--in-place", "--config", "./pyproject.toml"]
# - repo: https://github.com/PyCQA/docformatter
# rev: v1.7.5
# hooks:
# - id: docformatter
# args: ["--in-place", "--config", "./pyproject.toml"]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.15
rev: v0.6.9
hooks:
# Run the linter.
- id: ruff
Expand Down
1 change: 1 addition & 0 deletions ccres_disdrometer_processing/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Top-level package for CCRES Disdrometer processing."""

version_str = "0.1.0"
version_tuple = version_str.split(".")

Expand Down
2 changes: 2 additions & 0 deletions ccres_disdrometer_processing/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ def preprocess(disdro_file, ws_file, radar_file, config_file, output_file, verbo
Returns
-------
None
""" # noqa
return preprocess_cli.preprocess(
disdro_file, ws_file, radar_file, config_file, output_file, verbosity
Expand Down Expand Up @@ -268,6 +269,7 @@ def process(
Returns
-------
None
""" # noqa
processing.process(
yesterday, today, tomorrow, config_file, output_file, no_meteo, verbosity
Expand Down
76 changes: 38 additions & 38 deletions ccres_disdrometer_processing/cli/preprocess_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,15 +94,15 @@ def preprocess(disdro_file, ws_file, radar_file, config_file, output_file, verbo
lgr.info("Merge : OK")

final_data["weather_data_avail"] = np.array(weather_avail).astype("i2")
final_data["weather_data_avail"].attrs[
"long_name"
] = "Availability of weather data at the station"
final_data["weather_data_avail"].attrs["long_name"] = (
"Availability of weather data at the station"
)
final_data["weather_data_avail"].attrs["flag_values"] = np.array([0, 1]).astype(
"i2"
)
final_data["weather_data_avail"].attrs[
"flag_meanings"
] = "no_weather_file_available weather_file_provided"
final_data["weather_data_avail"].attrs["flag_meanings"] = (
"no_weather_file_available weather_file_provided"
)

lgr.info("Add netCDF missing global attributes")

Expand All @@ -112,31 +112,31 @@ def preprocess(disdro_file, ws_file, radar_file, config_file, output_file, verbo
final_data.attrs["fallspeedFormula"] = strMethod

# Add global attributes specified in the file format
final_data.attrs[
"title"
] = f"CCRES pre-processing file for Doppler cloud radar stability monitoring with disdrometer at {final_data.attrs['location']} site" # noqa E501
final_data.attrs[
"summary"
] = f"Disdrometer ({final_data.attrs['disdrometer_source']}) data are processed to derive the equivalent reflectivity factor at {len(computed_frequencies)} frequencies ({', '.join(str(round(freq*1e-9,0)) for freq in computed_frequencies[:])} GHz). Doppler cloud radar ({final_data.attrs['radar_source']}) data (reflectivity and Doppler velocity) are extracted up to some hundreds of meters, and weather station data (temperature, humidity, wind and precipitation rate) are added to the dataset if provided. The resulting pre-processing netCDF file has a 1-minute sampling for all the collocated sensors." # noqa E501
final_data.attrs[
"keywords"
] = "GCMD:EARTH SCIENCE, GCMD:ATMOSPHERE, GCMD:CLOUDS, GCMD:CLOUD DROPLET DISTRIBUTION, GCMD:CLOUD RADIATIVE TRANSFER, GCMD:CLOUD REFLECTANCE, GCMD:SCATTERING, GCMD:PRECIPITATION, GCMD:ATMOSPHERIC PRECIPITATION INDICES, GCMD:DROPLET SIZE, GCMD:HYDROMETEORS, GCMD:LIQUID PRECIPITATION, GCMD:RAIN, GCMD:LIQUID WATER EQUIVALENT, GCMD:PRECIPITATION AMOUNT, GCMD:PRECIPITATION RATE, GCMD:SURFACE PRECIPITATION" # noqa
final_data.attrs[
"keywords_vocabulary"
] = "GCMD:GCMD Keywords, CF:NetCDF COARDS Climate and Forecast Standard Names"
final_data.attrs["title"] = (
f"CCRES pre-processing file for Doppler cloud radar stability monitoring with disdrometer at {final_data.attrs['location']} site" # noqa E501
)
final_data.attrs["summary"] = (
f"Disdrometer ({final_data.attrs['disdrometer_source']}) data are processed to derive the equivalent reflectivity factor at {len(computed_frequencies)} frequencies ({', '.join(str(round(freq*1e-9,0)) for freq in computed_frequencies[:])} GHz). Doppler cloud radar ({final_data.attrs['radar_source']}) data (reflectivity and Doppler velocity) are extracted up to some hundreds of meters, and weather station data (temperature, humidity, wind and precipitation rate) are added to the dataset if provided. The resulting pre-processing netCDF file has a 1-minute sampling for all the collocated sensors." # noqa E501
)
final_data.attrs["keywords"] = (
"GCMD:EARTH SCIENCE, GCMD:ATMOSPHERE, GCMD:CLOUDS, GCMD:CLOUD DROPLET DISTRIBUTION, GCMD:CLOUD RADIATIVE TRANSFER, GCMD:CLOUD REFLECTANCE, GCMD:SCATTERING, GCMD:PRECIPITATION, GCMD:ATMOSPHERIC PRECIPITATION INDICES, GCMD:DROPLET SIZE, GCMD:HYDROMETEORS, GCMD:LIQUID PRECIPITATION, GCMD:RAIN, GCMD:LIQUID WATER EQUIVALENT, GCMD:PRECIPITATION AMOUNT, GCMD:PRECIPITATION RATE, GCMD:SURFACE PRECIPITATION" # noqa
)
final_data.attrs["keywords_vocabulary"] = (
"GCMD:GCMD Keywords, CF:NetCDF COARDS Climate and Forecast Standard Names"
)
final_data.attrs["Conventions"] = "CF-1.8, ACDD-1.3, GEOMS"
final_data.attrs["id"] = config["nc_meta"]["id"]
final_data.attrs["naming_authority"] = config["nc_meta"]["naming_authority"]
date_created = datetime.datetime.utcnow().strftime(ISO_DATE_FORMAT)
final_data.attrs["history"] = "created on {} by {}, v{}".format(
date_created, script_name, __version__
final_data.attrs["history"] = (
f"created on {date_created} by {script_name}, v{__version__}"
)
weather_str = ""
if weather_avail:
weather_str = " and AMS"
final_data.attrs[
"source"
] = f"surface observation from {final_data.radar_source} DCR, {final_data.disdrometer_source} disdrometer{weather_str}, processed by CloudNet" # noqa
final_data.attrs["source"] = (
f"surface observation from {final_data.radar_source} DCR, {final_data.disdrometer_source} disdrometer{weather_str}, processed by CloudNet" # noqa
)
final_data.attrs["processing_level"] = "2a"
final_data.attrs["comment"] = config["nc_meta"]["comment"]
final_data.attrs["acknowledgement"] = ""
Expand Down Expand Up @@ -288,9 +288,9 @@ def precision(nb):
precision(final_data.radar_altitude.values),
)
)
final_data.attrs[
"geospatial_bounds"
] = f"POLYGON (({geospatial_lat_min}, {geospatial_lon_min}), ({geospatial_lat_min}, {geospatial_lon_max}), ({geospatial_lat_max}, {geospatial_lon_max}), ({geospatial_lat_max}, {geospatial_lon_min}))" # noqa
final_data.attrs["geospatial_bounds"] = (
f"POLYGON (({geospatial_lat_min}, {geospatial_lon_min}), ({geospatial_lat_min}, {geospatial_lon_max}), ({geospatial_lat_max}, {geospatial_lon_max}), ({geospatial_lat_max}, {geospatial_lon_min}))" # noqa
)
final_data.attrs["geospatial_bounds_crs"] = "EPSG:4326" # WGS84
final_data.attrs["geospatial_bounds_vertical_crs"] = "EPSG:5829"
final_data.attrs["geospatial_lat_min"] = geospatial_lat_min
Expand Down Expand Up @@ -321,20 +321,20 @@ def precision(nb):
).isoformat() # PT60S here
final_data.attrs["program"] = "ACTRIS, CloudNet, CCRES"
final_data.attrs["date_modified"] = date_created
final_data.attrs[
"date_issued"
] = date_created # made available immediately to the users after ceration
final_data.attrs[
"date_metadata_modified"
] = "" # will be set when everything will be of ; modify it if some fields evolve
final_data.attrs["date_issued"] = (
date_created # made available immediately to the users after ceration
)
final_data.attrs["date_metadata_modified"] = (
"" # will be set when everything will be of ; modify it if some fields evolve
)
final_data.attrs["product_version"] = __version__
final_data.attrs[
"platform"
] = "GCMD:In Situ Land-based Platforms, GCMD:OBSERVATORIES"
final_data.attrs["platform"] = (
"GCMD:In Situ Land-based Platforms, GCMD:OBSERVATORIES"
)
final_data.attrs["platform_vocabulary"] = "GCMD:GCMD Keywords"
final_data.attrs[
"instrument"
] = "GCMD:Earth Remote Sensing Instruments, GCMD:Active Remote Sensing, GCMD:Profilers/Sounders, GCMD:Radar Sounders, GCMD:DOPPLER RADAR, GCMD:FMCWR, GCMD:VERTICAL POINTING RADAR, GCMD:In Situ/Laboratory Instruments, GCMD:Gauges, GCMD:RAIN GAUGES, GCMD:Recorders/Loggers, GCMD:DISDROMETERS, GCMD:Temperature/Humidity Sensors, GCMD:TEMPERATURE SENSORS, GCMD:HUMIDITY SENSORS, GCMD:Current/Wind Meters, GCMD:WIND MONITOR, GCMD:Pressure/Height Meters, GCMD:BAROMETERS" # noqa
final_data.attrs["instrument"] = (
"GCMD:Earth Remote Sensing Instruments, GCMD:Active Remote Sensing, GCMD:Profilers/Sounders, GCMD:Radar Sounders, GCMD:DOPPLER RADAR, GCMD:FMCWR, GCMD:VERTICAL POINTING RADAR, GCMD:In Situ/Laboratory Instruments, GCMD:Gauges, GCMD:RAIN GAUGES, GCMD:Recorders/Loggers, GCMD:DISDROMETERS, GCMD:Temperature/Humidity Sensors, GCMD:TEMPERATURE SENSORS, GCMD:HUMIDITY SENSORS, GCMD:Current/Wind Meters, GCMD:WIND MONITOR, GCMD:Pressure/Height Meters, GCMD:BAROMETERS" # noqa
)
final_data.attrs["instrument_vocabulary"] = "GCMD:GCMD Keywords"
final_data.attrs["cdm_data_type"] = config["nc_meta"]["cdm_data_type"] # empty
final_data.attrs["metadata_link"] = config["nc_meta"]["metadata_link"] # empty
Expand Down
1 change: 1 addition & 0 deletions ccres_disdrometer_processing/constants.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing physical constants."""

from scipy import constants

FREQ = 95.0 * 1e9 # Hz
Expand Down
4 changes: 1 addition & 3 deletions ccres_disdrometer_processing/dz_1event_degrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -821,9 +821,7 @@ def dz_plot(
plt.text(
x=pd.Timestamp((t[0].value + t[-1].value) / 2.0),
y=20,
s=r"Gate n° {} ({}m AGL) used for $\Delta Z$ computation".format(
int(gate) + 1, int(preprocessed_ds.range.values[int(gate)])
),
s=rf"Gate n° {int(gate) + 1} ({int(preprocessed_ds.range.values[int(gate)])}m AGL) used for $\Delta Z$ computation", # noqa: E501
fontsize=14,
ha="center",
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -765,9 +765,7 @@ def dz_plot(
plt.text(
x=pd.Timestamp((t[0].value + t[-1].value) / 2.0),
y=20,
s=r"Gate n° {} ({}m AGL) used for $\Delta Z$ computation".format(
int(gate) + 1, int(preprocessed_ds.range.values[int(gate)])
),
s=rf"Gate n° {int(gate) + 1} ({int(preprocessed_ds.range.values[int(gate)])}m AGL) used for $\Delta Z$ computation", # noqa: E501
fontsize=14,
ha="center",
)
Expand Down
2 changes: 2 additions & 0 deletions ccres_disdrometer_processing/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def get_log_level_from_count(count: int) -> LogLevels:
-------
LogLevels
Corresponding Enum from the count
"""
level = LogLevels.ERROR
if count == 1:
Expand All @@ -65,6 +66,7 @@ def init_logger(level: LogLevels) -> None:
----------
level : LogLevels
Enum corresponding to the level we want
"""
log_dict = {
"version": 1,
Expand Down
11 changes: 8 additions & 3 deletions ccres_disdrometer_processing/open_disdro_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def resample_data_perfect_timesteps(filename: Union[str, Path], config) -> xr.Da
-------
xr.Dataset
A dataset with a 1-minute regular sampling
"""
data_nc = xr.open_dataset(filename)
start_time = pd.Timestamp(data_nc.time.values[0]).replace(
Expand Down Expand Up @@ -113,6 +114,7 @@ def read_parsivel_cloudnet(
xr.Dataset
Dataset for Parsivel daily disdrometer data, with only
the useful variables kept (after being renamed)
"""
data = xr.Dataset(
coords=dict(
Expand Down Expand Up @@ -201,6 +203,7 @@ def read_thies_cloudnet(
xr.Dataset
Dataset for Thies daily disdrometer data, with only
the useful variables kept (after being renamed)
"""
data = xr.Dataset(
coords=dict(
Expand All @@ -221,9 +224,9 @@ def read_thies_cloudnet(
data["F"] = data_nc["F"]
data["F"].attrs["long_name"] = "Disdrometer sampling area"
data["F"].attrs["units"] = "m^2"
data["F"].attrs[
"comment"
] = "Varies from one instrument to another for Thies LNM disdrometers"
data["F"].attrs["comment"] = (
"Varies from one instrument to another for Thies LNM disdrometers"
)
data["disdro_pr"] = xr.DataArray(
data_nc["rainfall_rate"].values * 1000 * 3600,
dims=["time"],
Expand Down Expand Up @@ -306,6 +309,7 @@ def read_parsivel_cloudnet_choice(
xr.Dataset
A formatted dataset with disdrometer data, with a structure independent
from the disdrometer model used for the data acquisition
"""
data_nc = resample_data_perfect_timesteps(filename=filename, config=config)
source = data_nc.disdrometer_source
Expand Down Expand Up @@ -383,6 +387,7 @@ def reflectivity_model_multilambda_measmodV_hvfov(
xr.Dataset
The formatted disdrometer dataset enhanced with the additional variables
(reflectivity in the different computed configurations, ...).
"""
# scatt_list : list of scattering_prop() objects :
# [(lambda1, vert), (lambda2, vert), ...(lambda1, hori), ...] -> 4 lambdas = 8 scatt objects # noqa E501
Expand Down
1 change: 1 addition & 0 deletions ccres_disdrometer_processing/open_radar_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def read_radar_cloudnet(filename, max_radar_alt=2500):
xr.Dataset
A formatted dataset for radar data (specs, Z, DV and range vector)
with a 1-minute regular sampling
"""
range_bounds = [0, max_radar_alt]
data_nc = xr.open_dataset(filename)[LIST_VARIABLES].sel(
Expand Down
1 change: 1 addition & 0 deletions ccres_disdrometer_processing/open_weather_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def read_weather_cloudnet(filename):
-------
xr.Dataset
A dataset containg weather data with a 1-minute regular sampling
"""
data_nc = xr.open_dataset(filename)

Expand Down
6 changes: 6 additions & 0 deletions ccres_disdrometer_processing/plot/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ def divider(axe, size="5%", axis="off"):
-------
matplotlib.axes
The newly created axis.
"""
divider = make_axes_locatable(axe)
cax = divider.append_axes("right", size=size, pad=0.2)
Expand Down Expand Up @@ -73,6 +74,7 @@ def plot_preprocessed_ql_overview(
The data read in the toml configuration file.
version : str
Version of the code.
"""
if not isinstance(output_ql_overview, Path):
output_ql_overview = Path(output_ql_overview)
Expand Down Expand Up @@ -314,6 +316,7 @@ def plot_preprocessed_ql_overview_downgraded_mode(
The data read in the toml configuration file.
version : str
Version of the code.
"""
if not isinstance(output_ql_overview, Path):
output_ql_overview = Path(output_ql_overview)
Expand Down Expand Up @@ -497,6 +500,7 @@ def plot_preprocessed_ql_overview_zh(
The data read in the toml configuration file.
version : str
Version of the code.
"""
site = data.attrs["location"]
station = conf["location"]["STATION"]
Expand Down Expand Up @@ -689,6 +693,7 @@ def plot_processed_ql_summary(
The data read in the toml configuration file.
version : str
Version of the code.
"""
selected_alt = conf["instrument_parameters"]["DCR_DZ_RANGE"]

Expand Down Expand Up @@ -909,6 +914,7 @@ def plot_processed_ql_detailled(
The data read in the toml configuration file.
version : str
Version of the code.
"""
# TODO: properly
selected_alt = conf["instrument_parameters"]["DCR_DZ_RANGE"]
Expand Down
Loading

0 comments on commit b296ee8

Please sign in to comment.