diff --git a/copernicusmarine/__init__.py b/copernicusmarine/__init__.py index 24eb448c..9458327d 100644 --- a/copernicusmarine/__init__.py +++ b/copernicusmarine/__init__.py @@ -10,6 +10,17 @@ logging.Formatter.converter = time.gmtime from copernicusmarine.catalogue_parser.models import ( + CopernicusMarineCatalogue, + CopernicusMarineCoordinate, + CopernicusMarineDataset, + CopernicusMarinePart, + CopernicusMarineProduct, + CopernicusMarineService, + CopernicusMarineServiceFormat, + CopernicusMarineServiceNames, + CopernicusMarineVariable, + CopernicusMarineVersion, + CoperniusMarineServiceShortNames, DatasetNotFound, DatasetVersionNotFound, DatasetVersionPartNotFound, diff --git a/copernicusmarine/catalogue_parser/catalogue_parser.py b/copernicusmarine/catalogue_parser/catalogue_parser.py index 8bc91cfb..2112ca17 100644 --- a/copernicusmarine/catalogue_parser/catalogue_parser.py +++ b/copernicusmarine/catalogue_parser/catalogue_parser.py @@ -1,16 +1,17 @@ import logging -from dataclasses import dataclass from enum import Enum from itertools import groupby -from typing import Any, Optional +from typing import Any, Optional, Union import pystac +from pydantic import BaseModel from tqdm import tqdm from copernicusmarine.catalogue_parser.models import ( CopernicusMarineCatalogue, + CopernicusMarineDataset, CopernicusMarineProduct, - CopernicusMarineProductDataset, + DatasetIsNotPartOfTheProduct, DatasetNotFound, get_version_and_part_from_full_dataset_id, ) @@ -19,7 +20,6 @@ ) from copernicusmarine.core_functions.utils import ( construct_query_params_for_marine_data_store_monitoring, - map_reject_none, run_concurrently, ) @@ -67,7 +67,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def get_dataset_metadata( dataset_id: str, staging: bool -) -> Optional[CopernicusMarineProductDataset]: +) -> Optional[CopernicusMarineDataset]: with CatalogParserConnection() as connection: stac_url = ( MARINE_DATA_STORE_STAC_URL @@ -145,7 +145,7 @@ def _parse_product_json_to_pystac_collection( def _parse_and_sort_dataset_items( dataset_items: list[pystac.Item], -) -> Optional[CopernicusMarineProductDataset]: +) -> Optional[CopernicusMarineDataset]: """ Return all dataset metadata parsed and sorted. The first version and part are the default. @@ -154,7 +154,7 @@ def _parse_and_sort_dataset_items( dataset_id, _, _ = get_version_and_part_from_full_dataset_id( dataset_item_example.id ) - dataset_part_version_merged = CopernicusMarineProductDataset( + dataset_part_version_merged = CopernicusMarineDataset( dataset_id=dataset_id, dataset_name=dataset_item_example.properties.get("title", dataset_id), versions=[], @@ -241,12 +241,15 @@ def fetch_dataset_items( root_url: str, connection: CatalogParserConnection, collection: pystac.Collection, + force_dataset_id: Optional[str], ) -> list[pystac.Item]: items = [] for link in collection.get_item_links(): if not link.owner: logger.warning(f"Invalid Item, no owner for: {link.href}") continue + if force_dataset_id and force_dataset_id not in link.href: + continue url = root_url + "/" + link.owner.id + "/" + link.href item_json = connection.get_json_file(url) item = _parse_dataset_json_to_pystac_item(item_json) @@ -259,11 +262,14 @@ def fetch_collection( root_url: str, connection: CatalogParserConnection, url: str, + force_dataset_id: Optional[str], ) -> Optional[tuple[pystac.Collection, list[pystac.Item]]]: json_collection = connection.get_json_file(url) collection = _parse_product_json_to_pystac_collection(json_collection) if collection: - items = fetch_dataset_items(root_url, connection, collection) + items = fetch_dataset_items( + root_url, connection, collection, force_dataset_id + ) return (collection, items) return None @@ -272,12 +278,18 @@ def fetch_product_items( root_url: str, connection: CatalogParserConnection, child_links: list[pystac.Link], + force_product_id: Optional[str], + force_dataset_id: Optional[str], max_concurrent_requests: int, disable_progress_bar: bool, ) -> list[Optional[tuple[pystac.Collection, list[pystac.Item]]]]: tasks = [] for link in child_links: - tasks.append((root_url, connection, link.absolute_href)) + if force_product_id and force_product_id not in link.href: + continue + tasks.append( + (root_url, connection, link.absolute_href, force_dataset_id) + ) tdqm_bar_configuration = { "desc": "Fetching products", "disable": disable_progress_bar, @@ -297,6 +309,8 @@ def fetch_product_items( def fetch_all_products_items( connection: CatalogParserConnection, + force_product_id: Optional[str], + force_dataset_id: Optional[str], max_concurrent_requests: int, staging: bool, disable_progress_bar: bool, @@ -319,6 +333,8 @@ def fetch_all_products_items( root_url, connection, child_links, + force_product_id, + force_dataset_id, max_concurrent_requests, disable_progress_bar, ) @@ -326,18 +342,43 @@ def fetch_all_products_items( def parse_catalogue( + force_product_id: Optional[str], + force_dataset_id: Optional[str], max_concurrent_requests: int, disable_progress_bar: bool, staging: bool = False, ) -> CopernicusMarineCatalogue: logger.debug("Parsing catalogue...") progress_bar = tqdm( - total=2, desc="Fetching catalog", disable=disable_progress_bar + total=2, desc="Fetching catalogue", disable=disable_progress_bar ) - with CatalogParserConnection() as connection: + if force_dataset_id: + root_url = ( + MARINE_DATA_STORE_ROOT_METADATA_URL + if not staging + else MARINE_DATA_STORE_ROOT_METADATA_URL_STAGING + ) + dataset_product_mapping_url = ( + f"{root_url}/dataset_product_id_mapping.json" + ) + product_id_from_mapping = connection.get_json_file( + dataset_product_mapping_url + ).get(force_dataset_id) + if not product_id_from_mapping: + raise DatasetNotFound(force_dataset_id) + if ( + force_product_id + and product_id_from_mapping != force_product_id + ): + raise DatasetIsNotPartOfTheProduct( + force_dataset_id, force_product_id + ) + force_product_id = product_id_from_mapping marine_data_store_root_collections = fetch_all_products_items( connection=connection, + force_product_id=force_product_id, + force_dataset_id=force_dataset_id, max_concurrent_requests=max_concurrent_requests, staging=staging, disable_progress_bar=disable_progress_bar, @@ -365,75 +406,74 @@ def parse_catalogue( return full_catalog -@dataclass -class DistinctDatasetVersionPart: - dataset_id: str - dataset_version: str - dataset_part: str - layer_elements: list - raw_services: dict - stac_items_values: Optional[dict] - - # --------------------------------------- -# --- Utils function on any catalogue --- +# --- Utils functions # --------------------------------------- -def filter_catalogue_with_strings( - catalogue: CopernicusMarineCatalogue, tokens: list[str] -) -> dict[str, Any]: - return find_match_object(catalogue, tokens) or {} - - -def find_match_object(value: Any, tokens: list[str]) -> Any: - match: Any - if isinstance(value, str): - match = find_match_string(value, tokens) - elif isinstance(value, Enum): - match = find_match_enum(value, tokens) - elif isinstance(value, tuple): - match = find_match_tuple(value, tokens) - elif isinstance(value, list): - match = find_match_list(value, tokens) - elif hasattr(value, "__dict__"): - match = find_match_dict(value, tokens) - else: - match = None - return match - - -def find_match_string(string: str, tokens: list[str]) -> Optional[str]: - return string if any(token in string for token in tokens) else None - - -def find_match_enum(enum: Enum, tokens: list[str]) -> Any: - return find_match_object(enum.value, tokens) - - -def find_match_tuple(tuple: tuple, tokens: list[str]) -> Optional[list[Any]]: - return find_match_list(list(tuple), tokens) - - -def find_match_list(object_list: list[Any], tokens) -> Optional[list[Any]]: - def find_match(element: Any) -> Optional[Any]: - return find_match_object(element, tokens) - - filtered_list: list[Any] = list(map_reject_none(find_match, object_list)) - return filtered_list if filtered_list else None +def search_and_filter( + model: BaseModel, search_str: set[str] +) -> Union[BaseModel, None]: + filtered_fields = {} + search_str = {s.lower() for s in search_str} + for field, value in model: + if isinstance(value, BaseModel): + filtered_value = search_and_filter(value, search_str) + if filtered_value: + filtered_fields[field] = filtered_value + + elif isinstance(value, list) or isinstance(value, tuple): + filtered_list = [] + for item in value: + if isinstance(item, BaseModel): + filtered_item = search_and_filter(item, search_str) + if filtered_item: + filtered_list.append(filtered_item) + elif isinstance(item, str) and any( + s in item.lower() for s in search_str + ): + filtered_list.append(item) + + if filtered_list and isinstance(value, list): + filtered_fields[field] = filtered_list + + if filtered_list and isinstance(value, tuple): + filtered_fields[field] = tuple(filtered_list) + + elif isinstance(value, dict): + filtered_dict = {} + for key, val in value.items(): + if isinstance(val, BaseModel): + filtered_val = search_and_filter(val, search_str) + if filtered_val: + filtered_dict[key] = filtered_val + elif isinstance(val, str) and any( + s in val.lower() for s in search_str + ): + filtered_dict[key] = val + + if filtered_dict: + filtered_fields[field] = filtered_dict + + elif isinstance(value, Enum): + if any(s in value.name.lower() for s in search_str): + filtered_fields[field] = value + + elif isinstance(value, str) and any( + s in value.lower() for s in search_str + ): + filtered_fields[field] = value + if filtered_fields: + return model.model_copy(update=filtered_fields) + return None -def find_match_dict( - structure: dict[str, Any], tokens -) -> Optional[dict[str, Any]]: - filtered_dict = { - key: find_match_object(value, tokens) - for key, value in structure.__dict__.items() - if find_match_object(value, tokens) - } - - found_match = any(filtered_dict.values()) - if found_match: - new_dict = dict(structure.__dict__, **filtered_dict) - structure.__dict__ = new_dict - return structure if found_match else None +def filter_catalogue_with_strings( + catalogue: CopernicusMarineCatalogue, search_str: set[str] +) -> CopernicusMarineCatalogue: + filtered_models = [] + for model in catalogue.products: + filtered_model = search_and_filter(model, search_str) + if filtered_model: + filtered_models.append(filtered_model) + return CopernicusMarineCatalogue(products=filtered_models) diff --git a/copernicusmarine/catalogue_parser/fields_query_builder.py b/copernicusmarine/catalogue_parser/fields_query_builder.py new file mode 100644 index 00000000..0f7465b2 --- /dev/null +++ b/copernicusmarine/catalogue_parser/fields_query_builder.py @@ -0,0 +1,80 @@ +from typing import Optional, Type, get_args, get_origin, get_type_hints + +from pydantic import BaseModel + + +def check_type_is_base_model(type_to_check: Type) -> bool: + try: + return issubclass(type_to_check, BaseModel) + except TypeError: + return False + + +class QueryBuilder: + fields_to_include_or_exclude: set[str] + + def __init__(self, fields_to_include_or_exclude: set[str]): + self.fields_to_include_or_exclude = fields_to_include_or_exclude + assert ( + self.fields_to_include_or_exclude + ), "fields_to_include_or_exclude is empty" + + def build_query( + self, + type_to_check: Type, + query: Optional[dict] = None, + ): + """ + Recursively builds a query to include or exclude fields from a Pydantic model + Allows to handle nested models, lists and dictionaries. + + Example of resulting query: + input: + QueryBuilder({"product_id", "dataset_id"}).build_query(CopernicusMarineCatalogue) + + output: + { + "products": { + "__all__": { + "product_id": True + "datasets": { + "__all__": { + "dataset_id": True + } + } + }, + } + } + """ # noqa: E501 + if query is None: + query = {} + + for ( + field_name, + field_type, + ) in get_type_hints(type_to_check).items(): + if field_name in self.fields_to_include_or_exclude: + query[field_name] = True + elif get_origin(field_type) is list: + if field_name not in query: + query[field_name] = {"__all__": {}} + result = self.build_query( + get_args(field_type)[0], + query[field_name]["__all__"], + ) + if not result: + del query[field_name] + elif get_origin(field_type) is dict: + if field_name not in query: + query[field_name] = {"__all__": {}} + result = self.build_query( + get_args(field_type)[1], + query[field_name]["__all__"], + ) + if not result: + del query[field_name] + elif check_type_is_base_model(field_type): + if field_name not in query: + query[field_name] = {} + self.build_query(field_type, query) + return query diff --git a/copernicusmarine/catalogue_parser/models.py b/copernicusmarine/catalogue_parser/models.py index 5f435482..282fb116 100644 --- a/copernicusmarine/catalogue_parser/models.py +++ b/copernicusmarine/catalogue_parser/models.py @@ -1,11 +1,9 @@ import re - -# TODO: change to pydantic -from dataclasses import dataclass from enum import Enum from typing import Optional, Type, TypeVar, Union import pystac +from pydantic import BaseModel, ConfigDict from copernicusmarine.command_line_interface.exception_handler import ( log_exception_debug, @@ -15,14 +13,15 @@ next_or_raise_exception, ) -# Output Types definitions - VERSION_DEFAULT = "default" PART_DEFAULT = "default" -# Service types -class _ServiceName(str, Enum): +class CopernicusMarineServiceNames(str, Enum): + """ + Services parsed by the Copernicus Marine toolbox. + """ + GEOSERIES = "arco-geo-series" TIMESERIES = "arco-time-series" FILES = "original-files" @@ -31,7 +30,12 @@ class _ServiceName(str, Enum): STATIC_ARCO = "static-arco" -class _ServiceShortName(str, Enum): +class CoperniusMarineServiceShortNames(str, Enum): + """ + Short names or the services parsed by the Copernicus Marine toolbox. + Also accepted when a service is requested. + """ + GEOSERIES = "geoseries" TIMESERIES = "timeseries" FILES = "files" @@ -40,40 +44,23 @@ class _ServiceShortName(str, Enum): STATIC_ARCO = "static-arco" -@dataclass(frozen=True) -class _Service: - service_name: _ServiceName - short_name: _ServiceShortName - - def aliases(self) -> list[str]: - return ( - [self.service_name.value, self.short_name.value] - if self.short_name.value != self.service_name.value - else [self.service_name.value] - ) - - def to_json_dict(self): - return { - "service_name": self.service_name.value, - "short_name": self.short_name.value, - } - - -class CopernicusMarineDatasetServiceType(_Service, Enum): - GEOSERIES = _ServiceName.GEOSERIES, _ServiceShortName.GEOSERIES - TIMESERIES = ( - _ServiceName.TIMESERIES, - _ServiceShortName.TIMESERIES, - ) - FILES = _ServiceName.FILES, _ServiceShortName.FILES - WMTS = _ServiceName.WMTS, _ServiceShortName.WMTS - OMI_ARCO = _ServiceName.OMI_ARCO, _ServiceShortName.OMI_ARCO - STATIC_ARCO = _ServiceName.STATIC_ARCO, _ServiceShortName.STATIC_ARCO +def short_name_from_service_name( + service_name: CopernicusMarineServiceNames, +) -> CoperniusMarineServiceShortNames: + mapping = { + CopernicusMarineServiceNames.GEOSERIES: CoperniusMarineServiceShortNames.GEOSERIES, # noqa + CopernicusMarineServiceNames.TIMESERIES: CoperniusMarineServiceShortNames.TIMESERIES, # noqa + CopernicusMarineServiceNames.FILES: CoperniusMarineServiceShortNames.FILES, # noqa + CopernicusMarineServiceNames.WMTS: CoperniusMarineServiceShortNames.WMTS, # noqa + CopernicusMarineServiceNames.OMI_ARCO: CoperniusMarineServiceShortNames.OMI_ARCO, # noqa + CopernicusMarineServiceNames.STATIC_ARCO: CoperniusMarineServiceShortNames.STATIC_ARCO, # noqa + } + return mapping[service_name] def _service_type_from_web_api_string( name: str, -) -> CopernicusMarineDatasetServiceType: +) -> CopernicusMarineServiceNames: class WebApi(Enum): GEOSERIES = "timeChunked" TIMESERIES = "geoChunked" @@ -82,13 +69,13 @@ class WebApi(Enum): OMI_ARCO = "omi" STATIC_ARCO = "static" - web_api_mapping = { - WebApi.GEOSERIES: CopernicusMarineDatasetServiceType.GEOSERIES, - WebApi.TIMESERIES: CopernicusMarineDatasetServiceType.TIMESERIES, - WebApi.FILES: CopernicusMarineDatasetServiceType.FILES, - WebApi.WMTS: CopernicusMarineDatasetServiceType.WMTS, - WebApi.OMI_ARCO: CopernicusMarineDatasetServiceType.OMI_ARCO, - WebApi.STATIC_ARCO: CopernicusMarineDatasetServiceType.STATIC_ARCO, + web_api_mapping: dict[WebApi, CopernicusMarineServiceNames] = { + WebApi.GEOSERIES: CopernicusMarineServiceNames.GEOSERIES, + WebApi.TIMESERIES: CopernicusMarineServiceNames.TIMESERIES, + WebApi.FILES: CopernicusMarineServiceNames.FILES, + WebApi.WMTS: CopernicusMarineServiceNames.WMTS, + WebApi.OMI_ARCO: CopernicusMarineServiceNames.OMI_ARCO, + WebApi.STATIC_ARCO: CopernicusMarineServiceNames.STATIC_ARCO, } return next_or_raise_exception( @@ -115,24 +102,43 @@ class ServiceNotHandled(Exception): # service formats class CopernicusMarineServiceFormat(str, Enum): + """ + Format of the data for a service. + For example, "arco-geo-series" and "arco-time-series" can be "zarr" or "sqlite" + """ + ZARR = "zarr" SQLITE = "sqlite" -@dataclass -class CopernicusMarineCoordinate: +Coordinate = TypeVar("Coordinate", bound="CopernicusMarineCoordinate") + + +class CopernicusMarineCoordinate(BaseModel): + """ + Coordinate for a variable. + """ + + #: Coordinate id coordinate_id: str - units: str - minimum_value: Optional[float] - maximum_value: Optional[float] + #: Coordinate units + coordinate_unit: str + #: Minimum value of the coordinate + minimum_value: Optional[Union[float, str]] + #: Maximum value of the coordinate + maximum_value: Optional[Union[float, str]] + #: Step of the coordinate step: Optional[float] - values: Optional[list[Union[float, int]]] - chunking_length: Optional[int] + #: Values of the coordinate + values: Optional[list[Union[float, int, str]]] + #: Chunking length of the coordinate + chunking_length: Optional[Union[float, int]] + #: Chunk type of the coordinate chunk_type: Optional[str] - chunk_reference_coordinate: Optional[int] - chunk_geometric_factor: Optional[int] - - Coordinate = TypeVar("Coordinate", bound="CopernicusMarineCoordinate") + #: Chunk reference coordinate of the coordinate + chunk_reference_coordinate: Optional[Union[float, int]] + #: Chunk geometric factor of the coordinate + chunk_geometric_factor: Optional[Union[float, int]] @classmethod def from_metadata_item( @@ -171,7 +177,7 @@ def from_metadata_item( coordinate = cls( coordinate_id=dimension, - units=dimension_metadata.get("units") or "", + coordinate_unit=dimension_metadata.get("units") or "", minimum_value=minimum_value or coordinates_info.get("min"), # type: ignore maximum_value=coordinates_info.get("max"), step=coordinates_info.get("step"), @@ -205,28 +211,42 @@ def _convert_elevation_to_depth(self): self.coordinate_id = "depth" minimum_elevation = self.minimum_value maximum_elevation = self.maximum_value - if minimum_elevation is not None: + if minimum_elevation is not None and isinstance( + minimum_elevation, (int, float) + ): self.maximum_value = -minimum_elevation else: self.maximum_value = None - if maximum_elevation is not None: + if maximum_elevation is not None and isinstance( + maximum_elevation, (int, float) + ): self.minimum_value = -maximum_elevation else: self.minimum_value = None if self.values is not None: - self.values = [-value for value in self.values] + self.values = [-value for value in self.values] # type: ignore -@dataclass -class CopernicusMarineVariable: +Variable = TypeVar("Variable", bound="CopernicusMarineVariable") + + +class CopernicusMarineVariable(BaseModel): + """ + Variable of the dataset. + Contains the variable metadata and a list of coordinates. + """ + + #: Short name of the variable short_name: str - standard_name: str - units: str + #: Standard name of the variable + standard_name: Optional[str] + #: Units of the variable + units: Optional[str] + #: Bounding box of the variable bbox: Optional[list[float]] + #: List of coordinates of the variable coordinates: list[CopernicusMarineCoordinate] - Variable = TypeVar("Variable", bound="CopernicusMarineVariable") - @classmethod def from_metadata_item( cls: Type[Variable], @@ -259,15 +279,32 @@ def from_metadata_item( ) -@dataclass -class CopernicusMarineService: - service_type: CopernicusMarineDatasetServiceType +Service = TypeVar("Service", bound="CopernicusMarineService") + + +class CopernicusMarineService(BaseModel): + """ + Service available for a dataset. + Contains the service metadata and a list of variables. + For original files service, there are no variables. + """ + + model_config = ConfigDict(use_enum_values=True) + + #: Service name + service_name: CopernicusMarineServiceNames + + #: Service short name + service_short_name: Optional[CoperniusMarineServiceShortNames] + + #: Service format: format of the service + #: (eg:"arco-geo-series" can be "zarr", "sqlite") service_format: Optional[CopernicusMarineServiceFormat] + #: Service uri: uri of the service uri: str + #: List of variables of the service variables: list[CopernicusMarineVariable] - Service = TypeVar("Service", bound="CopernicusMarineService") - @classmethod def from_metadata_item( cls: Type[Service], @@ -279,7 +316,12 @@ def from_metadata_item( service_uri = asset.get_absolute_href() if not service_uri: raise ServiceNotHandled(service_name) - service_type = _service_type_from_web_api_string(service_name) + service_name_parsed = _service_type_from_web_api_string( + service_name + ) + service_short_name = short_name_from_service_name( + service_name_parsed + ) service_format = None admp_in_preparation = metadata_item.properties.get( "admp_in_preparation" @@ -291,16 +333,15 @@ def from_metadata_item( if not service_uri.endswith("/"): if admp_in_preparation and ( - service_type - == CopernicusMarineDatasetServiceType.GEOSERIES - or service_type - == CopernicusMarineDatasetServiceType.TIMESERIES + service_name == CopernicusMarineServiceNames.GEOSERIES + or service_name == CopernicusMarineServiceNames.TIMESERIES ): return None else: bbox = metadata_item.bbox return cls( - service_type=service_type, + service_name=service_name_parsed, + service_short_name=service_short_name, uri=service_uri, variables=[ CopernicusMarineVariable.from_metadata_item( @@ -318,15 +359,24 @@ def from_metadata_item( return None -@dataclass -class CopernicusMarineVersionPart: +VersionPart = TypeVar("VersionPart", bound="CopernicusMarinePart") + + +class CopernicusMarinePart(BaseModel): + """ + Part of a dataset. Datasets can have multiple parts. + Each part contains a distinct list of services and distinct data. + """ + + #: Name of the part name: str + #: List of services available for the part services: list[CopernicusMarineService] + #: Date when the part will be retired retired_date: Optional[str] + #: Date when the part will be/was released released_date: Optional[str] - VersionPart = TypeVar("VersionPart", bound="CopernicusMarineVersionPart") - @classmethod def from_metadata_item( cls: Type[VersionPart], metadata_item: pystac.Item, part_name: str @@ -358,24 +408,28 @@ def from_metadata_item( released_date=released_date, ) - def get_service_by_service_type( - self, service_type: CopernicusMarineDatasetServiceType - ): + def get_service_by_service_name( + self, service_name: CopernicusMarineServiceNames + ) -> CopernicusMarineService: return next( service for service in self.services - if service.service_type == service_type + if service.service_name == service_name ) -@dataclass -class CopernicusMarineDatasetVersion: +class CopernicusMarineVersion(BaseModel): + """ + Version of a dataset. Datasets can have multiple versions. + Usually around data releases. + """ + + #: Label of the version (eg: "latest", "202101") label: str - parts: list[CopernicusMarineVersionPart] + #: List of parts of the version + parts: list[CopernicusMarinePart] - def get_part( - self, force_part: Optional[str] - ) -> CopernicusMarineVersionPart: + def get_part(self, force_part: Optional[str]) -> CopernicusMarinePart: wanted_part = force_part or PART_DEFAULT for part in self.parts: if part.name == wanted_part: @@ -414,15 +468,22 @@ def sort_parts(self) -> tuple[Optional[str], Optional[str]]: return self.parts[0].released_date, self.parts[0].retired_date -@dataclass -class CopernicusMarineProductDataset: +class CopernicusMarineDataset(BaseModel): + """ + Dataset of a product. + Contains the dataset metadata and a list of versions. + """ + + #: The datasetID dataset_id: str + #: The dataset name dataset_name: str - versions: list[CopernicusMarineDatasetVersion] + #: List of versions of the dataset + versions: list[CopernicusMarineVersion] def get_version( self, force_version: Optional[str] - ) -> CopernicusMarineDatasetVersion: + ) -> CopernicusMarineVersion: wanted_version = force_version or VERSION_DEFAULT for version in self.versions: if version.label == wanted_version: @@ -464,7 +525,7 @@ def parse_dataset_metadata_items( dataset_version, dataset_part, ) = get_version_and_part_from_full_dataset_id(metadata_item.id) - part = CopernicusMarineVersionPart.from_metadata_item( + part = CopernicusMarinePart.from_metadata_item( metadata_item, dataset_part ) if not part: @@ -476,28 +537,47 @@ def parse_dataset_metadata_items( break else: all_versions.add(dataset_version) - version = CopernicusMarineDatasetVersion( + version = CopernicusMarineVersion( label=dataset_version, parts=[part] ) self.versions.append(version) -@dataclass -class CopernicusMarineProduct: +class CopernicusMarineProduct(BaseModel): + """ + Product of the catalogue. + Contains the product metadata and a list of datasets. + """ + + #: Title of the product title: str + #: ProductID product_id: str + #: Thumbnail url of the product thumbnail_url: str - description: str + #: Description of the product + description: Optional[str] + #: Digital object identifier of the product digital_object_identifier: Optional[str] + #: Sources of the product sources: list[str] + #: Processing level of the product processing_level: Optional[str] + #: Production center of the product production_center: str + #: Keywords of the product keywords: Optional[list[str]] - datasets: list[CopernicusMarineProductDataset] + #: List of datasets of the product + datasets: list[CopernicusMarineDataset] -@dataclass -class CopernicusMarineCatalogue: +class CopernicusMarineCatalogue(BaseModel): + """ + Catalogue of the Copernicus Marine service. + You can find here the products of the catalogue and their metadata as the response of the describe command/function. + """ # noqa + + #: List of products in the catalogue products: list[CopernicusMarineProduct] def filter_only_official_versions_and_parts(self): @@ -537,7 +617,7 @@ class DatasetVersionPartNotFound(Exception): If yes, please contact user support. """ - def __init__(self, version: CopernicusMarineDatasetVersion): + def __init__(self, version: CopernicusMarineVersion): message = f"No part found for version {version.label}" super().__init__(message) @@ -552,7 +632,7 @@ class DatasetVersionNotFound(Exception): If yes, please contact user support. """ - def __init__(self, dataset: CopernicusMarineProductDataset): + def __init__(self, dataset: CopernicusMarineDataset): message = f"No version found for dataset {dataset.dataset_id}" super().__init__(message) @@ -580,6 +660,24 @@ def __init__(self, dataset_id: str): super().__init__(message) +class DatasetIsNotPartOfTheProduct(Exception): + """ + Exception raised when the dataset is not part of the product. + + If you request a datasetID and a productID + at the same time with the describe command, + please verify that the dataset is part of the product. + """ + + def __init__(self, dataset_id: str, product_id: str): + message = ( + f"{dataset_id} not part of {product_id} " + f"Please check that the dataset is part of the product and " + f"the input datasetID is correct." + ) + super().__init__(message) + + REGEX_PATTERN_DATE_YYYYMM = r"[12]\d{3}(0[1-9]|1[0-2])" PART_SEPARATOR = "--ext--" diff --git a/copernicusmarine/command_line_interface/group_describe.py b/copernicusmarine/command_line_interface/group_describe.py index 7290c128..1fc766e3 100644 --- a/copernicusmarine/command_line_interface/group_describe.py +++ b/copernicusmarine/command_line_interface/group_describe.py @@ -1,7 +1,10 @@ import logging +from typing import Optional import click +from copernicusmarine.catalogue_parser.fields_query_builder import QueryBuilder +from copernicusmarine.catalogue_parser.models import CopernicusMarineCatalogue from copernicusmarine.command_line_interface.exception_handler import ( log_exception_and_exit, ) @@ -9,6 +12,7 @@ from copernicusmarine.core_functions import documentation_utils from copernicusmarine.core_functions.click_custom_class import ( CustomClickOptionsCommand, + DeprecatedClickOption, ) from copernicusmarine.core_functions.describe import describe_function @@ -33,7 +37,7 @@ def cli_describe() -> None: .. code-block:: bash - copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets + copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --returned-fields datasets .. code-block:: bash @@ -42,6 +46,9 @@ def cli_describe() -> None: ) @click.option( "--include-description", + cls=DeprecatedClickOption, + deprecated=["--include-description"], + preferred="--returned-fields description", type=bool, is_flag=True, default=False, @@ -49,6 +56,9 @@ def cli_describe() -> None: ) @click.option( "--include-datasets", + cls=DeprecatedClickOption, + deprecated=["--include-datasets"], + preferred="--returned-fields datasets", type=bool, is_flag=True, default=False, @@ -56,6 +66,9 @@ def cli_describe() -> None: ) @click.option( "--include-keywords", + cls=DeprecatedClickOption, + deprecated=["--include-keywords"], + preferred="--returned-fields keywords", type=bool, is_flag=True, default=False, @@ -71,11 +84,28 @@ def cli_describe() -> None: @click.option( "-a", "--include-all", + cls=DeprecatedClickOption, + deprecated=["--include-all"], + preferred="--returned-fields all", type=bool, is_flag=True, default=False, help=documentation_utils.DESCRIBE["INCLUDE_ALL_HELP"], ) +@click.option( + "--returned-fields", + "-r", + type=str, + default=None, + help=documentation_utils.DESCRIBE["RETURNED_FIELDS_HELP"], +) +@click.option( + "--returned-fields-exclude", + "-e", + type=str, + default=None, + help=documentation_utils.DESCRIBE["RETURNED_FIELDS_EXCLUDE_HELP"], +) @click.option( "--contains", "-c", @@ -83,6 +113,20 @@ def cli_describe() -> None: multiple=True, help=documentation_utils.DESCRIBE["CONTAINS_HELP"], ) +@click.option( + "--product-id", + "-p", + type=str, + default=None, + help=documentation_utils.DESCRIBE["PRODUCT_ID_HELP"], +) +@click.option( + "--dataset-id", + "-i", + type=str, + default=None, + help=documentation_utils.DESCRIBE["DATASET_ID_HELP"], +) @click.option( "--max-concurrent-requests", type=int, @@ -110,7 +154,11 @@ def describe( include_keywords: bool, include_versions: bool, include_all: bool, + returned_fields: Optional[str], + returned_fields_exclude: Optional[str], contains: list[str], + product_id: Optional[str], + dataset_id: Optional[str], max_concurrent_requests: int, disable_progress_bar: bool, log_level: str, @@ -125,20 +173,87 @@ def describe( if logger.isEnabledFor(logging.DEBUG): logger.debug("DEBUG mode activated") + if include_all: + include_versions = True + + response_catalogue = describe_function( + include_versions, + contains, + product_id, + dataset_id, + max_concurrent_requests, + disable_progress_bar, + staging, + ) + include_query, exclude_query = _create_include_and_exclude( + returned_fields, + returned_fields_exclude, + include_datasets, + include_keywords, + include_description, + include_all, + ) + blank_logger.info( + response_catalogue.model_dump_json( + exclude_unset=True, + exclude_none=True, + exclude=exclude_query, + include=include_query, + indent=2, + context={"sort_keys": False}, + ) + ) + + +def _create_include_and_exclude( + returned_fields: Optional[str], + returned_fields_exclude: Optional[str], + include_datasets: bool, + include_keywords: bool, + include_description: bool, + include_all: bool, +) -> tuple[Optional[dict], Optional[dict]]: + if include_all: include_description = True include_datasets = True include_keywords = True - include_versions = True + include_in_output = set() + if returned_fields: + include_in_output = set(returned_fields.replace(" ", "").split(",")) + exclude_from_output = set() + if returned_fields_exclude: + exclude_from_output = set( + returned_fields_exclude.replace(" ", "").split(",") + ) + if ( + not include_datasets + and not exclude_from_output + and not include_in_output + and ("datasets" not in include_in_output) + ): + exclude_from_output.add("datasets") + if ( + not include_keywords + and not exclude_from_output + and not include_in_output + and ("keywords" not in include_in_output) + ): + exclude_from_output.add("keywords") + if ( + not include_description + and not exclude_from_output + and not include_in_output + and ("description" not in include_in_output) + ): + exclude_from_output.add("description") - json_dump = describe_function( - include_description=include_description, - include_datasets=include_datasets, - include_keywords=include_keywords, - include_versions=include_versions, - contains=contains, - max_concurrent_requests=max_concurrent_requests, - disable_progress_bar=disable_progress_bar, - staging=staging, - ) - blank_logger.info(json_dump) + include_query = None + if include_in_output and "all" not in include_in_output: + query_builder = QueryBuilder(include_in_output) + include_query = query_builder.build_query(CopernicusMarineCatalogue) + exclude_query = None + if exclude_from_output: + query_builder = QueryBuilder(exclude_from_output) + exclude_query = query_builder.build_query(CopernicusMarineCatalogue) + return include_query, exclude_query diff --git a/copernicusmarine/core_functions/click_custom_class.py b/copernicusmarine/core_functions/click_custom_class.py index 880f9178..34f5c55b 100644 --- a/copernicusmarine/core_functions/click_custom_class.py +++ b/copernicusmarine/core_functions/click_custom_class.py @@ -4,9 +4,8 @@ import click from copernicusmarine.core_functions.deprecated_options import ( - DEPRECATED_OPTIONS, + log_deprecated_message, ) -from copernicusmarine.core_functions.utils import log_deprecated_message logger = logging.getLogger("copernicusmarine") @@ -26,15 +25,16 @@ def make_parser(self, ctx): options = set(parser._short_opt.values()) options |= set(parser._long_opt.values()) - # get name of the command - command_name = ctx.command.name - for option in options: + if not isinstance(option.obj, DeprecatedClickOption): + continue def make_process(an_option): orig_process = an_option.process - deprecated = getattr(an_option.obj, "deprecated", []) - preferred = getattr(an_option.obj, "preferred", []) + deprecated = getattr(an_option.obj, "deprecated", None) + preferred = getattr(an_option.obj, "preferred", None) + msg = "Expected `deprecated` value for `{}`" + assert deprecated is not None, msg.format(an_option.obj.name) def process(value, state): frame = inspect.currentframe() @@ -42,25 +42,9 @@ def process(value, state): opt = frame.f_back.f_locals.get("opt") finally: del frame - old_alias = opt.replace("--", "").replace("-", "_") # type: ignore - if ( - opt in deprecated - or old_alias - in DEPRECATED_OPTIONS.deprecated_options_by_old_names - ): - alias_info = ( - DEPRECATED_OPTIONS.deprecated_options_by_old_names[ - old_alias - ] - ) - if command_name in alias_info.targeted_functions: - log_deprecated_message( - opt, - preferred, - alias_info.deleted_for_v2, - alias_info.deprecated_for_v2, - alias_info.only_for_v2, - ) + + if opt in deprecated: + log_deprecated_message(opt, preferred) return orig_process(value, state) return process diff --git a/copernicusmarine/core_functions/deprecated_options.py b/copernicusmarine/core_functions/deprecated_options.py index 9d80a8de..0baa6ff1 100644 --- a/copernicusmarine/core_functions/deprecated_options.py +++ b/copernicusmarine/core_functions/deprecated_options.py @@ -1,40 +1,24 @@ +import functools +import logging from collections.abc import Iterator, Mapping -from typing import Dict, List, Optional +from typing import Any, Callable + +logger = logging.getLogger("copernicusmarine") class DeprecatedOption: def __init__( - self, - old_name, - new_name, - replace=True, - deprecated_for_v2=False, - deleted_for_v2=True, - only_for_v2=False, - targeted_functions: Optional[list[str]] = None, + self, old_name, new_name, replace=True, do_not_pass=False ) -> None: self.old_name = old_name self.new_name = new_name self.replace = replace - self.deprecated_for_v2 = deprecated_for_v2 - self.deleted_for_v2 = deleted_for_v2 - self.only_for_v2 = only_for_v2 - if not targeted_functions: - self.targeted_functions = [ - "describe", - "get", - "subset", - "login", - "open_dataset", - "read_dataframe", - ] - else: - self.targeted_functions = targeted_functions + self.do_not_pass = do_not_pass class DeprecatedOptionMapping(Mapping): - def __init__(self, deprecated_options: List[DeprecatedOption]) -> None: - self.deprecated_options_by_old_names: Dict[str, DeprecatedOption] = {} + def __init__(self, deprecated_options: list[DeprecatedOption]) -> None: + self.deprecated_options_by_old_names: dict[str, DeprecatedOption] = {} for value in deprecated_options: if value not in self.deprecated_options_by_old_names: self.deprecated_options_by_old_names[value.old_name] = value @@ -48,16 +32,78 @@ def __iter__(self) -> Iterator: def __len__(self) -> int: return self.deprecated_options_by_old_names.__len__() - @property - def dict_old_names_to_new_names(self): - result_dict = {} - for ( - old_name, - deprecated_option, - ) in self.deprecated_options_by_old_names.items(): - if deprecated_option.replace: - result_dict[old_name] = deprecated_option.new_name - return result_dict + +DEPRECATED_OPTIONS: DeprecatedOptionMapping = DeprecatedOptionMapping( + [ + DeprecatedOption( + old_name="include_datasets", + new_name="include_datasets", + do_not_pass=True, + ), + DeprecatedOption( + old_name="include_keywords", + new_name="include_keywords", + do_not_pass=True, + ), + DeprecatedOption( + old_name="include_all", + new_name="include_all", + do_not_pass=True, + ), + DeprecatedOption( + old_name="include_description", + new_name="include_description", + do_not_pass=True, + ), + ] +) + + +def get_deprecated_message(old_value, preferred_value): + return ( + f"'{old_value}' has been deprecated, use '{preferred_value}' instead" + ) + + +def log_deprecated_message(old_value, preferred_value): + if preferred_value is None: + logger.warning(f"'{old_value}' has been deprecated") + else: + logger.warning(get_deprecated_message(old_value, preferred_value)) + + +def raise_both_old_and_new_value_error(old_value, new_value): + raise TypeError( + f"Received both {old_value} and {new_value} as arguments! " + f"{get_deprecated_message(old_value, new_value)}" + ) + + +def deprecated_python_option(aliases: DeprecatedOptionMapping) -> Callable: + def deco(f: Callable): + @functools.wraps(f) + def wrapper(*args, **kwargs): + rename_kwargs(f.__name__, kwargs, aliases) + return f(*args, **kwargs) + + return wrapper + + return deco -DEPRECATED_OPTIONS: DeprecatedOptionMapping = DeprecatedOptionMapping([]) +def rename_kwargs( + func_name: str, kwargs: dict[str, Any], aliases: DeprecatedOptionMapping +): + for old, alias_info in aliases.deprecated_options_by_old_names.items(): + new = alias_info.new_name + if old in kwargs: + if new in kwargs and old != new: + raise_both_old_and_new_value_error(old, new) + if old == new: + log_deprecated_message(old, None) + else: + log_deprecated_message(old, new) + if alias_info.replace: + kwargs[new] = kwargs.pop(old) + if alias_info.do_not_pass: + del kwargs[old] diff --git a/copernicusmarine/core_functions/describe.py b/copernicusmarine/core_functions/describe.py index e0759fbc..77398cb5 100644 --- a/copernicusmarine/core_functions/describe.py +++ b/copernicusmarine/core_functions/describe.py @@ -1,29 +1,25 @@ -import json import logging +from typing import Optional from copernicusmarine.catalogue_parser.catalogue_parser import ( filter_catalogue_with_strings, parse_catalogue, ) -from copernicusmarine.catalogue_parser.models import ( - CopernicusMarineCatalogue, - CopernicusMarineDatasetServiceType, -) +from copernicusmarine.catalogue_parser.models import CopernicusMarineCatalogue from copernicusmarine.core_functions.versions_verifier import VersionVerifier logger = logging.getLogger("copernicusmarine") def describe_function( - include_description: bool, - include_datasets: bool, - include_keywords: bool, include_versions: bool, contains: list[str], + force_product_id: Optional[str], + force_dataset_id: Optional[str], max_concurrent_requests: int, disable_progress_bar: bool, staging: bool, -) -> str: +) -> CopernicusMarineCatalogue: VersionVerifier.check_version_describe(staging) if staging: @@ -33,6 +29,8 @@ def describe_function( ) base_catalogue: CopernicusMarineCatalogue = parse_catalogue( + force_product_id=force_product_id, + force_dataset_id=force_dataset_id, max_concurrent_requests=max_concurrent_requests, disable_progress_bar=disable_progress_bar, staging=staging, @@ -40,27 +38,9 @@ def describe_function( if not include_versions: base_catalogue.filter_only_official_versions_and_parts() - catalogue_dict = ( - filter_catalogue_with_strings(base_catalogue, contains) + response_catalogue = ( + filter_catalogue_with_strings(base_catalogue, set(contains)) if contains - else base_catalogue.__dict__ - ) - - def default_filter(obj): - if isinstance(obj, CopernicusMarineDatasetServiceType): - return obj.to_json_dict() - - attributes = obj.__dict__ - attributes.pop("__objclass__", None) - if not include_description: - attributes.pop("description", None) - if not include_datasets: - attributes.pop("datasets", None) - if not include_keywords: - attributes.pop("keywords", None) - return obj.__dict__ - - json_dump = json.dumps( - catalogue_dict, default=default_filter, sort_keys=False, indent=2 + else base_catalogue ) - return json_dump + return response_catalogue diff --git a/copernicusmarine/core_functions/documentation_utils.py b/copernicusmarine/core_functions/documentation_utils.py index 798b2f5e..8ba3785b 100644 --- a/copernicusmarine/core_functions/documentation_utils.py +++ b/copernicusmarine/core_functions/documentation_utils.py @@ -82,25 +82,42 @@ "DESCRIBE_RESPONSE_HELP": ( "JSON\n A dictionary containing the retrieved metadata information." ), - "MAX_CONCURRENT_REQUESTS_HELP": ( - "Maximum number of concurrent requests (>=1). Default 15. The command uses " - "a thread pool executor to manage concurrent requests." - ), - "INCLUDE_DESCRIPTION_HELP": "Include product description in output.", - "INCLUDE_DATASETS_HELP": "Include product dataset details in output.", - "INCLUDE_KEYWORDS_HELP": "Include product keyword details in output.", + "INCLUDE_DESCRIPTION_HELP": "Deprecated. Include product description in output.", + "INCLUDE_DATASETS_HELP": "Deprecated. Include product dataset details in output.", + "INCLUDE_KEYWORDS_HELP": "Deprecated. Include product keyword details in output.", "INCLUDE_VERSIONS_HELP": ( "Include dataset versions in output. By default, shows only the default " "version." ), "INCLUDE_ALL_HELP": ( - "Include all the possible data in output: description, datasets, keywords, " + "Deprecated. Include all the possible data in output: " + "description, datasets, keywords, " "and versions." ), + "RETURNED_FIELDS_HELP": ( + "Option to specify the fields to return in the output. " + "The fields are separated by a comma. You can use 'all' to return all fields." + ), + "RETURNED_FIELDS_EXCLUDE_HELP": ( + "Option to specify the fields to exclude from the output. " + "The fields are separated by a comma." + ), "CONTAINS_HELP": ( "Filter catalogue output. Returns products with attributes matching a string " "token." ), + "PRODUCT_ID_HELP": ( + "Force the productID to be used for the describe command. Will not parse the " + "whole catalogue, but only the product with the given productID." + ), + "DATASET_ID_HELP": ( + "Force the datasetID to be used for the describe command. Will not " + "parse the whole catalogue, but only the dataset with the given datasetID." + ), + "MAX_CONCURRENT_REQUESTS_HELP": ( + "Maximum number of concurrent requests (>=1). Default 15. The command uses " + "a thread pool executor to manage concurrent requests." + ), } SUBSET: dict[str, str] = { @@ -115,7 +132,7 @@ "SERVICE_HELP": ( f"Force download through one of the available services using the service name " f"among {CommandType.SUBSET.service_names()} or " - f"its short name among {CommandType.SUBSET.service_names()}." + f"its short name among {CommandType.SUBSET.short_names_services()}." ), "VARIABLES_HELP": "Specify dataset variable. Can be used multiple times.", "MINIMUM_LONGITUDE_HELP": ( @@ -243,4 +260,4 @@ SUBSET.update(SHARED) GET.update(SHARED) LOGIN.update(SHARED) -DESCRIBE.update(SHARED) +DESCRIBE.update({k: v for k, v in SHARED.items() if k not in DESCRIBE}) diff --git a/copernicusmarine/core_functions/get.py b/copernicusmarine/core_functions/get.py index aa1b4405..ee0d4adc 100644 --- a/copernicusmarine/core_functions/get.py +++ b/copernicusmarine/core_functions/get.py @@ -157,8 +157,7 @@ def _run_get_request( ) get_request.dataset_url = retrieval_service.uri logger.info( - "Downloading using service " - f"{retrieval_service.service_type.service_name.value}..." + "Downloading using service " f"{retrieval_service.service_name}..." ) downloaded_files = download_original_files( username, diff --git a/copernicusmarine/core_functions/services_utils.py b/copernicusmarine/core_functions/services_utils.py index 6678d99e..8a0dc924 100644 --- a/copernicusmarine/core_functions/services_utils.py +++ b/copernicusmarine/core_functions/services_utils.py @@ -1,19 +1,19 @@ import logging from dataclasses import dataclass from enum import Enum -from itertools import chain from typing import List, Literal, Optional, Union from copernicusmarine.catalogue_parser.catalogue_parser import ( get_dataset_metadata, ) from copernicusmarine.catalogue_parser.models import ( - CopernicusMarineDatasetServiceType, - CopernicusMarineDatasetVersion, - CopernicusMarineProductDataset, + CopernicusMarineDataset, + CopernicusMarinePart, CopernicusMarineService, CopernicusMarineServiceFormat, - CopernicusMarineVersionPart, + CopernicusMarineServiceNames, + CopernicusMarineVersion, + short_name_from_service_name, ) from copernicusmarine.catalogue_parser.request_structure import ( DatasetTimeAndSpaceSubset, @@ -40,74 +40,59 @@ class _Command(Enum): @dataclass(frozen=True) class Command: command_name: _Command - service_types_by_priority: List[CopernicusMarineDatasetServiceType] + service_names_by_priority: List[CopernicusMarineServiceNames] def service_names(self) -> List[str]: - return list( - map( - lambda service_type: service_type.service_name.value, - self.service_types_by_priority, - ) - ) - - def service_short_names(self) -> List[str]: - return list( - map( - lambda service_type: service_type.short_name.value, - self.service_types_by_priority, - ) - ) - - def service_aliases(self) -> List[str]: - return list( - chain( - *map( - lambda service_type: service_type.aliases(), - self.service_types_by_priority, + return [ + service_name.value + for service_name in self.service_names_by_priority + ] + + def short_names_services(self) -> List[str]: + return [ + short_name_from_service_name(service_name).value + for service_name in self.service_names_by_priority + ] + + def get_available_service_for_command(self) -> list[str]: + available_services = [] + for service_name in self.service_names_by_priority: + available_services.append(service_name.value) + short_name = short_name_from_service_name(service_name) + if short_name != service_name: + available_services.append( + short_name_from_service_name(service_name).value ) - ) - ) + return available_services class CommandType(Command, Enum): SUBSET = ( _Command.SUBSET, [ - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.OMI_ARCO, - CopernicusMarineDatasetServiceType.STATIC_ARCO, + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.OMI_ARCO, + CopernicusMarineServiceNames.STATIC_ARCO, ], ) GET = ( _Command.GET, [ - CopernicusMarineDatasetServiceType.FILES, + CopernicusMarineServiceNames.FILES, ], ) LOAD = ( _Command.LOAD, [ - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.OMI_ARCO, - CopernicusMarineDatasetServiceType.STATIC_ARCO, + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.OMI_ARCO, + CopernicusMarineServiceNames.STATIC_ARCO, ], ) -def assert_service_type_for_command( - service_type: CopernicusMarineDatasetServiceType, command_type: CommandType -) -> CopernicusMarineDatasetServiceType: - return next_or_raise_exception( - ( - service_type - for service_type in command_type.service_types_by_priority - ), - _service_type_does_not_exist_for_command(service_type, command_type), - ) - - class ServiceDoesNotExistForCommand(Exception): """ Exception raised when the service does not exist for the command. @@ -115,45 +100,43 @@ class ServiceDoesNotExistForCommand(Exception): Please make sure the service exists for the command. """ # TODO: list available services per command - def __init__(self, service_name, command_name, available_services): + def __init__( + self, + requested_service_name: str, + command_name: str, + available_services: list[str], + ): super().__init__() self.__setattr__( "custom_exception_message", - f"Service {service_name} " + f"Service {requested_service_name} " f"does not exist for command {command_name}. " f"Possible service{'s' if len(available_services) > 1 else ''}: " f"{available_services}", ) -def _service_type_does_not_exist_for_command( - service_type: CopernicusMarineDatasetServiceType, command_type: CommandType -) -> ServiceDoesNotExistForCommand: - return _service_does_not_exist_for_command( - service_type.service_name.value, command_type - ) - - def _service_does_not_exist_for_command( - service_name: str, command_type: CommandType + requested_service_name: str, + command_type: CommandType, ) -> ServiceDoesNotExistForCommand: return ServiceDoesNotExistForCommand( - service_name, + requested_service_name, command_type.command_name.value, - command_type.service_aliases(), + command_type.get_available_service_for_command(), ) def _select_forced_service( - dataset_version_part: CopernicusMarineVersionPart, - force_service_type: CopernicusMarineDatasetServiceType, + dataset_version_part: CopernicusMarinePart, + force_service_name: CopernicusMarineServiceNames, command_type: CommandType, ) -> CopernicusMarineService: return next_or_raise_exception( ( service for service in dataset_version_part.services - if service.service_type == force_service_type + if service.service_name == force_service_name ), _service_not_available_error(dataset_version_part, command_type), ) @@ -164,8 +147,8 @@ def _get_best_arco_service_type( dataset_url: str, username: Optional[str], ) -> Literal[ - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.GEOSERIES, ]: dataset = custom_open_zarr.open_zarr( dataset_url, copernicus_marine_username=username @@ -213,46 +196,46 @@ def _get_best_arco_service_type( temporal_coverage = subset_temporal_dimensions / temporal_dimensions if geographical_coverage >= temporal_coverage: - return CopernicusMarineDatasetServiceType.GEOSERIES - return CopernicusMarineDatasetServiceType.TIMESERIES + return CopernicusMarineServiceNames.GEOSERIES + return CopernicusMarineServiceNames.TIMESERIES -def _get_first_available_service_type( +def _get_first_available_service_name( command_type: CommandType, - dataset_available_service_types: list[CopernicusMarineDatasetServiceType], -) -> CopernicusMarineDatasetServiceType: - available_service_types = command_type.service_types_by_priority + dataset_available_service_names: list[CopernicusMarineServiceNames], +) -> CopernicusMarineServiceNames: + available_service_names = command_type.service_names_by_priority return next_or_raise_exception( ( - service_type - for service_type in available_service_types - if service_type in dataset_available_service_types + service_name + for service_name in available_service_names + if service_name in dataset_available_service_names ), _no_service_available_for_command(command_type), ) def _select_service_by_priority( - dataset_version_part: CopernicusMarineVersionPart, + dataset_version_part: CopernicusMarinePart, command_type: CommandType, dataset_subset: Optional[DatasetTimeAndSpaceSubset], username: Optional[str], ) -> CopernicusMarineService: - dataset_available_service_types = [ - service.service_type for service in dataset_version_part.services + dataset_available_service_names = [ + service.service_name for service in dataset_version_part.services ] - first_available_service_type = _get_first_available_service_type( + first_available_service_name = _get_first_available_service_name( command_type=command_type, - dataset_available_service_types=dataset_available_service_types, + dataset_available_service_names=dataset_available_service_names, ) - first_available_service = dataset_version_part.get_service_by_service_type( - service_type=first_available_service_type + first_available_service = dataset_version_part.get_service_by_service_name( + service_name=first_available_service_name ) if ( - CopernicusMarineDatasetServiceType.GEOSERIES - in dataset_available_service_types - and CopernicusMarineDatasetServiceType.TIMESERIES - in dataset_available_service_types + CopernicusMarineServiceNames.GEOSERIES + in dataset_available_service_names + and CopernicusMarineServiceNames.TIMESERIES + in dataset_available_service_names and command_type in [CommandType.SUBSET, CommandType.LOAD] and dataset_subset is not None ): @@ -260,15 +243,13 @@ def _select_service_by_priority( first_available_service.service_format == CopernicusMarineServiceFormat.SQLITE ): - raise FormatNotSupported( - first_available_service.service_format.value - ) - best_arco_service_type: CopernicusMarineDatasetServiceType = ( + raise FormatNotSupported(first_available_service.service_format) + best_arco_service_type: CopernicusMarineServiceNames = ( _get_best_arco_service_type( dataset_subset, first_available_service.uri, username ) ) - return dataset_version_part.get_service_by_service_type( + return dataset_version_part.get_service_by_service_name( best_arco_service_type ) return first_available_service @@ -277,7 +258,7 @@ def _select_service_by_priority( @dataclass class RetrievalService: dataset_id: str - service_type: CopernicusMarineDatasetServiceType + service_name: CopernicusMarineServiceNames service_format: Optional[CopernicusMarineServiceFormat] uri: str dataset_valid_start_date: Optional[Union[str, int, float]] @@ -288,7 +269,7 @@ def get_retrieval_service( dataset_id: str, force_dataset_version_label: Optional[str], force_dataset_part_label: Optional[str], - force_service_type_string: Optional[str], + force_service_name_or_short_name: Optional[str], command_type: CommandType, index_parts: bool = False, dataset_subset: Optional[DatasetTimeAndSpaceSubset] = None, @@ -304,9 +285,11 @@ def get_retrieval_service( " you can use 'copernicusmarine describe --include-datasets " "--contains ' to find datasets" ) - force_service_type: Optional[CopernicusMarineDatasetServiceType] = ( - _service_type_from_string(force_service_type_string, command_type) - if force_service_type_string + force_service_name: Optional[CopernicusMarineServiceNames] = ( + _service_name_from_string( + force_service_name_or_short_name, command_type + ) + if force_service_name_or_short_name else None ) @@ -314,7 +297,7 @@ def get_retrieval_service( dataset=dataset_metadata, force_dataset_version_label=force_dataset_version_label, force_dataset_part_label=force_dataset_part_label, - force_service_type=force_service_type, + force_service_name=force_service_name, command_type=command_type, index_parts=index_parts, dataset_subset=dataset_subset, @@ -324,10 +307,10 @@ def get_retrieval_service( def _get_retrieval_service_from_dataset( - dataset: CopernicusMarineProductDataset, + dataset: CopernicusMarineDataset, force_dataset_version_label: Optional[str], force_dataset_part_label: Optional[str], - force_service_type: Optional[CopernicusMarineDatasetServiceType], + force_service_name: Optional[CopernicusMarineServiceNames], command_type: CommandType, index_parts: bool, dataset_subset: Optional[DatasetTimeAndSpaceSubset], @@ -349,7 +332,7 @@ def _get_retrieval_service_from_dataset( dataset_id=dataset.dataset_id, dataset_version=dataset_version, force_dataset_part_label=force_dataset_part_label, - force_service_type=force_service_type, + force_service_name=force_service_name, command_type=command_type, index_parts=index_parts, dataset_subset=dataset_subset, @@ -360,9 +343,9 @@ def _get_retrieval_service_from_dataset( def _get_retrieval_service_from_dataset_version( dataset_id: str, - dataset_version: CopernicusMarineDatasetVersion, + dataset_version: CopernicusMarineVersion, force_dataset_part_label: Optional[str], - force_service_type: Optional[CopernicusMarineDatasetServiceType], + force_service_name: Optional[CopernicusMarineServiceNames], command_type: CommandType, index_parts: bool, dataset_subset: Optional[DatasetTimeAndSpaceSubset], @@ -395,18 +378,17 @@ def _get_retrieval_service_from_dataset_version( dataset_id, dataset_version, dataset_part ) - if force_service_type: + if force_service_name: logger.info( - f"You forced selection of service: " - f"{force_service_type.service_name.value}" + f"You forced selection of service: " f"{force_service_name.value}" ) service = _select_forced_service( dataset_version_part=dataset_part, - force_service_type=force_service_type, + force_service_name=force_service_name, command_type=command_type, ) if service.service_format == CopernicusMarineServiceFormat.SQLITE: - raise FormatNotSupported(service.service_format.value) + raise FormatNotSupported(service.service_format) else: service = _select_service_by_priority( dataset_version_part=dataset_part, @@ -416,12 +398,12 @@ def _get_retrieval_service_from_dataset_version( ) logger.info( "Service was not specified, the default one was " - f'selected: "{service.service_type.service_name.value}"' + f'selected: "{service.service_name}"' ) dataset_start_date = _get_dataset_start_date_from_service(service) return RetrievalService( dataset_id=dataset_id, - service_type=service.service_type, + service_name=service.service_name, uri=service.uri, dataset_valid_start_date=dataset_start_date, service_format=service.service_format, @@ -454,8 +436,8 @@ class ServiceNotAvailable(Exception): def _warning_dataset_will_be_deprecated( dataset_id: str, - dataset_version: CopernicusMarineDatasetVersion, - dataset_part: CopernicusMarineVersionPart, + dataset_version: CopernicusMarineVersion, + dataset_part: CopernicusMarinePart, ): logger.warning( f"""The dataset {dataset_id}""" @@ -471,8 +453,8 @@ def _warning_dataset_will_be_deprecated( def _warning_dataset_not_yet_released( dataset_id: str, - dataset_version: CopernicusMarineDatasetVersion, - dataset_part: CopernicusMarineVersionPart, + dataset_version: CopernicusMarineVersion, + dataset_part: CopernicusMarinePart, ): logger.warning( f"""The dataset {dataset_id}""" @@ -488,17 +470,17 @@ def _warning_dataset_not_yet_released( def _service_not_available_error( - dataset_version_part: CopernicusMarineVersionPart, + dataset_version_part: CopernicusMarinePart, command_type: CommandType, ) -> ServiceNotAvailable: - dataset_available_service_types = [ - service.service_type.short_name.value + dataset_available_service_names = [ + service.service_short_name for service in dataset_version_part.services - if service.service_type in command_type.service_types_by_priority + if service.service_name in command_type.service_names_by_priority ] return ServiceNotAvailable( f"Available services for dataset: " - f"{dataset_available_service_types}" + f"{dataset_available_service_names}" ) @@ -522,14 +504,15 @@ def _no_service_available_for_command( ) -def _service_type_from_string( +def _service_name_from_string( string: str, command_type: CommandType -) -> CopernicusMarineDatasetServiceType: +) -> CopernicusMarineServiceNames: return next_or_raise_exception( ( - service_type - for service_type in command_type.service_types_by_priority - if string in service_type.aliases() + service_name + for service_name in command_type.service_names_by_priority + if string + in {service_name, short_name_from_service_name(service_name)} ), _service_does_not_exist_for_command(string, command_type), ) diff --git a/copernicusmarine/core_functions/subset.py b/copernicusmarine/core_functions/subset.py index 8085594c..a3a21013 100644 --- a/copernicusmarine/core_functions/subset.py +++ b/copernicusmarine/core_functions/subset.py @@ -6,8 +6,8 @@ from pendulum import DateTime from copernicusmarine.catalogue_parser.models import ( - CopernicusMarineDatasetServiceType, CopernicusMarineServiceFormat, + CopernicusMarineServiceNames, ) from copernicusmarine.catalogue_parser.request_structure import ( SubsetRequest, @@ -158,20 +158,19 @@ def subset_function( username=username, password=password, dataset_url=subset_request.dataset_url, - service_type=retrieval_service.service_type, + service_name=retrieval_service.service_name, dataset_subset=subset_request.get_time_and_space_subset(), coordinates_selection_method=subset_request.coordinates_selection_method, dataset_valid_date=retrieval_service.dataset_valid_start_date, ) logger.info( - "Downloading using service " - f"{retrieval_service.service_type.service_name.value}..." + "Downloading using service " f"{retrieval_service.service_name}..." ) - if retrieval_service.service_type in [ - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.OMI_ARCO, - CopernicusMarineDatasetServiceType.STATIC_ARCO, + if retrieval_service.service_name in [ + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.OMI_ARCO, + CopernicusMarineServiceNames.STATIC_ARCO, ]: if ( retrieval_service.service_format @@ -187,7 +186,7 @@ def subset_function( service=retrieval_service.service, ) else: - raise ServiceNotSupported(retrieval_service.service_type) + raise ServiceNotSupported(retrieval_service.service_name) return response diff --git a/copernicusmarine/core_functions/utils.py b/copernicusmarine/core_functions/utils.py index fa190e45..f189af7c 100644 --- a/copernicusmarine/core_functions/utils.py +++ b/copernicusmarine/core_functions/utils.py @@ -1,5 +1,4 @@ import concurrent.futures -import functools import logging import pathlib import re @@ -7,7 +6,6 @@ from typing import ( Any, Callable, - Iterable, Iterator, Literal, Optional, @@ -27,9 +25,6 @@ from tqdm import tqdm from copernicusmarine import __version__ as copernicusmarine_version -from copernicusmarine.core_functions.deprecated_options import ( - DeprecatedOptionMapping, -) from copernicusmarine.core_functions.exceptions import WrongDatetimeFormat logger = logging.getLogger("copernicusmarine") @@ -71,12 +66,6 @@ def get_unique_filename( _S = TypeVar("_S") -def map_reject_none( - function: Callable[[_S], Optional[_T]], iterable: Iterable[_S] -) -> Iterable[_T]: - return (element for element in map(function, iterable) if element) - - def next_or_raise_exception( iterator: Iterator[_T], exception_to_raise: Exception ) -> _T: @@ -219,93 +208,3 @@ def _add_custom_query_param(params, context, **kwargs): ) return _add_custom_query_param - - -# Deprecation utils -def get_deprecated_message( - old_value, - preferred_value, - deleted_for_v2: bool = False, - deprecated_for_v2: bool = False, - only_for_v2: bool = False, -): - message = "" - if only_for_v2: - message = f"Deprecation warning for option '{old_value}'. " - else: - message = f"'{old_value}' has been deprecated. " - if old_value != preferred_value and not only_for_v2: - message += f"Use '{preferred_value}' instead. " - if deleted_for_v2: - message += ( - "This option will no longer be " - + "available in copernicusmarine>=2.0.0. " - + "Please refer to the documentation when the new major " - + "version is released for more information." - ) - if deprecated_for_v2: - message += ( - "This option will be deprecated in copernicusmarine>=2.0.0 i.e. " - + "it will not break but it might have an unexpected effect." - ) - return message - - -def log_deprecated_message( - old_value, - preferred_value, - deleted_for_v2: bool, - deprecated_for_v2: bool, - only_for_v2: bool, -): - logger.warning( - get_deprecated_message( - old_value, - preferred_value, - deleted_for_v2=deleted_for_v2, - deprecated_for_v2=deprecated_for_v2, - only_for_v2=only_for_v2, - ) - ) - - -def raise_both_old_and_new_value_error(old_value, new_value): - raise TypeError( - f"Received both {old_value} and {new_value} as arguments! " - f"{get_deprecated_message(old_value, new_value)}" - ) - - -def deprecated_python_option( - deprecated_option: DeprecatedOptionMapping, -) -> Callable: - def deco(f: Callable): - @functools.wraps(f) - def wrapper(*args, **kwargs): - rename_kwargs(f.__name__, kwargs, deprecated_option) - return f(*args, **kwargs) - - return wrapper - - return deco - - -def rename_kwargs( - func_name: str, kwargs: dict[str, Any], aliases: DeprecatedOptionMapping -): - for old, alias_info in aliases.deprecated_options_by_old_names.items(): - if func_name not in alias_info.targeted_functions: - continue - new = alias_info.new_name - if old in kwargs: - if new in kwargs and old != new: - raise_both_old_and_new_value_error(old, new) - log_deprecated_message( - old, - new, - alias_info.deleted_for_v2, - alias_info.deprecated_for_v2, - alias_info.only_for_v2, - ) - if alias_info.replace: - kwargs[new] = kwargs.pop(old) diff --git a/copernicusmarine/download_functions/subset_xarray.py b/copernicusmarine/download_functions/subset_xarray.py index fd1ad02d..443cdccf 100644 --- a/copernicusmarine/download_functions/subset_xarray.py +++ b/copernicusmarine/download_functions/subset_xarray.py @@ -8,7 +8,7 @@ from pendulum import DateTime from copernicusmarine.catalogue_parser.models import ( - CopernicusMarineDatasetServiceType, + CopernicusMarineServiceNames, ) from copernicusmarine.catalogue_parser.request_structure import ( DatasetTimeAndSpaceSubset, @@ -566,23 +566,23 @@ def check_dataset_subset_bounds( username: str, password: str, dataset_url: str, - service_type: CopernicusMarineDatasetServiceType, + service_name: CopernicusMarineServiceNames, dataset_subset: DatasetTimeAndSpaceSubset, coordinates_selection_method: CoordinatesSelectionMethod, dataset_valid_date: Optional[Union[str, int, float]], ) -> None: - if service_type in [ - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.OMI_ARCO, - CopernicusMarineDatasetServiceType.STATIC_ARCO, + if service_name in [ + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.OMI_ARCO, + CopernicusMarineServiceNames.STATIC_ARCO, ]: dataset = custom_open_zarr.open_zarr( dataset_url, copernicus_marine_username=username ) dataset_coordinates = dataset.coords else: - raise ServiceNotSupported(service_type) + raise ServiceNotSupported(service_name) for coordinate_label in COORDINATES_LABEL["latitude"]: if coordinate_label in dataset.sizes: latitudes = dataset_coordinates[coordinate_label].values diff --git a/copernicusmarine/download_functions/utils.py b/copernicusmarine/download_functions/utils.py index 986f6cb1..327efb3e 100644 --- a/copernicusmarine/download_functions/utils.py +++ b/copernicusmarine/download_functions/utils.py @@ -2,7 +2,7 @@ import logging import math from pathlib import Path -from typing import Any, Optional +from typing import Any, Optional, Union import xarray from pendulum import DateTime @@ -260,7 +260,7 @@ def get_approximation_size_data_downloaded( download_estimated_size = 0 for variable_name in temp_dataset.data_vars: - coordinates_size = 1 + coordinates_size = 1.0 variable = [ var for var in service.variables if var.short_name == variable_name ][0] @@ -302,7 +302,7 @@ def get_approximation_size_data_downloaded( def get_number_of_chunks_for_coordinate( dataset: xarray.Dataset, coordinate: CopernicusMarineCoordinate, - chunking_length: int, + chunking_length: Union[int, float], ) -> Optional[int]: maximum_value = coordinate.maximum_value minimum_value = coordinate.minimum_value @@ -315,9 +315,9 @@ def get_number_of_chunks_for_coordinate( ): values = [minimum_value] for _ in range( - 0, math.ceil((maximum_value - minimum_value) / step_value) + 0, math.ceil((maximum_value - minimum_value) / step_value) # type: ignore ): - values.append(values[-1] + step_value) + values.append(values[-1] + step_value) # type: ignore elif not values: return None @@ -346,8 +346,8 @@ def get_number_of_chunks_for_coordinate( index_left = bisect.bisect_left(values, requested_minimum) if index_left == len(values) - 1: chunk_of_requested_minimum = math.floor((index_left) / chunking_length) - elif abs(values[index_left] - requested_minimum) <= abs( - values[index_left + 1] - requested_minimum + elif abs(values[index_left] - requested_minimum) <= abs( # type: ignore + values[index_left + 1] - requested_minimum # type: ignore ): chunk_of_requested_minimum = math.floor(index_left / chunking_length) else: @@ -358,8 +358,8 @@ def get_number_of_chunks_for_coordinate( index_left = bisect.bisect_left(values, requested_maximum) if index_left == len(values) - 1 or index_left == len(values): chunk_of_requested_maximum = math.floor((index_left) / chunking_length) - elif abs(values[index_left] - requested_maximum) <= abs( - values[index_left + 1] - requested_maximum + elif abs(values[index_left] - requested_maximum) <= abs( # type: ignore + values[index_left + 1] - requested_maximum # type: ignore ): chunk_of_requested_maximum = math.floor(index_left / chunking_length) else: diff --git a/copernicusmarine/python_interface/describe.py b/copernicusmarine/python_interface/describe.py index 968bff28..410107a3 100644 --- a/copernicusmarine/python_interface/describe.py +++ b/copernicusmarine/python_interface/describe.py @@ -1,11 +1,11 @@ -import json -from typing import Any +from typing import Optional +from copernicusmarine.catalogue_parser.models import CopernicusMarineCatalogue from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, + deprecated_python_option, ) from copernicusmarine.core_functions.describe import describe_function -from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) @@ -14,33 +14,27 @@ @deprecated_python_option(DEPRECATED_OPTIONS) @log_exception_and_exit def describe( - include_description: bool = False, - include_datasets: bool = False, - include_keywords: bool = False, include_versions: bool = False, - include_all: bool = False, contains: list[str] = [], + product_id: Optional[str] = None, + dataset_id: Optional[str] = None, max_concurrent_requests: int = 15, disable_progress_bar: bool = False, staging: bool = False, -) -> dict[str, Any]: +) -> CopernicusMarineCatalogue: """ Retrieve and parse the metadata information from the Copernicus Marine catalogue. Parameters ---------- - include_description : bool, optional - Include product description in output. - include_datasets : bool, optional - Include product dataset details in output. - include_keywords : bool, optional - Include product keyword details in output. include_versions : bool, optional Include dataset versions in output. By default, shows only the default version. - include_all : bool, optional - Include all the possible data in output: description, datasets, keywords, and versions. contains : list[str], optional Filter catalogue output. Returns products with attributes matching a string token. + product_id : str, optional + Force the productID to be used for the describe command. Will not parse the whole catalogue, but only the product with the given productID. + dataset_id : str, optional + Force the datasetID to be used for the describe command. Will not parse the whole catalogue, but only the dataset with the given datasetID. max_concurrent_requests : int, optional Maximum number of concurrent requests (>=1). Default 15. The command uses a thread pool executor to manage concurrent requests. disable_progress_bar : bool, optional @@ -48,29 +42,20 @@ def describe( Returns ------- - dict[str, Any] - A dictionary containing the retrieved metadata information. + copernicusmarine.CopernicusMarineCatalogue + An object containing the retrieved metadata information. """ # noqa if not isinstance(contains, list): raise ValueError("contains must be of list type") - if include_all: - include_description = True - include_datasets = True - include_keywords = True - include_versions = True - - catalogue_json = describe_function( - include_description, - include_datasets, - include_keywords, + return describe_function( include_versions, contains, + product_id, + dataset_id, max_concurrent_requests, disable_progress_bar, staging=staging, ) - catalogue = json.loads(catalogue_json) - return catalogue diff --git a/copernicusmarine/python_interface/get.py b/copernicusmarine/python_interface/get.py index 2317ade5..10d87d34 100644 --- a/copernicusmarine/python_interface/get.py +++ b/copernicusmarine/python_interface/get.py @@ -3,10 +3,10 @@ from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, + deprecated_python_option, ) from copernicusmarine.core_functions.get import get_function from copernicusmarine.core_functions.models import ResponseGet -from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) diff --git a/copernicusmarine/python_interface/load_utils.py b/copernicusmarine/python_interface/load_utils.py index e9f4df4d..36903ad6 100644 --- a/copernicusmarine/python_interface/load_utils.py +++ b/copernicusmarine/python_interface/load_utils.py @@ -4,7 +4,7 @@ import xarray from copernicusmarine.catalogue_parser.models import ( - CopernicusMarineDatasetServiceType, + CopernicusMarineServiceNames, ) from copernicusmarine.catalogue_parser.request_structure import LoadRequest from copernicusmarine.core_functions.credentials_utils import ( @@ -30,7 +30,7 @@ def load_data_object_from_load_request( dataset_id=load_request.dataset_id, force_dataset_version_label=load_request.force_dataset_version, force_dataset_part_label=load_request.force_dataset_part, - force_service_type_string=load_request.force_service, + force_service_name_or_short_name=load_request.force_service, command_type=CommandType.LOAD, dataset_subset=load_request.get_time_and_space_subset(), ) @@ -44,16 +44,16 @@ def load_data_object_from_load_request( username=username, password=password, dataset_url=load_request.dataset_url, - service_type=retrieval_service.service_type, + service_name=retrieval_service.service_name, dataset_subset=load_request.get_time_and_space_subset(), coordinates_selection_method=load_request.coordinates_selection_method, dataset_valid_date=retrieval_service.dataset_valid_start_date, ) - if retrieval_service.service_type in [ - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - CopernicusMarineDatasetServiceType.OMI_ARCO, - CopernicusMarineDatasetServiceType.STATIC_ARCO, + if retrieval_service.service_name in [ + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + CopernicusMarineServiceNames.OMI_ARCO, + CopernicusMarineServiceNames.STATIC_ARCO, ]: if retrieval_service.dataset_valid_start_date: parsed_start_datetime = timestamp_or_datestring_to_datetime( @@ -79,5 +79,5 @@ def load_data_object_from_load_request( chunks=None, ) else: - raise ServiceNotSupported(retrieval_service.service_type) + raise ServiceNotSupported(retrieval_service.service_name) return dataset diff --git a/copernicusmarine/python_interface/open_dataset.py b/copernicusmarine/python_interface/open_dataset.py index 35bf8788..594e28d1 100644 --- a/copernicusmarine/python_interface/open_dataset.py +++ b/copernicusmarine/python_interface/open_dataset.py @@ -7,6 +7,7 @@ from copernicusmarine.catalogue_parser.request_structure import LoadRequest from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, + deprecated_python_option, ) from copernicusmarine.core_functions.models import ( DEFAULT_COORDINATES_SELECTION_METHOD, @@ -14,7 +15,6 @@ CoordinatesSelectionMethod, VerticalDimensionOutput, ) -from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.download_functions.download_arco_series import ( open_dataset_from_arco_series, ) @@ -100,7 +100,7 @@ def open_dataset( end_datetime : Union[datetime, str], optional The end datetime of the temporal subset. Supports common format parsed by pendulum (https://pendulum.eustace.io/docs/#parsing). service : str, optional - Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['geoseries', 'timeseries', 'omi-arco', 'static-arco']. credentials_file : Union[pathlib.Path, str], optional Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. diff --git a/copernicusmarine/python_interface/read_dataframe.py b/copernicusmarine/python_interface/read_dataframe.py index 8105a67a..fbbacebd 100644 --- a/copernicusmarine/python_interface/read_dataframe.py +++ b/copernicusmarine/python_interface/read_dataframe.py @@ -7,6 +7,7 @@ from copernicusmarine.catalogue_parser.request_structure import LoadRequest from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, + deprecated_python_option, ) from copernicusmarine.core_functions.models import ( DEFAULT_COORDINATES_SELECTION_METHOD, @@ -14,7 +15,6 @@ CoordinatesSelectionMethod, VerticalDimensionOutput, ) -from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.download_functions.download_arco_series import ( read_dataframe_from_arco_series, ) @@ -99,7 +99,7 @@ def read_dataframe( coordinates_selection_method : str, optional If ``inside``, the selection retrieved will be inside the requested range. If ``strict-inside``, the selection retrieved will be inside the requested range, and an error will be raised if the values don't exist. If ``nearest``, the extremes closest to the requested values will be returned. If ``outside``, the extremes will be taken to contain all the requested interval. The methods ``inside``, ``nearest`` and ``outside`` will display a warning if the request is out of bounds. service : str, optional - Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['geoseries', 'timeseries', 'omi-arco', 'static-arco']. credentials_file : Union[pathlib.Path, str], optional Path to a credentials file if not in its default directory (``$HOME/.copernicusmarine``). Accepts .copernicusmarine-credentials / .netrc or _netrc / motuclient-python.ini files. diff --git a/copernicusmarine/python_interface/subset.py b/copernicusmarine/python_interface/subset.py index 43dfb9bd..0ad34de5 100644 --- a/copernicusmarine/python_interface/subset.py +++ b/copernicusmarine/python_interface/subset.py @@ -4,6 +4,7 @@ from copernicusmarine.core_functions.deprecated_options import ( DEPRECATED_OPTIONS, + deprecated_python_option, ) from copernicusmarine.core_functions.models import ( DEFAULT_COORDINATES_SELECTION_METHOD, @@ -15,7 +16,6 @@ VerticalDimensionOutput, ) from copernicusmarine.core_functions.subset import subset_function -from copernicusmarine.core_functions.utils import deprecated_python_option from copernicusmarine.python_interface.exception_handler import ( log_exception_and_exit, ) @@ -86,7 +86,7 @@ def subset( request_file : Union[pathlib.Path, str], optional Option to pass a file containing the arguments. For more information please refer to the documentation or use option ``--create-template`` from the command line interface for an example template. service : str, optional - Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco']. + Force download through one of the available services using the service name among ['arco-geo-series', 'arco-time-series', 'omi-arco', 'static-arco'] or its short name among ['geoseries', 'timeseries', 'omi-arco', 'static-arco']. variables : List[str], optional List of variable names to extract. minimum_longitude : float, optional diff --git a/doc/conf.py b/doc/conf.py index f6510af4..44009802 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -25,7 +25,7 @@ numpydoc_show_class_members = False templates_path = ["_templates"] -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "__pycache__"] pygments_style = "sphinx" pygments_dark_style = "monokai" diff --git a/doc/response-types.rst b/doc/response-types.rst index 5e658eab..fe61ed73 100644 --- a/doc/response-types.rst +++ b/doc/response-types.rst @@ -7,7 +7,8 @@ Response types The Copernicus Marine toolbox commands return some information when downloading. It can contain useful metadata for the user. -For the :ref:`command line interface `, all the returned data will be in a form of a json sent to stdout. +For the :ref:`command line interface `, all the returned data will +be in a form of a json sent to stdout (whereas the logs are sent to stderr). You can easily save it locally by doing, for example: .. code-block:: bash @@ -17,7 +18,7 @@ You can easily save it locally by doing, for example: .. note:: For the "get" and "subset" commands you can get those metadata without - downloading anything by using the ``dry_run`` option (or ``--dry-run`` flag for the CLI). + downloading anything by using the ``dry_run`` option (or ``--dry-run`` flag for the command line interface). ------------------- Commands Response @@ -35,6 +36,12 @@ Commands Response :exclude-members: model_computed_fields, model_config, model_fields :member-order: bysource +.. autoclass:: copernicusmarine.CopernicusMarineCatalogue() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, filter_only_official_versions_and_parts + :member-order: bysource + -------------- Subtypes -------------- @@ -63,3 +70,63 @@ Subtypes :undoc-members: :exclude-members: model_computed_fields, model_config, model_fields :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineProduct() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, sort_versions, parse_dataset_metadata_items + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineDataset() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, get_version, sort_versions, parse_dataset_metadata_items + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineVersion() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, get_part, sort_parts + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarinePart() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, from_metadata_item, get_service_by_service_name + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineService() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, from_metadata_item + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineVariable() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, from_metadata_item + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineCoordinate() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields, from_metadata_item + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineServiceNames() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +.. autoclass:: copernicusmarine.CoperniusMarineServiceShortNames() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource + +.. autoclass:: copernicusmarine.CopernicusMarineServiceFormat() + :members: + :undoc-members: + :exclude-members: model_computed_fields, model_config, model_fields + :member-order: bysource diff --git a/doc/usage/describe-usage.rst b/doc/usage/describe-usage.rst index 06a03d34..e3a2b0b1 100644 --- a/doc/usage/describe-usage.rst +++ b/doc/usage/describe-usage.rst @@ -44,19 +44,76 @@ Here the first 2 products are shown: } -By default, the command only shows the products. To include the datasets, you can use the ``--include-datasets`` option. +By default, the command only shows the products. To include the datasets, you can use the ``--returned-fields datasets`` option. -**Example:** +To save the JSON output to a file, you can use the following command: .. code-block:: bash - copernicusmarine describe --include-datasets + copernicusmarine describe > all_products_copernicus_marine_service.json -To save the JSON output to a file, you can use the following command: +.. note:: + At some point in the future, the command will be updated to include everything by default. + Then the user will be able to use the ``--returned-fields`` option or ``--returned-fields-exclude`` to filter the output. + +.. note:: + In the case of the Python interface, the describe will return directly the :class:`copernicusmarine.CopernicusMarineCatalogue` object. + The catalogue will be complete except if you use the ``dataset_id``, ``product_id`` or ``contains`` arguments. + +``--returned-fields`` and ``--returned-fields-exclude`` options +---------------------------------------------------------------- + +You can use the ``--returned-fields`` (``-r``) and ``--returned-fields-exclude`` (``-e``) options to select the fields you want to see in the output. +The options allow to select respectively the fields you want to include or exclude from the output. +You just need to add them as a comma-separated list. + +For example if you want only the URI of the services, you can use the following command: + +**Example:** .. code-block:: bash - copernicusmarine describe --include-datasets > all_datasets_copernicusmarine.json + copernicusmarine describe --returned-fields uri,product_id,dataset_id,service_name + +The output will be something like this (only the first product is shown): + +.. code-block:: json + + { + "products": [ + { + "product_id": "ANTARCTIC_OMI_SI_extent", + "datasets": [ + { + "dataset_id": "antarctic_omi_si_extent", + "versions": [ + { + "parts": [ + { + "services": [ + { + "service_name": "original-files", + "uri": "https://s3.waw3-1.cloudferro.com/mdl-native-10/native/ANTARCTIC_OMI_SI_extent/antarctic_omi_si_extent_202207/antarctic_omi_si_extent_19930115_P20220328.nc" + }, + { + "service_name": "omi-arco", + "uri": "https://s3.waw3-1.cloudferro.com/mdl-arco-time-001/arco/ANTARCTIC_OMI_SI_extent/antarctic_omi_si_extent_202207/omi.zarr" + } + ] + } + ] + } + ] + } + ] + } + ] + } + + +.. note:: + You can use ``--returned-fields all`` to show all fields. + ``--contains`` option ---------------------- @@ -120,6 +177,51 @@ The output will be something like this: ] } + +``dataset_id`` and ``product_id`` options +----------------------------------------- + +Calling the whole catalogue can be time-consuming. If you know the dataset or product you are looking for, you can use the ``dataset_id`` or ``product_id`` options. +It will drastically reduce the time to get the information you need. +You can either use the ``--dataset_id`` option, the ``--product_id`` option or both. + +**Example:** + +Let's filter to returned values for simplicity. + +.. code-block:: bash + + copernicusmarine describe -i cmems_mod_glo_phy_my_0.083deg_P1D-m -e services -r datasets,product_id + +The output will be something like this: + +.. code-block:: json + + { + "products": [ + { + "product_id": "GLOBAL_MULTIYEAR_PHY_001_030", + "datasets": [ + { + "dataset_id": "cmems_mod_glo_phy_my_0.083deg_P1D-m", + "dataset_name": "daily mean fields from Global Ocean Physics Analysis and Forecast updated Daily", + "versions": [ + { + "label": "202311", + "parts": [ + { + "name": "default", + "released_date": "2023-11-30T11:00:00.000Z" + } + ] + } + ] + } + ] + } + ] + } + ``--include-versions`` option ----------------------------- diff --git a/doc/usage/quickoverview.ipynb b/doc/usage/quickoverview.ipynb index bffd4dcb..b419aea5 100644 --- a/doc/usage/quickoverview.ipynb +++ b/doc/usage/quickoverview.ipynb @@ -150,21 +150,22 @@ "source": [ "## Copernicus Marine toolbox - Describe\n", "\n", - "To explore the catalogue of products and datasets available in Copernicus Marine service.\n", + "To explore the catalogue of products and datasets available in Copernicus Marine service. \n", + "In the Python interface, it returns the {class}`copernicusmarine.CopernicusMarineCatalogue` object.\n", "\n", "For more information, see the [page about describe](describe-page) of the documentation. You can also check the dedicated pages for the [command line interface](cli-describe) or the {func}`Python interface `." ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.85s/it]\n" + "Fetching catalogue: 100%|██████████| 2/2 [00:11<00:00, 5.58s/it]\n" ] }, { @@ -175,42 +176,40 @@ " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/ANTARCTIC_OMI_SI_extent.png',\n", " 'digital_object_identifier': '10.48670/moi-00186',\n", " 'sources': ['Numerical models'],\n", - " 'processing_level': None,\n", " 'production_center': 'Mercator Océan International'}" ] }, - "execution_count": 3, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# by default, we only get information about the products\n", - "catalogue_products = copernicusmarine.describe()\n", - "catalogue_products[\"products\"][0]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# We can get information about the datasets\n", - "catalogue_datasets = copernicusmarine.describe(include_datasets=True)\n", - "# catalogue_datasets[\"products\"][0][\"datasets\"][0] not shown because it is too long" + "catalogue = copernicusmarine.describe()\n", + "\n", + "# Let's show the product only doing some filtering\n", + "# the copernicusmarine.CopernicusMarineCatalogue oject is a BaseModel object\n", + "# see https://docs.pydantic.dev/latest/concepts/models/\n", + "# you can easily dump the object to json or dict\n", + "catalogue_dict = catalogue.model_dump(\n", + " exclude_none=True, \n", + " exclude_unset=True, \n", + " exclude={\"products\": {\"__all__\": {\"datasets\": True, \"description\": True, \"keywords\": True}}}\n", + " )\n", + "catalogue_dict[\"products\"][0]" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Fetching catalog: 100%|██████████| 2/2 [00:12<00:00, 6.34s/it]\n" + "Fetching catalogue: 100%|██████████| 2/2 [00:11<00:00, 5.71s/it]\n" ] }, { @@ -218,86 +217,46 @@ "text/plain": [ "{'products': [{'title': 'Atlantic-Iberian Biscay Irish- Ocean Biogeochemical Analysis and Forecast',\n", " 'product_id': 'IBI_ANALYSISFORECAST_BGC_005_004',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_BGC_005_004.jpg',\n", - " 'description': 'The IBI-MFC provides a high-resolution biogeochemical analysis and forecast product covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as daily averaged forecasts with a horizon of 10 days (updated on a weekly basis) are available on the catalogue.\\nTo this aim, an online coupled physical-biogeochemical operational system is based on NEMO-PISCES at 1/36° and adapted to the IBI area, being Mercator-Ocean in charge of the model code development. PISCES is a model of intermediate complexity, with 24 prognostic variables. It simulates marine biological productivity of the lower trophic levels and describes the biogeochemical cycles of carbon and of the main nutrients (P, N, Si, Fe).\\nThe product provides daily and monthly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon, surface partial pressure of carbon dioxide, and zooplankton.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00026\\n\\n**References:**\\n\\n* Gutknecht, E. and Reffray, G. and Mignot, A. and Dabrowski, T. and Sotillo, M. G. Modelling the marine ecosystem of Iberia-Biscay-Ireland (IBI) European waters for CMEMS operational applications. Ocean Sci., 15, 1489–1516, 2019. https://doi.org/10.5194/os-15-1489-2019\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00026',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a high-resolution biogeochemical analysis and forecast product covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as daily averaged forecasts with a horizon of 10 days (updated on a weekly basis) are available on the catalogue.\\nTo this aim, an online coupled physical-biogeochemical operational system is based on NEMO-PISCES at 1/36° and adapted to the IBI area, being Mercator-Ocean in charge of the model code development. PISCES is a model of intermediate complexity, with 24 prognostic variables. It simulates marine biological productivity of the lower trophic levels and describes the biogeochemical cycles of carbon and of the main nutrients (P, N, Si, Fe).\\nThe product provides daily and monthly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon, surface partial pressure of carbon dioxide, and zooplankton.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00026\\n\\n**References:**\\n\\n* Gutknecht, E. and Reffray, G. and Mignot, A. and Dabrowski, T. and Sotillo, M. G. Modelling the marine ecosystem of Iberia-Biscay-Ireland (IBI) European waters for CMEMS operational applications. Ocean Sci., 15, 1489–1516, 2019. https://doi.org/10.5194/os-15-1489-2019\\n'},\n", " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Physics Analysis and Forecast',\n", " 'product_id': 'IBI_ANALYSISFORECAST_PHY_005_001',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_PHY_005_001.jpg',\n", - " 'description': 'The IBI-MFC provides a high-resolution ocean analysis and forecast product (daily run by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as forecasts of different temporal resolutions with a horizon of 5 days (updated on a daily basis) are available on the catalogue.\\nThe system is based on a eddy-resolving NEMO model application at 1/36º horizontal resolution, being Mercator-Ocean in charge of the model code development. The hydrodynamic forecast includes high frequency processes of paramount importance to characterize regional scale marine processes: tidal forcing, surges and high frequency atmospheric forcing, fresh water river discharge, wave forcing in forecast, etc. A weekly update of IBI downscaled analysis is also delivered as historic IBI best estimates.\\nThe product offers 3D daily and monthly ocean fields, as well as hourly mean and 15-minute instantaneous values for some surface variables. Daily and monthly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are delivered. Finally, 15-minute instantaneous values of Sea Surface Height and Currents are also given.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00027\\n\\n**References:**\\n\\n* Sotillo, M.G.; Campuzano, F.; Guihou, K.; Lorente, P.; Olmedo, E.; Matulka, A.; Santos, F.; Amo-Baladrón, M.A.; Novellino, A. River Freshwater Contribution in Operational Ocean Models along the European Atlantic Façade: Impact of a New River Discharge Forcing Data on the CMEMS IBI Regional Model Solution. J. Mar. Sci. Eng. 2021, 9, 401. https://doi.org/10.3390/jmse9040401\\n* Mason, E. and Ruiz, S. and Bourdalle-Badie, R. and Reffray, G. and García-Sotillo, M. and Pascual, A. New insight into 3-D mesoscale eddy properties from CMEMS operational models in the western Mediterranean. Ocean Sci., 15, 1111–1131, 2019. https://doi.org/10.5194/os-15-1111-2019\\n* Lorente, P. and García-Sotillo, M. and Amo-Baladrón, A. and Aznar, R. and Levier, B. and Sánchez-Garrido, J. C. and Sammartino, S. and de Pascual-Collar, Á. and Reffray, G. and Toledano, C. and Álvarez-Fanjul, E. Skill assessment of global, regional, and coastal circulation forecast models: evaluating the benefits of dynamical downscaling in IBI (Iberia-Biscay-Ireland) surface waters. Ocean Sci., 15, 967–996, 2019. https://doi.org/10.5194/os-15-967-2019\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n* Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Reffray, G., Amo-Baladrón, A., Benkiran, M., and Alvarez Fanjul, E.: The MyOcean IBI Ocean Forecast and Reanalysis Systems: operational products and roadmap to the future Copernicus Service, J. Oper. Oceanogr., 8, 63–79, https://doi.org/10.1080/1755876X.2015.1014663, 2015.\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00027',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a high-resolution ocean analysis and forecast product (daily run by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates) as well as forecasts of different temporal resolutions with a horizon of 5 days (updated on a daily basis) are available on the catalogue.\\nThe system is based on a eddy-resolving NEMO model application at 1/36º horizontal resolution, being Mercator-Ocean in charge of the model code development. The hydrodynamic forecast includes high frequency processes of paramount importance to characterize regional scale marine processes: tidal forcing, surges and high frequency atmospheric forcing, fresh water river discharge, wave forcing in forecast, etc. A weekly update of IBI downscaled analysis is also delivered as historic IBI best estimates.\\nThe product offers 3D daily and monthly ocean fields, as well as hourly mean and 15-minute instantaneous values for some surface variables. Daily and monthly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are delivered. Finally, 15-minute instantaneous values of Sea Surface Height and Currents are also given.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00027\\n\\n**References:**\\n\\n* Sotillo, M.G.; Campuzano, F.; Guihou, K.; Lorente, P.; Olmedo, E.; Matulka, A.; Santos, F.; Amo-Baladrón, M.A.; Novellino, A. River Freshwater Contribution in Operational Ocean Models along the European Atlantic Façade: Impact of a New River Discharge Forcing Data on the CMEMS IBI Regional Model Solution. J. Mar. Sci. Eng. 2021, 9, 401. https://doi.org/10.3390/jmse9040401\\n* Mason, E. and Ruiz, S. and Bourdalle-Badie, R. and Reffray, G. and García-Sotillo, M. and Pascual, A. New insight into 3-D mesoscale eddy properties from CMEMS operational models in the western Mediterranean. Ocean Sci., 15, 1111–1131, 2019. https://doi.org/10.5194/os-15-1111-2019\\n* Lorente, P. and García-Sotillo, M. and Amo-Baladrón, A. and Aznar, R. and Levier, B. and Sánchez-Garrido, J. C. and Sammartino, S. and de Pascual-Collar, Á. and Reffray, G. and Toledano, C. and Álvarez-Fanjul, E. Skill assessment of global, regional, and coastal circulation forecast models: evaluating the benefits of dynamical downscaling in IBI (Iberia-Biscay-Ireland) surface waters. Ocean Sci., 15, 967–996, 2019. https://doi.org/10.5194/os-15-967-2019\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n* Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Reffray, G., Amo-Baladrón, A., Benkiran, M., and Alvarez Fanjul, E.: The MyOcean IBI Ocean Forecast and Reanalysis Systems: operational products and roadmap to the future Copernicus Service, J. Oper. Oceanogr., 8, 63–79, https://doi.org/10.1080/1755876X.2015.1014663, 2015.\\n'},\n", " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Wave Analysis and Forecast',\n", " 'product_id': 'IBI_ANALYSISFORECAST_WAV_005_005',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_ANALYSISFORECAST_WAV_005_005.jpg',\n", - " 'description': 'The IBI-MFC provides a high-resolution wave analysis and forecast product (run twice a day by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates), as well as hourly instantaneous forecasts with a horizon of up to 10 days (updated on a daily basis) are available on the catalogue.\\nThe IBI wave model system is based on the MFWAM model and runs on a grid of 5 km of horizontal resolution forced with the ECMWF hourly wind data. The system assimilates significant wave height (SWH) altimeter data and CFOSAT wave spectral data (supplied by Météo-France), and it is forced by currents provided by the IBI ocean circulation system. \\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, the IBI wave system is set up to provide internally some key parameters adequate to be used as forcing in the IBI NEMO ocean model forecast run.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00025\\n\\n**References:**\\n\\n* Toledano, C.; Ghantous, M.; Lorente, P.; Dalphinet, A.; Aouf, L.; Sotillo, M.G. Impacts of an Altimetric Wave Data Assimilation Scheme and Currents-Wave Coupling in an Operational Wave System: The New Copernicus Marine IBI Wave Forecast Service. J. Mar. Sci. Eng. 2022, 10, 457. https://doi.org/10.3390/jmse10040457\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00025',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a high-resolution wave analysis and forecast product (run twice a day by Nologin with the support of CESGA in terms of supercomputing resources), covering the European waters, and more specifically the Iberia–Biscay–Ireland (IBI) area. The last 2 years before now (historic best estimates), as well as hourly instantaneous forecasts with a horizon of up to 10 days (updated on a daily basis) are available on the catalogue.\\nThe IBI wave model system is based on the MFWAM model and runs on a grid of 5 km of horizontal resolution forced with the ECMWF hourly wind data. The system assimilates significant wave height (SWH) altimeter data and CFOSAT wave spectral data (supplied by Météo-France), and it is forced by currents provided by the IBI ocean circulation system. \\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, the IBI wave system is set up to provide internally some key parameters adequate to be used as forcing in the IBI NEMO ocean model forecast run.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00025\\n\\n**References:**\\n\\n* Toledano, C.; Ghantous, M.; Lorente, P.; Dalphinet, A.; Aouf, L.; Sotillo, M.G. Impacts of an Altimetric Wave Data Assimilation Scheme and Currents-Wave Coupling in an Operational Wave System: The New Copernicus Marine IBI Wave Forecast Service. J. Mar. Sci. Eng. 2022, 10, 457. https://doi.org/10.3390/jmse10040457\\n'},\n", " {'title': 'Atlantic-Iberian Biscay Irish- Ocean BioGeoChemistry NON ASSIMILATIVE Hindcast',\n", " 'product_id': 'IBI_MULTIYEAR_BGC_005_003',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_BGC_005_003.jpg',\n", - " 'description': 'The IBI-MFC provides a biogeochemical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources.\\nTo this aim, an application of the biogeochemical model PISCES is run simultaneously with the ocean physical IBI reanalysis, generating both products at the same 1/12° horizontal resolution. The PISCES model is able to simulate the first levels of the marine food web, from nutrients up to mesozooplankton and it has 24 state variables.\\nThe product provides daily, monthly and yearly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon and surface partial pressure of carbon dioxide. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00028\\n\\n**References:**\\n\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00028',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a biogeochemical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources.\\nTo this aim, an application of the biogeochemical model PISCES is run simultaneously with the ocean physical IBI reanalysis, generating both products at the same 1/12° horizontal resolution. The PISCES model is able to simulate the first levels of the marine food web, from nutrients up to mesozooplankton and it has 24 state variables.\\nThe product provides daily, monthly and yearly averages of the main biogeochemical variables: chlorophyll, oxygen, nitrate, phosphate, silicate, iron, ammonium, net primary production, euphotic zone depth, phytoplankton carbon, pH, dissolved inorganic carbon and surface partial pressure of carbon dioxide. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00028\\n\\n**References:**\\n\\n* Aznar, R., Sotillo, M. G., Cailleau, S., Lorente, P., Levier, B., Amo-Baladrón, A., Reffray, G., and Alvarez Fanjul, E. Strengths and weaknesses of the CMEMS forecasted and reanalyzed solutions for the Iberia-Biscay-Ireland (IBI) waters. J. Mar. Syst., 159, 1–14, https://doi.org/10.1016/j.jmarsys.2016.02.007, 2016\\n'},\n", " {'title': 'Atlantic-Iberian Biscay Irish- Ocean Physics Reanalysis',\n", " 'product_id': 'IBI_MULTIYEAR_PHY_005_002',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_PHY_005_002.jpg',\n", - " 'description': 'The IBI-MFC provides a ocean physical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe IBI model numerical core is based on the NEMO v3.6 ocean general circulation model run at 1/12° horizontal resolution. Altimeter data, in situ temperature and salinity vertical profiles and satellite sea surface temperature are assimilated.\\nThe product offers 3D daily, monthly and yearly ocean fields, as well as hourly mean fields for surface variables. Daily, monthly and yearly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are distributed. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00029',\n", - " 'digital_object_identifier': '10.48670/moi-00029',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a ocean physical reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly updated on a yearly basis. The model system is run by Mercator-Ocean, being the product post-processed to the user’s format by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe IBI model numerical core is based on the NEMO v3.6 ocean general circulation model run at 1/12° horizontal resolution. Altimeter data, in situ temperature and salinity vertical profiles and satellite sea surface temperature are assimilated.\\nThe product offers 3D daily, monthly and yearly ocean fields, as well as hourly mean fields for surface variables. Daily, monthly and yearly averages of 3D Temperature, 3D Salinity, 3D Zonal and Meridional Velocity components, Mix Layer Depth, Sea Bottom Temperature and Sea Surface Height are provided. Additionally, hourly means of surface fields for variables such as Sea Surface Height, Mix Layer Depth, Surface Temperature and Currents, together with Barotropic Velocities are distributed. Additionally, climatological parameters (monthly mean and standard deviation) of these variables for the period 1993-2016 are delivered.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00029'},\n", " {'title': 'Atlantic -Iberian Biscay Irish- Ocean Wave Reanalysis',\n", " 'product_id': 'IBI_MULTIYEAR_WAV_005_006',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_WAV_005_006.jpg',\n", - " 'description': 'The IBI-MFC provides a high-resolution wave reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly extended on a yearly basis. The model system is run by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe Multi-Year model configuration is based on the MFWAM model developed by Météo-France (MF), covering the same region as the IBI-MFC Near Real Time (NRT) analysis and forecasting product, but with an enhanced horizontal resolution (1/36º instead of 1/20º). The system assimilates significant wave height (SWH) altimeter data and wave spectral data (Envisat and CFOSAT), supplied by MF. Both, the MY and the NRT products, are fed by ECMWF hourly winds. Specifically, the MY system is forced by the ERA5 reanalysis wind data. As boundary conditions, the NRT system uses the 2D wave spectra from the Copernicus Marine GLOBAL forecast system, whereas the MY system is nested to the GLOBAL reanalysis.\\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, climatological parameters of significant wave height (VHM0) and zero -crossing wave period (VTM02) are delivered for the time interval 1993-2016.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00030',\n", - " 'digital_object_identifier': '10.48670/moi-00030',\n", - " 'sources': ['Numerical models'],\n", - " 'processing_level': 'Level 4',\n", - " 'production_center': 'NOLOGIN'},\n", + " 'description': 'The IBI-MFC provides a high-resolution wave reanalysis product for the Iberia-Biscay-Ireland (IBI) area starting in 01/01/1993 and being regularly extended on a yearly basis. The model system is run by Nologin with the support of CESGA in terms of supercomputing resources. \\nThe Multi-Year model configuration is based on the MFWAM model developed by Météo-France (MF), covering the same region as the IBI-MFC Near Real Time (NRT) analysis and forecasting product, but with an enhanced horizontal resolution (1/36º instead of 1/20º). The system assimilates significant wave height (SWH) altimeter data and wave spectral data (Envisat and CFOSAT), supplied by MF. Both, the MY and the NRT products, are fed by ECMWF hourly winds. Specifically, the MY system is forced by the ERA5 reanalysis wind data. As boundary conditions, the NRT system uses the 2D wave spectra from the Copernicus Marine GLOBAL forecast system, whereas the MY system is nested to the GLOBAL reanalysis.\\nThe product offers hourly instantaneous fields of different wave parameters, including Wave Height, Period and Direction for total spectrum; fields of Wind Wave (or wind sea), Primary Swell Wave and Secondary Swell for partitioned wave spectra; and the highest wave variables, such as maximum crest height and maximum crest-to-trough height. Additionally, climatological parameters of significant wave height (VHM0) and zero -crossing wave period (VTM02) are delivered for the time interval 1993-2016.\\n\\n**Product Citation**: \\nPlease refer to our Technical FAQ for citing [products.](http://marine.copernicus.eu/faq/cite-cmems-products-cmems-credit/?idpage=169)\\n\\n**DOI (Product)**: \\nhttps://doi.org/10.48670/moi-00030'},\n", " {'title': 'Atlantic Iberian Biscay Irish Ocean- In-Situ Near Real Time Observations',\n", " 'product_id': 'INSITU_IBI_PHYBGCWAV_DISCRETE_MYNRT_013_033',\n", - " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/INSITU_IBI_PHYBGCWAV_DISCRETE_MYNRT_013_033.jpg',\n", - " 'description': 'IBI Seas - near real-time (NRT) in situ quality controlled observations, hourly updated and distributed by INSTAC within 24-48 hours from acquisition in average\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00043',\n", - " 'digital_object_identifier': '10.48670/moi-00043',\n", - " 'sources': ['In-situ observations'],\n", - " 'processing_level': 'Level 2',\n", - " 'production_center': 'Puertos del Estado (Spain)'},\n", + " 'description': 'IBI Seas - near real-time (NRT) in situ quality controlled observations, hourly updated and distributed by INSTAC within 24-48 hours from acquisition in average\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00043'},\n", " {'title': 'Atlantic Iberian Biscay Mean Sea Level time series and trend from Observations Reprocessing',\n", " 'product_id': 'OMI_CLIMATE_SL_IBI_area_averaged_anomalies',\n", - " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/OMI_CLIMATE_SL_IBI_area_averaged_anomalies.png',\n", - " 'description': '**DEFINITION**\\n\\nThe ocean monitoring indicator on regional mean sea level is derived from the DUACS delayed-time (DT-2021 version, “my” (multi-year) dataset used when available, “myint” (multi-year interim) used after) sea level anomaly maps from satellite altimetry based on a stable number of altimeters (two) in the satellite constellation. These products are distributed by the Copernicus Climate Change Service and the Copernicus Marine Service (SEALEVEL_GLO_PHY_CLIMATE_L4_MY_008_057).\\nThe time series of area averaged anomalies correspond to the area average of the maps in the Irish-Biscay-Iberian (IBI) Sea weighted by the cosine of the latitude (to consider the changing area in each grid with latitude) and by the proportion of ocean in each grid (to consider the coastal areas). The time series are corrected from global TOPEX-A instrumental drift (WCRP Global Sea Level Budget Group, 2018) and regional mean GIA correction (weighted GIA mean of a 27 ensemble model following Spada et Melini, 2019). The time series are adjusted for seasonal annual and semi-annual signals and low-pass filtered at 6 months. Then, the trends/accelerations are estimated on the time series using ordinary least square fit.The trend uncertainty is provided in a 90% confidence interval. It is calculated as the weighted mean uncertainties in the region from Prandi et al., 2021. This estimate only considers errors related to the altimeter observation system (i.e., orbit determination errors, geophysical correction errors and inter-mission bias correction errors). The presence of the interannual signal can strongly influence the trend estimation considering to the altimeter period considered (Wang et al., 2021; Cazenave et al., 2014). The uncertainty linked to this effect is not considered.\\n\\n**CONTEXT **\\n\\nChange in mean sea level is an essential indicator of our evolving climate, as it reflects both the thermal expansion of the ocean in response to its warming and the increase in ocean mass due to the melting of ice sheets and glaciers (WCRP Global Sea Level Budget Group, 2018). At regional scale, sea level does not change homogenously. It is influenced by various other processes, with different spatial and temporal scales, such as local ocean dynamic, atmospheric forcing, Earth gravity and vertical land motion changes (IPCC WGI, 2021). The adverse effects of floods, storms and tropical cyclones, and the resulting losses and damage, have increased as a result of rising sea levels, increasing people and infrastructure vulnerability and food security risks, particularly in low-lying areas and island states (IPCC, 2022a). Adaptation and mitigation measures such as the restoration of mangroves and coastal wetlands, reduce the risks from sea level rise (IPCC, 2022b). \\nIn IBI region, the RMSL trend is modulated by decadal variations. As observed over the global ocean, the main actors of the long-term RMSL trend are associated with anthropogenic global/regional warming. Decadal variability is mainly linked to the strengthening or weakening of the Atlantic Meridional Overturning Circulation (AMOC) (e.g. Chafik et al., 2019). The latest is driven by the North Atlantic Oscillation (NAO) for decadal (20-30y) timescales (e.g. Delworth and Zeng, 2016). Along the European coast, the NAO also influences the along-slope winds dynamic which in return significantly contributes to the local sea level variability observed (Chafik et al., 2019).\\n\\n**KEY FINDINGS**\\n\\nOver the [1993/01/01, 2023/07/06] period, the area-averaged sea level in the IBI area rises at a rate of 4.00 \\uf0b1 0.80 mm/year with an acceleration of 0.14 \\uf0b1\\uf0200.06 mm/year2. This trend estimation is based on the altimeter measurements corrected from the Topex-A drift at the beginning of the time series (Legeais et al., 2020) and global GIA correction (Spada et Melini, 2019) to consider the ongoing movement of land. \\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00252\\n\\n**References:**\\n\\n* Cazenave, A., Dieng, H.-B., Meyssignac, B., von Schuckmann, K., Decharme, B., and Berthier, E.: The rate of sea-level rise, Nat. Clim. Change, 4, 358–361, https://doi.org/10.1038/nclimate2159, 2014.\\n* Chafik, L., Nilsen, J. E. Ø., Dangendorf, S., Reverdin, G., and Frederikse, T.: North Atlantic Ocean Circulation and Decadal Sea Level Change During the Altimetry Era, Sci. Rep., 9, 1041, https://doi.org/10.1038/s41598-018-37603-6, 2019.\\n* Delworth, T. L. and Zeng, F.: The Impact of the North Atlantic Oscillation on Climate through Its Influence on the Atlantic Meridional Overturning Circulation, J. Clim., 29, 941–962, https://doi.org/10.1175/JCLI-D-15-0396.1, 2016.\\n* Horwath, M., Gutknecht, B. D., Cazenave, A., Palanisamy, H. K., Marti, F., Marzeion, B., Paul, F., Le Bris, R., Hogg, A. E., Otosaka, I., Shepherd, A., Döll, P., Cáceres, D., Müller Schmied, H., Johannessen, J. A., Nilsen, J. E. Ø., Raj, R. P., Forsberg, R., Sandberg Sørensen, L., Barletta, V. R., Simonsen, S. B., Knudsen, P., Andersen, O. B., Ranndal, H., Rose, S. K., Merchant, C. J., Macintosh, C. R., von Schuckmann, K., Novotny, K., Groh, A., Restano, M., and Benveniste, J.: Global sea-level budget and ocean-mass budget, with a focus on advanced data products and uncertainty characterisation, Earth Syst. Sci. Data, 14, 411–447, https://doi.org/10.5194/essd-14-411-2022, 2022.\\n* IPCC: AR6 Synthesis Report: Climate Change 2022, 2022a.\\n* IPCC: Summary for Policymakers [H.-O. Pörtner, D.C. Roberts, E.S. Poloczanska, K. Mintenbeck, M. Tignor, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem (eds.)]. In: Climate Change 2022: Impacts, Adaptation, and Vulnerability. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [H.-O. Pörtner, D.C. Roberts, M. Tignor, E.S. Poloczanska, K. Mintenbeck, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem, B. Rama (eds.)], 2022b.\\n* IPCC: Summary for Policymakers. In: Climate Change 2022: Mitigation of Climate Change. Contribution of Working Group III to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [P.R. Shukla, J. Skea, R. Slade, A. Al Khourdajie, R. van Diemen, D. McCollum, M. Pathak, S. Some, P. Vyas, R. Fradera, M. Belkacemi, A. Hasija, G. Lisboa, S. Luz, J. Malley, (eds.)], , https://doi.org/10.1017/9781009157926.001, 2022c.\\n* IPCC WGI: Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* IPCC WGII: Climate Change 2021: Impacts, Adaptation and Vulnerability; Summary for Policemakers. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* Legeais, J. F., Llowel, W., Melet, A., and Meyssignac, B.: Evidence of the TOPEX-A altimeter instrumental anomaly and acceleration of the global mean sea level, Copernic. Mar. Serv. Ocean State Rep. Issue 4, 13, s77–s82, https://doi.org/10.1080/1755876X.2021.1946240, 2020.\\n* Peltier, W. R.: GLOBAL GLACIAL ISOSTASY AND THE SURFACE OF THE ICE-AGE EARTH: The ICE-5G (VM2) Model and GRACE, Annu. Rev. Earth Planet. Sci., 32, 111–149, https://doi.org/10.1146/annurev.earth.32.082503.144359, 2004.\\n* Prandi, P., Meyssignac, B., Ablain, M., Spada, G., Ribes, A., and Benveniste, J.: Local sea level trends, accelerations and uncertainties over 1993–2019, Sci. Data, 8, 1, https://doi.org/10.1038/s41597-020-00786-7, 2021.\\n* Wang, J., Church, J. A., Zhang, X., and Chen, X.: Reconciling global mean and regional sea level change in projections and observations, Nat. Commun., 12, 990, https://doi.org/10.1038/s41467-021-21265-6, 2021.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present, Earth Syst. Sci. Data, 10, 1551–1590, https://doi.org/10.5194/essd-10-1551-2018, 2018.\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00252',\n", - " 'sources': ['Satellite observations'],\n", - " 'processing_level': None,\n", - " 'production_center': 'CLS (France)'},\n", + " 'description': '**DEFINITION**\\n\\nThe ocean monitoring indicator on regional mean sea level is derived from the DUACS delayed-time (DT-2021 version, “my” (multi-year) dataset used when available, “myint” (multi-year interim) used after) sea level anomaly maps from satellite altimetry based on a stable number of altimeters (two) in the satellite constellation. These products are distributed by the Copernicus Climate Change Service and the Copernicus Marine Service (SEALEVEL_GLO_PHY_CLIMATE_L4_MY_008_057).\\nThe time series of area averaged anomalies correspond to the area average of the maps in the Irish-Biscay-Iberian (IBI) Sea weighted by the cosine of the latitude (to consider the changing area in each grid with latitude) and by the proportion of ocean in each grid (to consider the coastal areas). The time series are corrected from global TOPEX-A instrumental drift (WCRP Global Sea Level Budget Group, 2018) and regional mean GIA correction (weighted GIA mean of a 27 ensemble model following Spada et Melini, 2019). The time series are adjusted for seasonal annual and semi-annual signals and low-pass filtered at 6 months. Then, the trends/accelerations are estimated on the time series using ordinary least square fit.The trend uncertainty is provided in a 90% confidence interval. It is calculated as the weighted mean uncertainties in the region from Prandi et al., 2021. This estimate only considers errors related to the altimeter observation system (i.e., orbit determination errors, geophysical correction errors and inter-mission bias correction errors). The presence of the interannual signal can strongly influence the trend estimation considering to the altimeter period considered (Wang et al., 2021; Cazenave et al., 2014). The uncertainty linked to this effect is not considered.\\n\\n**CONTEXT **\\n\\nChange in mean sea level is an essential indicator of our evolving climate, as it reflects both the thermal expansion of the ocean in response to its warming and the increase in ocean mass due to the melting of ice sheets and glaciers (WCRP Global Sea Level Budget Group, 2018). At regional scale, sea level does not change homogenously. It is influenced by various other processes, with different spatial and temporal scales, such as local ocean dynamic, atmospheric forcing, Earth gravity and vertical land motion changes (IPCC WGI, 2021). The adverse effects of floods, storms and tropical cyclones, and the resulting losses and damage, have increased as a result of rising sea levels, increasing people and infrastructure vulnerability and food security risks, particularly in low-lying areas and island states (IPCC, 2022a). Adaptation and mitigation measures such as the restoration of mangroves and coastal wetlands, reduce the risks from sea level rise (IPCC, 2022b). \\nIn IBI region, the RMSL trend is modulated by decadal variations. As observed over the global ocean, the main actors of the long-term RMSL trend are associated with anthropogenic global/regional warming. Decadal variability is mainly linked to the strengthening or weakening of the Atlantic Meridional Overturning Circulation (AMOC) (e.g. Chafik et al., 2019). The latest is driven by the North Atlantic Oscillation (NAO) for decadal (20-30y) timescales (e.g. Delworth and Zeng, 2016). Along the European coast, the NAO also influences the along-slope winds dynamic which in return significantly contributes to the local sea level variability observed (Chafik et al., 2019).\\n\\n**KEY FINDINGS**\\n\\nOver the [1993/01/01, 2023/07/06] period, the area-averaged sea level in the IBI area rises at a rate of 4.00 \\uf0b1 0.80 mm/year with an acceleration of 0.14 \\uf0b1\\uf0200.06 mm/year2. This trend estimation is based on the altimeter measurements corrected from the Topex-A drift at the beginning of the time series (Legeais et al., 2020) and global GIA correction (Spada et Melini, 2019) to consider the ongoing movement of land. \\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00252\\n\\n**References:**\\n\\n* Cazenave, A., Dieng, H.-B., Meyssignac, B., von Schuckmann, K., Decharme, B., and Berthier, E.: The rate of sea-level rise, Nat. Clim. Change, 4, 358–361, https://doi.org/10.1038/nclimate2159, 2014.\\n* Chafik, L., Nilsen, J. E. Ø., Dangendorf, S., Reverdin, G., and Frederikse, T.: North Atlantic Ocean Circulation and Decadal Sea Level Change During the Altimetry Era, Sci. Rep., 9, 1041, https://doi.org/10.1038/s41598-018-37603-6, 2019.\\n* Delworth, T. L. and Zeng, F.: The Impact of the North Atlantic Oscillation on Climate through Its Influence on the Atlantic Meridional Overturning Circulation, J. Clim., 29, 941–962, https://doi.org/10.1175/JCLI-D-15-0396.1, 2016.\\n* Horwath, M., Gutknecht, B. D., Cazenave, A., Palanisamy, H. K., Marti, F., Marzeion, B., Paul, F., Le Bris, R., Hogg, A. E., Otosaka, I., Shepherd, A., Döll, P., Cáceres, D., Müller Schmied, H., Johannessen, J. A., Nilsen, J. E. Ø., Raj, R. P., Forsberg, R., Sandberg Sørensen, L., Barletta, V. R., Simonsen, S. B., Knudsen, P., Andersen, O. B., Ranndal, H., Rose, S. K., Merchant, C. J., Macintosh, C. R., von Schuckmann, K., Novotny, K., Groh, A., Restano, M., and Benveniste, J.: Global sea-level budget and ocean-mass budget, with a focus on advanced data products and uncertainty characterisation, Earth Syst. Sci. Data, 14, 411–447, https://doi.org/10.5194/essd-14-411-2022, 2022.\\n* IPCC: AR6 Synthesis Report: Climate Change 2022, 2022a.\\n* IPCC: Summary for Policymakers [H.-O. Pörtner, D.C. Roberts, E.S. Poloczanska, K. Mintenbeck, M. Tignor, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem (eds.)]. In: Climate Change 2022: Impacts, Adaptation, and Vulnerability. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [H.-O. Pörtner, D.C. Roberts, M. Tignor, E.S. Poloczanska, K. Mintenbeck, A. Alegría, M. Craig, S. Langsdorf, S. Löschke, V. Möller, A. Okem, B. Rama (eds.)], 2022b.\\n* IPCC: Summary for Policymakers. In: Climate Change 2022: Mitigation of Climate Change. Contribution of Working Group III to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change [P.R. Shukla, J. Skea, R. Slade, A. Al Khourdajie, R. van Diemen, D. McCollum, M. Pathak, S. Some, P. Vyas, R. Fradera, M. Belkacemi, A. Hasija, G. Lisboa, S. Luz, J. Malley, (eds.)], , https://doi.org/10.1017/9781009157926.001, 2022c.\\n* IPCC WGI: Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* IPCC WGII: Climate Change 2021: Impacts, Adaptation and Vulnerability; Summary for Policemakers. Contribution of Working Group II to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change, 2021.\\n* Legeais, J. F., Llowel, W., Melet, A., and Meyssignac, B.: Evidence of the TOPEX-A altimeter instrumental anomaly and acceleration of the global mean sea level, Copernic. Mar. Serv. Ocean State Rep. Issue 4, 13, s77–s82, https://doi.org/10.1080/1755876X.2021.1946240, 2020.\\n* Peltier, W. R.: GLOBAL GLACIAL ISOSTASY AND THE SURFACE OF THE ICE-AGE EARTH: The ICE-5G (VM2) Model and GRACE, Annu. Rev. Earth Planet. Sci., 32, 111–149, https://doi.org/10.1146/annurev.earth.32.082503.144359, 2004.\\n* Prandi, P., Meyssignac, B., Ablain, M., Spada, G., Ribes, A., and Benveniste, J.: Local sea level trends, accelerations and uncertainties over 1993–2019, Sci. Data, 8, 1, https://doi.org/10.1038/s41597-020-00786-7, 2021.\\n* Wang, J., Church, J. A., Zhang, X., and Chen, X.: Reconciling global mean and regional sea level change in projections and observations, Nat. Commun., 12, 990, https://doi.org/10.1038/s41467-021-21265-6, 2021.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present, Earth Syst. Sci. Data, 10, 1551–1590, https://doi.org/10.5194/essd-10-1551-2018, 2018.\\n'},\n", " {'title': 'Iberia Biscay Ireland sea level extreme variability mean and anomaly (observations)',\n", " 'product_id': 'OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs',\n", - " 'thumbnail_url': 'https://catalogue.marine.copernicus.eu/documents/IMG/OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs.png',\n", - " 'description': '**DEFINITION**\\n\\nThe OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs indicator is based on the computation of the 99th and the 1st percentiles from in situ data (observations). It is computed for the variable sea level measured by tide gauges along the coast. The use of percentiles instead of annual maximum and minimum values, makes this extremes study less affected by individual data measurement errors. The annual percentiles referred to annual mean sea level are temporally averaged and their spatial evolution is displayed in the dataset ibi_omi_sl_extreme_var_slev_mean_and_anomaly_obs, jointly with the anomaly in the target year. This study of extreme variability was first applied to sea level variable (Pérez Gómez et al 2016) and then extended to other essential variables, sea surface temperature and significant wave height (Pérez Gómez et al 2018).\\n\\n**CONTEXT**\\nSea level (SLEV) is one of the Essential Ocean Variables most affected by climate change. Global mean sea level rise has accelerated since the 1990’s (Abram et al., 2019, Legeais et al., 2020), due to the increase of ocean temperature and mass volume caused by land ice melting (WCRP, 2018). Basin scale oceanographic and meteorological features lead to regional variations of this trend that combined with changes in the frequency and intensity of storms could also rise extreme sea levels up to one metre by the end of the century (Vousdoukas et al., 2020). This will significantly increase coastal vulnerability to storms, with important consequences on the extent of flooding events, coastal erosion and damage to infrastructures caused by waves.\\nThe Iberian Biscay Ireland region shows positive sea level trend modulated by decadal-to-multidecadal variations driven by ocean dynamics and superposed to the long-term trend (Chafik et al., 2019).\\n\\n** KEY FINDINGS**\\nThe completeness index criteria is fulfilled by 55 stations in 2021, three more than those available in 2020 (52), recently added to the multi-year product INSITU_GLO_PHY_SSH_DISCRETE_MY_013_053. The mean 99th percentiles reflect the great tide spatial variability around the UK and the north of France. Minimum values are observed in the Irish coast (e.g.: 0.66 m above mean sea level in Arklow Harbour), South of England (e.g.: 0.70 m above mean sea level in Bournemouth), and the Canary Islands (e.g.: 0.96 m above mean sea level in Hierro). Maximum values are observed in the Bristol and English Channels (e.g.: 6.26 m and 5.17 m above mean sea level in Newport and St. Helier, respectively). The standard deviation reflects the south-north increase of storminess, ranging between 2 cm in the Canary Islands to 12 cm in Newport (Bristol Channel). Negative or close to zero anomalies of 2021 99th percentile are observed this year for most of the stations in the region, reaching up to -17.8 cm in Newport, or -15 cm in St.Helier (Jersey Island, Channel Islands).\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00253\\n\\n**References:**\\n\\n* Abram, N., Gattuso, J.-P., Prakash, A., Cheng, L., Chidichimo, M. P., Crate, S., Enomoto, H., Garschagen, M., Gruber, N., Harper, S., Holland, E., Kudela, R. M., Rice, J., Steffen, K., & von Schuckmann, K. (2019). Framing and Context of the Report. In H. O. Pörtner, D. C. Roberts, V. Masson-Delmotte, P. Zhai, M. Tignor, E. Poloczanska, K. Mintenbeck, A. Alegría, M. Nicolai, A. Okem, J. Petzold, B. Rama, & N. M. Weyer (Eds.), IPCC Special Report on the Ocean and Cryosphere in a Changing Climate (pp. 73–129). in press. https://www.ipcc.ch/srocc/\\n* Legeais J-F, W. Llowel, A. Melet and B. Meyssignac: Evidence of the TOPEX-A Altimeter Instrumental Anomaly and Acceleration of the Global Mean Sea Level, in Copernicus Marine Service Ocean State Report, Issue 4, Journal of Operational Oceanography, 2020, accepted.\\n* Pérez-Gómez B, Álvarez-Fanjul E, She J, Pérez-González I, Manzano F. 2016. Extreme sea level events, Section 4.4, p:300. In: Von Schuckmann K, Le Traon PY, Alvarez-Fanjul E, Axell L, Balmaseda M, Breivik LA, Brewin RJW, Bricaud C, Drevillon M, Drillet Y, Dubois C , Embury O, Etienne H, García-Sotillo M, Garric G, Gasparin F, Gutknecht E, Guinehut S, Hernandez F, Juza M, Karlson B, Korres G, Legeais JF, Levier B, Lien VS, Morrow R, Notarstefano G, Parent L, Pascual A, Pérez-Gómez B, Perruche C, Pinardi N, Pisano A, Poulain PM , Pujol IM, Raj RP, Raudsepp U, Roquet H, Samuelsen A, Sathyendranath S, She J, Simoncelli S, Solidoro C, Tinker J, Tintoré J, Viktorsson L, Ablain M, Almroth-Rosell E, Bonaduce A, Clementi E, Cossarini G, Dagneaux Q, Desportes C, Dye S, Fratianni C, Good S, Greiner E, Gourrion J, Hamon M, Holt J, Hyder P, Kennedy J, Manzano-Muñoz F, Melet A, Meyssignac B, Mulet S, Nardelli BB, O’Dea E, Olason E, Paulmier A, Pérez-González I, Reid R, Racault MF, Raitsos DE, Ramos A, Sykes P, Szekely T, Verbrugge N. 2016. The Copernicus Marine Environment Monitoring Service Ocean State Report, Journal of Operational Oceanography. 9 (sup2): 235-320. http://dx.doi.org/10.1080/1755876X.2016.1273446\\n* Pérez Gómez B, De Alfonso M, Zacharioudaki A, Pérez González I, Álvarez Fanjul E, Müller M, Marcos M, Manzano F, Korres G, Ravdas M, Tamm S. 2018. Sea level, SST and waves: extremes variability. In: Copernicus Marine Service Ocean State Report, Issue 2, Journal of Operational Oceanography, 11:sup1, Chap. 3.1, s79–s88, DOI: https://doi.org/10.1080/1755876X.2018.1489208.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present. 2018. Earth Syst. Sci. Data, 10, 1551-1590, https://doi.org/10.5194/essd-10-1551-2018.\\n* Vousdoukas MI, Mentaschi L, Hinkel J, et al. 2020. Economic motivation for raising coastal flood defenses in Europe. Nat Commun 11, 2119 (2020). https://doi.org/10.1038/s41467-020-15665-3.\\n',\n", - " 'digital_object_identifier': '10.48670/moi-00253',\n", - " 'sources': ['In-situ observations'],\n", - " 'processing_level': None,\n", - " 'production_center': 'Puertos del Estado (Spain)'}]}" + " 'description': '**DEFINITION**\\n\\nThe OMI_EXTREME_SL_IBI_slev_mean_and_anomaly_obs indicator is based on the computation of the 99th and the 1st percentiles from in situ data (observations). It is computed for the variable sea level measured by tide gauges along the coast. The use of percentiles instead of annual maximum and minimum values, makes this extremes study less affected by individual data measurement errors. The annual percentiles referred to annual mean sea level are temporally averaged and their spatial evolution is displayed in the dataset ibi_omi_sl_extreme_var_slev_mean_and_anomaly_obs, jointly with the anomaly in the target year. This study of extreme variability was first applied to sea level variable (Pérez Gómez et al 2016) and then extended to other essential variables, sea surface temperature and significant wave height (Pérez Gómez et al 2018).\\n\\n**CONTEXT**\\nSea level (SLEV) is one of the Essential Ocean Variables most affected by climate change. Global mean sea level rise has accelerated since the 1990’s (Abram et al., 2019, Legeais et al., 2020), due to the increase of ocean temperature and mass volume caused by land ice melting (WCRP, 2018). Basin scale oceanographic and meteorological features lead to regional variations of this trend that combined with changes in the frequency and intensity of storms could also rise extreme sea levels up to one metre by the end of the century (Vousdoukas et al., 2020). This will significantly increase coastal vulnerability to storms, with important consequences on the extent of flooding events, coastal erosion and damage to infrastructures caused by waves.\\nThe Iberian Biscay Ireland region shows positive sea level trend modulated by decadal-to-multidecadal variations driven by ocean dynamics and superposed to the long-term trend (Chafik et al., 2019).\\n\\n** KEY FINDINGS**\\nThe completeness index criteria is fulfilled by 55 stations in 2021, three more than those available in 2020 (52), recently added to the multi-year product INSITU_GLO_PHY_SSH_DISCRETE_MY_013_053. The mean 99th percentiles reflect the great tide spatial variability around the UK and the north of France. Minimum values are observed in the Irish coast (e.g.: 0.66 m above mean sea level in Arklow Harbour), South of England (e.g.: 0.70 m above mean sea level in Bournemouth), and the Canary Islands (e.g.: 0.96 m above mean sea level in Hierro). Maximum values are observed in the Bristol and English Channels (e.g.: 6.26 m and 5.17 m above mean sea level in Newport and St. Helier, respectively). The standard deviation reflects the south-north increase of storminess, ranging between 2 cm in the Canary Islands to 12 cm in Newport (Bristol Channel). Negative or close to zero anomalies of 2021 99th percentile are observed this year for most of the stations in the region, reaching up to -17.8 cm in Newport, or -15 cm in St.Helier (Jersey Island, Channel Islands).\\n\\n**DOI (product):** \\nhttps://doi.org/10.48670/moi-00253\\n\\n**References:**\\n\\n* Abram, N., Gattuso, J.-P., Prakash, A., Cheng, L., Chidichimo, M. P., Crate, S., Enomoto, H., Garschagen, M., Gruber, N., Harper, S., Holland, E., Kudela, R. M., Rice, J., Steffen, K., & von Schuckmann, K. (2019). Framing and Context of the Report. In H. O. Pörtner, D. C. Roberts, V. Masson-Delmotte, P. Zhai, M. Tignor, E. Poloczanska, K. Mintenbeck, A. Alegría, M. Nicolai, A. Okem, J. Petzold, B. Rama, & N. M. Weyer (Eds.), IPCC Special Report on the Ocean and Cryosphere in a Changing Climate (pp. 73–129). in press. https://www.ipcc.ch/srocc/\\n* Legeais J-F, W. Llowel, A. Melet and B. Meyssignac: Evidence of the TOPEX-A Altimeter Instrumental Anomaly and Acceleration of the Global Mean Sea Level, in Copernicus Marine Service Ocean State Report, Issue 4, Journal of Operational Oceanography, 2020, accepted.\\n* Pérez-Gómez B, Álvarez-Fanjul E, She J, Pérez-González I, Manzano F. 2016. Extreme sea level events, Section 4.4, p:300. In: Von Schuckmann K, Le Traon PY, Alvarez-Fanjul E, Axell L, Balmaseda M, Breivik LA, Brewin RJW, Bricaud C, Drevillon M, Drillet Y, Dubois C , Embury O, Etienne H, García-Sotillo M, Garric G, Gasparin F, Gutknecht E, Guinehut S, Hernandez F, Juza M, Karlson B, Korres G, Legeais JF, Levier B, Lien VS, Morrow R, Notarstefano G, Parent L, Pascual A, Pérez-Gómez B, Perruche C, Pinardi N, Pisano A, Poulain PM , Pujol IM, Raj RP, Raudsepp U, Roquet H, Samuelsen A, Sathyendranath S, She J, Simoncelli S, Solidoro C, Tinker J, Tintoré J, Viktorsson L, Ablain M, Almroth-Rosell E, Bonaduce A, Clementi E, Cossarini G, Dagneaux Q, Desportes C, Dye S, Fratianni C, Good S, Greiner E, Gourrion J, Hamon M, Holt J, Hyder P, Kennedy J, Manzano-Muñoz F, Melet A, Meyssignac B, Mulet S, Nardelli BB, O’Dea E, Olason E, Paulmier A, Pérez-González I, Reid R, Racault MF, Raitsos DE, Ramos A, Sykes P, Szekely T, Verbrugge N. 2016. The Copernicus Marine Environment Monitoring Service Ocean State Report, Journal of Operational Oceanography. 9 (sup2): 235-320. http://dx.doi.org/10.1080/1755876X.2016.1273446\\n* Pérez Gómez B, De Alfonso M, Zacharioudaki A, Pérez González I, Álvarez Fanjul E, Müller M, Marcos M, Manzano F, Korres G, Ravdas M, Tamm S. 2018. Sea level, SST and waves: extremes variability. In: Copernicus Marine Service Ocean State Report, Issue 2, Journal of Operational Oceanography, 11:sup1, Chap. 3.1, s79–s88, DOI: https://doi.org/10.1080/1755876X.2018.1489208.\\n* WCRP Global Sea Level Budget Group: Global sea-level budget 1993–present. 2018. Earth Syst. Sci. Data, 10, 1551-1590, https://doi.org/10.5194/essd-10-1551-2018.\\n* Vousdoukas MI, Mentaschi L, Hinkel J, et al. 2020. Economic motivation for raising coastal flood defenses in Europe. Nat Commun 11, 2119 (2020). https://doi.org/10.1038/s41467-020-15665-3.\\n'}]}" ] }, - "execution_count": 10, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# We can also filter based on the strings found by the describe method\n", - "copernicusmarine.describe(include_description=True, contains=[\"Iberian Biscay\"])" + "catalogue = copernicusmarine.describe(contains=[\"Iberian Biscay\"])\n", + "catalogue.model_dump(\n", + " exclude_none=True, \n", + " exclude_unset=True, \n", + " include={\"products\": {\"__all__\": {\"title\": True, \"product_id\": True, \"description\": True}}}\n", + " )" ] }, { @@ -309,28 +268,28 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.85s/it]\n" + "Fetching catalogue: 100%|██████████| 2/2 [00:10<00:00, 5.48s/it]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Products that offer the variables ['chl', 'o2']: 85\n" + "Products that offer the variables ['chl', 'o2']: 93\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "Fetching catalog: 100%|██████████| 2/2 [00:11<00:00, 5.57s/it]\n" + "Fetching catalogue: 100%|██████████| 2/2 [00:11<00:00, 5.58s/it]\n" ] }, { @@ -373,15 +332,15 @@ "# We find the products that offer the variables we are interested in:\n", "describe_var = copernicusmarine.describe(contains=[var_name[0], var_name[1]])\n", "prod_var = []\n", - "for product in describe_var['products']:\n", - " prod_var.append(product['product_id'])\n", + "for product in describe_var.products:\n", + " prod_var.append(product.product_id)\n", "print(f\"Products that offer the variables {var_name}: {len(prod_var)}\")\n", "\n", "# We save the products that offer the region we are interested in:\n", - "describe_loc = copernicusmarine.describe(contains=[regions[0]], include_all=True)\n", + "describe_loc = copernicusmarine.describe(contains=[regions[0]])\n", "prod_loc = []\n", - "for product in describe_loc['products']:\n", - " prod_loc.append(product['product_id'])\n", + "for product in describe_loc.products:\n", + " prod_loc.append(product.product_id)\n", "print(f\"Products in the region {regions}: {len(prod_loc)}\")\n", "\n", "# And we search the intersection of both lists:\n", @@ -390,19 +349,19 @@ "\n", "pairs_dataset_step = {}\n", "\n", - "for product in describe_loc['products']:\n", + "for product in describe_loc.products:\n", " # We add a filter to clarify specific products, in this case the \"OMI\" (Ocean Monitoring Indicators)\n", - " if product['product_id'] in final_selected_products and \"OMI_\" not in product[\"product_id\"]:\n", - " for dataset in product['datasets']:\n", - " for version in dataset['versions']:\n", - " for part in version['parts']:\n", - " for part in version.get('parts', []):\n", - " for service in part['services'][:-1]:\n", + " if product.product_id in final_selected_products and \"OMI_\" not in product.product_id:\n", + " for dataset in product.datasets:\n", + " for version in dataset.versions:\n", + " for part in version.parts:\n", + " for part in version.parts:\n", + " for service in part.services[:-1]:\n", " # And we filter the datasets that can be subsetted:\n", - " if 'arco-' in service['service_type']['service_name'] and 'zarr' in service['service_format']:\n", - " for variable in service['variables']:\n", - " if (variable['short_name'] == var_name[0], variable['short_name'] == var_name[1]) and variable['coordinates'] != []:\n", - " pairs_dataset_step[dataset['dataset_id']] = (product['product_id'], variable['coordinates'][2]['step'])\n", + " if 'arco-' in service.service_name and 'zarr' == service.service_format:\n", + " for variable in service.variables:\n", + " if (variable.short_name == var_name[0], variable.short_name == var_name[1]) and variable.coordinates != []:\n", + " pairs_dataset_step[dataset.dataset_id] = (product.product_id, variable.coordinates[2].step)\n", "\n", "for key, value in pairs_dataset_step.items():\n", " print(\"______________________\")\n", @@ -419,7 +378,49 @@ "- [IBI_ANALYSISFORECAST_BGC_005_004](https://data.marine.copernicus.eu/product/IBI_ANALYSISFORECAST_BGC_005_004/description) with recent data and a few days of forecast\n", "- [IBI_MULTIYEAR_BGC_005_003](https://data.marine.copernicus.eu/product/IBI_MULTIYEAR_BGC_005_003/description) with several years of data covering the last decades\n", "\n", - "Both products offer daily (`P1D`) and monthly (`P1M`) data." + "Both products offer daily (`P1D`) and monthly (`P1M`) data. \n", + "\n", + "Now that we know the productID that are of interest to us, we can directly use describe for this product. The process will be way faster since we do not have to parse the whole catalogue. " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Fetching catalogue: 100%|██████████| 2/2 [00:00<00:00, 2.35it/s]\n" + ] + }, + { + "data": { + "text/plain": [ + "{'products': [{'title': 'Atlantic-Iberian Biscay Irish- Ocean BioGeoChemistry NON ASSIMILATIVE Hindcast',\n", + " 'product_id': 'IBI_MULTIYEAR_BGC_005_003',\n", + " 'thumbnail_url': 'https://mdl-metadata.s3.waw3-1.cloudferro.com/metadata/thumbnails/IBI_MULTIYEAR_BGC_005_003.jpg',\n", + " 'digital_object_identifier': '10.48670/moi-00028',\n", + " 'sources': ['Numerical models'],\n", + " 'processing_level': 'Level 4',\n", + " 'production_center': 'NOLOGIN'}]}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "product_describe = copernicusmarine.describe(product_id=\"IBI_MULTIYEAR_BGC_005_003\")\n", + "\n", + "# let's only show basic product info\n", + "product_describe.model_dump(\n", + " exclude_none=True, \n", + " exclude_unset=True, \n", + " exclude={\"products\": {\"__all__\": {\"datasets\": True, \"description\": True, \"keywords\": True}}}\n", + " )" ] }, { @@ -435,27 +436,27 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "INFO - 2024-10-18T15:50:02Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", - "INFO - 2024-10-18T15:50:02Z - Dataset part was not specified, the first one was selected: \"default\"\n", - "INFO - 2024-10-18T15:50:03Z - Service was not specified, the default one was selected: \"arco-time-series\"\n", - "INFO - 2024-10-18T15:50:05Z - Downloading using service arco-time-series...\n", - "INFO - 2024-10-18T15:50:06Z - Estimated size of the dataset file is 61.855 MB\n", + "INFO - 2024-10-23T08:34:04Z - Dataset version was not specified, the latest one was selected: \"202211\"\n", + "INFO - 2024-10-23T08:34:04Z - Dataset part was not specified, the first one was selected: \"default\"\n", + "INFO - 2024-10-23T08:34:06Z - Service was not specified, the default one was selected: \"arco-time-series\"\n", + "INFO - 2024-10-23T08:34:07Z - Downloading using service arco-time-series...\n", + "INFO - 2024-10-23T08:34:09Z - Estimated size of the dataset file is 61.855 MB\n", "Estimated size of the data that needs to be downloaded to obtain the result: 2814 MB\n", "This is a very rough estimate that is generally higher than the actual size of the data that needs to be downloaded.\n", - "INFO - 2024-10-18T15:50:06Z - Writing to local storage. Please wait...\n" + "INFO - 2024-10-23T08:34:09Z - Writing to local storage. Please wait...\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "d062ca80f47149f481764a07113437fc", + "model_id": "75e8924847e84564b05b12724c9701a6", "version_major": 2, "version_minor": 0 }, @@ -470,7 +471,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "INFO - 2024-10-18T15:50:17Z - Successfully downloaded to data/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_chl-o2_5.00W-3.00W_43.00N-44.00N_0.49-5727.92m_2023-09-01-2023-09-30_(1).nc\n" + "INFO - 2024-10-23T08:34:19Z - Successfully downloaded to data/cmems_mod_ibi_bgc_anfc_0.027deg-3D_P1D-m_chl-o2_5.00W-3.00W_43.00N-44.00N_0.49-5727.92m_2023-09-01-2023-09-30_(2).nc\n" ] } ], diff --git a/tests/__snapshots__/test_command_line_interface.ambr b/tests/__snapshots__/test_describe.ambr similarity index 66% rename from tests/__snapshots__/test_command_line_interface.ambr rename to tests/__snapshots__/test_describe.ambr index 793ddafa..b1ad2453 100644 --- a/tests/__snapshots__/test_command_line_interface.ambr +++ b/tests/__snapshots__/test_describe.ambr @@ -1,20 +1,17 @@ # serializer version: 1 -# name: TestCommandLineInterface.test_describe_including_datasets +# name: TestDescribe.test_describe_returned_fields_datasets list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -24,10 +21,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -37,10 +34,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -50,10 +47,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -63,10 +60,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -76,10 +73,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -89,10 +86,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -102,10 +99,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -115,10 +112,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -128,10 +125,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -141,10 +138,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -154,10 +151,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -167,10 +164,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -180,10 +177,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -193,10 +190,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -206,10 +203,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -219,10 +216,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -232,10 +229,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -245,10 +242,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -258,10 +255,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -271,10 +268,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -284,10 +281,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -297,10 +294,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -310,10 +307,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -325,55 +322,41 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-066/arco/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311/timeChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_cglo', @@ -382,47 +365,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_glor', @@ -431,47 +402,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_oras', @@ -480,47 +439,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_cglo', @@ -529,47 +476,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_glor', @@ -578,47 +513,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_oras', @@ -627,47 +550,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_cglo', @@ -676,47 +587,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_glor', @@ -725,47 +624,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_oras', @@ -774,22 +661,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -869,40 +750,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_cglo', @@ -911,22 +780,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1006,40 +869,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_glor', @@ -1048,22 +899,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1143,40 +988,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_oras', @@ -1185,22 +1018,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1280,40 +1107,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_cglo', @@ -1322,22 +1137,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1417,40 +1226,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_glor', @@ -1459,22 +1256,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1554,40 +1345,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_oras', @@ -1596,22 +1375,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1691,40 +1464,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_cglo', @@ -1733,22 +1494,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1828,40 +1583,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_glor', @@ -1870,22 +1613,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -1965,40 +1702,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_oras', @@ -2007,22 +1732,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -2102,40 +1821,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_cglo', @@ -2144,22 +1851,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -2239,40 +1940,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_glor', @@ -2281,22 +1970,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -2376,40 +2059,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_oras', @@ -2418,47 +2089,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_cglo', @@ -2467,47 +2126,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_glor', @@ -2516,47 +2163,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 681, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1440, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_oras', @@ -2567,55 +2202,41 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-066/arco/GLOBAL_MULTIYEAR_PHY_ENS_001_031/cmems_mod_glo_phy-all_my_0.25deg_P1D-m_202311/geoChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_cglo', @@ -2624,47 +2245,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_glor', @@ -2673,47 +2282,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'mlotst_oras', @@ -2722,47 +2319,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_cglo', @@ -2771,47 +2356,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_glor', @@ -2820,47 +2393,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'siconc_oras', @@ -2869,47 +2430,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_cglo', @@ -2918,47 +2467,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_glor', @@ -2967,47 +2504,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'sithick_oras', @@ -3016,22 +2541,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3111,40 +2630,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_cglo', @@ -3153,22 +2660,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3248,40 +2749,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_glor', @@ -3290,22 +2779,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3385,40 +2868,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so_oras', @@ -3427,22 +2898,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3522,40 +2987,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_cglo', @@ -3564,22 +3017,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3659,40 +3106,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_glor', @@ -3701,22 +3136,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3796,40 +3225,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'thetao_oras', @@ -3838,22 +3255,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -3933,40 +3344,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_cglo', @@ -3975,22 +3374,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -4070,40 +3463,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_glor', @@ -4112,22 +3493,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -4207,40 +3582,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo_oras', @@ -4249,22 +3612,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -4344,40 +3701,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_cglo', @@ -4386,22 +3731,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -4481,40 +3820,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_glor', @@ -4523,22 +3850,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5902.0576171875, 5698.060546875, @@ -4618,40 +3939,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo_oras', @@ -4660,47 +3969,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_cglo', @@ -4709,47 +4006,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_glor', @@ -4758,47 +4043,35 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.75, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.25, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.75, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.25, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1826, 'coordinate_id': 'time', - 'maximum_value': 1703980800000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1703980800000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), ]), 'short_name': 'zos_oras', diff --git a/tests/__snapshots__/test_describe_released_date.ambr b/tests/__snapshots__/test_describe_released_date.ambr index 5f2d9af9..6d88475b 100644 --- a/tests/__snapshots__/test_describe_released_date.ambr +++ b/tests/__snapshots__/test_describe_released_date.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: TestDescribe.test_describe_all_versions +# name: TestDescribeReleaseDate.test_describe_all_versions dict({ 'products': list([ dict({ @@ -14,22 +14,18 @@ dict({ 'name': 'default', 'released_date': '2022-04-23T04:23:46.808754Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -39,10 +35,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -54,30 +50,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211/timeChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -132,40 +120,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -174,22 +150,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -244,40 +214,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -288,30 +246,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211/geoChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -366,40 +316,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -408,22 +346,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -478,40 +410,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -530,22 +450,18 @@ dict({ 'name': 'default', 'released_date': '2060-04-23T04:23:46.808754Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_206011', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -555,10 +471,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -570,30 +486,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_206011/timeChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -648,40 +556,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -690,22 +586,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -760,40 +650,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -804,30 +682,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_206011/geoChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -882,40 +752,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -924,22 +782,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -994,40 +846,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -1052,22 +892,18 @@ dict({ 'name': 'default', 'released_date': '2060-04-23T04:23:46.808754Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-so_anfc_0.083deg_P1D-m_202211', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -1079,30 +915,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-010/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-so_anfc_0.083deg_P1D-m_202211/timeChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -1157,40 +985,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so', @@ -1201,30 +1017,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-010/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-so_anfc_0.083deg_P1D-m_202211/geoChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -1279,40 +1087,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'so', @@ -1336,15 +1132,55 @@ 'parts': list([ dict({ 'name': 'default', - 'released_date': None, - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_format': 'zarr', + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', + 'uri': 'https://s3.waw3-1.cloudferro.com:443/mdl-arco-time-005-dta/arco/OCEANCOLOUR_GLO_BGC_L3_MY_009_103/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211/timeChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -179.99722290039062, + -89.99722290039062, + 179.9972381591797, + 89.99722290039062, + ]), + 'coordinates': list([ + dict({ + 'chunking_length': 1, + 'coordinate_id': 'time', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1487376000000.0, + 'minimum_value': 1461542400000.0, + 'step': 86400000.0, + }), + dict({ + 'chunking_length': 400, + 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', + 'maximum_value': 89.99722290039062, + 'minimum_value': -89.99722290039062, + 'step': 0.0055555555974190944, + }), + dict({ + 'chunking_length': 1200, + 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', + 'maximum_value': 179.9972381591797, + 'minimum_value': -179.99722290039062, + 'step': 0.0055555555764870024, + }), + ]), + 'short_name': 'CHL', + 'standard_name': 'mass_concentration_of_chlorophyll_a_in_sea_water', + 'units': 'milligram m-3', + }), + ]), + }), + dict({ + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-dta/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211', 'variables': list([ dict({ @@ -1362,6 +1198,51 @@ }), ]), }), + dict({ + 'service_format': 'zarr', + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', + 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-005-dta/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211/geoChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -179.99722290039062, + -89.99722290039062, + 179.9972381591797, + 89.99722290039062, + ]), + 'coordinates': list([ + dict({ + 'chunking_length': 300, + 'coordinate_id': 'time', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1487376000000.0, + 'minimum_value': 1461542400000.0, + 'step': 86400000.0, + }), + dict({ + 'chunking_length': 60, + 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', + 'maximum_value': 89.99722290039062, + 'minimum_value': -89.99722290039062, + 'step': 0.0055555555974190944, + }), + dict({ + 'chunking_length': 60, + 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', + 'maximum_value': 179.9972381591797, + 'minimum_value': -179.99722290039062, + 'step': 0.0055555555764870024, + }), + ]), + 'short_name': 'CHL', + 'standard_name': 'mass_concentration_of_chlorophyll_a_in_sea_water', + 'units': 'milligram m-3', + }), + ]), + }), ]), }), ]), @@ -1391,14 +1272,10 @@ dict({ 'name': 'default', 'released_date': '2060-02-28T19:49:29.817792Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-13/native/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1D-m_202012', 'variables': list([ dict({ @@ -1418,10 +1295,8 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-041/arco/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1D-m_202012/timeChunked.zarr', 'variables': list([ dict({ @@ -1433,27 +1308,17 @@ ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 4, 'coordinate_id': 'time', - 'maximum_value': 1693440000000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1693440000000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5000, 4000, @@ -1482,28 +1347,20 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 375, 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 297, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, - 'units': 'degrees_east', - 'values': None, }), ]), 'short_name': 'chl', @@ -1514,10 +1371,8 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-041/arco/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1D-m_202012/geoChunked.zarr', 'variables': list([ dict({ @@ -1529,27 +1384,17 @@ ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2156, 'coordinate_id': 'time', - 'maximum_value': 1693440000000, - 'minimum_value': 725846400000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1693440000000.0, + 'minimum_value': 725846400000.0, + 'step': 86400000.0, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5000, 4000, @@ -1578,28 +1423,20 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 16, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, - 'units': 'degrees_east', - 'values': None, }), ]), 'short_name': 'chl', @@ -1624,14 +1461,10 @@ dict({ 'name': 'default', 'released_date': '2060-02-28T19:49:29.817792Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-13/native/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1M-m_202012', 'variables': list([ dict({ @@ -1651,10 +1484,8 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-041/arco/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1M-m_202012/timeChunked.zarr', 'variables': list([ dict({ @@ -1666,15 +1497,9 @@ ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 4, 'coordinate_id': 'time', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', 'values': list([ 725846400000, 728524800000, @@ -2047,15 +1872,9 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5000, 4000, @@ -2084,28 +1903,20 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 375, 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 297, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, - 'units': 'degrees_east', - 'values': None, }), ]), 'short_name': 'chl', @@ -2116,10 +1927,8 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-041/arco/NWSHELF_MULTIYEAR_BGC_004_011/cmems_mod_nws_bgc-chl_my_7km-3D_P1M-m_202012/geoChunked.zarr', 'variables': list([ dict({ @@ -2131,15 +1940,9 @@ ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 72, 'coordinate_id': 'time', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', 'values': list([ 725846400000, 728524800000, @@ -2512,15 +2315,9 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5000, 4000, @@ -2549,28 +2346,20 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', 'maximum_value': 65.00125122070312, 'minimum_value': 40.06666946411133, 'step': 0.06667000469676951, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 12.999670028686523, 'minimum_value': -19.88888931274414, 'step': 0.11110999133135821, - 'units': 'degrees_east', - 'values': None, }), ]), 'short_name': 'chl', @@ -2599,7 +2388,7 @@ ]), }) # --- -# name: TestDescribe.test_only_released_dataset_by_default +# name: TestDescribeReleaseDate.test_only_released_dataset_by_default dict({ 'products': list([ dict({ @@ -2614,22 +2403,18 @@ dict({ 'name': 'default', 'released_date': '2022-04-23T04:23:46.808754Z', - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-14/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -2639,10 +2424,10 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ ]), @@ -2654,30 +2439,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-geo-series', - 'short_name': 'geoseries', - }), + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-time-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211/timeChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -2732,40 +2509,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -2774,22 +2539,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -2844,40 +2603,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 512, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2048, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 1, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -2888,30 +2635,22 @@ }), dict({ 'service_format': 'zarr', - 'service_type': dict({ - 'service_name': 'arco-time-series', - 'short_name': 'timeseries', - }), + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-007/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_mod_glo_phy-cur_anfc_0.083deg_P1D-m_202211/geoChunked.zarr', 'variables': list([ dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -2966,40 +2705,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'uo', @@ -3008,22 +2735,16 @@ }), dict({ 'bbox': list([ - -180, - -80, + -180.0, + -80.0, 179.91668701171875, - 90, + 90.0, ]), 'coordinates': list([ dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 2, 'coordinate_id': 'depth', - 'maximum_value': None, - 'minimum_value': None, - 'step': None, - 'units': 'm', + 'coordinate_unit': 'm', 'values': list([ 5727.9169921875, 5274.7841796875, @@ -3078,40 +2799,28 @@ ]), }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 32, 'coordinate_id': 'latitude', - 'maximum_value': 90, - 'minimum_value': -80, + 'coordinate_unit': 'degrees_north', + 'maximum_value': 90.0, + 'minimum_value': -80.0, 'step': 0.08333333333333333, - 'units': 'degrees_north', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 64, 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', 'maximum_value': 179.91668701171875, - 'minimum_value': -180, + 'minimum_value': -180.0, 'step': 0.08333333804392655, - 'units': 'degrees_east', - 'values': None, }), dict({ - 'chunk_geometric_factor': None, - 'chunk_reference_coordinate': None, - 'chunk_type': None, 'chunking_length': 177, 'coordinate_id': 'time', - 'maximum_value': 1714608000000, - 'minimum_value': 1604188800000, - 'step': 86400000, - 'units': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', - 'values': None, + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1714608000000.0, + 'minimum_value': 1604188800000.0, + 'step': 86400000.0, }), ]), 'short_name': 'vo', @@ -3135,15 +2844,55 @@ 'parts': list([ dict({ 'name': 'default', - 'released_date': None, - 'retired_date': None, 'services': list([ dict({ - 'service_format': None, - 'service_type': dict({ - 'service_name': 'original-files', - 'short_name': 'files', - }), + 'service_format': 'zarr', + 'service_name': 'arco-geo-series', + 'service_short_name': 'geoseries', + 'uri': 'https://s3.waw3-1.cloudferro.com:443/mdl-arco-time-005-dta/arco/OCEANCOLOUR_GLO_BGC_L3_MY_009_103/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211/timeChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -179.99722290039062, + -89.99722290039062, + 179.9972381591797, + 89.99722290039062, + ]), + 'coordinates': list([ + dict({ + 'chunking_length': 1, + 'coordinate_id': 'time', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1487376000000.0, + 'minimum_value': 1461542400000.0, + 'step': 86400000.0, + }), + dict({ + 'chunking_length': 400, + 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', + 'maximum_value': 89.99722290039062, + 'minimum_value': -89.99722290039062, + 'step': 0.0055555555974190944, + }), + dict({ + 'chunking_length': 1200, + 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', + 'maximum_value': 179.9972381591797, + 'minimum_value': -179.99722290039062, + 'step': 0.0055555555764870024, + }), + ]), + 'short_name': 'CHL', + 'standard_name': 'mass_concentration_of_chlorophyll_a_in_sea_water', + 'units': 'milligram m-3', + }), + ]), + }), + dict({ + 'service_name': 'original-files', + 'service_short_name': 'files', 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-native-dta/native/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211', 'variables': list([ dict({ @@ -3161,6 +2910,51 @@ }), ]), }), + dict({ + 'service_format': 'zarr', + 'service_name': 'arco-time-series', + 'service_short_name': 'timeseries', + 'uri': 'https://s3.waw3-1.cloudferro.com/mdl-arco-geo-005-dta/arco/GLOBAL_ANALYSISFORECAST_PHY_001_024/cmems_obs-oc_glo_bgc-plankton_my_l3-olci-300m_P1D_202211/geoChunked.zarr', + 'variables': list([ + dict({ + 'bbox': list([ + -179.99722290039062, + -89.99722290039062, + 179.9972381591797, + 89.99722290039062, + ]), + 'coordinates': list([ + dict({ + 'chunking_length': 300, + 'coordinate_id': 'time', + 'coordinate_unit': 'milliseconds since 1970-01-01 00:00:00Z (no leap seconds)', + 'maximum_value': 1487376000000.0, + 'minimum_value': 1461542400000.0, + 'step': 86400000.0, + }), + dict({ + 'chunking_length': 60, + 'coordinate_id': 'latitude', + 'coordinate_unit': 'degrees_north', + 'maximum_value': 89.99722290039062, + 'minimum_value': -89.99722290039062, + 'step': 0.0055555555974190944, + }), + dict({ + 'chunking_length': 60, + 'coordinate_id': 'longitude', + 'coordinate_unit': 'degrees_east', + 'maximum_value': 179.9972381591797, + 'minimum_value': -179.99722290039062, + 'step': 0.0055555555764870024, + }), + ]), + 'short_name': 'CHL', + 'standard_name': 'mass_concentration_of_chlorophyll_a_in_sea_water', + 'units': 'milligram m-3', + }), + ]), + }), ]), }), ]), diff --git a/tests/__snapshots__/test_help_command_interface.ambr b/tests/__snapshots__/test_help_command_interface.ambr index 629e69a7..d484f488 100644 --- a/tests/__snapshots__/test_help_command_interface.ambr +++ b/tests/__snapshots__/test_help_command_interface.ambr @@ -18,7 +18,7 @@ # --- # name: TestHelpCommandLineInterface.test_help_from_describe_is_as_expected list([ - "CompletedProcess(args=['copernicusmarine', 'describe', '--help'], returncode=0, stdout=b'Usage: copernicusmarine describe [OPTIONS]", + 'CompletedProcess(args=[\'copernicusmarine\', \'describe\', \'--help\'], returncode=0, stdout=b"Usage: copernicusmarine describe [OPTIONS]', '', ' Retrieve and parse the metadata information from the Copernicus Marine', ' catalogue.', @@ -26,16 +26,35 @@ ' Returns JSON A dictionary containing the retrieved metadata information.', '', 'Options:', - ' --include-description Include product description in output.', - ' --include-datasets Include product dataset details in output.', - ' --include-keywords Include product keyword details in output.', + ' --include-description Deprecated. Include product description in', + ' output.', + ' --include-datasets Deprecated. Include product dataset details', + ' in output.', + ' --include-keywords Deprecated. Include product keyword details', + ' in output.', ' --include-versions Include dataset versions in output. By', ' default, shows only the default version.', - ' -a, --include-all Include all the possible data in output:', - ' description, datasets, keywords, and', + ' -a, --include-all Deprecated. Include all the possible data in', + ' output: description, datasets, keywords, and', ' versions.', + ' -r, --returned-fields TEXT Option to specify the fields to return in', + ' the output. The fields are separated by a', + " comma. You can use 'all' to return all", + ' fields.', + ' -e, --returned-fields-exclude TEXT', + ' Option to specify the fields to exclude from', + ' the output. The fields are separated by a', + ' comma.', ' -c, --contains TEXT Filter catalogue output. Returns products', ' with attributes matching a string token.', + ' -p, --product-id TEXT Force the productID to be used for the', + ' describe command. Will not parse the whole', + ' catalogue, but only the product with the', + ' given productID.', + ' -i, --dataset-id TEXT Force the datasetID to be used for the', + ' describe command. Will not parse the whole', + ' catalogue, but only the dataset with the', + ' given datasetID.', ' --max-concurrent-requests INTEGER', ' Maximum number of concurrent requests (>=1).', ' Default 15. The command uses a thread pool', @@ -50,13 +69,13 @@ ' Examples:', '', '', - ' copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --include-datasets', + ' copernicusmarine describe --contains METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 --returned-fields datasets', '', '', ' copernicusmarine describe -c METOFFICE-GLO-SST-L4-NRT-OBS-SST-V2 ', '', ' ', - "', stderr=b'')", + '", stderr=b\'\')', ]) # --- # name: TestHelpCommandLineInterface.test_help_from_get_is_as_expected @@ -315,8 +334,8 @@ ' services using the service name among', " ['arco-geo-series', 'arco-time-series',", " 'omi-arco', 'static-arco'] or its short name", - " among ['arco-geo-series', 'arco-time-", - " series', 'omi-arco', 'static-arco'].", + " among ['geoseries', 'timeseries', 'omi-", + " arco', 'static-arco'].", ' --create-template Option to create a file', ' _template.json in your current', ' directory containing the arguments. If', diff --git a/tests/test_command_line_interface.py b/tests/test_command_line_interface.py index 38ad44a0..acc06bfc 100644 --- a/tests/test_command_line_interface.py +++ b/tests/test_command_line_interface.py @@ -12,12 +12,6 @@ import xarray -from copernicusmarine.catalogue_parser.models import ( - PART_DEFAULT, - REGEX_PATTERN_DATE_YYYYMM, - VERSION_DEFAULT, - CopernicusMarineDatasetServiceType, -) from tests.test_utils import ( execute_in_terminal, remove_extra_logging_prefix_info, @@ -41,366 +35,6 @@ def get_file_size(filepath): class TestCommandLineInterface: - def test_describe_default(self): - self.when_I_run_copernicus_marine_describe_with_default_arguments() - self.then_stdout_can_be_load_as_json() - self.then_I_can_read_the_default_json() - self.and_there_are_no_warnings_about_backend_versions() - - def test_describe_including_datasets(self, snapshot): - self.when_I_run_copernicus_marine_describe_including_datasets() - self.then_I_can_read_it_does_not_contain_weird_symbols() - self.then_I_can_read_the_json_including_datasets() - self.then_omi_services_are_not_in_the_catalog() - self.then_products_from_marine_data_store_catalog_are_available() - self.then_datasets_variables_are_correct(snapshot) - self.then_all_dataset_parts_are_filled() - - def test_describe_contains_option(self): - self.when_I_run_copernicus_marine_describe_with_contains_option() - self.then_I_can_read_the_filtered_json() - - def test_describe_with_staging_flag(self): - self.when_I_use_staging_environment_in_debug_logging_level() - self.then_I_check_that_the_urls_contains_only_dta() - - def when_I_run_copernicus_marine_describe_with_default_arguments(self): - command = ["copernicusmarine", "describe"] - self.output = execute_in_terminal(command, timeout_second=30) - - def then_stdout_can_be_load_as_json(self): - loads(self.output.stdout.decode("utf-8")) - - def then_I_can_read_the_default_json(self): - json_result = loads(self.output.stdout.decode("utf-8")) - assert len(json_result["products"]) >= 270 - for product in json_result["products"]: - assert product["title"] is not None - assert product["product_id"] is not None - assert product["thumbnail_url"] is not None - assert "digital_object_identifier" in product - assert product["sources"] is not None - assert "processing_level" in product - assert product["production_center"] is not None - - def and_there_are_no_warnings_about_backend_versions(self): - assert ( - b"Please update to the latest client version." - not in self.output.stderr - ) - - def then_omi_services_are_not_in_the_catalog(self): - json_result = loads(self.output.stdout) - for product in json_result["products"]: - for dataset in product["datasets"]: - for version in dataset["versions"]: - for part in version["parts"]: - assert "omi" not in list( - map( - lambda x: x["service_type"]["service_name"], - part["services"], - ) - ) - - def then_products_from_marine_data_store_catalog_are_available(self): - expected_product_id = "NWSHELF_ANALYSISFORECAST_PHY_004_013" - expected_dataset_id = "cmems_mod_nws_phy_anfc_0.027deg-2D_PT15M-i" - expected_services = [ - "original-files", - "arco-geo-series", - "arco-time-series", - "wmts", - ] - - json_result = loads(self.output.stdout) - expected_product = list( - filter( - lambda product: product["product_id"] == expected_product_id, - json_result["products"], - ) - ) - assert len(expected_product) == 1 - product = expected_product[0] - product_datasets = product["datasets"] - expected_dataset = list( - filter( - lambda product: product["dataset_id"] == expected_dataset_id, - product_datasets, - ) - ) - assert len(expected_dataset) == 1 - dataset = expected_dataset[0] - expected_dataset_services = list( - map( - lambda x: x["service_type"]["service_name"], - dataset["versions"][0]["parts"][0]["services"], - ) - ) - assert all( - map(lambda x: x in expected_services, expected_dataset_services) - ) - - def then_datasets_variables_are_correct(self, snapshot): - expected_product_id = "GLOBAL_MULTIYEAR_PHY_ENS_001_031" - expected_dataset_id = "cmems_mod_glo_phy-all_my_0.25deg_P1D-m" - wanted_services = [ - "original-files", - "arco-geo-series", - "arco-time-series", - ] - json_result = loads(self.output.stdout) - expected_product = list( - filter( - lambda product: product["product_id"] == expected_product_id, - json_result["products"], - ) - ) - product = expected_product[0] - product_datasets = product["datasets"] - expected_dataset = list( - filter( - lambda product: product["dataset_id"] == expected_dataset_id, - product_datasets, - ) - ) - dataset = expected_dataset[0] - wanted_services_in_dataset = list( - filter( - lambda x: x["service_type"]["service_name"] in wanted_services, - dataset["versions"][0]["parts"][0]["services"], - ) - ) - assert snapshot == wanted_services_in_dataset - - def then_all_dataset_parts_are_filled(self): - expected_product_id = "BALTICSEA_ANALYSISFORECAST_BGC_003_007" - expected_dataset_id = "cmems_mod_bal_bgc_anfc_static" - - json_result = loads(self.output.stdout) - expected_product = list( - filter( - lambda product: product["product_id"] == expected_product_id, - json_result["products"], - ) - ) - assert len(expected_product) == 1 - product = expected_product[0] - - expected_dataset = list( - filter( - lambda product: product["dataset_id"] == expected_dataset_id, - product["datasets"], - ) - ) - - assert len(expected_dataset) == 1 - dataset = expected_dataset[0] - - for version in dataset["versions"]: - non_default_parts = list( - filter( - lambda part: part["name"] != PART_DEFAULT, version["parts"] - ) - ) - - assert len(non_default_parts) > 0 - - version_ordered = sorted( - dataset["versions"], - key=lambda x: ( - x["label"] if x["label"] != VERSION_DEFAULT else "110001" - ), - reverse=True, - ) - - latest_version = version_ordered[0] - maybe_default_part = list( - filter( - lambda part: part["name"] == PART_DEFAULT, - latest_version["parts"], - ) - ) - assert len(maybe_default_part) == 0 - - def when_I_run_copernicus_marine_describe_with_contains_option(self): - filter_token = "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_n" - command = [ - "copernicusmarine", - "describe", - "--contains", - f"{filter_token}", - ] - self.output = execute_in_terminal(command, timeout_second=30) - - def then_I_can_read_the_filtered_json(self): - json_result = loads(self.output.stdout) - assert len(json_result["products"]) == 2 - assert ( - json_result["products"][0]["product_id"] - == "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_nag_area_mean" - ) - assert json_result["products"][0]["production_center"] == "PML (UK)" - assert ( - json_result["products"][0]["thumbnail_url"] - == "https://catalogue.marine.copernicus.eu/documents/IMG/OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_nag_area_mean.png" # noqa - ) - assert ( - json_result["products"][0]["title"] - == "North Atlantic Gyre Area Chlorophyll-a time series and trend from Observations Reprocessing" # noqa - ) - - assert ( - json_result["products"][1]["product_id"] - == "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_npg_area_mean" - ) - assert json_result["products"][1]["production_center"] == "PML (UK)" - assert ( - json_result["products"][1]["thumbnail_url"] - == "https://catalogue.marine.copernicus.eu/documents/IMG/OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_npg_area_mean.png" # noqa - ) - assert ( - json_result["products"][1]["title"] - == "North Pacific Gyre Area Chlorophyll-a time series and trend from Observations Reprocessing" # noqa - ) - - def when_I_run_copernicus_marine_describe_including_datasets(self): - command = [ - "copernicusmarine", - "describe", - "--include-datasets", - ] - self.output = execute_in_terminal(command, timeout_second=30) - - def then_I_can_read_it_does_not_contain_weird_symbols(self): - assert b"__" not in self.output.stdout - assert b" _" not in self.output.stdout - # assert b"_ " not in self.output - assert b'"_' not in self.output.stdout - assert b'_"' not in self.output.stdout - - def then_I_can_read_the_json_including_datasets(self): - json_result = loads(self.output.stdout) - assert len(json_result["products"]) >= 270 - for product in json_result["products"]: - assert product["title"] is not None - assert product["product_id"] is not None - assert product["thumbnail_url"] is not None - assert "digital_object_identifier" in product - assert product["sources"] is not None - assert "processing_level" in product - assert product["production_center"] is not None - assert "datasets" in product - assert product[ - "datasets" - ], f"No datasets found for product {product['product_id']}" - for dataset in product["datasets"]: - assert dataset["dataset_id"] is not None - assert dataset["dataset_name"] is not None - version_labels = list( - map(lambda x: x["label"], dataset["versions"]) - ) - assert len(version_labels) == len(set(version_labels)) - for version in dataset["versions"]: - assert re.match( - rf"({VERSION_DEFAULT}|{REGEX_PATTERN_DATE_YYYYMM})", - version["label"], - ) - parts = version["parts"] - assert len(parts) != 0 - has_default_part = ( - len( - list( - filter( - lambda x: x["name"] == PART_DEFAULT, parts - ) - ) - ) - > 0 - ) - if has_default_part: - # If there is a "default" part, then it is the only one - assert len(parts) == 1 - else: - # Else, there is no "default" part at all - assert all( - map(lambda x: x["name"] != PART_DEFAULT, parts) - ) - part_names = list( - map(lambda x: x["name"], version["parts"]) - ) - assert len(part_names) == len(set(part_names)) - for part in parts: - assert part["name"] is not None - assert part["name"] != "" - services = part["services"] - assert len(services) != 0, dataset["dataset_id"] - service_names = list( - map( - lambda x: x["service_type"]["service_name"], - services, - ) - ) - assert len(service_names) == len(set(service_names)) - if ( - CopernicusMarineDatasetServiceType.OMI_ARCO.service_name.value # noqa - in service_names - ): - assert ( - CopernicusMarineDatasetServiceType.GEOSERIES.service_name.value # noqa - not in service_names - ) - assert ( - CopernicusMarineDatasetServiceType.TIMESERIES.service_name.value # noqa - not in service_names - ) - assert ( - CopernicusMarineDatasetServiceType.STATIC_ARCO.service_name.value # noqa - not in service_names - ) - if ( - CopernicusMarineDatasetServiceType.STATIC_ARCO.service_name.value # noqa - in service_names - ): - assert ( - CopernicusMarineDatasetServiceType.GEOSERIES.service_name.value # noqa - not in service_names - ) - assert ( - CopernicusMarineDatasetServiceType.TIMESERIES.service_name.value # noqa - not in service_names - ) - assert ( - CopernicusMarineDatasetServiceType.OMI_ARCO.service_name.value # noqa - not in service_names - ) - if service_names in ( - CopernicusMarineDatasetServiceType.GEOSERIES, - CopernicusMarineDatasetServiceType.TIMESERIES, - ): - assert ( - CopernicusMarineDatasetServiceType.OMI_ARCO.service_name.value # noqa - not in service_names - ) - assert ( - CopernicusMarineDatasetServiceType.STATIC_ARCO.service_name.value # noqa - not in service_names - ) - - def when_I_use_staging_environment_in_debug_logging_level(self): - command = [ - "copernicusmarine", - "describe", - "--staging", - "--log-level", - "DEBUG", - ] - self.output = execute_in_terminal(command) - - def then_I_check_that_the_urls_contains_only_dta(self): - assert ( - b"https://s3.waw3-1.cloudferro.com/mdl-metadata/" - not in self.output.stdout - ) # -------------------------# # Test on subset requests # diff --git a/tests/test_deprecated_options.py b/tests/test_deprecated_options.py index cdc8b582..aeb55406 100644 --- a/tests/test_deprecated_options.py +++ b/tests/test_deprecated_options.py @@ -1,2 +1,49 @@ +from copernicusmarine import describe +from tests.test_utils import execute_in_terminal + + class TestDeprecatedOptions: - pass + def test_describe_include_options_are_deprecated_cli(self): + command = [ + "copernicusmarine", + "describe", + "--include-description", + "--include-datasets", + "--include-keywords", + "--include-all", + "--dataset-id", + "cmems_mod_glo_phy_my_0.083deg_P1D-m", + ] + self.output = execute_in_terminal(command) + + assert ( + b"'--include-datasets' has been deprecated, use " + b"'--returned-fields datasets' instead" in self.output.stderr + ) + assert ( + b"'--include-keywords' has been deprecated, use " + b"'--returned-fields keywords' instead" in self.output.stderr + ) + assert ( + b"'--include-all' has been deprecated, use " + b"'--returned-fields all' instead" in self.output.stderr + ) + assert ( + b"'--include-description' has been deprecated, use " + b"'--returned-fields description' instead" in self.output.stderr + ) + assert self.output.returncode == 0 + + def test_describe_include_options_are_deprecated_python_api(self, caplog): + describe( + dataset_id="cmems_mod_glo_phy_my_0.083deg_P1D-m", + include_description=True, + include_datasets=True, + include_keywords=True, + include_all=True, + ) + + assert "'include_datasets' has been deprecated" in caplog.text + assert "'include_keywords' has been deprecated" in caplog.text + assert "'include_all' has been deprecated" in caplog.text + assert "'include_description' has been deprecated" in caplog.text diff --git a/tests/test_describe.py b/tests/test_describe.py new file mode 100644 index 00000000..0ebc597f --- /dev/null +++ b/tests/test_describe.py @@ -0,0 +1,506 @@ +import re +from json import loads + +from copernicusmarine import ( + CopernicusMarineCatalogue, + CopernicusMarineServiceNames, + describe, +) +from copernicusmarine.catalogue_parser.models import ( + PART_DEFAULT, + REGEX_PATTERN_DATE_YYYYMM, + VERSION_DEFAULT, +) +from tests.test_utils import execute_in_terminal + + +class TestDescribe: + # CLI (Command Line Interface) tests + def test_describe_default(self): + self.when_I_run_copernicus_marine_describe_with_default_arguments() + self.then_stdout_can_be_load_as_json() + self.then_I_can_read_the_default_json() + self.and_there_are_no_warnings_about_backend_versions() + + def test_describe_returned_fields_datasets(self, snapshot): + self.when_I_run_copernicus_marine_describe_including_datasets() + self.then_I_can_read_it_does_not_contain_weird_symbols() + self.then_I_can_read_the_json_including_datasets() + self.then_omi_services_are_not_in_the_catalog() + self.then_products_from_marine_data_store_catalog_are_available() + self.then_datasets_variables_are_correct(snapshot) + self.then_all_dataset_parts_are_filled() + + def test_describe_product_id_dataset_id(self): + dataset_id = "cmems_mod_glo_phy_my_0.083deg_P1D-m" + product_id = "GLOBAL_MULTIYEAR_PHY_001_030" + different_product_id = "ANTARCTIC_OMI_SI_extent" + self.when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + product_id, None + ) + self.then_stdout_can_be_load_as_json() + self.then_I_have_only_one_product() + self.when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + None, dataset_id + ) + self.then_stdout_can_be_load_as_json() + self.then_I_have_only_one_product_and_one_dataset() + self.when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + product_id, dataset_id + ) + self.then_stdout_can_be_load_as_json() + self.then_I_have_only_one_product_and_one_dataset() + self.when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + different_product_id, dataset_id + ) + self.then_I_have_an_error_message_about_dataset_id_and_product_id() + + def test_describe_contains_option(self): + self.when_I_run_copernicus_marine_describe_with_contains_option() + self.then_I_can_read_the_filtered_json() + + def test_describe_with_staging_flag(self): + self.when_I_use_staging_environment_in_debug_logging_level() + self.then_I_check_that_the_urls_contains_only_dta() + + def test_describe_function_with_returned_fields(self): + self.when_I_run_copernicus_marine_describe_with_returned_fields() + self.then_stdout_can_be_load_as_json() + self.then_only_the_queried_fields_are_returned() + + def test_describe_exclude_datasets(self): + product_id = "GLOBAL_MULTIYEAR_PHY_001_030" + self.when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + product_id, None, exclude="services" + ) + json_result = loads(self.output.stdout.decode("utf-8")) + for product in json_result["products"]: + for dataset in product["datasets"]: + for version in dataset["versions"]: + for part in version["parts"]: + assert "services" not in part + + def when_I_run_copernicus_marine_describe_with_default_arguments(self): + command = ["copernicusmarine", "describe"] + self.output = execute_in_terminal(command, timeout_second=30) + + def then_stdout_can_be_load_as_json(self): + loads(self.output.stdout.decode("utf-8")) + + def then_I_can_read_the_default_json(self): + json_result = loads(self.output.stdout.decode("utf-8")) + # TODO: increase number after November release + assert len(json_result["products"]) >= 270 + seen_processing_level = False + seen_digital_object_identifier = False + for product in json_result["products"]: + assert product["title"] is not None + assert product["product_id"] is not None + assert product["thumbnail_url"] is not None + seen_digital_object_identifier = ( + seen_digital_object_identifier + or ("digital_object_identifier" in product) + ) + assert product["sources"] is not None + seen_processing_level = ( + seen_processing_level or "processing_level" in product + ) + assert product["production_center"] is not None + assert seen_processing_level + assert seen_digital_object_identifier + + def and_there_are_no_warnings_about_backend_versions(self): + assert ( + b"Please update to the latest client version." + not in self.output.stderr + ) + + def then_omi_services_are_not_in_the_catalog(self): + json_result = loads(self.output.stdout) + for product in json_result["products"]: + for dataset in product["datasets"]: + for version in dataset["versions"]: + for part in version["parts"]: + assert "omi" not in list( + map( + lambda x: x["service_name"], + part["services"], + ) + ) + + def then_products_from_marine_data_store_catalog_are_available(self): + expected_product_id = "NWSHELF_ANALYSISFORECAST_PHY_004_013" + expected_dataset_id = "cmems_mod_nws_phy_anfc_0.027deg-2D_PT15M-i" + expected_services = [ + "original-files", + "arco-geo-series", + "arco-time-series", + "wmts", + ] + + json_result = loads(self.output.stdout) + expected_product = list( + filter( + lambda product: product["product_id"] == expected_product_id, + json_result["products"], + ) + ) + assert len(expected_product) == 1 + product = expected_product[0] + product_datasets = product["datasets"] + expected_dataset = list( + filter( + lambda product: product["dataset_id"] == expected_dataset_id, + product_datasets, + ) + ) + assert len(expected_dataset) == 1 + dataset = expected_dataset[0] + expected_dataset_services = list( + map( + lambda x: x["service_name"], + dataset["versions"][0]["parts"][0]["services"], + ) + ) + assert all( + map(lambda x: x in expected_services, expected_dataset_services) + ) + + def then_datasets_variables_are_correct(self, snapshot): + expected_product_id = "GLOBAL_MULTIYEAR_PHY_ENS_001_031" + expected_dataset_id = "cmems_mod_glo_phy-all_my_0.25deg_P1D-m" + wanted_services = [ + "original-files", + "arco-geo-series", + "arco-time-series", + ] + json_result = loads(self.output.stdout) + expected_product = list( + filter( + lambda product: product["product_id"] == expected_product_id, + json_result["products"], + ) + ) + product = expected_product[0] + product_datasets = product["datasets"] + expected_dataset = list( + filter( + lambda product: product["dataset_id"] == expected_dataset_id, + product_datasets, + ) + ) + dataset = expected_dataset[0] + wanted_services_in_dataset = list( + filter( + lambda x: x["service_name"] in wanted_services, + dataset["versions"][0]["parts"][0]["services"], + ) + ) + assert snapshot == wanted_services_in_dataset + + def then_all_dataset_parts_are_filled(self): + expected_product_id = "BALTICSEA_ANALYSISFORECAST_BGC_003_007" + expected_dataset_id = "cmems_mod_bal_bgc_anfc_static" + + json_result = loads(self.output.stdout) + expected_product = list( + filter( + lambda product: product["product_id"] == expected_product_id, + json_result["products"], + ) + ) + assert len(expected_product) == 1 + product = expected_product[0] + + expected_dataset = list( + filter( + lambda product: product["dataset_id"] == expected_dataset_id, + product["datasets"], + ) + ) + + assert len(expected_dataset) == 1 + dataset = expected_dataset[0] + + for version in dataset["versions"]: + non_default_parts = list( + filter( + lambda part: part["name"] != PART_DEFAULT, version["parts"] + ) + ) + + assert len(non_default_parts) > 0 + + version_ordered = sorted( + dataset["versions"], + key=lambda x: ( + x["label"] if x["label"] != VERSION_DEFAULT else "110001" + ), + reverse=True, + ) + + latest_version = version_ordered[0] + maybe_default_part = list( + filter( + lambda part: part["name"] == PART_DEFAULT, + latest_version["parts"], + ) + ) + assert len(maybe_default_part) == 0 + + def when_I_run_copernicus_marine_describe_with_contains_option(self): + filter_token = "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_n" + command = [ + "copernicusmarine", + "describe", + "--contains", + f"{filter_token}", + ] + self.output = execute_in_terminal(command, timeout_second=30) + + def then_I_can_read_the_filtered_json(self): + json_result = loads(self.output.stdout) + assert len(json_result["products"]) == 2 + assert ( + json_result["products"][0]["product_id"] + == "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_nag_area_mean" + ) + assert json_result["products"][0]["production_center"] == "PML (UK)" + assert ( + json_result["products"][0]["thumbnail_url"] + == "https://catalogue.marine.copernicus.eu/documents/IMG/OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_nag_area_mean.png" # noqa + ) + assert ( + json_result["products"][0]["title"] + == "North Atlantic Gyre Area Chlorophyll-a time series and trend from Observations Reprocessing" # noqa + ) + + assert ( + json_result["products"][1]["product_id"] + == "OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_npg_area_mean" + ) + assert json_result["products"][1]["production_center"] == "PML (UK)" + assert ( + json_result["products"][1]["thumbnail_url"] + == "https://catalogue.marine.copernicus.eu/documents/IMG/OMI_HEALTH_CHL_GLOBAL_OCEANCOLOUR_oligo_npg_area_mean.png" # noqa + ) + assert ( + json_result["products"][1]["title"] + == "North Pacific Gyre Area Chlorophyll-a time series and trend from Observations Reprocessing" # noqa + ) + + def when_I_run_copernicus_marine_describe_including_datasets(self): + command = [ + "copernicusmarine", + "describe", + "--returned-fields", + "all", + "--returned-fields-exclude", + "keywords,description", + ] + self.output = execute_in_terminal(command, timeout_second=30) + + def then_I_can_read_it_does_not_contain_weird_symbols(self): + assert b"__" not in self.output.stdout + assert b" _" not in self.output.stdout + assert b"_ " not in self.output.stdout + assert b'"_' not in self.output.stdout + assert b'_"' not in self.output.stdout + + def then_I_can_read_the_json_including_datasets(self): + json_result = loads(self.output.stdout) + assert len(json_result["products"]) >= 270 + seen_processing_level = False + seen_digital_object_identifier = False + for product in json_result["products"]: + assert product["title"] is not None + assert product["product_id"] is not None + assert product["thumbnail_url"] is not None + seen_digital_object_identifier = ( + seen_digital_object_identifier + or ("digital_object_identifier" in product) + ) + assert product["sources"] is not None + seen_processing_level = ( + seen_processing_level or "processing_level" in product + ) + assert product["production_center"] is not None + assert "datasets" in product + assert product[ + "datasets" + ], f"No datasets found for product {product['product_id']}" + for dataset in product["datasets"]: + assert dataset["dataset_id"] is not None + assert dataset["dataset_name"] is not None + version_labels = list( + map(lambda x: x["label"], dataset["versions"]) + ) + assert len(version_labels) == len(set(version_labels)) + for version in dataset["versions"]: + assert re.match( + rf"({VERSION_DEFAULT}|{REGEX_PATTERN_DATE_YYYYMM})", + version["label"], + ) + parts = version["parts"] + assert len(parts) != 0 + has_default_part = ( + len( + list( + filter( + lambda x: x["name"] == PART_DEFAULT, parts + ) + ) + ) + > 0 + ) + if has_default_part: + # If there is a "default" part, then it is the only one + assert len(parts) == 1 + else: + # Else, there is no "default" part at all + assert all( + map(lambda x: x["name"] != PART_DEFAULT, parts) + ) + part_names = list( + map(lambda x: x["name"], version["parts"]) + ) + assert len(part_names) == len(set(part_names)) + for part in parts: + assert part["name"] is not None + assert part["name"] != "" + services = part["services"] + assert len(services) != 0, dataset["dataset_id"] + service_names = list( + map( + lambda x: x["service_name"], + services, + ) + ) + assert len(service_names) == len(set(service_names)) + if ( + CopernicusMarineServiceNames.OMI_ARCO.value # noqa + in service_names + ): + assert ( + CopernicusMarineServiceNames.GEOSERIES.value # noqa + not in service_names + ) + assert ( + CopernicusMarineServiceNames.TIMESERIES.value # noqa + not in service_names + ) + assert ( + CopernicusMarineServiceNames.STATIC_ARCO.value # noqa + not in service_names + ) + if ( + CopernicusMarineServiceNames.STATIC_ARCO.value # noqa + in service_names + ): + assert ( + CopernicusMarineServiceNames.GEOSERIES.value # noqa + not in service_names + ) + assert ( + CopernicusMarineServiceNames.TIMESERIES.value # noqa + not in service_names + ) + assert ( + CopernicusMarineServiceNames.OMI_ARCO.value # noqa + not in service_names + ) + if service_names in ( + CopernicusMarineServiceNames.GEOSERIES, + CopernicusMarineServiceNames.TIMESERIES, + ): + assert ( + CopernicusMarineServiceNames.OMI_ARCO.value # noqa + not in service_names + ) + assert ( + CopernicusMarineServiceNames.STATIC_ARCO.value # noqa + not in service_names + ) + assert seen_processing_level + assert seen_digital_object_identifier + + def when_I_run_copernicus_marine_describe_with_product_id_and_dataset_id( + self, product_id, dataset_id, exclude=None + ): + command = ["copernicusmarine", "describe", "--returned-fields", "all"] + if product_id: + command.extend(["--product-id", product_id]) + if dataset_id: + command.extend(["--dataset-id", dataset_id]) + if exclude: + command.extend(["--returned-fields-exclude", exclude]) + self.output = execute_in_terminal(command, timeout_second=10) + + def then_I_have_only_one_product(self): + json_result = loads(self.output.stdout) + assert len(json_result["products"]) == 1 + + def then_I_have_only_one_product_and_one_dataset(self): + json_result = loads(self.output.stdout) + assert len(json_result["products"]) == 1 + assert len(json_result["products"][0]["datasets"]) == 1 + + def then_I_have_an_error_message_about_dataset_id_and_product_id(self): + assert self.output.returncode == 1 + assert b"Dataset is not part of the product" in self.output.stderr + + def when_I_use_staging_environment_in_debug_logging_level(self): + command = [ + "copernicusmarine", + "describe", + "--staging", + "--log-level", + "DEBUG", + ] + self.output = execute_in_terminal(command) + + def then_I_check_that_the_urls_contains_only_dta(self): + assert ( + b"https://s3.waw3-1.cloudferro.com/mdl-metadata/" + not in self.output.stdout + ) + + def when_I_run_copernicus_marine_describe_with_returned_fields(self): + command = [ + "copernicusmarine", + "describe", + "-i", + "cmems_mod_glo_phy_my_0.083deg_P1D-m", + "--returned-fields", + "product_id,datasets", + "--returned-fields-exclude", + "services", + ] + self.output = execute_in_terminal(command, timeout_second=10) + + def then_only_the_queried_fields_are_returned(self): + json_result = loads(self.output.stdout) + for product in json_result["products"]: + assert set(product.keys()) == {"product_id", "datasets"} + for dataset in product["datasets"]: + assert set(dataset.keys()) == { + "dataset_id", + "versions", + "dataset_name", + } + for version in dataset["versions"]: + assert set(version.keys()) == {"parts", "label"} + for part in version["parts"]: + assert "services" not in set(part.keys()) + + # ###################### + # Python API tests + # ###################### + def test_describe_function(self): + describe_result = describe() + assert describe_result is not None + assert isinstance(describe_result, CopernicusMarineCatalogue) + + def test_describe_function_with_contains(self): + nwshelf_catalog = describe(contains=["NWSHELF"]) + assert len(nwshelf_catalog.products) == 7 diff --git a/tests/test_describe_released_date.py b/tests/test_describe_released_date.py index 2f507cb9..8195ba56 100644 --- a/tests/test_describe_released_date.py +++ b/tests/test_describe_released_date.py @@ -1,12 +1,16 @@ from unittest import mock -from copernicusmarine import describe +from copernicusmarine import CopernicusMarineCatalogue, describe +from copernicusmarine.catalogue_parser.fields_query_builder import QueryBuilder from tests.resources.mock_stac_catalog.marine_data_store_stac_metadata_mock import ( mocked_stac_requests_get, ) +query_builder = QueryBuilder({"description", "keywords"}) +exclude_query = query_builder.build_query(CopernicusMarineCatalogue) -class TestDescribe: + +class TestDescribeReleaseDate: @mock.patch( "requests.Session.get", side_effect=mocked_stac_requests_get, @@ -18,7 +22,6 @@ def when_I_describe_the_marine_data_store( ): return describe( include_versions=include_versions, - include_datasets=True, ) def test_only_released_dataset_by_default(self, snapshot): @@ -28,10 +31,15 @@ def test_only_released_dataset_by_default(self, snapshot): ) def then_I_dont_get_the_not_released_products_version_and_datasets( - self, describe_result, snapshot + self, describe_result: CopernicusMarineCatalogue, snapshot ): - assert 1 == len(describe_result["products"]) - assert describe_result == snapshot + assert 1 == len(describe_result.products) + assert ( + describe_result.model_dump( + exclude_none=True, exclude_unset=True, exclude=exclude_query + ) + == snapshot + ) def test_describe_all_versions(self, snapshot): describe_result = self.when_I_describe_the_marine_data_store( @@ -42,7 +50,12 @@ def test_describe_all_versions(self, snapshot): ) def then_I_get_all_products_versions_and_datasets( - self, describe_result, snapshot + self, describe_result: CopernicusMarineCatalogue, snapshot ): - assert 2 == len(describe_result["products"]) - assert describe_result == snapshot + assert 2 == len(describe_result.products) + assert ( + describe_result.model_dump( + exclude_none=True, exclude_unset=True, exclude=exclude_query + ) + == snapshot + ) diff --git a/tests/test_python_interface.py b/tests/test_python_interface.py index 7115c8fd..ea50f10d 100644 --- a/tests/test_python_interface.py +++ b/tests/test_python_interface.py @@ -20,17 +20,6 @@ class TestPythonInterface: - def test_describe_function(self): - describe_result = describe() - assert describe_result is not None - assert isinstance(describe_result, dict) - - def test_describe_function_with_filter_twice_in_a_row(self): - nwshelf_catalog = describe(contains=["NWSHELF"], include_datasets=True) - assert len(nwshelf_catalog["products"]) == 7 - nwshelf_catalog = describe(contains=["NWSHELF"], include_datasets=True) - assert len(nwshelf_catalog["products"]) == 7 - def test_get_function(self, tmp_path): get_result = get( username=os.getenv("COPERNICUSMARINE_SERVICE_USERNAME"), diff --git a/tests/test_versions_parts_sorting.py b/tests/test_versions_parts_sorting.py index b45356ba..e2c4093e 100644 --- a/tests/test_versions_parts_sorting.py +++ b/tests/test_versions_parts_sorting.py @@ -1,57 +1,57 @@ from copernicusmarine.catalogue_parser.models import ( PART_DEFAULT, VERSION_DEFAULT, - CopernicusMarineDatasetVersion, - CopernicusMarineProductDataset, - CopernicusMarineVersionPart, + CopernicusMarineDataset, + CopernicusMarinePart, + CopernicusMarineVersion, ) -version_default = CopernicusMarineDatasetVersion( +version_default = CopernicusMarineVersion( label=VERSION_DEFAULT, parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name="latest", services=[], retired_date=None, released_date=None ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="latest_to_be_released", services=[], retired_date=None, released_date="2060-01-01", ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="bathy", services=[], retired_date=None, released_date=None ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="history", services=[], retired_date=None, released_date=None ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date=None, released_date=None, ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="latest_will_be_retired_in_2060", services=[], retired_date="2060-01-01", released_date=None, ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="latest_will_be_retired_in_2030", services=[], retired_date="2030-01-01", released_date=None, ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="monthly", services=[], retired_date=None, released_date=None ), ], ) -to_be_released_version = CopernicusMarineDatasetVersion( +to_be_released_version = CopernicusMarineVersion( label="206011", parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date=None, @@ -60,10 +60,10 @@ ], ) -will_be_retired_soon_version = CopernicusMarineDatasetVersion( +will_be_retired_soon_version = CopernicusMarineVersion( label="202011", parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date="2025-01-01", @@ -72,10 +72,10 @@ ], ) -will_be_retired_in_a_long_time_version = CopernicusMarineDatasetVersion( +will_be_retired_in_a_long_time_version = CopernicusMarineVersion( label="202111", parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date="2060-01-01", @@ -84,16 +84,16 @@ ], ) -newly_released_version = CopernicusMarineDatasetVersion( +newly_released_version = CopernicusMarineVersion( label="202201", parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date=None, released_date="2022-01-01", ), - CopernicusMarineVersionPart( + CopernicusMarinePart( name="to_be_released_part", services=[], retired_date=None, @@ -102,10 +102,10 @@ ], ) -old_version = CopernicusMarineDatasetVersion( +old_version = CopernicusMarineVersion( label="201901", parts=[ - CopernicusMarineVersionPart( + CopernicusMarinePart( name=PART_DEFAULT, services=[], retired_date=None, @@ -115,7 +115,7 @@ ) -example_dataset = CopernicusMarineProductDataset( +example_dataset = CopernicusMarineDataset( dataset_id="example_dataset", dataset_name="Example Dataset", versions=[