diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 970fef79f..91f65494c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: - --fixable=E,W,F,I,T,RUF,TID,UP - --target-version=py39 - id: ruff-format - rev: v0.8.1 + rev: v0.8.2 - repo: https://github.com/igorshubovych/markdownlint-cli rev: v0.43.0 diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index a65ebeb64..9263e9594 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -15,6 +15,32 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.3.19] - 2024-12-09 + +### Added + +- [alpha feature] `cdf purge dataset` now supports purging resources with internal IDs. + +### Fixed + +- Replacing variables in an inline SQL query no longer removes the quotes around the variable. +- Running `cdf build` on an older module will no longer raise an `KeyError` if the `module.toml` does + not have a `package` key. +- [alpha feature] `cdf purge dataset` no longer deletes `LocationFilters` +- [alpha feature] `GraphQL` resources with views that specify a `rawFilter` no longer raise an error when + running `cdf deploy`. +- In the `cdf dump datamodel` command, properties that are overridden in a view are now correctly dumped. + +### Changed + +- [alpha feature] `cdf purge` now requires a confirmation before deleting resources. +- Building a `Transformation` will store the `.sql` file in the build directory instead of inlined in the + resource YAML file. + +### Improved + +- Consistent display names of resources in output table of `cdf deploy` and `cdf clean`. + ## [0.3.18] - 2024-12-03 ### Fixed diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 531f73a87..f0ef4da9a 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -15,6 +15,10 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.3.19] - 2024-12-09 + +No changes to templates. + ## [0.3.18] - 2024-12-03 No changes to templates. diff --git a/README.md b/README.md index dc0ce039c..723097e66 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ More details about the tool can be found at [docs.cognite.com](https://docs.cognite.com/cdf/deploy/cdf_toolkit/). You can find an overview of the modules and packages in the -[module and package documentation](https://docs.cognite.com/cdf/deploy/cdf_toolkit/references/module_reference). +[module and package documentation](https://docs.cognite.com/cdf/deploy/cdf_toolkit/references/resource_library). See [./CONTRIBUTING.md](./CONTRIBUTING.md) for information about how to contribute to the `cdf-tk` tool or templates. diff --git a/cdf.toml b/cdf.toml index 98b1fe94b..fb6901421 100644 --- a/cdf.toml +++ b/cdf.toml @@ -23,4 +23,4 @@ dump = true [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/cognite_toolkit/_builtin_modules/cdf.toml b/cognite_toolkit/_builtin_modules/cdf.toml index 75a52b199..f2a25192c 100644 --- a/cognite_toolkit/_builtin_modules/cdf.toml +++ b/cognite_toolkit/_builtin_modules/cdf.toml @@ -4,7 +4,7 @@ default_env = "" [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" [plugins] diff --git a/cognite_toolkit/_cdf_tk/apps/_purge.py b/cognite_toolkit/_cdf_tk/apps/_purge.py index 2c21918ad..ba4572295 100644 --- a/cognite_toolkit/_cdf_tk/apps/_purge.py +++ b/cognite_toolkit/_cdf_tk/apps/_purge.py @@ -17,7 +17,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: def main(self, ctx: typer.Context) -> None: """Commands purge functionality""" if ctx.invoked_subcommand is None: - print("Use [bold yellow]cdf pull --help[/] for more information.") + print("Use [bold yellow]cdf purge --help[/] for more information.") def purge_dataset( self, @@ -44,6 +44,14 @@ def purge_dataset( help="Whether to do a dry-run, do dry-run if present.", ), ] = False, + auto_yes: Annotated[ + bool, + typer.Option( + "--yes", + "-y", + help="Automatically confirm that you are sure you want to purge the dataset.", + ), + ] = False, verbose: Annotated[ bool, typer.Option( @@ -61,6 +69,7 @@ def purge_dataset( external_id, include_dataset, dry_run, + auto_yes, verbose, ) ) @@ -90,6 +99,14 @@ def purge_space( help="Whether to do a dry-run, do dry-run if present.", ), ] = False, + auto_yes: Annotated[ + bool, + typer.Option( + "--yes", + "-y", + help="Automatically confirm that you are sure you want to purge the space.", + ), + ] = False, verbose: Annotated[ bool, typer.Option( @@ -109,6 +126,7 @@ def purge_space( space, include_space, dry_run, + auto_yes, verbose, ) ) diff --git a/cognite_toolkit/_cdf_tk/builders/_base.py b/cognite_toolkit/_cdf_tk/builders/_base.py index e7dc34dbc..9201f0c97 100644 --- a/cognite_toolkit/_cdf_tk/builders/_base.py +++ b/cognite_toolkit/_cdf_tk/builders/_base.py @@ -63,7 +63,7 @@ def validate_directory( return WarningList[ToolkitWarning]() # Helper methods - def _create_destination_path(self, source_path: Path, module_dir: Path, kind: str) -> Path: + def _create_destination_path(self, source_path: Path, kind: str) -> Path: """Creates the filepath in the build directory for the given source path. Note that this is a complex operation as the modules in the source are nested while the build directory is flat. @@ -153,7 +153,7 @@ def build( if warning is not None: yield [warning] continue - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) destination = BuildDestinationFile( path=destination_path, diff --git a/cognite_toolkit/_cdf_tk/builders/_datamodels.py b/cognite_toolkit/_cdf_tk/builders/_datamodels.py index b91f9e635..359361b24 100644 --- a/cognite_toolkit/_cdf_tk/builders/_datamodels.py +++ b/cognite_toolkit/_cdf_tk/builders/_datamodels.py @@ -39,7 +39,7 @@ def build( yield [warning] continue - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) extra_sources: list[SourceLocation] | None = None if loader is GraphQLLoader: diff --git a/cognite_toolkit/_cdf_tk/builders/_file.py b/cognite_toolkit/_cdf_tk/builders/_file.py index f6b717bf6..39bf00bdb 100644 --- a/cognite_toolkit/_cdf_tk/builders/_file.py +++ b/cognite_toolkit/_cdf_tk/builders/_file.py @@ -32,7 +32,7 @@ def build( continue if loader in {FileMetadataLoader, CogniteFileLoader}: loaded = self._expand_file_metadata(loaded, module, console) - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) yield BuildDestinationFile( path=destination_path, diff --git a/cognite_toolkit/_cdf_tk/builders/_function.py b/cognite_toolkit/_cdf_tk/builders/_function.py index eb8a077f1..f6a7d9797 100644 --- a/cognite_toolkit/_cdf_tk/builders/_function.py +++ b/cognite_toolkit/_cdf_tk/builders/_function.py @@ -43,7 +43,7 @@ def build( if loader is FunctionLoader: warnings = self.copy_function_directory_to_build(source_file) - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) yield BuildDestinationFile( path=destination_path, diff --git a/cognite_toolkit/_cdf_tk/builders/_raw.py b/cognite_toolkit/_cdf_tk/builders/_raw.py index dde8f2b05..8e5631261 100644 --- a/cognite_toolkit/_cdf_tk/builders/_raw.py +++ b/cognite_toolkit/_cdf_tk/builders/_raw.py @@ -51,7 +51,7 @@ def build( for loader, entries in entry_by_loader.items(): if not entries: continue - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) if loader is RawDatabaseLoader and has_split_table_and_database: # We have inferred the database from a Table file, so we need to recalculate the hash diff --git a/cognite_toolkit/_cdf_tk/builders/_streamlit.py b/cognite_toolkit/_cdf_tk/builders/_streamlit.py index fb52cad2c..62256090a 100644 --- a/cognite_toolkit/_cdf_tk/builders/_streamlit.py +++ b/cognite_toolkit/_cdf_tk/builders/_streamlit.py @@ -40,7 +40,7 @@ def build( if loader is StreamlitLoader: warnings = self.copy_app_directory_to_build(source_file) - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + destination_path = self._create_destination_path(source_file.source.path, loader.kind) yield BuildDestinationFile( path=destination_path, diff --git a/cognite_toolkit/_cdf_tk/builders/_transformation.py b/cognite_toolkit/_cdf_tk/builders/_transformation.py index c0957db1d..23075e236 100644 --- a/cognite_toolkit/_cdf_tk/builders/_transformation.py +++ b/cognite_toolkit/_cdf_tk/builders/_transformation.py @@ -37,11 +37,11 @@ def build( yield [warning] continue + destination_path = self._create_destination_path(source_file.source.path, loader.kind) + extra_sources: list[SourceLocation] | None = None if loader is TransformationLoader: - extra_sources = self._add_query(loaded, source_file, query_files) - - destination_path = self._create_destination_path(source_file.source.path, module.dir, loader.kind) + extra_sources = self._add_query(loaded, source_file, query_files, destination_path) destination = BuildDestinationFile( path=destination_path, @@ -57,6 +57,7 @@ def _add_query( loaded: dict[str, Any] | list[dict[str, Any]], source_file: BuildSourceFile, query_files: dict[Path, BuildSourceFile], + transformation_destination_path: Path, ) -> list[SourceLocation]: loaded_list = loaded if isinstance(loaded, list) else [loaded] extra_sources: list[SourceLocation] = [] @@ -80,7 +81,10 @@ def _add_query( filepath, ) elif query_file is not None: - entry["query"] = query_file.content + destination_path = self._create_destination_path(query_file.source.path, "Query") + destination_path.write_text(query_file.content) + relative = destination_path.relative_to(transformation_destination_path.parent) + entry["query"] = relative.as_posix() extra_sources.append(query_file.source) return extra_sources diff --git a/cognite_toolkit/_cdf_tk/commands/_purge.py b/cognite_toolkit/_cdf_tk/commands/_purge.py index 2e7a9e4d3..60e5bfcfd 100644 --- a/cognite_toolkit/_cdf_tk/commands/_purge.py +++ b/cognite_toolkit/_cdf_tk/commands/_purge.py @@ -44,15 +44,23 @@ def space( space: str | None = None, include_space: bool = False, dry_run: bool = False, + auto_yes: bool = False, verbose: bool = False, ) -> None: """Purge a space and all its content""" selected_space = self._get_selected_space(space, ToolGlobals.toolkit_client) - self._print_panel("space", selected_space) if space is None: # Interactive mode include_space = questionary.confirm("Do you also want to delete the space itself?", default=False).ask() dry_run = questionary.confirm("Dry run?", default=True).ask() + if not dry_run: + self._print_panel("space", selected_space) + if not auto_yes: + confirm = questionary.confirm( + f"Are you really sure you want to purge the {selected_space!r} space?", default=False + ).ask() + if not confirm: + return loaders = self._get_dependencies( SpaceLoader, @@ -118,17 +126,25 @@ def dataset( external_id: str | None = None, include_dataset: bool = False, dry_run: bool = False, + auto_yes: bool = False, verbose: bool = False, ) -> None: """Purge a dataset and all its content""" selected_dataset = self._get_selected_dataset(external_id, ToolGlobals.toolkit_client) - self._print_panel("dataset", selected_dataset) if external_id is None: # Interactive mode include_dataset = questionary.confirm( "Do you want to archive the dataset itself after the purge?", default=False ).ask() dry_run = questionary.confirm("Dry run?", default=True).ask() + if not dry_run: + self._print_panel("dataset", selected_dataset) + if not auto_yes: + confirm = questionary.confirm( + f"Are you really sure you want to purge the {selected_dataset!r} dataset?", default=False + ).ask() + if not confirm: + return loaders = self._get_dependencies( DataSetsLoader, @@ -139,6 +155,7 @@ def dataset( StreamlitLoader, HostedExtractorDestinationLoader, FunctionLoader, + LocationFilterLoader, }, ) is_purged = self._purge( @@ -245,9 +262,14 @@ def _purge( try: batch_ids.append(loader.get_id(resource)) except ToolkitRequiredValueError as e: - self.warn(HighSeverityWarning(f"Cannot delete {resource.dump()!r}. Failed to obtain ID: {e}")) - is_purged = False - continue + try: + batch_ids.append(loader.get_internal_id(resource)) + except (AttributeError, NotImplementedError): + self.warn( + HighSeverityWarning(f"Cannot delete {type(resource).__name__}. Failed to obtain ID: {e}") + ) + is_purged = False + continue if len(batch_ids) >= batch_size: child_deletion = self._delete_children(batch_ids, child_loaders, dry_run, verbose) diff --git a/cognite_toolkit/_cdf_tk/commands/build.py b/cognite_toolkit/_cdf_tk/commands/build.py index 1b520ebad..c3bc4a091 100644 --- a/cognite_toolkit/_cdf_tk/commands/build.py +++ b/cognite_toolkit/_cdf_tk/commands/build.py @@ -105,6 +105,7 @@ def __init__(self, print_warning: bool = True, skip_tracking: bool = False, sile defaultdict(list) ) self._has_built = False + self._printed_variable_tree_structure_hint = False def execute( self, @@ -485,14 +486,16 @@ def _check_variables_replaced(self, content: str, module: Path, source_path: Pat if len(module_names) == 1 else (", ".join(module_names[:-1]) + f" or {module_names[-1]}") ) - self.console( - f"The variables in 'config.[ENV].yaml' need to be organised in a tree structure following" - f"\n the folder structure of the modules, but can also be moved up the config hierarchy to be shared between modules." - f"\n The variable {variable!r} is defined in the variable section{'s' if len(module_names) > 1 else ''} {module_str}." - f"\n Check that {'these paths reflect' if len(module_names) > 1 else 'this path reflects'} " - f"the location of {module.as_posix()}.", - prefix=" [bold green]Hint:[/] ", - ) + if not self._printed_variable_tree_structure_hint: + self._printed_variable_tree_structure_hint = True + self.console( + f"The variables in 'config.[ENV].yaml' need to be organised in a tree structure following" + f"\n the folder structure of the modules, but can also be moved up the config hierarchy to be shared between modules." + f"\n The variable {variable!r} is defined in the variable section{'s' if len(module_names) > 1 else ''} {module_str}." + f"\n Check that {'these paths reflect' if len(module_names) > 1 else 'this path reflects'} " + f"the location of {module.as_posix()}.", + prefix=" [bold green]Hint:[/] ", + ) self.warning_list.extend(warning_list) if self.print_warning and warning_list: print(str(warning_list)) diff --git a/cognite_toolkit/_cdf_tk/commands/dump.py b/cognite_toolkit/_cdf_tk/commands/dump.py index 61ac4ca99..83a1c6d18 100644 --- a/cognite_toolkit/_cdf_tk/commands/dump.py +++ b/cognite_toolkit/_cdf_tk/commands/dump.py @@ -14,8 +14,9 @@ from rich.panel import Panel from cognite_toolkit._cdf_tk.exceptions import ToolkitMissingResourceError +from cognite_toolkit._cdf_tk.loaders import ViewLoader from cognite_toolkit._cdf_tk.tk_warnings import MediumSeverityWarning -from cognite_toolkit._cdf_tk.utils import CDFToolConfig, retrieve_view_ancestors +from cognite_toolkit._cdf_tk.utils import CDFToolConfig from ._base import ToolkitCommand @@ -53,8 +54,6 @@ def execute( space_ids = {item.space for item in itertools.chain(containers, views, [data_model])} spaces = client.data_modeling.spaces.retrieve(list(space_ids)) - views_by_id = {view.as_id(): view for view in views} - is_populated = output_dir.exists() and any(output_dir.iterdir()) if is_populated and clean: shutil.rmtree(output_dir) @@ -89,6 +88,7 @@ def execute( suffix_version = len(views) != len({f"{view.space}{view.external_id}" for view in views}) view_folder = resource_folder / "views" view_folder.mkdir(exist_ok=True) + view_loader = ViewLoader.create_loader(ToolGlobals, None) for view in views: file_name = f"{view.external_id}.view.yaml" if prefix_space: @@ -96,15 +96,7 @@ def execute( if suffix_version: file_name = f"{file_name.removesuffix('.view.yaml')}_{view.version}.view.yaml" view_file = view_folder / file_name - view_write = view.as_write().dump() - parents = retrieve_view_ancestors(client, view.implements or [], views_by_id) - for parent in parents: - for prop_name in parent.properties.keys(): - view_write["properties"].pop(prop_name, None) - if not view_write["properties"]: - # All properties were removed, so we remove the properties key. - view_write.pop("properties", None) - + view_write = view_loader.dump_as_write(view) view_file.write_text(yaml.safe_dump(view_write, sort_keys=False)) if verbose: print(f" [bold green]INFO:[/] Dumped view {view.as_id()} to {view_file!s}.") diff --git a/cognite_toolkit/_cdf_tk/data_classes/_build_variables.py b/cognite_toolkit/_cdf_tk/data_classes/_build_variables.py index 3f54d0a35..1f54c6e21 100644 --- a/cognite_toolkit/_cdf_tk/data_classes/_build_variables.py +++ b/cognite_toolkit/_cdf_tk/data_classes/_build_variables.py @@ -154,16 +154,18 @@ def get_module_variables(self, module: ModuleLocation) -> list[BuildVariables]: def replace(self, content: str, file_suffix: str = ".yaml") -> str: for variable in self: replace = variable.value_variable - _core_patter = rf"{{{{\s*{variable.key}\s*}}}}" + _core_pattern = rf"{{{{\s*{variable.key}\s*}}}}" if file_suffix in {".yaml", ".yml", ".json"}: # Preserve data types + pattern = _core_pattern if isinstance(replace, str) and (replace.isdigit() or replace.endswith(":")): replace = f'"{replace}"' + pattern = rf"'{_core_pattern}'|{_core_pattern}|" + rf'"{_core_pattern}"' elif replace is None: replace = "null" - content = re.sub(rf"'{_core_patter}'|{_core_patter}|" + rf'"{_core_patter}"', str(replace), content) + content = re.sub(pattern, str(replace), content) else: - content = re.sub(_core_patter, str(replace), content) + content = re.sub(_core_pattern, str(replace), content) return content diff --git a/cognite_toolkit/_cdf_tk/data_classes/_module_toml.py b/cognite_toolkit/_cdf_tk/data_classes/_module_toml.py index 578b58ce7..361ef88a7 100644 --- a/cognite_toolkit/_cdf_tk/data_classes/_module_toml.py +++ b/cognite_toolkit/_cdf_tk/data_classes/_module_toml.py @@ -63,11 +63,21 @@ def load(cls, data: dict[str, Any] | Path) -> ModuleToml: if "extra_resources" in data and isinstance(data["extra_resources"], list): extra_resources = [Path(item["location"]) for item in data["extra_resources"] if "location" in item] + tags: frozenset[str] = frozenset() + if "packages" in data: + if "tags" in data["packages"]: + tags = frozenset(data["packages"]["tags"]) + title: str | None = None + is_selected_by_default: bool = False + if "module" in data: + title = data["module"].get("title") + is_selected_by_default = data["module"].get("is_selected_by_default", False) + return cls( - title=data["module"].get("title"), - tags=frozenset(data["packages"].get("tags", set())), + title=title, + tags=tags, dependencies=dependencies, - is_selected_by_default=data["module"].get("is_selected_by_default", False), + is_selected_by_default=is_selected_by_default, data=example_data, extra_resources=extra_resources, ) diff --git a/cognite_toolkit/_cdf_tk/loaders/_base_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_base_loaders.py index 6e94506c0..59db5566c 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_base_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_base_loaders.py @@ -249,6 +249,23 @@ def get_dependent_items(cls, item: dict) -> Iterable[tuple[type[ResourceLoader], return yield + @classmethod + def get_internal_id(cls, item: T_WritableCogniteResource | dict) -> int: + raise NotImplementedError(f"{cls.__name__} does not have an internal id.") + + @classmethod + def _split_ids(cls, ids: T_ID | int | SequenceNotStr[T_ID | int] | None) -> tuple[list[int], list[str]]: + # Used by subclasses to split the ids into external and internal ids + if ids is None: + return [], [] + if isinstance(ids, int): + return [ids], [] + if isinstance(ids, str): + return [], [ids] + if isinstance(ids, Sequence): + return [id for id in ids if isinstance(id, int)], [id for id in ids if isinstance(id, str)] + raise ValueError(f"Invalid ids: {ids}") + def load_resource( self, filepath: Path, ToolGlobals: CDFToolConfig, skip_validation: bool ) -> T_WriteClass | T_CogniteResourceList | None: diff --git a/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py index c3c1962d3..653fc5ff3 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py @@ -32,7 +32,7 @@ class DatapointsLoader(DataLoader): @property def display_name(self) -> str: - return "timeseries.datapoints" + return "timeseries datapoints" def upload(self, state: BuildEnvironment, ToolGlobals: CDFToolConfig, dry_run: bool) -> Iterable[tuple[str, int]]: if self.folder_name not in state.built_resources: @@ -92,7 +92,7 @@ class FileLoader(DataLoader): @property def display_name(self) -> str: - return "file contents" + return "file content" def upload(self, state: BuildEnvironment, ToolGlobals: CDFToolConfig, dry_run: bool) -> Iterable[tuple[str, int]]: if self.folder_name not in state.built_resources: @@ -157,7 +157,7 @@ class RawFileLoader(DataLoader): @property def display_name(self) -> str: - return "raw.rows" + return "raw rows" def upload(self, state: BuildEnvironment, ToolGlobals: CDFToolConfig, dry_run: bool) -> Iterable[tuple[str, int]]: if self.folder_name not in state.built_resources: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py index 426511796..fb1a2b631 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py @@ -100,7 +100,7 @@ def __init__( @property def display_name(self) -> str: - return f"iam.groups({self.target_scopes.removesuffix('_only')})" + return f"groups({self.target_scopes.removesuffix('_only')})" @classmethod def create_loader( @@ -478,6 +478,10 @@ class GroupAllScopedLoader(GroupLoader): def __init__(self, client: ToolkitClient, build_dir: Path | None): super().__init__(client, build_dir, "all_scoped_only") + @property + def display_name(self) -> str: + return "all-scoped groups" + @final class SecurityCategoryLoader( @@ -495,7 +499,7 @@ class SecurityCategoryLoader( @property def display_name(self) -> str: - return "security.categories" + return "security categories" @classmethod def get_id(cls, item: SecurityCategoryWrite | SecurityCategory | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/classic_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/classic_loaders.py index 6070c918b..4370a2928 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/classic_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/classic_loaders.py @@ -48,7 +48,7 @@ class AssetLoader(ResourceLoader[str, AssetWrite, Asset, AssetWriteList, AssetLi @property def display_name(self) -> str: - return self.kind + return "assets" @classmethod def get_id(cls, item: Asset | AssetWrite | dict) -> str: @@ -58,6 +58,14 @@ def get_id(cls, item: Asset | AssetWrite | dict) -> str: raise KeyError("Asset must have external_id") return item.external_id + @classmethod + def get_internal_id(cls, item: Asset | dict) -> int: + if isinstance(item, dict): + return item["id"] + if not item.id: + raise KeyError("Asset must have id") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -94,13 +102,15 @@ def retrieve(self, ids: SequenceNotStr[str]) -> AssetList: def update(self, items: AssetWriteList) -> AssetList: return self.client.assets.update(items, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: + def delete(self, ids: SequenceNotStr[str | int]) -> int: + internal_ids, external_ids = self._split_ids(ids) try: - self.client.assets.delete(external_id=ids) + self.client.assets.delete(id=internal_ids, external_id=external_ids) except (CogniteAPIError, CogniteNotFoundError) as e: non_existing = set(e.failed or []) if existing := [id_ for id_ in ids if id_ not in non_existing]: - self.client.assets.delete(external_id=existing) + internal_ids, external_ids = self._split_ids(existing) + self.client.assets.delete(id=internal_ids, external_id=external_ids) return len(existing) else: return len(ids) @@ -230,7 +240,7 @@ class SequenceLoader(ResourceLoader[str, SequenceWrite, Sequence, SequenceWriteL @property def display_name(self) -> str: - return self.kind + return "sequences" @classmethod def get_id(cls, item: Sequence | SequenceWrite | dict) -> str: @@ -240,6 +250,14 @@ def get_id(cls, item: Sequence | SequenceWrite | dict) -> str: raise KeyError("Sequence must have external_id") return item.external_id + @classmethod + def get_internal_id(cls, item: Sequence | dict) -> int: + if isinstance(item, dict): + return item["id"] + if not item.id: + raise KeyError("Sequence must have id") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -273,13 +291,15 @@ def retrieve(self, ids: SequenceNotStr[str]) -> SequenceList: def update(self, items: SequenceWriteList) -> SequenceList: return self.client.sequences.update(items, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: + def delete(self, ids: SequenceNotStr[str | int]) -> int: + internal_ids, external_ids = self._split_ids(ids) try: - self.client.sequences.delete(external_id=ids) + self.client.sequences.delete(id=internal_ids, external_id=external_ids) except (CogniteAPIError, CogniteNotFoundError) as e: non_existing = set(e.failed or []) if existing := [id_ for id_ in ids if id_ not in non_existing]: - self.client.sequences.delete(external_id=existing) + internal_ids, external_ids = self._split_ids(existing) + self.client.sequences.delete(id=internal_ids, external_id=external_ids) return len(existing) else: return len(ids) @@ -346,7 +366,7 @@ class EventLoader(ResourceLoader[str, EventWrite, Event, EventWriteList, EventLi @property def display_name(self) -> str: - return self.kind + return "events" @classmethod def get_id(cls, item: Event | EventWrite | dict) -> str: @@ -356,6 +376,14 @@ def get_id(cls, item: Event | EventWrite | dict) -> str: raise KeyError("Event must have external_id") return item.external_id + @classmethod + def get_internal_id(cls, item: Event | dict) -> int: + if isinstance(item, dict): + return item["id"] + if not item.id: + raise KeyError("Event must have id") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -392,13 +420,15 @@ def retrieve(self, ids: SequenceNotStr[str]) -> EventList: def update(self, items: EventWriteList) -> EventList: return self.client.events.update(items, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: + def delete(self, ids: SequenceNotStr[str | int]) -> int: + internal_ids, external_ids = self._split_ids(ids) try: - self.client.events.delete(external_id=ids) + self.client.events.delete(id=internal_ids, external_id=external_ids) except (CogniteAPIError, CogniteNotFoundError) as e: non_existing = set(e.failed or []) if existing := [id_ for id_ in ids if id_ not in non_existing]: - self.client.events.delete(external_id=existing) + internal_ids, external_ids = self._split_ids(existing) + self.client.events.delete(id=internal_ids, external_id=external_ids) return len(existing) else: return len(ids) diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/data_organization_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/data_organization_loaders.py index d448509ee..a24e5f165 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/data_organization_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/data_organization_loaders.py @@ -63,6 +63,10 @@ class DataSetsLoader(ResourceLoader[str, DataSetWrite, DataSet, DataSetWriteList dependencies = frozenset({GroupAllScopedLoader}) _doc_url = "Data-sets/operation/createDataSets" + @property + def display_name(self) -> str: + return "data sets" + @classmethod def get_required_capability(cls, items: DataSetWriteList | None, read_only: bool) -> Capability | list[Capability]: if not items and items is not None: @@ -177,7 +181,7 @@ class LabelLoader( @property def display_name(self) -> str: - return self.kind + return "labels" @classmethod def get_id(cls, item: LabelDefinition | LabelDefinitionWrite | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/datamodel_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/datamodel_loaders.py index 51115535d..407a61f10 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/datamodel_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/datamodel_loaders.py @@ -526,18 +526,13 @@ def get_dependent_items(cls, item: dict) -> Iterable[tuple[type[ResourceLoader], elif source.get("type") == "container" and in_dict(("space", "externalId"), source): yield ContainerLoader, ContainerId(source["space"], source["externalId"]) - def _are_equal( - self, local: ViewApply, cdf_resource: View, return_dumped: bool = False - ) -> bool | tuple[bool, dict[str, Any], dict[str, Any]]: - local_dumped = local.dump() + def dump_as_write(self, cdf_resource: View) -> dict[str, Any]: + """Views are special in that they include all parent properties. This + methods looks up all parent views and removes the properties that are + not overridden to get the true write view.""" cdf_dumped = cdf_resource.as_write().dump() - - # The version is always a string from the API, but can be an int when reading from YAML. - local_dumped["version"] = str(local_dumped["version"]) - if not cdf_resource.implements: - return self._return_are_equal(local_dumped, cdf_dumped, return_dumped) - + return cdf_dumped if cdf_resource.properties: # All read version of views have all the properties of their parent views. # We need to remove these properties to compare with the local view. @@ -550,6 +545,16 @@ def _are_equal( if is_overidden: continue cdf_properties.pop(prop_name, None) + return cdf_dumped + + def _are_equal( + self, local: ViewApply, cdf_resource: View, return_dumped: bool = False + ) -> bool | tuple[bool, dict[str, Any], dict[str, Any]]: + local_dumped = local.dump() + cdf_dumped = self.dump_as_write(cdf_resource) + + # The version is always a string from the API, but can be an int when reading from YAML. + local_dumped["version"] = str(local_dumped["version"]) if not cdf_dumped.get("properties"): # All properties were removed, so we remove the properties key. @@ -1060,7 +1065,7 @@ def __init__(self, client: ToolkitClient, build_dir: Path) -> None: @property def display_name(self) -> str: - return "GraphQL schemas" + return "graph QL schemas" @classmethod def get_id(cls, item: GraphQLDataModelWrite | GraphQLDataModel | dict) -> DataModelId: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/extraction_pipeline_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/extraction_pipeline_loaders.py index 299d41def..57a76d745 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/extraction_pipeline_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/extraction_pipeline_loaders.py @@ -17,7 +17,7 @@ from collections.abc import Hashable, Iterable, Sequence from functools import lru_cache from pathlib import Path -from typing import Any, cast, final +from typing import Any, final import yaml from cognite.client.data_classes import ( @@ -78,6 +78,10 @@ class ExtractionPipelineLoader( dependencies = frozenset({DataSetsLoader, RawDatabaseLoader, RawTableLoader, GroupAllScopedLoader}) _doc_url = "Extraction-Pipelines/operation/createExtPipes" + @property + def display_name(self) -> str: + return "extraction pipelines" + @classmethod def get_required_capability( cls, items: ExtractionPipelineWriteList | None, read_only: bool @@ -111,6 +115,14 @@ def get_id(cls, item: ExtractionPipeline | ExtractionPipelineWrite | dict) -> st raise ToolkitRequiredValueError("ExtractionPipeline must have external_id set.") return item.external_id + @classmethod + def get_internal_id(cls, item: ExtractionPipeline | dict) -> int: + if isinstance(item, dict): + return item["id"] + if item.id is None: + raise ToolkitRequiredValueError("ExtractionPipeline must have id set.") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -189,18 +201,19 @@ def update(self, items: ExtractionPipelineWriteList) -> ExtractionPipelineList: # Bug in SDK overload so need the ignore. return self.client.extraction_pipelines.update(items, mode="replace") # type: ignore[call-overload] - def delete(self, ids: SequenceNotStr[str]) -> int: - id_list = list(ids) + def delete(self, ids: SequenceNotStr[str | int]) -> int: + internal_ids, external_ids = self._split_ids(ids) try: - self.client.extraction_pipelines.delete(external_id=id_list) + self.client.extraction_pipelines.delete(id=internal_ids, external_id=external_ids) except CogniteNotFoundError as e: not_existing = {external_id for dup in e.not_found if (external_id := dup.get("externalId", None))} - if id_list := [id_ for id_ in id_list if id_ not in not_existing]: - self.client.extraction_pipelines.delete(external_id=id_list) + if id_list := [id_ for id_ in ids if id_ not in not_existing]: + internal_ids, external_ids = self._split_ids(id_list) + self.client.extraction_pipelines.delete(id=internal_ids, external_id=external_ids) except CogniteAPIError as e: if e.code == 403 and "not found" in e.message and "extraction pipeline" in e.message.lower(): return 0 - return len(id_list) + return len(ids) def _iterate( self, @@ -251,7 +264,7 @@ class ExtractionPipelineConfigLoader( @property def display_name(self) -> str: - return "extraction_pipeline.config" + return "extraction pipeline configs" @classmethod def get_required_capability( @@ -382,12 +395,16 @@ def _iterate( ) -> Iterable[ExtractionPipelineConfig]: parent_iterable = parent_ids or iter(self.client.extraction_pipelines) for parent_id in parent_iterable or []: + pipeline_id: str | None = None if isinstance(parent_id, ExtractionPipeline): - pipeline_id = cast(str, parent_id.external_id) + if parent_id.external_id: + pipeline_id = parent_id.external_id elif isinstance(parent_id, str): pipeline_id = parent_id - else: + + if pipeline_id is None: continue + try: yield self.client.extraction_pipelines.config.retrieve(external_id=pipeline_id) except CogniteAPIError as e: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/file_loader.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/file_loader.py index e8126b2c6..9d516d633 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/file_loader.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/file_loader.py @@ -78,7 +78,7 @@ class FileMetadataLoader( @property def display_name(self) -> str: - return "file_metadata" + return "file metadata" @classmethod def get_required_capability( @@ -104,6 +104,12 @@ def get_id(cls, item: FileMetadata | FileMetadataWrite | dict) -> str: raise ToolkitRequiredValueError("FileMetadata must have external_id set.") return item.external_id + @classmethod + def get_internal_id(cls, item: FileMetadata | dict) -> int: + if isinstance(item, dict): + return item["id"] + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -193,8 +199,9 @@ def retrieve(self, ids: SequenceNotStr[str]) -> FileMetadataList: def update(self, items: FileMetadataWriteList) -> FileMetadataList: return self.client.files.update(items, mode="replace") - def delete(self, ids: str | SequenceNotStr[str] | None) -> int: - self.client.files.delete(external_id=cast(SequenceNotStr[str], ids)) + def delete(self, ids: str | int | SequenceNotStr[str | int] | None) -> int: + internal_ids, external_ids = self._split_ids(ids) + self.client.files.delete(id=internal_ids, external_id=external_ids) return len(cast(SequenceNotStr[str], ids)) def _iterate( @@ -266,7 +273,7 @@ class CogniteFileLoader( @property def display_name(self) -> str: - return "cognite_file" + return "cognite files" @classmethod def get_id(cls, item: ExtendableCogniteFile | ExtendableCogniteFileApply | dict) -> NodeId: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py index ddf061197..6d7019c8b 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py @@ -63,6 +63,10 @@ class _MetadataKey: function_hash = "cdf-toolkit-function-hash" secret_hash = "cdf-toolkit-secret-hash" + @property + def display_name(self) -> str: + return "functions" + @classmethod def get_required_capability( cls, items: FunctionWriteList | None, read_only: bool @@ -283,7 +287,7 @@ class FunctionScheduleLoader( @property def display_name(self) -> str: - return "function.schedules" + return "function schedules" @classmethod def get_required_capability(cls, items: FunctionScheduleWriteList | None, read_only: bool) -> list[Capability]: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/group_scoped_loader.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/group_scoped_loader.py index aff9fccb7..0fc039336 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/group_scoped_loader.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/group_scoped_loader.py @@ -46,3 +46,7 @@ class GroupResourceScopedLoader(GroupLoader): def __init__(self, client: ToolkitClient, build_dir: Path | None): super().__init__(client, build_dir, "resource_scoped_only") + + @property + def display_name(self) -> str: + return "resource-scoped groups" diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/hosted_extractors.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/hosted_extractors.py index 0e581c1bb..cb2d59921 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/hosted_extractors.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/hosted_extractors.py @@ -50,7 +50,7 @@ class HostedExtractorSourceLoader(ResourceLoader[str, SourceWrite, Source, Sourc @property def display_name(self) -> str: - return "Hosted Extractor Source" + return "hosted extractor sources" @classmethod def get_id(cls, item: SourceWrite | Source | dict) -> str: @@ -143,7 +143,7 @@ def __init__(self, client: ToolkitClient, build_dir: Path | None): @property def display_name(self) -> str: - return "Hosted Extractor Destination" + return "hosted extractor destinations" @classmethod def get_id(cls, item: DestinationWrite | Destination | dict) -> str: @@ -265,7 +265,7 @@ class HostedExtractorJobLoader(ResourceLoader[str, JobWrite, Job, JobWriteList, @property def display_name(self) -> str: - return "Hosted Extractor Job" + return "hosted extractor jobs" @classmethod def get_id(cls, item: JobWrite | Job | dict) -> str: @@ -364,7 +364,7 @@ class HostedExtractorMappingLoader(ResourceLoader[str, MappingWrite, Mapping, Ma @property def display_name(self) -> str: - return "Hosted Extractor Mapping" + return "hosted extractor mappings" @classmethod def get_id(cls, item: MappingWrite | Mapping | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/industrial_tool_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/industrial_tool_loaders.py index e9b881624..3f6a740e8 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/industrial_tool_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/industrial_tool_loaders.py @@ -44,6 +44,10 @@ class StreamlitLoader(ResourceLoader[str, StreamlitWrite, Streamlit, StreamlitWr _doc_url = "Files/operation/initFileUpload" _metadata_hash_key = "cdf-toolkit-app-hash" + @property + def display_name(self) -> str: + return "Streamlit apps" + def __init__(self, client: ToolkitClient, build_dir: Path | None): super().__init__(client, build_dir) self._source_file_by_external_id: dict[str, Path] = {} diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/location_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/location_loaders.py index 03c4f9cb2..1bed06892 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/location_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/location_loaders.py @@ -54,6 +54,10 @@ class LocationFilterLoader( subfilter_names = ("assets", "events", "files", "timeseries", "sequences") + @property + def display_name(self) -> str: + return "location filters" + @classmethod def get_required_capability( cls, items: LocationFilterWriteList | None, read_only: bool diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/raw_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/raw_loaders.py index c3ccf24b2..b3c810735 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/raw_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/raw_loaders.py @@ -55,7 +55,7 @@ def __init__(self, client: ToolkitClient, build_dir: Path): @property def display_name(self) -> str: - return "raw.databases" + return "raw databases" @classmethod def get_required_capability(cls, items: RawDatabaseList | None, read_only: bool) -> Capability | list[Capability]: @@ -169,7 +169,7 @@ def __init__(self, client: ToolkitClient, build_dir: Path): @property def display_name(self) -> str: - return "raw.tables" + return "raw tables" @classmethod def get_required_capability(cls, items: RawTableList | None, read_only: bool) -> Capability | list[Capability]: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/relationship_loader.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/relationship_loader.py index 5a8e38c65..f85ae3378 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/relationship_loader.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/relationship_loader.py @@ -41,7 +41,7 @@ class RelationshipLoader(ResourceLoader[str, RelationshipWrite, Relationship, Re @property def display_name(self) -> str: - return self.kind + return "relationships" @classmethod def get_id(cls, item: Relationship | RelationshipWrite | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/robotics_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/robotics_loaders.py index 0ee0c91fc..6914ba932 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/robotics_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/robotics_loaders.py @@ -48,7 +48,7 @@ class RoboticFrameLoader(ResourceLoader[str, FrameWrite, Frame, FrameWriteList, @property def display_name(self) -> str: - return "robotics.frame" + return "robotics frames" @classmethod def get_id(cls, item: Frame | FrameWrite | dict) -> str: @@ -114,7 +114,7 @@ class RoboticLocationLoader(ResourceLoader[str, LocationWrite, Location, Locatio @property def display_name(self) -> str: - return "robotics.location" + return "robotics locations" @classmethod def get_id(cls, item: Location | LocationWrite | dict) -> str: @@ -189,7 +189,7 @@ class RoboticsDataPostProcessingLoader( @property def display_name(self) -> str: - return "robotics.data_postprocessing" + return "robotics data postprocessing" @classmethod def get_id(cls, item: DataPostProcessing | DataPostProcessingWrite | dict) -> str: @@ -276,7 +276,7 @@ class RobotCapabilityLoader( @property def display_name(self) -> str: - return "robotics.robot_capability" + return "robotics robot capabilities" @classmethod def get_id(cls, item: RobotCapability | RobotCapabilityWrite | dict) -> str: @@ -365,7 +365,7 @@ class RoboticMapLoader(ResourceLoader[str, MapWrite, Map, MapWriteList, MapList] @property def display_name(self) -> str: - return "robotics.map" + return "robotics maps" @classmethod def get_id(cls, item: Map | MapWrite | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/three_d_model_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/three_d_model_loaders.py index 4a9afd1f6..7e32ed4d6 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/three_d_model_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/three_d_model_loaders.py @@ -39,6 +39,10 @@ class ThreeDModelLoader( _doc_url = "3D-Models/operation/create3DModels" item_name = "revisions" + @property + def display_name(self) -> str: + return "3D models" + @classmethod def get_id(cls, item: ThreeDModel | ThreeDModelWrite | dict) -> str: if isinstance(item, dict): @@ -47,6 +51,14 @@ def get_id(cls, item: ThreeDModel | ThreeDModelWrite | dict) -> str: raise KeyError("3DModel must have name") return item.name + @classmethod + def get_internal_id(cls, item: ThreeDModel | dict) -> int: + if isinstance(item, dict): + return item["id"] + if not item.id: + raise KeyError("3DModel must have id") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"name": id} @@ -87,14 +99,18 @@ def create(self, items: ThreeDModelWriteList) -> ThreeDModelList: created.append(new_item) return created - def retrieve(self, ids: SequenceNotStr[str]) -> ThreeDModelList: + def retrieve(self, ids: SequenceNotStr[str | int]) -> ThreeDModelList: output = ThreeDModelList([]) - to_find = set(ids) + selected_names = {id_ for id_ in ids if isinstance(id_, str)} + selected_ids = {id_ for id_ in ids if isinstance(id_, int)} for model in self.client.three_d.models: - if model.name in to_find: + if model.name in selected_names or model.id in selected_ids: output.append(model) - to_find.remove(model.name) - if not to_find: + if model.name: + selected_names.discard(model.name) + if model.id: + selected_ids.discard(model.id) + if (not selected_names) and (not selected_ids): break return output @@ -117,7 +133,7 @@ def update(self, items: ThreeDModelWriteList) -> ThreeDModelList: updates.append(update) return self.client.three_d.models.update(updates, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: + def delete(self, ids: SequenceNotStr[str | int]) -> int: models = self.retrieve(ids) self.client.three_d.models.delete(models.as_ids()) return len(models) diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py index 70c45d481..70673f16f 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py @@ -51,6 +51,10 @@ class TimeSeriesLoader(ResourceContainerLoader[str, TimeSeriesWrite, TimeSeries, dependencies = frozenset({DataSetsLoader, GroupAllScopedLoader, AssetLoader}) _doc_url = "Time-series/operation/postTimeSeries" + @property + def display_name(self) -> str: + return "time series" + @classmethod def get_required_capability( cls, items: TimeSeriesWriteList | None, read_only: bool @@ -78,6 +82,12 @@ def get_id(cls, item: TimeSeries | TimeSeriesWrite | dict) -> str: raise ToolkitRequiredValueError("TimeSeries must have external_id set.") return item.external_id + @classmethod + def get_internal_id(cls, item: TimeSeries | dict) -> int: + if isinstance(item, dict): + return item["id"] + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -147,18 +157,19 @@ def _are_equal( def create(self, items: TimeSeriesWriteList) -> TimeSeriesList: return self.client.time_series.create(items) - def retrieve(self, ids: SequenceNotStr[str]) -> TimeSeriesList: + def retrieve(self, ids: SequenceNotStr[str | int]) -> TimeSeriesList: + internal_ids, external_ids = self._split_ids(ids) return self.client.time_series.retrieve_multiple( - external_ids=cast(SequenceNotStr[str], ids), ignore_unknown_ids=True + ids=internal_ids, external_ids=external_ids, ignore_unknown_ids=True ) def update(self, items: TimeSeriesWriteList) -> TimeSeriesList: return self.client.time_series.update(items, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: - existing = self.retrieve(ids).as_external_ids() + def delete(self, ids: SequenceNotStr[str | int]) -> int: + existing = self.retrieve(ids) if existing: - self.client.time_series.delete(external_id=existing, ignore_unknown_ids=True) + self.client.time_series.delete(id=existing.as_ids(), ignore_unknown_ids=True) return len(existing) def _iterate( @@ -240,7 +251,7 @@ class DatapointSubscriptionLoader( @property def display_name(self) -> str: - return "timeseries.subscription" + return "timeseries subscriptions" @classmethod def get_id(cls, item: DataPointSubscriptionWrite | DatapointSubscription | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/transformation_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/transformation_loaders.py index f43d61656..efb112cc3 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/transformation_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/transformation_loaders.py @@ -111,6 +111,10 @@ class TransformationLoader( _doc_url = "Transformations/operation/createTransformations" do_environment_variable_injection = True + @property + def display_name(self) -> str: + return "transformations" + @classmethod def get_required_capability( cls, items: TransformationWriteList | None, read_only: bool @@ -141,6 +145,14 @@ def get_id(cls, item: Transformation | TransformationWrite | dict) -> str: raise ToolkitRequiredValueError("Transformation must have external_id set.") return item.external_id + @classmethod + def get_internal_id(cls, item: Transformation | dict) -> int: + if isinstance(item, dict): + return item["id"] + if item.id is None: + raise ToolkitRequiredValueError("Transformation must have id set.") + return item.id + @classmethod def dump_id(cls, id: str) -> dict[str, Any]: return {"externalId": id} @@ -182,23 +194,6 @@ def _are_equal( return self._return_are_equal(local_dumped, cdf_dumped, return_dumped) - @staticmethod - def _get_query_file(filepath: Path, transformation_external_id: str | None) -> Path | None: - query_file = filepath.parent / f"{filepath.stem}.sql" - if not query_file.exists() and transformation_external_id: - found_query_file = next( - ( - f - for f in filepath.parent.iterdir() - if f.is_file() and f.name.endswith(f"{transformation_external_id}.sql") - ), - None, - ) - if found_query_file is None: - return None - query_file = found_query_file - return query_file - def load_resource( self, filepath: Path, ToolGlobals: CDFToolConfig, skip_validation: bool ) -> TransformationWrite | TransformationWriteList: @@ -258,7 +253,11 @@ def load_resource( except KeyError as e: raise ToolkitYAMLFormatError("authentication property is missing required fields", filepath, e) - query_file = self._get_query_file(filepath, transformation.external_id) + query_file: Path | None = filepath.parent / Path(transformation.query or "") + if query_file and query_file.exists(): + transformation.query = None + else: + query_file = None if transformation.query is None: if query_file is None: @@ -267,11 +266,6 @@ def load_resource( filepath, ) transformation.query = safe_read(query_file) - elif transformation.query is not None and query_file is not None: - raise ToolkitYAMLFormatError( - f"query property is ambiguously defined in both the yaml file and a separate file named {query_file}\n" - f"Please remove one of the definitions, either the query property in {filepath} or the file {query_file}", - ) transformations.append(transformation) @@ -293,16 +287,19 @@ def dump_resource( def create(self, items: Sequence[TransformationWrite]) -> TransformationList: return self.client.transformations.create(items) - def retrieve(self, ids: SequenceNotStr[str]) -> TransformationList: - return self.client.transformations.retrieve_multiple(external_ids=ids, ignore_unknown_ids=True) + def retrieve(self, ids: SequenceNotStr[str | int]) -> TransformationList: + internal_ids, external_ids = self._split_ids(ids) + return self.client.transformations.retrieve_multiple( + ids=internal_ids, external_ids=external_ids, ignore_unknown_ids=True + ) def update(self, items: TransformationWriteList) -> TransformationList: return self.client.transformations.update(items, mode="replace") - def delete(self, ids: SequenceNotStr[str]) -> int: - existing = self.retrieve(ids).as_external_ids() + def delete(self, ids: SequenceNotStr[str | int]) -> int: + existing = self.retrieve(ids).as_ids() if existing: - self.client.transformations.delete(external_id=existing, ignore_unknown_ids=True) + self.client.transformations.delete(id=existing, ignore_unknown_ids=True) return len(existing) def _iterate( @@ -376,7 +373,7 @@ class TransformationScheduleLoader( @property def display_name(self) -> str: - return "transformation.schedules" + return "transformation schedules" @classmethod def get_required_capability( @@ -452,11 +449,14 @@ def _iterate( yield from iter(self.client.transformations.schedules) else: for transformation_id in parent_ids: - if not isinstance(transformation_id, str): - continue - res = self.client.transformations.schedules.retrieve(external_id=transformation_id) - if res: - yield res + if isinstance(transformation_id, str): + res = self.client.transformations.schedules.retrieve(external_id=transformation_id) + if res: + yield res + elif isinstance(transformation_id, int): + res = self.client.transformations.schedules.retrieve(id=transformation_id) + if res: + yield res @final @@ -484,7 +484,7 @@ class TransformationNotificationLoader( @property def display_name(self) -> str: - return "transformation.notifications" + return "transformation notifications" @classmethod def get_id(cls, item: TransformationNotification | TransformationNotificationWrite | dict) -> str: @@ -603,9 +603,10 @@ def _iterate( yield from iter(self.client.transformations.notifications) else: for transformation_id in parent_ids: - if not isinstance(transformation_id, str): - continue - yield from self.client.transformations.notifications(transformation_external_id=transformation_id) + if isinstance(transformation_id, str): + yield from self.client.transformations.notifications(transformation_external_id=transformation_id) + elif isinstance(transformation_id, int): + yield from self.client.transformations.notifications(transformation_id=transformation_id) @classmethod def get_dependent_items(cls, item: dict) -> Iterable[tuple[type[ResourceLoader], Hashable]]: diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/workflow_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/workflow_loaders.py index 7c08bfac7..3202b55f6 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/workflow_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders/workflow_loaders.py @@ -81,6 +81,10 @@ class WorkflowLoader(ResourceLoader[str, WorkflowUpsert, Workflow, WorkflowUpser _doc_base_url = "https://api-docs.cognite.com/20230101-beta/tag/" _doc_url = "Workflows/operation/CreateOrUpdateWorkflow" + @property + def display_name(self) -> str: + return "workflows" + @classmethod def get_required_capability( cls, items: WorkflowUpsertList | None, read_only: bool @@ -234,7 +238,7 @@ class WorkflowVersionLoader( @property def display_name(self) -> str: - return "workflow.versions" + return "workflow versions" @classmethod def get_required_capability( @@ -376,7 +380,7 @@ def __init__(self, client: ToolkitClient, build_dir: Path | None): @property def display_name(self) -> str: - return "workflow.triggers" + return "workflow triggers" @classmethod def get_id(cls, item: WorkflowTriggerUpsert | WorkflowTrigger | dict) -> str: diff --git a/cognite_toolkit/_cdf_tk/utils/graphql_parser.py b/cognite_toolkit/_cdf_tk/utils/graphql_parser.py index 68b0b74ea..6fac6de72 100644 --- a/cognite_toolkit/_cdf_tk/utils/graphql_parser.py +++ b/cognite_toolkit/_cdf_tk/utils/graphql_parser.py @@ -115,22 +115,20 @@ def _parse(self) -> "list[_Entity]": if token == "@": is_directive_start = True elif directive_tokens: - if token == "\n" and "{" not in parentheses: - # Throw away. - continue # Gather the content of the directive directive_tokens.append(token) elif token == "@": is_directive_start = True - elif is_directive_start and token in ("import", "view"): - directive_tokens = _DirectiveTokens([token]) - is_directive_start = False elif is_directive_start: - # Not a directive we care about + if token in ("import", "view"): + # We only care about import and view directives + directive_tokens = _DirectiveTokens([token]) is_directive_start = False - elif token in ("type", "interface"): + elif token in ("type", "interface") and not parentheses: # Next token starts a new entity definition + # Notet hat we cannot be inside a paranthesis as that could be a + # property of the entity last_class = token elif last_class is not None and token != "\n": # Start of a new entity definition @@ -152,8 +150,10 @@ class _Directive: @classmethod def load(cls, content: list[str]) -> "_Directive | None": key, *content = content - raw_string = "".join(content).removeprefix("(").removesuffix(")").replace("\n", ",") + raw_string = cls._standardize(content) data = cls._create_args(raw_string) + if isinstance(data, list): + return None if key == "import": return _Import._load(data) if key == "view": @@ -161,23 +161,67 @@ def load(cls, content: list[str]) -> "_Directive | None": return None @classmethod - def _create_args(cls, string: str) -> dict[str, Any] | str: + def _standardize(cls, content: list[str]) -> str: + """We standardize to use commas as separators, instead of newlines. + However, if we are inside a parenthesis we need to replace newlines with empty. + """ + if not content: + return "" + # Ensure that the content is wrapped in parenthesis + # so that we can safely drop the first and last character + if content[0] != "(": + content.insert(0, "(") + if content[-1] != ")": + content.append(")") + + standardized: list[str] = [] + for last, current, next_ in zip(content, content[1:], content[2:]): + if current == "\n" and last in ")}]" and next_ in "({[": + standardized.append(",") + elif current == "\n" and last in "({[": + continue + elif current == "\n" and next_ in ")}]": + continue + elif current == "\n": + standardized.append(",") + else: + standardized.append(current) + return "".join(standardized) + + @classmethod + def _create_args(cls, string: str) -> dict[str, Any] | str | list[Any]: if "," not in string and ":" not in string: return string - output: dict[str, Any] = {} if string[0] == "{" and string[-1] == "}": string = string[1:-1] - if string[0] == ",": - string = string[1:] - if string[-1] == ",": - string = string[:-1] + is_list = False + if string[0] == "[" and string[-1] == "]": + string = string[1:-1] + is_list = True + items: list[Any] = [] + obj: dict[str, Any] = {} + last_pair = "" for pair in cls.SPLIT_ON_COMMA_PATTERN.split(string): stripped = pair.strip() - if not stripped or ":" not in stripped: + if (not stripped or (not is_list and ":" not in stripped)) and not last_pair: continue - key, value = cls._clean(*stripped.split(":", maxsplit=1)) - output[key] = cls._create_args(value) - return output + if last_pair: + stripped = f"{last_pair},{stripped}" + last_pair = "" + # Regex does not deal with nested parenthesis + left_count = sum(stripped.count(char) for char in "{[(") + right_count = sum(stripped.count(char) for char in "}])") + if left_count != right_count: + last_pair = stripped + continue + if is_list: + items.append(cls._create_args(stripped)) + else: + key, value = cls._clean(*stripped.split(":", maxsplit=1)) + if set("{[(}]}") & set(key): + raise ValueError(f"Invalid value {value}") + obj[key] = cls._create_args(value) + return items if is_list else obj @classmethod def _clean(cls, *args: Any) -> Any: @@ -198,7 +242,13 @@ class _ViewDirective(_Directive): def _load(cls, data: dict[str, Any] | str) -> "_ViewDirective": if isinstance(data, str): return _ViewDirective() - return _ViewDirective(space=data.get("space"), external_id=data.get("externalId"), version=data.get("version")) + space = data.get("space") + external_id = data.get("externalId") + version = data.get("version") + for variable in (space, external_id, version): + if variable and not isinstance(variable, str): + raise ValueError(f"Invalid variable {variable}") + return _ViewDirective(space=space, external_id=external_id, version=version) @dataclass diff --git a/cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml b/cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml index dca09f331..b8c4fd595 100644 --- a/cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml +++ b/cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml @@ -14,7 +14,7 @@ jobs: pool: vmImage: 'ubuntu-latest' container: - image: 'cognite/toolkit:0.3.18' + image: 'cognite/toolkit:0.3.19' env: CDF_CLUSTER: $(CDF_CLUSTER) CDF_PROJECT: $(CDF_PROJECT) diff --git a/cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml b/cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml index 811331d1d..e62e2b420 100644 --- a/cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml +++ b/cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml @@ -16,7 +16,7 @@ jobs: pool: vmImage: 'ubuntu-latest' container: - image: 'cognite/toolkit:0.3.18' + image: 'cognite/toolkit:0.3.19' env: CDF_CLUSTER: $(CDF_CLUSTER) CDF_PROJECT: $(CDF_PROJECT) diff --git a/cognite_toolkit/_repo_files/GitHub/.github/workflows/build.yaml b/cognite_toolkit/_repo_files/GitHub/.github/workflows/build.yaml index 2bad0f877..633f733ec 100644 --- a/cognite_toolkit/_repo_files/GitHub/.github/workflows/build.yaml +++ b/cognite_toolkit/_repo_files/GitHub/.github/workflows/build.yaml @@ -10,7 +10,7 @@ jobs: environment: dev name: Deploy Dry Run container: - image: cognite/toolkit:0.3.18 + image: cognite/toolkit:0.3.19 env: CDF_CLUSTER: ${{ vars.CDF_CLUSTER }} CDF_PROJECT: ${{ vars.CDF_PROJECT }} diff --git a/cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml b/cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml index 3ba3f19ba..3beb9b21f 100644 --- a/cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +++ b/cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml @@ -12,7 +12,7 @@ jobs: environment: dev name: Deploy container: - image: cognite/toolkit:0.3.18 + image: cognite/toolkit:0.3.19 env: CDF_CLUSTER: ${{ vars.CDF_CLUSTER }} CDF_PROJECT: ${{ vars.CDF_PROJECT }} diff --git a/cognite_toolkit/_version.py b/cognite_toolkit/_version.py index 50d85c89d..08aad71ad 100644 --- a/cognite_toolkit/_version.py +++ b/cognite_toolkit/_version.py @@ -1 +1 @@ -__version__ = "0.3.18" +__version__ = "0.3.19" diff --git a/poetry.lock b/poetry.lock index f9315b51c..9e3536455 100644 --- a/poetry.lock +++ b/poetry.lock @@ -257,13 +257,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cognite-sdk" -version = "7.69.3" +version = "7.70.2" description = "Cognite Python SDK" optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "cognite_sdk-7.69.3-py3-none-any.whl", hash = "sha256:dc73389da0880744c3659d354d4f728be4cacfcc0b8eb91d90b8d2d3ccf9721e"}, - {file = "cognite_sdk-7.69.3.tar.gz", hash = "sha256:c8872010de47c7fbd7249fa60cf01af94d0f27267ff07fb0b4bef6590b6e98ab"}, + {file = "cognite_sdk-7.70.2-py3-none-any.whl", hash = "sha256:9d20f5320a93beac4ad684f710b2eddb0980b54ccf5d6c4fe55af3654fefb93b"}, + {file = "cognite_sdk-7.70.2.tar.gz", hash = "sha256:882c28aba8cdf7c529852a9ac0a885eef7db6e2021667d5a93cfa4f0aa293636"}, ] [package.dependencies] @@ -297,73 +297,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.8" +version = "7.6.9" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, - {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, - {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, - {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, - {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, - {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, - {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, - {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, - {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, - {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, - {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, - {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, - {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, - {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, - {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, - {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, + {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, + {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, + {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, + {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, + {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, + {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, + {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, + {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, + {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, + {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, + {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, + {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, + {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, + {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, ] [package.dependencies] @@ -1069,66 +1069,66 @@ files = [ [[package]] name = "numpy" -version = "2.1.3" +version = "2.2.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:825656d0743699c529c5943554d223c021ff0494ff1442152ce887ef4f7561a1"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a4825252fcc430a182ac4dee5a505053d262c807f8a924603d411f6718b88fd"}, - {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e711e02f49e176a01d0349d82cb5f05ba4db7d5e7e0defd026328e5cfb3226d3"}, - {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78574ac2d1a4a02421f25da9559850d59457bac82f2b8d7a44fe83a64f770098"}, - {file = "numpy-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c7662f0e3673fe4e832fe07b65c50342ea27d989f92c80355658c7f888fcc83c"}, - {file = "numpy-2.1.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa2d1337dc61c8dc417fbccf20f6d1e139896a30721b7f1e832b2bb6ef4eb6c4"}, - {file = "numpy-2.1.3-cp310-cp310-win32.whl", hash = "sha256:72dcc4a35a8515d83e76b58fdf8113a5c969ccd505c8a946759b24e3182d1f23"}, - {file = "numpy-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:ecc76a9ba2911d8d37ac01de72834d8849e55473457558e12995f4cd53e778e0"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d1167c53b93f1f5d8a139a742b3c6f4d429b54e74e6b57d0eff40045187b15d"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c80e4a09b3d95b4e1cac08643f1152fa71a0a821a2d4277334c88d54b2219a41"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:576a1c1d25e9e02ed7fa5477f30a127fe56debd53b8d2c89d5578f9857d03ca9"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:973faafebaae4c0aaa1a1ca1ce02434554d67e628b8d805e61f874b84e136b09"}, - {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:762479be47a4863e261a840e8e01608d124ee1361e48b96916f38b119cfda04a"}, - {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f24b3d1ecc1eebfbf5d6051faa49af40b03be1aaa781ebdadcbc090b4539b"}, - {file = "numpy-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17ee83a1f4fef3c94d16dc1802b998668b5419362c8a4f4e8a491de1b41cc3ee"}, - {file = "numpy-2.1.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15cb89f39fa6d0bdfb600ea24b250e5f1a3df23f901f51c8debaa6a5d122b2f0"}, - {file = "numpy-2.1.3-cp311-cp311-win32.whl", hash = "sha256:d9beb777a78c331580705326d2367488d5bc473b49a9bc3036c154832520aca9"}, - {file = "numpy-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:d89dd2b6da69c4fff5e39c28a382199ddedc3a5be5390115608345dec660b9e2"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564"}, - {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512"}, - {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b"}, - {file = "numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc"}, - {file = "numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0"}, - {file = "numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9"}, - {file = "numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe"}, - {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43"}, - {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56"}, - {file = "numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a"}, - {file = "numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef"}, - {file = "numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f"}, - {file = "numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0"}, - {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408"}, - {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6"}, - {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f"}, - {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17"}, - {file = "numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48"}, - {file = "numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4f2015dfe437dfebbfce7c85c7b53d81ba49e71ba7eadbf1df40c915af75979f"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3522b0dfe983a575e6a9ab3a4a4dfe156c3e428468ff08ce582b9bb6bd1d71d4"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c006b607a865b07cd981ccb218a04fc86b600411d83d6fc261357f1c0966755d"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e14e26956e6f1696070788252dcdff11b4aca4c3e8bd166e0df1bb8f315a67cb"}, - {file = "numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, ] [[package]] @@ -1246,13 +1246,13 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pkginfo" -version = "1.10.0" +version = "1.12.0" description = "Query metadata from sdists / bdists / installed packages." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"}, - {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"}, + {file = "pkginfo-1.12.0-py3-none-any.whl", hash = "sha256:dcd589c9be4da8973eceffa247733c144812759aa67eaf4bbf97016a02f39088"}, + {file = "pkginfo-1.12.0.tar.gz", hash = "sha256:8ad91a0445a036782b9366ef8b8c2c50291f83a553478ba8580c73d3215700cf"}, ] [package.extras] @@ -1334,22 +1334,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "5.29.0" +version = "5.29.1" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, + {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"}, + {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"}, + {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"}, + {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"}, + {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"}, + {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"}, + {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"}, + {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"}, + {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"}, + {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"}, + {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"}, ] [[package]] @@ -1365,13 +1365,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] @@ -1929,13 +1929,13 @@ jeepney = ">=0.6" [[package]] name = "sentry-sdk" -version = "2.19.0" +version = "2.19.2" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" files = [ - {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, - {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, + {file = "sentry_sdk-2.19.2-py2.py3-none-any.whl", hash = "sha256:ebdc08228b4d131128e568d696c210d846e5b9d70aa0327dec6b1272d9d40b84"}, + {file = "sentry_sdk-2.19.2.tar.gz", hash = "sha256:467df6e126ba242d39952375dd816fbee0f217d119bf454a8ce74cf1e7909e8d"}, ] [package.dependencies] @@ -2014,13 +2014,13 @@ files = [ [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -2077,19 +2077,19 @@ files = [ [[package]] name = "twine" -version = "5.1.1" +version = "6.0.1" description = "Collection of utilities for publishing packages on PyPI" optional = false python-versions = ">=3.8" files = [ - {file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"}, - {file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"}, + {file = "twine-6.0.1-py3-none-any.whl", hash = "sha256:9c6025b203b51521d53e200f4a08b116dee7500a38591668c6a6033117bdc218"}, + {file = "twine-6.0.1.tar.gz", hash = "sha256:36158b09df5406e1c9c1fb8edb24fc2be387709443e7376689b938531582ee27"}, ] [package.dependencies] -importlib-metadata = ">=3.6" -keyring = ">=15.1" -pkginfo = ">=1.8.1,<1.11" +keyring = {version = ">=15.1", markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\""} +packaging = "*" +pkginfo = ">=1.8.1" readme-renderer = ">=35.0" requests = ">=2.20" requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" @@ -2097,15 +2097,18 @@ rfc3986 = ">=1.4.0" rich = ">=12.0.0" urllib3 = ">=1.26.0" +[package.extras] +keyring = ["keyring (>=15.1)"] + [[package]] name = "typer" -version = "0.14.0" +version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.14.0-py3-none-any.whl", hash = "sha256:f476233a25770ab3e7b2eebf7c68f3bc702031681a008b20167573a4b7018f09"}, - {file = "typer-0.14.0.tar.gz", hash = "sha256:af58f737f8d0c0c37b9f955a6d39000b9ff97813afcbeef56af5e37cf743b45a"}, + {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, + {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, ] [package.dependencies] @@ -2242,4 +2245,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "0a918e320e45a0ec13d81cfd58ca490e8173fc075e2028b86b33a2292ca56def" +content-hash = "fe0c196be25dd3ef1d5061f01e7d1b3e06f2fd4304ee2252a73d720b8bcc0e58" diff --git a/pyproject.toml b/pyproject.toml index 6602c8904..52b936afe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite_toolkit" -version = "0.3.18" +version = "0.3.19" description = "Official Cognite Data Fusion tool for project templates and configuration deployment" authors = ["Cognite AS "] license = "Apache-2" @@ -38,7 +38,7 @@ pytest-xdist = "^3.6.1" pytest-rerunfailures = "^14.0" types-PyYAML = "^6" types-toml = "^0.10.8.20240310" -twine = "^5.0.0" +twine = "^6.0.0" toml = { version = "^0.10.2", python = ">=3.11" } pytest-freezegun = "^0.4.2" pytest-cov = "^6.0.0" diff --git a/tests/data/build_group_with_unknown_acl/_build_environment.yaml b/tests/data/build_group_with_unknown_acl/_build_environment.yaml index 499f5ca3b..c49d79068 100644 --- a/tests/data/build_group_with_unknown_acl/_build_environment.yaml +++ b/tests/data/build_group_with_unknown_acl/_build_environment.yaml @@ -4,4 +4,4 @@ project: pytest-project type: dev selected: - modules -cdf_toolkit_version: 0.3.18 +cdf_toolkit_version: 0.3.19 diff --git a/tests/data/cdf_toml_data/cdf.toml b/tests/data/cdf_toml_data/cdf.toml index 0b830ffcd..9c0d8cc30 100644 --- a/tests/data/cdf_toml_data/cdf.toml +++ b/tests/data/cdf_toml_data/cdf.toml @@ -1,7 +1,7 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" [plugins] graphql = true diff --git a/tests/data/complete_org/cdf.toml b/tests/data/complete_org/cdf.toml index bbbe31ff9..4151f633c 100644 --- a/tests/data/complete_org/cdf.toml +++ b/tests/data/complete_org/cdf.toml @@ -1,4 +1,4 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/tests/data/complete_org_alpha_flags/cdf.toml b/tests/data/complete_org_alpha_flags/cdf.toml index bbbe31ff9..4151f633c 100644 --- a/tests/data/complete_org_alpha_flags/cdf.toml +++ b/tests/data/complete_org_alpha_flags/cdf.toml @@ -1,4 +1,4 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/tests/data/project_no_cognite_modules/cdf.toml b/tests/data/project_no_cognite_modules/cdf.toml index bbbe31ff9..4151f633c 100644 --- a/tests/data/project_no_cognite_modules/cdf.toml +++ b/tests/data/project_no_cognite_modules/cdf.toml @@ -1,4 +1,4 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/tests/data/project_with_bad_modules/cdf.toml b/tests/data/project_with_bad_modules/cdf.toml index bbbe31ff9..4151f633c 100644 --- a/tests/data/project_with_bad_modules/cdf.toml +++ b/tests/data/project_with_bad_modules/cdf.toml @@ -1,4 +1,4 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/tests/data/project_with_duplicates/cdf.toml b/tests/data/project_with_duplicates/cdf.toml index bbbe31ff9..4151f633c 100644 --- a/tests/data/project_with_duplicates/cdf.toml +++ b/tests/data/project_with_duplicates/cdf.toml @@ -1,4 +1,4 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" diff --git a/tests/data/resources_with_environment_variables/modules/example_module/transformations/first.Transformation.yaml b/tests/data/resources_with_environment_variables/modules/example_module/transformations/first.Transformation.yaml index 06e7c37fb..f39284b98 100644 --- a/tests/data/resources_with_environment_variables/modules/example_module/transformations/first.Transformation.yaml +++ b/tests/data/resources_with_environment_variables/modules/example_module/transformations/first.Transformation.yaml @@ -11,4 +11,5 @@ authentication: tokenUri: https://foo.bar cdfProjectName: foo scopes: bar - audience: baz \ No newline at end of file + audience: baz +query: first.Transformation.sql diff --git a/tests/data/run_data/cdf.toml b/tests/data/run_data/cdf.toml index cad897fed..62705d1b7 100644 --- a/tests/data/run_data/cdf.toml +++ b/tests/data/run_data/cdf.toml @@ -1,7 +1,7 @@ [modules] # This is the version of the modules. It should not be changed manually. # It will be updated by the 'cdf module upgrade' command. -version = "0.3.18" +version = "0.3.19" [modules.packages] cdf_infield = [ diff --git a/tests/test_unit/test_cdf_tk/test_data_classes/test_build_variables.py b/tests/test_unit/test_cdf_tk/test_data_classes/test_build_variables.py index 70c9f339b..47dae93f9 100644 --- a/tests/test_unit/test_cdf_tk/test_data_classes/test_build_variables.py +++ b/tests/test_unit/test_cdf_tk/test_data_classes/test_build_variables.py @@ -73,6 +73,34 @@ def test_replace_not_preserve_type(self) -> None: assert result == "dataset_id('ds_external_id')" + def test_replace_inline_sql_preserve_double_quotes(self) -> None: + source_yaml = """externalId: some_id +name: Some Transformation +destination: + type: nodes + view: + space: cdf_cdm + externalId: CogniteTimeSeries + version: v1 + instanceSpace: my_instance_space +query: >- + select "fpso_{{location_id}}" as externalId, "{{location_ID}}" as uid, "{{location_ID}}" as description +""" + variables = BuildVariables.load_raw( + { + "location_id": "uny", + "location_ID": "UNY", + }, + available_modules=set(), + selected_modules=set(), + ) + + result = variables.replace(source_yaml, file_suffix=".yaml") + + loaded = yaml.safe_load(result) + + assert loaded["query"] == 'select "fpso_uny" as externalId, "UNY" as uid, "UNY" as description' + def test_get_module_variables_variable_preference_order(self) -> None: source_yaml = """ modules: diff --git a/tests/test_unit/test_cdf_tk/test_loaders/test_transformation_loader.py b/tests/test_unit/test_cdf_tk/test_loaders/test_transformation_loader.py index 9cf701b6e..6ebfe3983 100644 --- a/tests/test_unit/test_cdf_tk/test_loaders/test_transformation_loader.py +++ b/tests/test_unit/test_cdf_tk/test_loaders/test_transformation_loader.py @@ -1,7 +1,6 @@ -import pathlib from collections.abc import Hashable from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pytest import yaml @@ -89,6 +88,7 @@ def _create_mock_file(content: str) -> MagicMock: filepath.read_text.return_value = content filepath.name = "transformation.yaml" filepath.stem = "transformation" + filepath.parent = Path("path") return filepath def test_oidc_raise_if_invalid( @@ -111,23 +111,6 @@ def test_oidc_raise_if_invalid( with pytest.raises(ToolkitYAMLFormatError): loader.load_resource(filepath, cdf_tool_real, skip_validation=False) - def test_sql_file( - self, - toolkit_client_approval: ApprovalToolkitClient, - cdf_tool_real: CDFToolConfig, - monkeypatch: MonkeyPatch, - ) -> None: - loader = TransformationLoader(toolkit_client_approval.mock_client, None) - - resource = yaml.CSafeLoader(self.trafo_yaml).get_data() - resource.pop("query") - filepath = self._create_mock_file(yaml.dump(resource)) - - with patch.object(TransformationLoader, "_get_query_file", return_value=Path("transformation.sql")): - with patch.object(pathlib.Path, "read_text", return_value=self.trafo_sql): - loaded = loader.load_resource(filepath, cdf_tool_real, skip_validation=False) - assert loaded.query == self.trafo_sql - def test_sql_inline( self, toolkit_client_approval: ApprovalToolkitClient, @@ -139,24 +122,8 @@ def test_sql_inline( filepath = self._create_mock_file(self.trafo_yaml) resource = yaml.CSafeLoader(self.trafo_yaml).get_data() - with patch.object(TransformationLoader, "_get_query_file", return_value=None): - loaded = loader.load_resource(filepath, cdf_tool_real, skip_validation=False) - assert loaded.query == resource["query"] - - def test_if_ambiguous( - self, - toolkit_client_approval: ApprovalToolkitClient, - cdf_tool_real: CDFToolConfig, - monkeypatch: MonkeyPatch, - ) -> None: - loader = TransformationLoader(toolkit_client_approval.mock_client, None) - - filepath = self._create_mock_file(self.trafo_yaml) - - with pytest.raises(ToolkitYAMLFormatError): - with patch.object(TransformationLoader, "_get_query_file", return_value=Path("transformation.sql")): - with patch.object(pathlib.Path, "read_text", return_value=self.trafo_sql): - loader.load_resource(filepath, cdf_tool_real, skip_validation=False) + loaded = loader.load_resource(filepath, cdf_tool_real, skip_validation=False) + assert loaded.query == resource["query"] @pytest.mark.parametrize( "item, expected", diff --git a/tests/test_unit/test_cdf_tk/test_utils/__init__.py b/tests/test_unit/test_cdf_tk/test_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_unit/test_cdf_tk/test_utils/test_graphql_parser.py b/tests/test_unit/test_cdf_tk/test_utils/test_graphql_parser.py new file mode 100644 index 000000000..0fe8bd82d --- /dev/null +++ b/tests/test_unit/test_cdf_tk/test_utils/test_graphql_parser.py @@ -0,0 +1,256 @@ +import pytest +from cognite.client.data_classes.data_modeling import DataModelId, ViewId + +from cognite_toolkit._cdf_tk.utils import ( + GraphQLParser, +) +from cognite_toolkit._cdf_tk.utils.graphql_parser import _Directive, _DirectiveTokens, _ViewDirective + +SPACE = "sp_my_space" +DATA_MODEL = DataModelId(SPACE, "MyDataModel", "v1") +GraphQLTestCases = [ + pytest.param( + """interface Creatable @view(space: "cdf_apps_shared", version: "v1") @import { + visibility: String + createdBy: CDF_User + updatedBy: CDF_User + isArchived: Boolean +}""", + DATA_MODEL, + set(), + {ViewId("cdf_apps_shared", "Creatable", "v1")}, + id="Imported interface", + ), + pytest.param( + """"@code WOOL" +type WorkOrderObjectListItem @import(dataModel: {externalId: "MaintenanceDOM", version: "2_2_0", space: "EDG-COR-ALL-DMD"}) { + ... +}""", + DATA_MODEL, + set(), + {DataModelId(space="EDG-COR-ALL-DMD", external_id="MaintenanceDOM", version="2_2_0")}, + id="Imported data model", + ), + pytest.param( + ''' +""" +@name Notification +@description This is a SAP notification object. +@code NOTI +""" +type Notification @container(indexes: [{identifier: "refCostCenterIndex", indexType: BTREE, fields: ["refCostCenter"]}, {identifier: "createdDateIndex", indexType: BTREE, fields: ["createdDate"], cursorable: true}]) @import(dataModel: {externalId: "MaintenanceDOM", version: "2_2_0", space: "EDG-COR-ALL-DMD"}) { + ... + } +''', + DATA_MODEL, + set(), + {DataModelId(space="EDG-COR-ALL-DMD", external_id="MaintenanceDOM", version="2_2_0")}, + id="Imported data model with unused container directive", + ), + pytest.param( + ''' + """ +@code WKCC +@Description Work Center Category in SAP +""" +type WorkCenterCategory { + """ + @name Name + """ + name: String! + """ + @name Description + """ + description: String + """ + @name Code + """ + code: String +}''', + DATA_MODEL, + {ViewId(SPACE, "WorkCenterCategory", None)}, + set(), + id="Simple type", + ), + pytest.param( + """type Cdf3dConnectionProperties + @import + @view(space: "cdf_3d_schema", version: "1") + @edge + @container( + constraints: [ + { + identifier: "uniqueNodeRevisionConstraint" + constraintType: UNIQUENESS + fields: ["revisionId", "revisionNodeId"] + } + ] + ) { + revisionId: Int64! + revisionNodeId: Int64! +}""", + DATA_MODEL, + set(), + {ViewId("cdf_3d_schema", "Cdf3dConnectionProperties", "1")}, + id="Edge type", + ), + pytest.param( + """type APM_User @view (version: "7") { + name: String + email: String + lastSeen: Timestamp + preferences: JSONObject +}""", + DATA_MODEL, + {ViewId(SPACE, "APM_User", "7")}, + set(), + id="Simple type with version", + ), + pytest.param( + """type UnitOfMeasurement + @import( + dataModel: { + externalId: "CoreDOM" + version: "1_0_18" + space: "EDG-COR-ALL-DMD" + } + ) { + name: String! +}""", + DATA_MODEL, + set(), + {DataModelId(space="EDG-COR-ALL-DMD", external_id="CoreDOM", version="1_0_18")}, + id="No comma, only newline", + ), + pytest.param( + '''""" + @name Name }{ Breaks the parser +""" +type UnitOfMeasurement + @import( + dataModel: { + externalId: "CoreDOM" + version: "1_0_18" + space: "EDG-COR-ALL-DMD" + } + ) { + name: String! +}''', + DATA_MODEL, + set(), + {DataModelId(space="EDG-COR-ALL-DMD", external_id="CoreDOM", version="1_0_18")}, + id="Ignore comments", + ), + pytest.param( + """type APM_Config @view { + name: String + appDataSpaceId: String + appDataSpaceVersion: String + customerDataSpaceId: String + customerDataSpaceVersion: String + featureConfiguration: JSONObject + fieldConfiguration: JSONObject + rootLocationsConfiguration: JSONObject +}""", + DATA_MODEL, + {ViewId(SPACE, "APM_Config", None)}, + set(), + id="No version", + ), + pytest.param( + """"Navigational aid for traversing CogniteCADModel instances" +type CogniteCADModel implements CogniteDescribable & Cognite3DModel + @view( + space: "cdf_cdm" + version: "v1" + rawFilter: { + and: [ + { + hasData: [ + { + type: "container" + space: "cdf_cdm_3d" + externalId: "Cognite3DModel" + } + ] + } + { + equals: { + property: ["cdf_cdm_3d", "Cognite3DModel", "type"] + value: "CAD" + } + } + ] + } + ) + @import { + "Name of the instance" + name: String + "Description of the instance" + description: String + "Text based labels for generic use, limited to 1000" + tags: [String] + "Alternative names for the node" + aliases: [String] + "CAD, PointCloud or Image360" + type: Cognite3DModel_type + "Thumbnail of the 3D model" + thumbnail: CogniteFile + "List of revisions for this CAD model" + revisions: [CogniteCADRevision] + @reverseDirectRelation(throughProperty: "model3D") +}""", + DATA_MODEL, + set(), + {ViewId("cdf_cdm", "CogniteCADModel", "v1")}, + id="Setting custom filter on view", + ), +] + +DirectiveTestCases = [ + pytest.param( + """view( + space: "cdf_cdm" + version: "v1" + rawFilter: { + and: [ + { + hasData: [ + { + type: "container" + space: "cdf_cdm_3d" + externalId: "Cognite3DModel" + } + ] + } + { + equals: { + property: ["cdf_cdm_3d", "Cognite3DModel", "type"] + value: "CAD" + } + } + ] + } + )""", + _ViewDirective(space="cdf_cdm", version="v1"), + ) +] + + +class TestGraphQLParser: + @pytest.mark.parametrize("raw, data_model_id, expected_views, dependencies", GraphQLTestCases) + def test_parse( + self, raw: str, data_model_id: DataModelId, expected_views: set[ViewId], dependencies: set[ViewId | DataModelId] + ) -> None: + parser = GraphQLParser(raw, data_model_id) + + actual_views = parser.get_views(include_version=True) + assert expected_views == actual_views + actual_dependencies = parser.get_dependencies(include_version=True) + assert dependencies == actual_dependencies + + @pytest.mark.parametrize("string, expected", DirectiveTestCases) + def test_create_directive(self, string: str, expected: _Directive) -> None: + tokens = GraphQLParser._token_pattern.findall(string) + actual = _DirectiveTokens(tokens).create() + assert expected == actual diff --git a/tests/test_unit/test_cdf_tk/test_utils.py b/tests/test_unit/test_cdf_tk/test_utils/test_utils.py similarity index 82% rename from tests/test_unit/test_cdf_tk/test_utils.py rename to tests/test_unit/test_cdf_tk/test_utils/test_utils.py index de1ad457e..a22035b8a 100644 --- a/tests/test_unit/test_cdf_tk/test_utils.py +++ b/tests/test_unit/test_cdf_tk/test_utils/test_utils.py @@ -20,7 +20,6 @@ ProjectCapabilityList, ProjectsScope, ) -from cognite.client.data_classes.data_modeling import DataModelId, ViewId from cognite.client.data_classes.iam import ProjectSpec from cognite.client.exceptions import CogniteAuthError from pytest import MonkeyPatch @@ -32,7 +31,6 @@ AuthReader, AuthVariables, CDFToolConfig, - GraphQLParser, calculate_directory_hash, flatten_dict, iterate_modules, @@ -444,173 +442,6 @@ def test_flatten_dict(input_: dict[str, Any], expected: dict[str, Any]) -> None: assert actual == expected -SPACE = "sp_my_space" -DATA_MODEL = DataModelId(SPACE, "MyDataModel", "v1") -GraphQLTestCases = [ - pytest.param( - """interface Creatable @view(space: "cdf_apps_shared", version: "v1") @import { - visibility: String - createdBy: CDF_User - updatedBy: CDF_User - isArchived: Boolean -}""", - DATA_MODEL, - set(), - {ViewId("cdf_apps_shared", "Creatable", "v1")}, - id="Imported interface", - ), - pytest.param( - """"@code WOOL" -type WorkOrderObjectListItem @import(dataModel: {externalId: "MaintenanceDOM", version: "2_2_0", space: "EDG-COR-ALL-DMD"}) { - ... -}""", - DATA_MODEL, - set(), - {DataModelId(space="EDG-COR-ALL-DMD", external_id="MaintenanceDOM", version="2_2_0")}, - id="Imported data model", - ), - pytest.param( - ''' -""" -@name Notification -@description This is a SAP notification object. -@code NOTI -""" -type Notification @container(indexes: [{identifier: "refCostCenterIndex", indexType: BTREE, fields: ["refCostCenter"]}, {identifier: "createdDateIndex", indexType: BTREE, fields: ["createdDate"], cursorable: true}]) @import(dataModel: {externalId: "MaintenanceDOM", version: "2_2_0", space: "EDG-COR-ALL-DMD"}) { - ... - } -''', - DATA_MODEL, - set(), - {DataModelId(space="EDG-COR-ALL-DMD", external_id="MaintenanceDOM", version="2_2_0")}, - id="Imported data model with unused container directive", - ), - pytest.param( - ''' - """ -@code WKCC -@Description Work Center Category in SAP -""" -type WorkCenterCategory { - """ - @name Name - """ - name: String! - """ - @name Description - """ - description: String - """ - @name Code - """ - code: String -}''', - DATA_MODEL, - {ViewId(SPACE, "WorkCenterCategory", None)}, - set(), - id="Simple type", - ), - pytest.param( - """type Cdf3dConnectionProperties - @import - @view(space: "cdf_3d_schema", version: "1") - @edge - @container( - constraints: [ - { - identifier: "uniqueNodeRevisionConstraint" - constraintType: UNIQUENESS - fields: ["revisionId", "revisionNodeId"] - } - ] - ) { - revisionId: Int64! - revisionNodeId: Int64! -}""", - DATA_MODEL, - set(), - {ViewId("cdf_3d_schema", "Cdf3dConnectionProperties", "1")}, - id="Edge type", - ), - pytest.param( - """type APM_User @view (version: "7") { - name: String - email: String - lastSeen: Timestamp - preferences: JSONObject -}""", - DATA_MODEL, - {ViewId(SPACE, "APM_User", "7")}, - set(), - id="Simple type with version", - ), - pytest.param( - """type UnitOfMeasurement - @import( - dataModel: { - externalId: "CoreDOM" - version: "1_0_18" - space: "EDG-COR-ALL-DMD" - } - ) { - name: String! -}""", - DATA_MODEL, - set(), - {DataModelId(space="EDG-COR-ALL-DMD", external_id="CoreDOM", version="1_0_18")}, - id="No comma, only newline", - ), - pytest.param( - '''""" - @name Name }{ Breaks the parser -""" -type UnitOfMeasurement - @import( - dataModel: { - externalId: "CoreDOM" - version: "1_0_18" - space: "EDG-COR-ALL-DMD" - } - ) { - name: String! -}''', - DATA_MODEL, - set(), - {DataModelId(space="EDG-COR-ALL-DMD", external_id="CoreDOM", version="1_0_18")}, - id="Ignore comments", - ), - pytest.param( - """type APM_Config @view { - name: String - appDataSpaceId: String - appDataSpaceVersion: String - customerDataSpaceId: String - customerDataSpaceVersion: String - featureConfiguration: JSONObject - fieldConfiguration: JSONObject - rootLocationsConfiguration: JSONObject -}""", - DATA_MODEL, - {ViewId(SPACE, "APM_Config", None)}, - set(), - id="No version", - ), -] - - -class TestGraphQLParser: - @pytest.mark.parametrize("raw, data_model_id, expected_views, dependencies", GraphQLTestCases) - def test_parse( - self, raw: str, data_model_id: DataModelId, expected_views: set[ViewId], dependencies: set[ViewId | DataModelId] - ) -> None: - parser = GraphQLParser(raw, data_model_id) - - actual_views = parser.get_views(include_version=True) - assert expected_views == actual_views - actual_dependencies = parser.get_dependencies(include_version=True) - assert dependencies == actual_dependencies - - def quote_key_in_yaml_test_cases() -> Iterable[ParameterSet]: yield pytest.param( """space: my_space diff --git a/tests/test_unit/test_cli/test_behavior.py b/tests/test_unit/test_cli/test_behavior.py index 3d3e85ac9..3fe68bcdc 100644 --- a/tests/test_unit/test_cli/test_behavior.py +++ b/tests/test_unit/test_cli/test_behavior.py @@ -118,6 +118,9 @@ def load_transformation() -> TransformationWrite: content = content.replace("{{cdfProjectName}}", "123") content = content.replace("{{cicd_scopes}}", "scope") content = content.replace("{{cicd_audience}}", "123") + # The loader expects the query to have a reference to the transformation file. + # This is a workaround for this test. + content += f"\nquery: {transformation_yaml.with_suffix('.sql').name}" transformation_yaml.write_text(content) transformation = loader.load_resource(transformation_yaml, cdf_tool_mock, skip_validation=True) @@ -237,7 +240,7 @@ def test_dump_datamodel( ) toolkit_client_approval.append(dm.Space, space) toolkit_client_approval.append(dm.Container, container) - toolkit_client_approval.append(dm.View, view) + toolkit_client_approval.append(dm.View, parent_view) toolkit_client_approval.append(dm.DataModel, data_model) app = DumpApp() app.dump_datamodel_cmd( diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_common.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_common.yaml index 24d742806..62f8445b5 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_common.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_common.yaml @@ -36,8 +36,8 @@ ExtractionPipeline: name: Direct Relation Writer ExtractionPipelineConfig: - config: "state:\n rawDatabase: contextualizationState\n rawTable: diagramParsing\n\ - data:\n annotationSpace: springfield_instances\n directRelationMappings:\n \ - \ - startNodeView:\n space: cdf_cdm\n externalId: CogniteFile\n\ + data:\n annotationSpace: 'springfield_instances'\n directRelationMappings:\n\ + \ - startNodeView:\n space: cdf_cdm\n externalId: CogniteFile\n\ \ version: v1\n directRelationProperty: assets\n endNodeView:\n\ \ space: cdf_cdm\n externalId: CogniteAsset\n version: v1\n\ \ - startNodeView:\n type: view\n space: cdf_cdm\n externalId:\ diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_3d_valhall.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_3d_valhall.yaml index fd45628a8..f4da9b37d 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_3d_valhall.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_3d_valhall.yaml @@ -80,7 +80,7 @@ ExtractionPipelineConfig: - config: "data:\n debug: False\n runAll: False\n rawdb: '3d_oid_fileshare'\n \ \ rawTableGood: 'contextualization_good'\n rawTableBad: 'contextualization_bad'\n\ \ rawTableManual: 'contextualization_manual_input'\n assetRootExtId: WMT:VAL\n\ - \ 3dDataSetExtId: 'ds_3d_oid'\n 3dModelName: Valhall\n matchThreshold: 0.75\n\ + \ 3dDataSetExtId: 'ds_3d_oid'\n 3dModelName: 'Valhall'\n matchThreshold: 0.75\n\ \ keepOldMapping: False\n" externalId: ep_ctx_3d_oid_fileshare_annotation - config: "logger:\n # Logging to console/terminal. Remove or comment out to disable\ diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_files_valhall.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_files_valhall.yaml index 121afa0c2..2ff93318e 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_files_valhall.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_files_valhall.yaml @@ -34,8 +34,8 @@ ExtractionPipeline: \ documents, if False only run on document not updated since last annotation\n\ \ runAll: False\n # Number of document from each asset to annotate -1 = All, else\ \ number is used\n docLimit: -1\n # Data set to read documents / P&ID from + update\ - \ with annotated timestamp when done\n docDataSetExtId: ds_files_oid\n # In document\ - \ metadata, where is location of property for P&ID type documents\n docTypeMetaCol:\ + \ with annotated timestamp when done\n docDataSetExtId: \"ds_files_oid\"\n # In\ + \ document metadata, where is location of property for P&ID type documents\n docTypeMetaCol:\ \ \"doc_type\"\n # Document type for P&ID type documents\n pAndIdDocType: \"PNID\"\ \n # List of externalId for root assets to be used for annotation of documents.\ \ Documents are also extracted based on the root asset ID\n assetRootExtIds: [WMT:VAL]\n\ @@ -65,8 +65,8 @@ ExtractionPipeline: source: fileshare ExtractionPipelineConfig: - config: "data:\n debug: False\n runAll: False\n docLimit: -1\n docDataSetExtId:\ - \ ds_files_oid\n docTypeMetaCol: \"doc_type\"\n pAndIdDocType: \"PNID\"\n assetRootExtIds:\ - \ [WMT:VAL]\n matchThreshold: 0.85\n \n" + \ \"ds_files_oid\"\n docTypeMetaCol: \"doc_type\"\n pAndIdDocType: \"PNID\"\n\ + \ assetRootExtIds: [WMT:VAL]\n matchThreshold: 0.85\n \n" externalId: ep_ctx_files_oid_fileshare_pandid_annotation - config: "logger:\n # Logging to console/terminal. Remove or comment out to disable\ \ terminal\n # logging\n console:\n level: INFO\n\n # Logging to file. Include\ diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_timeseries_valhall.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_timeseries_valhall.yaml index 2e9f31616..c1a0ed830 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_timeseries_valhall.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_data_pipeline_timeseries_valhall.yaml @@ -117,7 +117,7 @@ ExtractionPipelineConfig: \ matchThreshold: 0.75\n" externalId: ep_ctx_timeseries_oid_opcua_asset - config: "# Version of the config schema\nversion: 1\n\nsource:\n # The URL of\ - \ the OPC-UA server to connect to\n endpoint-url: opc.tcp://DESKTOP-18JE0L8:53530/OPCUA/SimulationServer\n\ + \ the OPC-UA server to connect to\n endpoint-url: 'opc.tcp://DESKTOP-18JE0L8:53530/OPCUA/SimulationServer'\n\ \ force-restart: true\n browse-nodes-chunk: 100\n subscription-chunk:\ \ 1000\n keep-alive-interval: 50000\n sampling-interval: 0\n queue-length:\ \ 10\n browse-throttling:\n max-node-parallelism: 1000\n retries:\n\ @@ -133,9 +133,9 @@ ExtractionPipelineConfig: \ \"opc-ua:\"\n\n # Source node in the OPC-UA server. Leave empty to use the\ \ top level Objects node.\n # If root-nodes is set, this is added to the list\ \ of root nodes.\n root-node:\n # Full name of the namespace of the\ - \ root node.\n namespace-uri: http://www.prosysopc.com/OPCUA/SimulationNodes/\n\ + \ root node.\n namespace-uri: 'http://www.prosysopc.com/OPCUA/SimulationNodes/'\n\ \ # Id of the root node, on the form \"i=123\" or \"s=stringid\" etc.\n\ - \ node-id: s=85/0:Simulation\n\n # Time in minutes between each call\ + \ node-id: 's=85/0:Simulation'\n\n # Time in minutes between each call\ \ to browse the OPC-UA directory, then push new nodes to destinations.\n #\ \ Note that this is a heavy operation, so this number should not be set too low.\n\ \ # Alternatively, use N[timeunit] where timeunit is w, d, h, m, s or ms.\n\ diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_entity_matching.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_entity_matching.yaml index 2221013ac..bd77717a1 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_entity_matching.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_entity_matching.yaml @@ -33,7 +33,7 @@ ExtractionPipelineConfig: - config: "parameters:\n autoApprovalThreshold: 0.85\n autoRejectThreshold: 0.25\n\ \ featureType: \"bigram\"\nsourceSystem:\n space: springfield_instances\n externalId:\ \ entity_matcher\nstate:\n rawDatabase: contextualizationState\n rawTable: entityMatching\n\ - data:\n annotationSpace: springfield_instances\n instanceSpaces:\n ['springfield_instances']\n\ + data:\n annotationSpace: 'springfield_instances'\n instanceSpaces:\n ['springfield_instances']\n\ \ matchingJobs:\n job1:\n sourceView:\n space: cdf_cdm\n \ \ externalId: CogniteTimeSeries\n version: v1\n properties:\n\ \ - name\n targetViews:\n - space: cdf_cdm\n externalId:\ diff --git a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_p_and_id_parser.yaml b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_p_and_id_parser.yaml index b22110bf2..aa89ba289 100644 --- a/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_p_and_id_parser.yaml +++ b/tests/test_unit/test_cli/test_build_deploy_snapshots/cdf_p_and_id_parser.yaml @@ -28,7 +28,7 @@ ExtractionPipeline: ExtractionPipelineConfig: - config: "parameters:\n autoApprovalThreshold: 0.85\n autoRejectThreshold: 0.25\n\ sourceSystem:\n space: springfield_instances\n externalId: p_and_id_parser\n\ - data:\n annotationSpace: springfield_instances\n instanceSpaces:\n ['springfield_instances']\n\ + data:\n annotationSpace: 'springfield_instances'\n instanceSpaces:\n ['springfield_instances']\n\ \ annotationJobs:\n - fileView:\n space: cdf_cdm\n externalId:\ \ CogniteFile\n version: v1\n entityViews:\n - space: cdf_cdm\n\ \ externalId: CogniteEquipment\n version: v1\n searchProperty:\