From 6977bf5a66a7d9b8aba07a6aec44dad31b3889af Mon Sep 17 00:00:00 2001 From: Sherzod Karimov Date: Wed, 4 Sep 2024 13:12:03 -0400 Subject: [PATCH 1/4] add support for logs_archives --- datadog_sync/model/logs_archives.py | 67 +++++++++++++++++++ datadog_sync/models/__init__.py | 1 + ...vesResources.test_no_resource_diffs.frozen | 1 + ...ivesResources.test_resource_cleanup.frozen | 1 + ...chivesResources.test_resource_cleanup.yaml | 30 +++++++++ ...hivesResources.test_resource_import.frozen | 1 + ...rchivesResources.test_resource_import.yaml | 29 ++++++++ ...rchivesResources.test_resource_sync.frozen | 1 + ...sArchivesResources.test_resource_sync.yaml | 53 +++++++++++++++ ...Resources.test_resource_update_sync.frozen | 1 + ...esResources.test_resource_update_sync.yaml | 54 +++++++++++++++ .../resources/test_logs_archives.py | 12 ++++ 12 files changed, 251 insertions(+) create mode 100644 datadog_sync/model/logs_archives.py create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_no_resource_diffs.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.yaml create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.yaml create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.yaml create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.yaml create mode 100644 tests/integration/resources/test_logs_archives.py diff --git a/datadog_sync/model/logs_archives.py b/datadog_sync/model/logs_archives.py new file mode 100644 index 00000000..5365e9d3 --- /dev/null +++ b/datadog_sync/model/logs_archives.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, Tuple, cast + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class LogsArchives(BaseResource): + resource_type = "logs_archives" + resource_config = ResourceConfig( + base_path="/api/v2/logs/config/archives", + excluded_attributes=["id", "attributes.state"], + ) + # Additional LogsArchives specific attributes + + async def get_resources(self, client: CustomClient) -> List[Dict]: + resp = await client.get(self.resource_config.base_path) + + return resp["data"] + + async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]: + if _id: + source_client = self.config.source_client + resource = (await source_client.get(self.resource_config.base_path + f"/{_id}"))["data"] + resource = cast(dict, resource) + + return resource["id"], resource + + async def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + async def pre_apply_hook(self) -> None: + pass + + async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = await destination_client.post(self.resource_config.base_path, payload) + + return _id, resp["data"] + + async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + destination_client = self.config.destination_client + payload = {"data": resource} + resp = await destination_client.put( + self.resource_config.base_path + f"/{self.config.state.destination[self.resource_type][_id]['id']}", + payload, + ) + + self.config.state.destination[self.resource_type][_id] = resp["data"] + return _id, resp["data"] + + async def delete_resource(self, _id: str) -> None: + destination_client = self.config.destination_client + await destination_client.delete( + self.resource_config.base_path + f"/{self.config.state.destination[self.resource_type][_id]['id']}" + ) + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + pass diff --git a/datadog_sync/models/__init__.py b/datadog_sync/models/__init__.py index 5704c377..f951c825 100644 --- a/datadog_sync/models/__init__.py +++ b/datadog_sync/models/__init__.py @@ -10,6 +10,7 @@ from datadog_sync.model.downtime_schedules import DowntimeSchedules from datadog_sync.model.downtimes import Downtimes from datadog_sync.model.host_tags import HostTags +from datadog_sync.model.logs_archives import LogsArchives from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines from datadog_sync.model.logs_indexes import LogsIndexes from datadog_sync.model.logs_indexes_order import LogsIndexesOrder diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_no_resource_diffs.frozen b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_no_resource_diffs.frozen new file mode 100644 index 00000000..443f44f0 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_no_resource_diffs.frozen @@ -0,0 +1 @@ +2024-09-04T13:11:22.480442-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.frozen b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.frozen new file mode 100644 index 00000000..2d959979 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.frozen @@ -0,0 +1 @@ +2024-09-04T13:11:22.498371-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.yaml b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.yaml new file mode 100644 index 00000000..d452a3a5 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_cleanup.yaml @@ -0,0 +1,30 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: DELETE + uri: https://api.datadoghq.eu/api/v2/logs/config/archives/Zulo48JVQXSyM97uKjGHpA + response: + body: + string: '' + headers: {} + status: + code: 204 + message: No Content +- request: + body: null + headers: + Content-Type: + - application/json + method: DELETE + uri: https://api.datadoghq.eu/api/v2/logs/config/archives/3kkgEN2qQ7i_qfP0yFZx5g + response: + body: + string: '' + headers: {} + status: + code: 204 + message: No Content +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.frozen b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.frozen new file mode 100644 index 00000000..c2afa9ed --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.frozen @@ -0,0 +1 @@ +2024-09-04T13:11:21.025343-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.yaml b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.yaml new file mode 100644 index 00000000..6023fe30 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_import.yaml @@ -0,0 +1,29 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/logs/config/archives + response: + body: + string: '{"data": [{"type": "archives", "id": "V49TnL93R0C3QADZQllO5Q", "attributes": + {"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN", + "destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3", + "integration": {"role_name": "testacc-datadog-integration-role", "account_id": + "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags": + true, "rehydration_max_scan_size_in_gb": 123}}, {"type": "archives", "id": + "RK1PeXaNRButwKNMn_dRJQ", "attributes": {"name": "my first azure archive", + "query": "service:toto", "state": "UNKNOWN", "destination": {"container": + "my-container", "storage_account": "storageaccount", "path": "/path/blou", + "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea", + "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags": + [], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.frozen b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.frozen new file mode 100644 index 00000000..10dccb31 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.frozen @@ -0,0 +1 @@ +2024-09-04T13:11:21.284161-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.yaml b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.yaml new file mode 100644 index 00000000..3a60bc05 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_sync.yaml @@ -0,0 +1,53 @@ +interactions: +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first azure archive", + "query": "service:toto", "destination": {"container": "my-container", "storage_account": + "storageaccount", "path": "/path/blou", "type": "azure", "integration": {"tenant_id": + "92f7df25-f9d7-4e76-a3b6-4011e64958ea", "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, + "rehydration_tags": [], "include_tags": false, "rehydration_max_scan_size_in_gb": + null}}}' + headers: + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.eu/api/v2/logs/config/archives + response: + body: + string: '{"data": {"type": "archives", "id": "Zulo48JVQXSyM97uKjGHpA", "attributes": + {"name": "my first azure archive", "query": "service:toto", "state": "UNKNOWN", + "destination": {"container": "my-container", "storage_account": "storageaccount", + "path": "/path/blou", "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea", + "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags": + [], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first s3 archive", + "query": "service:tutu", "destination": {"bucket": "my-bucket", "path": "/path/foo", + "type": "s3", "integration": {"role_name": "testacc-datadog-integration-role", + "account_id": "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], + "include_tags": true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.eu/api/v2/logs/config/archives + response: + body: + string: '{"data": {"type": "archives", "id": "3kkgEN2qQ7i_qfP0yFZx5g", "attributes": + {"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN", + "destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3", + "integration": {"role_name": "testacc-datadog-integration-role", "account_id": + "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags": + true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.frozen b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.frozen new file mode 100644 index 00000000..470d66a8 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.frozen @@ -0,0 +1 @@ +2024-09-04T13:11:21.851747-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.yaml b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.yaml new file mode 100644 index 00000000..5a15dfec --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives/TestLogsArchivesResources.test_resource_update_sync.yaml @@ -0,0 +1,54 @@ +interactions: +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first azure archiveupdated", + "query": "service:toto", "destination": {"container": "my-container", "storage_account": + "storageaccount", "path": "/path/blou", "type": "azure", "integration": {"tenant_id": + "92f7df25-f9d7-4e76-a3b6-4011e64958ea", "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, + "rehydration_tags": [], "include_tags": false, "rehydration_max_scan_size_in_gb": + null}}}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v2/logs/config/archives/Zulo48JVQXSyM97uKjGHpA + response: + body: + string: '{"data": {"type": "archives", "id": "Zulo48JVQXSyM97uKjGHpA", "attributes": + {"name": "my first azure archiveupdated", "query": "service:toto", "state": + "UNKNOWN", "destination": {"container": "my-container", "storage_account": + "storageaccount", "path": "/path/blou", "type": "azure", "integration": {"tenant_id": + "92f7df25-f9d7-4e76-a3b6-4011e64958ea", "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, + "rehydration_tags": [], "include_tags": false, "rehydration_max_scan_size_in_gb": + null}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first s3 archiveupdated", + "query": "service:tutu", "destination": {"bucket": "my-bucket", "path": "/path/foo", + "type": "s3", "integration": {"role_name": "testacc-datadog-integration-role", + "account_id": "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], + "include_tags": true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v2/logs/config/archives/3kkgEN2qQ7i_qfP0yFZx5g + response: + body: + string: '{"data": {"type": "archives", "id": "3kkgEN2qQ7i_qfP0yFZx5g", "attributes": + {"name": "my first s3 archiveupdated", "query": "service:tutu", "state": "UNKNOWN", + "destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3", + "integration": {"role_name": "testacc-datadog-integration-role", "account_id": + "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags": + true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/test_logs_archives.py b/tests/integration/resources/test_logs_archives.py new file mode 100644 index 00000000..bb67f6c8 --- /dev/null +++ b/tests/integration/resources/test_logs_archives.py @@ -0,0 +1,12 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from tests.integration.helpers import BaseResourcesTestClass +from datadog_sync.models import LogsArchives + + +class TestLogsArchivesResources(BaseResourcesTestClass): + resource_type = LogsArchives.resource_type + field_to_update = "attributes.name" From 007174ca4cbe20d5d3ff1ea9b82773d328f9ba16 Mon Sep 17 00:00:00 2001 From: Sherzod Karimov Date: Wed, 4 Sep 2024 13:15:00 -0400 Subject: [PATCH 2/4] update readme --- README.md | 72 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index 7ddb570a..d9e703fc 100644 --- a/README.md +++ b/README.md @@ -206,40 +206,42 @@ When running againts multiple destination organizations, a seperate working dire #### Supported resources -| Resource | Description | -|----------------------------------------|----------------------------------------------------------| -| authn_mappings | Sync Datadog authn mappings. | -| dashboard_lists | Sync Datadog dashboard lists. | -| dashboards | Sync Datadog dashboards. | -| downtime_schedules | Sync Datadog downtimes. | -| downtimes (**deprecated**) | Sync Datadog downtimes. | -| host_tags | Sync Datadog host tags. | -| logs_custom_pipelines (**deprecated**) | Sync Datadog logs custom pipelines. | -| logs_indexes | Sync Datadog logs indexes. | -| logs_indexes_order | Sync Datadog logs indexes order. | -| logs_metrics | Sync Datadog logs metrics. | -| logs_pipelines | Sync Datadog logs OOTB integration and custom pipelines. | -| logs_pipelines_order | Sync Datadog logs pipelines order. | -| logs_restriction_queries | Sync Datadog logs restriction queries. | -| metric_percentiles | Sync Datadog metric percentiles. | -| metric_tag_configurations | Sync Datadog metric tags configurations. | -| metrics_metadata | Sync Datadog metric metadata. | -| monitors | Sync Datadog monitors. | -| notebooks | Sync Datadog notebooks. | -| powerpacks | Sync Datadog powerpacks. | -| restriction_policies | Sync Datadog restriction policies. | -| roles | Sync Datadog roles. | -| sensitive_data_scanner_groups | Sync SDS groups | -| sensitive_data_scanner_groups_order | Sync SDS groups order | -| sensitive_data_scanner_rules | Sync SDS rules | -| service_level_objectives | Sync Datadog SLOs. | -| slo_corrections | Sync Datadog SLO corrections. | -| spans_metrics | Sync Datadog spans metrics. | -| synthetics_global_variables | Sync Datadog synthetic global variables. | -| synthetics_private_locations | Sync Datadog synthetic private locations. | -| synthetics_tests | Sync Datadog synthetic tests. | -| teams | Sync Datadog teams (excluding users and permissions). | -| users | Sync Datadog users. | +| Resource | Description | +|----------------------------------------|----------------------------------------------------------------------| +| authn_mappings | Sync Datadog authn mappings. | +| dashboard_lists | Sync Datadog dashboard lists. | +| dashboards | Sync Datadog dashboards. | +| downtime_schedules | Sync Datadog downtimes. | +| downtimes (**deprecated**) | Sync Datadog downtimes. | +| host_tags | Sync Datadog host tags. | +| logs_archives | Sync Datadog logs archives. Requires GCP, Azure, or AWS integration. | +| logs_archives_order | Sync Datadog logs archives order. | +| logs_custom_pipelines (**deprecated**) | Sync Datadog logs custom pipelines. | +| logs_indexes | Sync Datadog logs indexes. | +| logs_indexes_order | Sync Datadog logs indexes order. | +| logs_metrics | Sync Datadog logs metrics. | +| logs_pipelines | Sync Datadog logs OOTB integration and custom pipelines. | +| logs_pipelines_order | Sync Datadog logs pipelines order. | +| logs_restriction_queries | Sync Datadog logs restriction queries. | +| metric_percentiles | Sync Datadog metric percentiles. | +| metric_tag_configurations | Sync Datadog metric tags configurations. | +| metrics_metadata | Sync Datadog metric metadata. | +| monitors | Sync Datadog monitors. | +| notebooks | Sync Datadog notebooks. | +| powerpacks | Sync Datadog powerpacks. | +| restriction_policies | Sync Datadog restriction policies. | +| roles | Sync Datadog roles. | +| sensitive_data_scanner_groups | Sync SDS groups | +| sensitive_data_scanner_groups_order | Sync SDS groups order | +| sensitive_data_scanner_rules | Sync SDS rules | +| service_level_objectives | Sync Datadog SLOs. | +| slo_corrections | Sync Datadog SLO corrections. | +| spans_metrics | Sync Datadog spans metrics. | +| synthetics_global_variables | Sync Datadog synthetic global variables. | +| synthetics_private_locations | Sync Datadog synthetic private locations. | +| synthetics_tests | Sync Datadog synthetic tests. | +| teams | Sync Datadog teams (excluding users and permissions). | +| users | Sync Datadog users. | ***Note:*** `logs_custom_pipelines` resource has been deprecated in favor of `logs_pipelines` resource which supports both logs OOTB integration and custom pipelines. To migrate to the new resource, rename the existing state files from `logs_custom_pipelines.json` to `logs_pipelines.json` for both source and destination files. @@ -259,6 +261,8 @@ See [Supported resources](#supported-resources) section below for potential reso | downtime_schedules | monitors | | downtimes (**deprecated**) | monitors | | host_tags | - | +| logs_archives | - (Requires manual setup of AWS, GCP or Azure integration) | +| logs_archives_order | logs_archives | | logs_custom_pipelines (**deprecated**) | - | | logs_indexes | - | | logs_indexes_order | logs_indexes | From cb6c996cd71b83fe50feb6e8af58c68ebf303c83 Mon Sep 17 00:00:00 2001 From: Sherzod Karimov Date: Fri, 20 Sep 2024 10:48:15 -0400 Subject: [PATCH 3/4] update readme from feedback --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index d9e703fc..05bfce54 100644 --- a/README.md +++ b/README.md @@ -215,7 +215,10 @@ When running againts multiple destination organizations, a seperate working dire | downtimes (**deprecated**) | Sync Datadog downtimes. | | host_tags | Sync Datadog host tags. | | logs_archives | Sync Datadog logs archives. Requires GCP, Azure, or AWS integration. | +<<<<<<< HEAD | logs_archives_order | Sync Datadog logs archives order. | +======= +>>>>>>> 0331d2b (update readme from feedback) | logs_custom_pipelines (**deprecated**) | Sync Datadog logs custom pipelines. | | logs_indexes | Sync Datadog logs indexes. | | logs_indexes_order | Sync Datadog logs indexes order. | @@ -231,9 +234,12 @@ When running againts multiple destination organizations, a seperate working dire | powerpacks | Sync Datadog powerpacks. | | restriction_policies | Sync Datadog restriction policies. | | roles | Sync Datadog roles. | +<<<<<<< HEAD | sensitive_data_scanner_groups | Sync SDS groups | | sensitive_data_scanner_groups_order | Sync SDS groups order | | sensitive_data_scanner_rules | Sync SDS rules | +======= +>>>>>>> 0331d2b (update readme from feedback) | service_level_objectives | Sync Datadog SLOs. | | slo_corrections | Sync Datadog SLO corrections. | | spans_metrics | Sync Datadog spans metrics. | @@ -261,8 +267,12 @@ See [Supported resources](#supported-resources) section below for potential reso | downtime_schedules | monitors | | downtimes (**deprecated**) | monitors | | host_tags | - | +<<<<<<< HEAD | logs_archives | - (Requires manual setup of AWS, GCP or Azure integration) | | logs_archives_order | logs_archives | +======= +| logs_archives | - (Requires manual setup of AWS, GCP or Azure integrations) | +>>>>>>> 0331d2b (update readme from feedback) | logs_custom_pipelines (**deprecated**) | - | | logs_indexes | - | | logs_indexes_order | logs_indexes | From 395eefe3b7da668616606d6c060e683a41117ae6 Mon Sep 17 00:00:00 2001 From: skarimo <40482491+skarimo@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:07:43 -0400 Subject: [PATCH 4/4] Add support for logs archive order (#278) * add support got logs archives orcer * update readme --- README.md | 10 -- datadog_sync/model/logs_archives_order.py | 115 ++++++++++++++++ datadog_sync/models/__init__.py | 1 + ...rchivesOrder.test_no_resource_diffs.frozen | 1 + ...sArchivesOrder.test_no_resource_diffs.yaml | 19 +++ ...ArchivesOrder.test_resource_cleanup.frozen | 1 + ...sArchivesOrder.test_resource_import.frozen | 1 + ...ogsArchivesOrder.test_resource_import.yaml | 35 +++++ ...ogsArchivesOrder.test_resource_sync.frozen | 1 + ...tLogsArchivesOrder.test_resource_sync.yaml | 130 ++++++++++++++++++ .../resources/test_logs_archives_order.py | 18 +++ 11 files changed, 322 insertions(+), 10 deletions(-) create mode 100644 datadog_sync/model/logs_archives_order.py create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.yaml create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_cleanup.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.yaml create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.frozen create mode 100644 tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.yaml create mode 100644 tests/integration/resources/test_logs_archives_order.py diff --git a/README.md b/README.md index 05bfce54..d9e703fc 100644 --- a/README.md +++ b/README.md @@ -215,10 +215,7 @@ When running againts multiple destination organizations, a seperate working dire | downtimes (**deprecated**) | Sync Datadog downtimes. | | host_tags | Sync Datadog host tags. | | logs_archives | Sync Datadog logs archives. Requires GCP, Azure, or AWS integration. | -<<<<<<< HEAD | logs_archives_order | Sync Datadog logs archives order. | -======= ->>>>>>> 0331d2b (update readme from feedback) | logs_custom_pipelines (**deprecated**) | Sync Datadog logs custom pipelines. | | logs_indexes | Sync Datadog logs indexes. | | logs_indexes_order | Sync Datadog logs indexes order. | @@ -234,12 +231,9 @@ When running againts multiple destination organizations, a seperate working dire | powerpacks | Sync Datadog powerpacks. | | restriction_policies | Sync Datadog restriction policies. | | roles | Sync Datadog roles. | -<<<<<<< HEAD | sensitive_data_scanner_groups | Sync SDS groups | | sensitive_data_scanner_groups_order | Sync SDS groups order | | sensitive_data_scanner_rules | Sync SDS rules | -======= ->>>>>>> 0331d2b (update readme from feedback) | service_level_objectives | Sync Datadog SLOs. | | slo_corrections | Sync Datadog SLO corrections. | | spans_metrics | Sync Datadog spans metrics. | @@ -267,12 +261,8 @@ See [Supported resources](#supported-resources) section below for potential reso | downtime_schedules | monitors | | downtimes (**deprecated**) | monitors | | host_tags | - | -<<<<<<< HEAD | logs_archives | - (Requires manual setup of AWS, GCP or Azure integration) | | logs_archives_order | logs_archives | -======= -| logs_archives | - (Requires manual setup of AWS, GCP or Azure integrations) | ->>>>>>> 0331d2b (update readme from feedback) | logs_custom_pipelines (**deprecated**) | - | | logs_indexes | - | | logs_indexes_order | logs_indexes | diff --git a/datadog_sync/model/logs_archives_order.py b/datadog_sync/model/logs_archives_order.py new file mode 100644 index 00000000..3d48b627 --- /dev/null +++ b/datadog_sync/model/logs_archives_order.py @@ -0,0 +1,115 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, Tuple +from copy import deepcopy + +from deepdiff.operator import BaseOperator + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class LogsArchivesOrderIdsComparator(BaseOperator): + def match(self, level): + if "archive_ids" in level.t1 and "archive_ids" in level.t2: + # make copy so we do not mutate the original + level.t1 = deepcopy(level.t1) + level.t2 = deepcopy(level.t2) + + # If we are at the top level, modify the list to exclude extra archives in destination. + t1 = set(level.t1["archive_ids"]) + t2 = set(level.t2["archive_ids"]) + d_ignore = t1 - t2 + + level.t1["archive_ids"] = [_id for _id in level.t1["archive_ids"] if _id not in d_ignore] + return True + + def give_up_diffing(self, level, diff_instance) -> bool: + return False + + +class LogsArchivesOrder(BaseResource): + resource_type = "logs_archives_order" + resource_config = ResourceConfig( + concurrent=False, + base_path="/api/v2/logs/config/archive-order", + resource_connections={ + "logs_archives": ["data.attributes.archive_ids"], + }, + deep_diff_config={ + "ignore_order": False, + "custom_operators": [LogsArchivesOrderIdsComparator()], + }, + ) + # Additional LogsArchivesOrder specific attributes + destination_archives_order: Dict[str, Dict] = dict() + default_id: str = "logs-archives-order" + + async def get_resources(self, client: CustomClient) -> List[Dict]: + resp = await client.get(self.resource_config.base_path) + + return [resp] + + async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]: + if _id: + source_client = self.config.source_client + resource = await source_client.get(self.resource_config.base_path) + + return self.default_id, resource + + async def pre_resource_action_hook(self, _id, resource: Dict) -> None: + self.destination_archives_order = await self.get_destination_archives_order() + + async def pre_apply_hook(self) -> None: + pass + + async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + if not self.destination_archives_order: + raise Exception("Failed to retrieve destination orgs logs archive order") + + self.config.state.destination[self.resource_type][_id] = self.destination_archives_order + return await self.update_resource(_id, resource) + + async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + destination_resources = ( + self.destination_archives_order or self.config.state.destination[self.resource_type][_id] + ) + ids_to_omit = set(resource["data"]["attributes"]["archive_ids"]) - set( + destination_resources["data"]["attributes"]["archive_ids"] + ) + + extra_ids_to_include = [ + _id + for _id in destination_resources["data"]["attributes"]["archive_ids"] + if _id not in resource["data"]["attributes"]["archive_ids"] + ] + + resource["data"]["attributes"]["archive_ids"] = [ + _id for _id in resource["data"]["attributes"]["archive_ids"] if _id not in ids_to_omit + ] + resource["data"]["attributes"]["archive_ids"] = ( + resource["data"]["attributes"]["archive_ids"] + extra_ids_to_include + ) + + destination_client = self.config.destination_client + resp = await destination_client.put(self.resource_config.base_path, resource) + + return _id, resp + + async def delete_resource(self, _id: str) -> None: + self.config.logger.warning("logs_archives_order cannot deleted. Removing resource from config only.") + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + return super(LogsArchivesOrder, self).connect_id(key, r_obj, resource_to_connect) + + async def get_destination_archives_order(self): + destination_client = self.config.destination_client + resp = await self.get_resources(destination_client) + + return resp[0] diff --git a/datadog_sync/models/__init__.py b/datadog_sync/models/__init__.py index f951c825..25c1a253 100644 --- a/datadog_sync/models/__init__.py +++ b/datadog_sync/models/__init__.py @@ -10,6 +10,7 @@ from datadog_sync.model.downtime_schedules import DowntimeSchedules from datadog_sync.model.downtimes import Downtimes from datadog_sync.model.host_tags import HostTags +from datadog_sync.model.logs_archives_order import LogsArchivesOrder from datadog_sync.model.logs_archives import LogsArchives from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines from datadog_sync.model.logs_indexes import LogsIndexes diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.frozen b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.frozen new file mode 100644 index 00000000..afc081f5 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.frozen @@ -0,0 +1 @@ +2024-09-10T11:05:05.873233-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.yaml b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.yaml new file mode 100644 index 00000000..2c1685be --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_no_resource_diffs.yaml @@ -0,0 +1,19 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order + response: + body: + string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw", + "vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_cleanup.frozen b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_cleanup.frozen new file mode 100644 index 00000000..a3a5b131 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_cleanup.frozen @@ -0,0 +1 @@ +2024-09-10T11:05:06.042240-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.frozen b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.frozen new file mode 100644 index 00000000..bf219653 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.frozen @@ -0,0 +1 @@ +2024-09-10T11:05:04.341981-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.yaml b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.yaml new file mode 100644 index 00000000..9742f0de --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_import.yaml @@ -0,0 +1,35 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/logs/config/archive-order + response: + body: + string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["RK1PeXaNRButwKNMn_dRJQ", + "V49TnL93R0C3QADZQllO5Q"]}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order + response: + body: + string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["79uMvyn6SfS6uspxJUaTqQ"]}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.frozen b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.frozen new file mode 100644 index 00000000..14fb244c --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.frozen @@ -0,0 +1 @@ +2024-09-10T11:05:04.676451-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.yaml b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.yaml new file mode 100644 index 00000000..55531200 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_archives_order/TestLogsArchivesOrder.test_resource_sync.yaml @@ -0,0 +1,130 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/logs/config/archives/RK1PeXaNRButwKNMn_dRJQ + response: + body: + string: '{"data": {"type": "archives", "id": "RK1PeXaNRButwKNMn_dRJQ", "attributes": + {"name": "my first azure archive", "query": "service:toto", "state": "UNKNOWN", + "destination": {"container": "my-container", "storage_account": "storageaccount", + "path": "/path/blou", "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea", + "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags": + [], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/logs/config/archives/V49TnL93R0C3QADZQllO5Q + response: + body: + string: '{"data": {"type": "archives", "id": "V49TnL93R0C3QADZQllO5Q", "attributes": + {"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN", + "destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3", + "integration": {"role_name": "testacc-datadog-integration-role", "account_id": + "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags": + true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first azure archive", + "query": "service:toto", "destination": {"container": "my-container", "storage_account": + "storageaccount", "path": "/path/blou", "type": "azure", "integration": {"tenant_id": + "92f7df25-f9d7-4e76-a3b6-4011e64958ea", "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, + "rehydration_tags": [], "include_tags": false, "rehydration_max_scan_size_in_gb": + null}}}' + headers: + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.eu/api/v2/logs/config/archives + response: + body: + string: '{"data": {"type": "archives", "id": "hbGkZhQ-RlyB-um-Wg7NIw", "attributes": + {"name": "my first azure archive", "query": "service:toto", "state": "UNKNOWN", + "destination": {"container": "my-container", "storage_account": "storageaccount", + "path": "/path/blou", "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea", + "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags": + [], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"data": {"type": "archives", "attributes": {"name": "my first s3 archive", + "query": "service:tutu", "destination": {"bucket": "my-bucket", "path": "/path/foo", + "type": "s3", "integration": {"role_name": "testacc-datadog-integration-role", + "account_id": "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], + "include_tags": true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.eu/api/v2/logs/config/archives + response: + body: + string: '{"data": {"type": "archives", "id": "vnoZzsiUS1mWBwIsQdKyug", "attributes": + {"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN", + "destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3", + "integration": {"role_name": "testacc-datadog-integration-role", "account_id": + "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags": + true, "rehydration_max_scan_size_in_gb": 123}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order + response: + body: + string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["79uMvyn6SfS6uspxJUaTqQ", + "hbGkZhQ-RlyB-um-Wg7NIw", "vnoZzsiUS1mWBwIsQdKyug"]}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw", + "vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order + response: + body: + string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw", + "vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/test_logs_archives_order.py b/tests/integration/resources/test_logs_archives_order.py new file mode 100644 index 00000000..a0f8ab6f --- /dev/null +++ b/tests/integration/resources/test_logs_archives_order.py @@ -0,0 +1,18 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import pytest + +from tests.integration.helpers import BaseResourcesTestClass +from datadog_sync.models import LogsArchivesOrder + + +class TestLogsArchivesOrder(BaseResourcesTestClass): + resource_type = LogsArchivesOrder.resource_type + force_missing_deps = True + + @pytest.mark.skip(reason="resource is only updated by default") + def test_resource_update_sync(self): + pass