diff --git a/README.md b/README.md index f60216ed..10175fb9 100644 --- a/README.md +++ b/README.md @@ -227,6 +227,7 @@ When running againts multiple destination organizations, a seperate working dire | notebooks | Sync Datadog notebooks. | | host_tags | Sync Datadog host tags. | | logs_indexes | Sync Datadog logs indexes. | +| logs_indexes_order | Sync Datadog logs indexes order. | | logs_metrics | Sync Datadog logs metrics. | | logs_restriction_queries | Sync Datadog logs restriction queries. | | metric_tag_configurations | Sync Datadog metric tags configurations. | @@ -264,6 +265,7 @@ See [Supported resources](#supported-resources) section below for potential reso | notebooks | - | | host_tags | - | | logs_indexes | - | +| logs_indexes_order | logs_indexes | | logs_metrics | - | | logs_restriction_queries | roles | | metric_tag_configurations | - | diff --git a/datadog_sync/model/logs_indexes_order.py b/datadog_sync/model/logs_indexes_order.py new file mode 100644 index 00000000..177f571b --- /dev/null +++ b/datadog_sync/model/logs_indexes_order.py @@ -0,0 +1,100 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from __future__ import annotations +from typing import TYPE_CHECKING, Optional, List, Dict, Tuple + +from datadog_sync.utils.base_resource import BaseResource, ResourceConfig +from datadog_sync.utils.resource_utils import LogsIndexesOrderNameComparator + +if TYPE_CHECKING: + from datadog_sync.utils.custom_client import CustomClient + + +class LogsIndexesOrder(BaseResource): + resource_type = "logs_indexes_order" + resource_config = ResourceConfig( + concurrent=False, + base_path="/api/v1/logs/config/index-order", + resource_connections={ + "logs_indexes": ["index_names"], + }, + deep_diff_config={ + "ignore_order": False, + "custom_operators": [LogsIndexesOrderNameComparator()], + }, + ) + # Additional LogsIndexesOrder specific attributes + destination_indexes_order: Dict[str, Dict] = dict() + default_id: str = "logs-index-order" + + async def get_resources(self, client: CustomClient) -> List[Dict]: + resp = await client.get(self.resource_config.base_path) + + return [resp] + + async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]: + if _id: + source_client = self.config.source_client + resource = await source_client.get(self.resource_config.base_path) + + return self.default_id, resource + + async def pre_resource_action_hook(self, _id, resource: Dict) -> None: + pass + + async def pre_apply_hook(self) -> None: + self.destination_indexes_order = await self.get_destination_indexes_order() + + async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + if not self.destination_indexes_order: + raise Exception("Failed to retrieve destination orgs logs index order") + + self.resource_config.destination_resources[_id] = self.destination_indexes_order + return await self.update_resource(_id, resource) + + async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]: + destination_resources = self.destination_indexes_order or self.resource_config.destination_resources[_id] + self.handle_additional_indexes(resource, destination_resources) + + destination_client = self.config.destination_client + resp = await destination_client.put(self.resource_config.base_path, resource) + + return _id, resp + + async def delete_resource(self, _id: str) -> None: + pass + + def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]: + logs_indexes = self.config.resources["logs_indexes"].resource_config.destination_resources + + failed_connections = [] + for i, name in enumerate(r_obj[key]): + if name in logs_indexes: + r_obj[key][i] = logs_indexes[name]["name"] + else: + failed_connections.append(name) + + return failed_connections + + async def get_destination_indexes_order(self): + destination_client = self.config.destination_client + resp = await self.get_resources(destination_client) + + return resp[0] + + @staticmethod + def handle_additional_indexes(resource, destination_resource) -> None: + # Logs index order requires all logs indexes in the destination org to be included in the payload + # Additional indexes in the source org which need to be removed from the payload + ids_to_omit = set(resource["index_names"]) - set(destination_resource["index_names"]) + resource["index_names"] = [_id for _id in resource["index_names"] if _id not in ids_to_omit] + + # Add back additional indexes present in the destination org while retaining the relative ordering + # of the additional indexes + extra_ids_to_include = [ + _id for _id in destination_resource["index_names"] if _id not in resource["index_names"] + ] + resource["index_names"] = resource["index_names"] + extra_ids_to_include diff --git a/datadog_sync/models/__init__.py b/datadog_sync/models/__init__.py index 86d2729c..78d2cd0d 100644 --- a/datadog_sync/models/__init__.py +++ b/datadog_sync/models/__init__.py @@ -4,27 +4,28 @@ # Copyright 2019 Datadog, Inc. # ruff: noqa -from datadog_sync.model.roles import Roles -from datadog_sync.model.users import Users -from datadog_sync.model.dashboards import Dashboards from datadog_sync.model.dashboard_lists import DashboardLists -from datadog_sync.model.monitors import Monitors -from datadog_sync.model.downtimes import Downtimes +from datadog_sync.model.dashboards import Dashboards from datadog_sync.model.downtime_schedules import DowntimeSchedules -from datadog_sync.model.service_level_objectives import ServiceLevelObjectives -from datadog_sync.model.slo_corrections import SLOCorrections -from datadog_sync.model.synthetics_tests import SyntheticsTests -from datadog_sync.model.synthetics_private_locations import SyntheticsPrivateLocations -from datadog_sync.model.synthetics_global_variables import SyntheticsGlobalVariables -from datadog_sync.model.logs_pipelines import LogsPipelines -from datadog_sync.model.logs_pipelines_order import LogsPipelinesOrder -from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines -from datadog_sync.model.notebooks import Notebooks -from datadog_sync.model.logs_metrics import LogsMetrics +from datadog_sync.model.downtimes import Downtimes from datadog_sync.model.host_tags import HostTags -from datadog_sync.model.metric_tag_configurations import MetricTagConfigurations +from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines from datadog_sync.model.logs_indexes import LogsIndexes +from datadog_sync.model.logs_indexes_order import LogsIndexesOrder +from datadog_sync.model.logs_metrics import LogsMetrics +from datadog_sync.model.logs_pipelines import LogsPipelines +from datadog_sync.model.logs_pipelines_order import LogsPipelinesOrder from datadog_sync.model.logs_restriction_queries import LogsRestrictionQueries -from datadog_sync.model.spans_metrics import SpansMetrics +from datadog_sync.model.metric_tag_configurations import MetricTagConfigurations +from datadog_sync.model.monitors import Monitors +from datadog_sync.model.notebooks import Notebooks from datadog_sync.model.restriction_policies import RestrictionPolicies +from datadog_sync.model.roles import Roles +from datadog_sync.model.service_level_objectives import ServiceLevelObjectives +from datadog_sync.model.slo_corrections import SLOCorrections +from datadog_sync.model.spans_metrics import SpansMetrics +from datadog_sync.model.synthetics_global_variables import SyntheticsGlobalVariables +from datadog_sync.model.synthetics_private_locations import SyntheticsPrivateLocations +from datadog_sync.model.synthetics_tests import SyntheticsTests from datadog_sync.model.teams import Teams +from datadog_sync.model.users import Users diff --git a/datadog_sync/utils/resource_utils.py b/datadog_sync/utils/resource_utils.py index 2b0ba8b9..8973b5c3 100644 --- a/datadog_sync/utils/resource_utils.py +++ b/datadog_sync/utils/resource_utils.py @@ -65,6 +65,25 @@ def give_up_diffing(self, level, diff_instance) -> bool: return False +class LogsIndexesOrderNameComparator(BaseOperator): + def match(self, level): + if "index_names" in level.t1 and "index_names" in level.t2: + # make copy so we do not mutate the original + level.t1 = deepcopy(level.t1) + level.t2 = deepcopy(level.t2) + + # If we are at the top level, modify the list to exclude extra index in destination. + t1 = set(level.t1["index_names"]) + t2 = set(level.t2["index_names"]) + d_ignore = t1 - t2 + + level.t1["index_names"] = [_id for _id in level.t1["index_names"] if _id not in d_ignore] + return True + + def give_up_diffing(self, level, diff_instance) -> bool: + return False + + RECURRENCE_START_ATTR_PATH_RE = r"root\['attributes'\]\['schedule'\]\['recurrences'\]\[[0-9]+\]\['start'\]" diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.frozen b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.frozen new file mode 100644 index 00000000..c8cc357f --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.frozen @@ -0,0 +1 @@ +2024-04-24T16:45:30.760472-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.yaml b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.yaml new file mode 100644 index 00000000..d69724ba --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_no_resource_diffs.yaml @@ -0,0 +1,18 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v1/logs/config/index-order + response: + body: + string: '{"index_names": ["test-index", "gcp-index-name", "main"]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_cleanup.frozen b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_cleanup.frozen new file mode 100644 index 00000000..d9e4cd07 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_cleanup.frozen @@ -0,0 +1 @@ +2024-04-24T16:45:31.131571-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.frozen b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.frozen new file mode 100644 index 00000000..83e032af --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.frozen @@ -0,0 +1 @@ +2024-04-24T16:45:18.706003-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.yaml b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.yaml new file mode 100644 index 00000000..32a0b4f1 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_import.yaml @@ -0,0 +1,34 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v1/logs/config/index-order + response: + body: + string: '{"index_names": ["main", "gcp-index-name"]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v1/logs/config/index-order + response: + body: + string: '{"index_names": ["test-index", "gcp-index-name", "main"]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.frozen b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.frozen new file mode 100644 index 00000000..25db5d92 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.frozen @@ -0,0 +1 @@ +2024-04-24T16:45:19.250067-04:00 \ No newline at end of file diff --git a/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.yaml b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.yaml new file mode 100644 index 00000000..28e3ba27 --- /dev/null +++ b/tests/integration/resources/cassettes/test_logs_indexes_order/TestLogsIndexesOrder.test_resource_sync.yaml @@ -0,0 +1,145 @@ +interactions: +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v1/logs/config/indexes/gcp-index-name + response: + body: + string: '{"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days": + 15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset": + {"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage": + null, "exclusion_filters": []}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v1/logs/config/indexes/main + response: + body: + string: '{"name": "main", "filter": {"query": ""}, "num_retention_days": 15, + "daily_limit": null, "is_rate_limited": false, "daily_limit_reset": null, + "daily_limit_warning_threshold_percentage": null, "exclusion_filters": [{"name": + "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", "sample_rate": + 1.0}}]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v1/logs/config/index-order + response: + body: + string: '{"index_names": ["test-index", "gcp-index-name", "main"]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: null + headers: + Content-Type: + - application/json + method: GET + uri: https://api.datadoghq.eu/api/v1/logs/config/indexes + response: + body: + string: '{"indexes": [{"name": "test-index", "filter": {"query": "test:filter"}, + "num_retention_days": 15, "daily_limit": 200000000, "is_rate_limited": false, + "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "-04:00"}, + "daily_limit_warning_threshold_percentage": null, "exclusion_filters": []}, + {"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days": + 15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset": + {"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage": + null, "exclusion_filters": []}, {"name": "main", "filter": {"query": ""}, + "num_retention_days": 15, "daily_limit": null, "is_rate_limited": false, "daily_limit_reset": + null, "daily_limit_warning_threshold_percentage": null, "exclusion_filters": + [{"name": "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", + "sample_rate": 1.0}}]}]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"filter": {"query": ""}, "num_retention_days": 15, "daily_limit_reset": + null, "daily_limit_warning_threshold_percentage": null, "exclusion_filters": + [{"name": "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", + "sample_rate": 1.0}}], "disable_daily_limit": true}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v1/logs/config/indexes/main + response: + body: + string: '{"name": "main", "filter": {"query": ""}, "num_retention_days": 15, + "daily_limit": null, "is_rate_limited": false, "daily_limit_reset": null, + "daily_limit_warning_threshold_percentage": null, "exclusion_filters": [{"name": + "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", "sample_rate": + 1.0}}]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"filter": {"query": "source:gcp.*"}, "num_retention_days": 15, "daily_limit": + 200000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": + "+00:00"}, "daily_limit_warning_threshold_percentage": null, "exclusion_filters": + []}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v1/logs/config/indexes/gcp-index-name + response: + body: + string: '{"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days": + 15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset": + {"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage": + null, "exclusion_filters": []}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- request: + body: '{"index_names": ["main", "gcp-index-name", "test-index"]}' + headers: + Content-Type: + - application/json + method: PUT + uri: https://api.datadoghq.eu/api/v1/logs/config/index-order + response: + body: + string: '{"index_names": ["main", "gcp-index-name", "test-index"]}' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/resources/test_logs_indexes_order.py b/tests/integration/resources/test_logs_indexes_order.py new file mode 100644 index 00000000..e30ea27a --- /dev/null +++ b/tests/integration/resources/test_logs_indexes_order.py @@ -0,0 +1,54 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import pytest + +from tests.integration.helpers import BaseResourcesTestClass +from datadog_sync.models import LogsIndexesOrder + + +class TestLogsIndexesOrder(BaseResourcesTestClass): + resource_type = LogsIndexesOrder.resource_type + force_missing_deps = True + + @pytest.mark.skip(reason="resource is only updated by default") + def test_resource_update_sync(self): + pass + + +@pytest.mark.parametrize( + "resource, destination_resource, expected", + [ + ( + {"index_names": ["index1", "index2", "index3"]}, + {"index_names": ["index3", "index2", "index4"]}, + {"index_names": ["index2", "index3", "index4"]}, + ), + ( + {"index_names": ["index1"]}, + {"index_names": ["index3", "index1", "index4"]}, + {"index_names": ["index1", "index3", "index4"]}, + ), + ( + {"index_names": ["index1", "index2", "index3"]}, + {"index_names": ["index3", "index1"]}, + {"index_names": ["index1", "index3"]}, + ), + ( + {"index_names": ["index1", "index2", "index3"]}, + {"index_names": ["index1"]}, + {"index_names": ["index1"]}, + ), + ( + {"index_names": ["index1"]}, + {"index_names": ["index5", "index1", "index3", "index4"]}, + {"index_names": ["index1", "index5", "index3", "index4"]}, + ), + ], +) +def test_handle_index_diff(resource, destination_resource, expected): + LogsIndexesOrder.handle_additional_indexes(resource, destination_resource) + + assert resource == expected