Skip to content

Commit

Permalink
add support for logs-indexes-order
Browse files Browse the repository at this point in the history
  • Loading branch information
skarimo committed Apr 24, 2024
1 parent 62fc172 commit 0d5a10e
Show file tree
Hide file tree
Showing 12 changed files with 351 additions and 17 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ When running againts multiple destination organizations, a seperate working dire
| notebooks | Sync Datadog notebooks. |
| host_tags | Sync Datadog host tags. |
| logs_indexes | Sync Datadog logs indexes. |
| logs_indexes_order | Sync Datadog logs indexes order. |
| logs_metrics | Sync Datadog logs metrics. |
| logs_restriction_queries | Sync Datadog logs restriction queries. |
| metric_tag_configurations | Sync Datadog metric tags configurations. |
Expand Down Expand Up @@ -264,6 +265,7 @@ See [Supported resources](#supported-resources) section below for potential reso
| notebooks | - |
| host_tags | - |
| logs_indexes | - |
| logs_indexes_order | logs_indexes |
| logs_metrics | - |
| logs_restriction_queries | roles |
| metric_tag_configurations | - |
Expand Down
93 changes: 93 additions & 0 deletions datadog_sync/model/logs_indexes_order.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig
from datadog_sync.utils.resource_utils import LogsIndexesOrderNameComparator

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class LogsIndexesOrder(BaseResource):
resource_type = "logs_indexes_order"
resource_config = ResourceConfig(
concurrent=False,
base_path="/api/v1/logs/config/index-order",
resource_connections={
"logs_indexes": ["index_names"],
},
deep_diff_config={
"ignore_order": False,
"custom_operators": [LogsIndexesOrderNameComparator()],
},
)
# Additional LogsIndexesOrder specific attributes
destination_indexes_order: Dict[str, Dict] = dict()
default_id: str = "logs-index-order"

async def get_resources(self, client: CustomClient) -> List[Dict]:
resp = await client.get(self.resource_config.base_path)

return [resp]

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
if _id:
source_client = self.config.source_client
resource = await source_client.get(self.resource_config.base_path)

return self.default_id, resource

async def pre_resource_action_hook(self, _id, resource: Dict) -> None:
pass

async def pre_apply_hook(self) -> None:
self.destination_indexes_order = await self.get_destination_indexes_order()

async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
if not self.destination_indexes_order:
raise Exception("Failed to retrieve destination orgs logs index order")

self.resource_config.destination_resources[_id] = self.destination_indexes_order
return await self.update_resource(_id, resource)

async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_resources = self.destination_indexes_order or self.resource_config.destination_resources[_id]
ids_to_omit = set(resource["index_names"]) - set(destination_resources["index_names"])

extra_ids_to_include = [
_id for _id in destination_resources["index_names"] if _id not in resource["index_names"]
]

resource["index_names"] = [_id for _id in resource["index_names"] if _id not in ids_to_omit]
resource["index_names"] = resource["index_names"] + extra_ids_to_include

destination_client = self.config.destination_client
resp = await destination_client.put(self.resource_config.base_path, resource)

return _id, resp

async def delete_resource(self, _id: str) -> None:
self.config.logger.warning("logs_indexes_order cannot deleted. Removing resource from config only.")

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
logs_indexes = self.config.resources["logs_indexes"].resource_config.destination_resources

failed_connections = []
for i, name in enumerate(r_obj[key]):
if name in logs_indexes:
r_obj[key][i] = logs_indexes[name]["name"]
else:
failed_connections.append(name)

return failed_connections

async def get_destination_indexes_order(self):
destination_client = self.config.destination_client
resp = await self.get_resources(destination_client)

return resp[0]
35 changes: 18 additions & 17 deletions datadog_sync/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,28 @@
# Copyright 2019 Datadog, Inc.
# ruff: noqa

from datadog_sync.model.roles import Roles
from datadog_sync.model.users import Users
from datadog_sync.model.dashboards import Dashboards
from datadog_sync.model.dashboard_lists import DashboardLists
from datadog_sync.model.monitors import Monitors
from datadog_sync.model.downtimes import Downtimes
from datadog_sync.model.dashboards import Dashboards
from datadog_sync.model.downtime_schedules import DowntimeSchedules
from datadog_sync.model.service_level_objectives import ServiceLevelObjectives
from datadog_sync.model.slo_corrections import SLOCorrections
from datadog_sync.model.synthetics_tests import SyntheticsTests
from datadog_sync.model.synthetics_private_locations import SyntheticsPrivateLocations
from datadog_sync.model.synthetics_global_variables import SyntheticsGlobalVariables
from datadog_sync.model.logs_pipelines import LogsPipelines
from datadog_sync.model.logs_pipelines_order import LogsPipelinesOrder
from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines
from datadog_sync.model.notebooks import Notebooks
from datadog_sync.model.logs_metrics import LogsMetrics
from datadog_sync.model.downtimes import Downtimes
from datadog_sync.model.host_tags import HostTags
from datadog_sync.model.metric_tag_configurations import MetricTagConfigurations
from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines
from datadog_sync.model.logs_indexes import LogsIndexes
from datadog_sync.model.logs_indexes_order import LogsIndexesOrder
from datadog_sync.model.logs_metrics import LogsMetrics
from datadog_sync.model.logs_pipelines import LogsPipelines
from datadog_sync.model.logs_pipelines_order import LogsPipelinesOrder
from datadog_sync.model.logs_restriction_queries import LogsRestrictionQueries
from datadog_sync.model.spans_metrics import SpansMetrics
from datadog_sync.model.metric_tag_configurations import MetricTagConfigurations
from datadog_sync.model.monitors import Monitors
from datadog_sync.model.notebooks import Notebooks
from datadog_sync.model.restriction_policies import RestrictionPolicies
from datadog_sync.model.roles import Roles
from datadog_sync.model.service_level_objectives import ServiceLevelObjectives
from datadog_sync.model.slo_corrections import SLOCorrections
from datadog_sync.model.spans_metrics import SpansMetrics
from datadog_sync.model.synthetics_global_variables import SyntheticsGlobalVariables
from datadog_sync.model.synthetics_private_locations import SyntheticsPrivateLocations
from datadog_sync.model.synthetics_tests import SyntheticsTests
from datadog_sync.model.teams import Teams
from datadog_sync.model.users import Users
19 changes: 19 additions & 0 deletions datadog_sync/utils/resource_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,25 @@ def give_up_diffing(self, level, diff_instance) -> bool:
return False


class LogsIndexesOrderNameComparator(BaseOperator):
def match(self, level):
if "index_names" in level.t1 and "index_names" in level.t2:
# make copy so we do not mutate the original
level.t1 = deepcopy(level.t1)
level.t2 = deepcopy(level.t2)

# If we are at the top level, modify the list to exclude extra index in destination.
t1 = set(level.t1["index_names"])
t2 = set(level.t2["index_names"])
d_ignore = t1 - t2

level.t1["index_names"] = [_id for _id in level.t1["index_names"] if _id not in d_ignore]
return True

def give_up_diffing(self, level, diff_instance) -> bool:
return False


RECURRENCE_START_ATTR_PATH_RE = r"root\['attributes'\]\['schedule'\]\['recurrences'\]\[[0-9]+\]\['start'\]"


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-04-24T16:45:30.760472-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v1/logs/config/index-order
response:
body:
string: '{"index_names": ["test-index", "gcp-index-name", "main"]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-04-24T16:45:31.131571-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-04-24T16:45:18.706003-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v1/logs/config/index-order
response:
body:
string: '{"index_names": ["main", "gcp-index-name"]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v1/logs/config/index-order
response:
body:
string: '{"index_names": ["test-index", "gcp-index-name", "main"]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-04-24T16:45:19.250067-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v1/logs/config/indexes/gcp-index-name
response:
body:
string: '{"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days":
15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset":
{"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage":
null, "exclusion_filters": []}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v1/logs/config/indexes/main
response:
body:
string: '{"name": "main", "filter": {"query": ""}, "num_retention_days": 15,
"daily_limit": null, "is_rate_limited": false, "daily_limit_reset": null,
"daily_limit_warning_threshold_percentage": null, "exclusion_filters": [{"name":
"gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", "sample_rate":
1.0}}]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v1/logs/config/index-order
response:
body:
string: '{"index_names": ["test-index", "gcp-index-name", "main"]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v1/logs/config/indexes
response:
body:
string: '{"indexes": [{"name": "test-index", "filter": {"query": "test:filter"},
"num_retention_days": 15, "daily_limit": 200000000, "is_rate_limited": false,
"daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "-04:00"},
"daily_limit_warning_threshold_percentage": null, "exclusion_filters": []},
{"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days":
15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset":
{"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage":
null, "exclusion_filters": []}, {"name": "main", "filter": {"query": ""},
"num_retention_days": 15, "daily_limit": null, "is_rate_limited": false, "daily_limit_reset":
null, "daily_limit_warning_threshold_percentage": null, "exclusion_filters":
[{"name": "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*",
"sample_rate": 1.0}}]}]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"filter": {"query": ""}, "num_retention_days": 15, "daily_limit_reset":
null, "daily_limit_warning_threshold_percentage": null, "exclusion_filters":
[{"name": "gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*",
"sample_rate": 1.0}}], "disable_daily_limit": true}'
headers:
Content-Type:
- application/json
method: PUT
uri: https://api.datadoghq.eu/api/v1/logs/config/indexes/main
response:
body:
string: '{"name": "main", "filter": {"query": ""}, "num_retention_days": 15,
"daily_limit": null, "is_rate_limited": false, "daily_limit_reset": null,
"daily_limit_warning_threshold_percentage": null, "exclusion_filters": [{"name":
"gcp-filter", "is_enabled": true, "filter": {"query": "source:gcp.*", "sample_rate":
1.0}}]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"filter": {"query": "source:gcp.*"}, "num_retention_days": 15, "daily_limit":
200000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset":
"+00:00"}, "daily_limit_warning_threshold_percentage": null, "exclusion_filters":
[]}'
headers:
Content-Type:
- application/json
method: PUT
uri: https://api.datadoghq.eu/api/v1/logs/config/indexes/gcp-index-name
response:
body:
string: '{"name": "gcp-index-name", "filter": {"query": "source:gcp.*"}, "num_retention_days":
15, "daily_limit": 200000000, "is_rate_limited": false, "daily_limit_reset":
{"reset_time": "14:00", "reset_utc_offset": "+00:00"}, "daily_limit_warning_threshold_percentage":
null, "exclusion_filters": []}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"index_names": ["main", "gcp-index-name", "test-index"]}'
headers:
Content-Type:
- application/json
method: PUT
uri: https://api.datadoghq.eu/api/v1/logs/config/index-order
response:
body:
string: '{"index_names": ["main", "gcp-index-name", "test-index"]}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
Loading

0 comments on commit 0d5a10e

Please sign in to comment.