Skip to content

Commit

Permalink
Add support for logs archive order (#278)
Browse files Browse the repository at this point in the history
* add support got logs archives orcer

* update readme
  • Loading branch information
skarimo authored Sep 23, 2024
1 parent 0331d2b commit d31045f
Show file tree
Hide file tree
Showing 11 changed files with 325 additions and 1 deletion.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ When running againts multiple destination organizations, a seperate working dire
| downtimes (**deprecated**) | Sync Datadog downtimes. |
| host_tags | Sync Datadog host tags. |
| logs_archives | Sync Datadog logs archives. Requires GCP, Azure, or AWS integration. |
| logs_archives_order | Sync Datadog logs archives order. |
| logs_custom_pipelines (**deprecated**) | Sync Datadog logs custom pipelines. |
| logs_indexes | Sync Datadog logs indexes. |
| logs_indexes_order | Sync Datadog logs indexes order. |
Expand Down Expand Up @@ -257,7 +258,8 @@ See [Supported resources](#supported-resources) section below for potential reso
| downtime_schedules | monitors |
| downtimes (**deprecated**) | monitors |
| host_tags | - |
| logs_archives | - (Requires manual setup of AWS, GCP or Azure integrations) |
| logs_archives | - (Requires manual setup of AWS, GCP and Azure integration) |
| logs_archives_order | logs_archives |
| logs_custom_pipelines (**deprecated**) | - |
| logs_indexes | - |
| logs_indexes_order | logs_indexes |
Expand Down
115 changes: 115 additions & 0 deletions datadog_sync/model/logs_archives_order.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple
from copy import deepcopy

from deepdiff.operator import BaseOperator

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class LogsArchivesOrderIdsComparator(BaseOperator):
def match(self, level):
if "archive_ids" in level.t1 and "archive_ids" in level.t2:
# make copy so we do not mutate the original
level.t1 = deepcopy(level.t1)
level.t2 = deepcopy(level.t2)

# If we are at the top level, modify the list to exclude extra archives in destination.
t1 = set(level.t1["archive_ids"])
t2 = set(level.t2["archive_ids"])
d_ignore = t1 - t2

level.t1["archive_ids"] = [_id for _id in level.t1["archive_ids"] if _id not in d_ignore]
return True

def give_up_diffing(self, level, diff_instance) -> bool:
return False


class LogsArchivesOrder(BaseResource):
resource_type = "logs_archives_order"
resource_config = ResourceConfig(
concurrent=False,
base_path="/api/v2/logs/config/archive-order",
resource_connections={
"logs_archives": ["data.attributes.archive_ids"],
},
deep_diff_config={
"ignore_order": False,
"custom_operators": [LogsArchivesOrderIdsComparator()],
},
)
# Additional LogsArchivesOrder specific attributes
destination_archives_order: Dict[str, Dict] = dict()
default_id: str = "logs-archives-order"

async def get_resources(self, client: CustomClient) -> List[Dict]:
resp = await client.get(self.resource_config.base_path)

return [resp]

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
if _id:
source_client = self.config.source_client
resource = await source_client.get(self.resource_config.base_path)

return self.default_id, resource

async def pre_resource_action_hook(self, _id, resource: Dict) -> None:
self.destination_archives_order = await self.get_destination_archives_order()

async def pre_apply_hook(self) -> None:
pass

async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
if not self.destination_archives_order:
raise Exception("Failed to retrieve destination orgs logs archive order")

self.config.state.destination[self.resource_type][_id] = self.destination_archives_order
return await self.update_resource(_id, resource)

async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_resources = (
self.destination_archives_order or self.config.state.destination[self.resource_type][_id]
)
ids_to_omit = set(resource["data"]["attributes"]["archive_ids"]) - set(
destination_resources["data"]["attributes"]["archive_ids"]
)

extra_ids_to_include = [
_id
for _id in destination_resources["data"]["attributes"]["archive_ids"]
if _id not in resource["data"]["attributes"]["archive_ids"]
]

resource["data"]["attributes"]["archive_ids"] = [
_id for _id in resource["data"]["attributes"]["archive_ids"] if _id not in ids_to_omit
]
resource["data"]["attributes"]["archive_ids"] = (
resource["data"]["attributes"]["archive_ids"] + extra_ids_to_include
)

destination_client = self.config.destination_client
resp = await destination_client.put(self.resource_config.base_path, resource)

return _id, resp

async def delete_resource(self, _id: str) -> None:
self.config.logger.warning("logs_archives_order cannot deleted. Removing resource from config only.")

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
return super(LogsArchivesOrder, self).connect_id(key, r_obj, resource_to_connect)

async def get_destination_archives_order(self):
destination_client = self.config.destination_client
resp = await self.get_resources(destination_client)

return resp[0]
1 change: 1 addition & 0 deletions datadog_sync/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from datadog_sync.model.downtime_schedules import DowntimeSchedules
from datadog_sync.model.downtimes import Downtimes
from datadog_sync.model.host_tags import HostTags
from datadog_sync.model.logs_archives_order import LogsArchivesOrder
from datadog_sync.model.logs_archives import LogsArchives
from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines
from datadog_sync.model.logs_indexes import LogsIndexes
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-10T11:05:05.873233-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order
response:
body:
string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw",
"vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-10T11:05:06.042240-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-10T11:05:04.341981-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v2/logs/config/archive-order
response:
body:
string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["RK1PeXaNRButwKNMn_dRJQ",
"V49TnL93R0C3QADZQllO5Q"]}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order
response:
body:
string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["79uMvyn6SfS6uspxJUaTqQ"]}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-10T11:05:04.676451-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
interactions:
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v2/logs/config/archives/RK1PeXaNRButwKNMn_dRJQ
response:
body:
string: '{"data": {"type": "archives", "id": "RK1PeXaNRButwKNMn_dRJQ", "attributes":
{"name": "my first azure archive", "query": "service:toto", "state": "UNKNOWN",
"destination": {"container": "my-container", "storage_account": "storageaccount",
"path": "/path/blou", "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea",
"client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags":
[], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.com/api/v2/logs/config/archives/V49TnL93R0C3QADZQllO5Q
response:
body:
string: '{"data": {"type": "archives", "id": "V49TnL93R0C3QADZQllO5Q", "attributes":
{"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN",
"destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3",
"integration": {"role_name": "testacc-datadog-integration-role", "account_id":
"123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags":
true, "rehydration_max_scan_size_in_gb": 123}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"data": {"type": "archives", "attributes": {"name": "my first azure archive",
"query": "service:toto", "destination": {"container": "my-container", "storage_account":
"storageaccount", "path": "/path/blou", "type": "azure", "integration": {"tenant_id":
"92f7df25-f9d7-4e76-a3b6-4011e64958ea", "client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}},
"rehydration_tags": [], "include_tags": false, "rehydration_max_scan_size_in_gb":
null}}}'
headers:
Content-Type:
- application/json
method: POST
uri: https://api.datadoghq.eu/api/v2/logs/config/archives
response:
body:
string: '{"data": {"type": "archives", "id": "hbGkZhQ-RlyB-um-Wg7NIw", "attributes":
{"name": "my first azure archive", "query": "service:toto", "state": "UNKNOWN",
"destination": {"container": "my-container", "storage_account": "storageaccount",
"path": "/path/blou", "type": "azure", "integration": {"tenant_id": "92f7df25-f9d7-4e76-a3b6-4011e64958ea",
"client_id": "a75fbdd2-ade6-43d0-a810-4d886c53871e"}}, "rehydration_tags":
[], "include_tags": false, "rehydration_max_scan_size_in_gb": null}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"data": {"type": "archives", "attributes": {"name": "my first s3 archive",
"query": "service:tutu", "destination": {"bucket": "my-bucket", "path": "/path/foo",
"type": "s3", "integration": {"role_name": "testacc-datadog-integration-role",
"account_id": "123456789112"}}, "rehydration_tags": ["team:intake", "team:app"],
"include_tags": true, "rehydration_max_scan_size_in_gb": 123}}}'
headers:
Content-Type:
- application/json
method: POST
uri: https://api.datadoghq.eu/api/v2/logs/config/archives
response:
body:
string: '{"data": {"type": "archives", "id": "vnoZzsiUS1mWBwIsQdKyug", "attributes":
{"name": "my first s3 archive", "query": "service:tutu", "state": "UNKNOWN",
"destination": {"bucket": "my-bucket", "path": "/path/foo", "type": "s3",
"integration": {"role_name": "testacc-datadog-integration-role", "account_id":
"123456789112"}}, "rehydration_tags": ["team:intake", "team:app"], "include_tags":
true, "rehydration_max_scan_size_in_gb": 123}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: null
headers:
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order
response:
body:
string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["79uMvyn6SfS6uspxJUaTqQ",
"hbGkZhQ-RlyB-um-Wg7NIw", "vnoZzsiUS1mWBwIsQdKyug"]}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
- request:
body: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw",
"vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}'
headers:
Content-Type:
- application/json
method: PUT
uri: https://api.datadoghq.eu/api/v2/logs/config/archive-order
response:
body:
string: '{"data": {"type": "archive_order", "attributes": {"archive_ids": ["hbGkZhQ-RlyB-um-Wg7NIw",
"vnoZzsiUS1mWBwIsQdKyug", "79uMvyn6SfS6uspxJUaTqQ"]}}}'
headers:
Content-Type:
- application/json
status:
code: 200
message: OK
version: 1
18 changes: 18 additions & 0 deletions tests/integration/resources/test_logs_archives_order.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

import pytest

from tests.integration.helpers import BaseResourcesTestClass
from datadog_sync.models import LogsArchivesOrder


class TestLogsArchivesOrder(BaseResourcesTestClass):
resource_type = LogsArchivesOrder.resource_type
force_missing_deps = True

@pytest.mark.skip(reason="resource is only updated by default")
def test_resource_update_sync(self):
pass

0 comments on commit d31045f

Please sign in to comment.