Skip to content

Commit

Permalink
Add support for restriction_policies (#213)
Browse files Browse the repository at this point in the history
* add support for restriction_policies

* add tests and update readme

* break out of iteration early

* add teams connection

* re-record cassettes

* Code review

* lint and re-record with tagging config

* re-record additional cassettes
  • Loading branch information
skarimo authored Feb 26, 2024
1 parent 9e3fbaf commit a945f52
Show file tree
Hide file tree
Showing 29 changed files with 7,175 additions and 97 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ The source organization will not be modified, but the destination organization w
| logs_metrics | Sync Datadog logs metrics. |
| logs_restriction_queries | Sync Datadog logs restriction queries. |
| metric_tag_configurations | Sync Datadog metric tags configurations. |
| restriction_policies | Sync Datadog restriction policies. |
| teams | Sync Datadog teams (excluding users and permissions). |

***Note:*** `logs_custom_pipelines` resource has been deprecated in favor of `logs_pipelines` resource which supports both logs OOTB integration and custom pipelines. To migrate to the new resource, rename the existing state files from `logs_custom_pipelines.json` to `logs_pipelines.json` for both source and destination files.
Expand Down Expand Up @@ -280,4 +281,5 @@ See [Supported resources](#supported-resources) section below for potential reso
| logs_metrics | - |
| logs_restriction_queries | roles |
| metric_tag_configurations | - |
| restriction_policies | dashboards, service_level_objectives, notebooks, users, roles |
| teams | - |
179 changes: 179 additions & 0 deletions datadog_sync/model/restriction_policies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig
from datadog_sync.utils.resource_utils import CustomClientHTTPError, SkipResource

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class RestrictionPolicies(BaseResource):
resource_type = "restriction_policies"
resource_config = ResourceConfig(
resource_connections={
# Primary ID connections
"dashboards": ["id"],
"service_level_objectives": ["id"],
"notebooks": ["id"],
# # TODO: Commented out until security rules are supported
# "security_rules": ["id"],
# Bindings connections
"users": ["attributes.bindings.principals"],
"roles": ["attributes.bindings.principals"],
"teams": ["attributes.bindings.principals"],
},
base_path="/api/v2/restriction_policy",
excluded_attributes=[],
)
# Additional RestrictionPolicies specific attributes
orgs_path: str = "/api/v1/org"
org_principal: str = "org:{}"

def get_resources(self, client: CustomClient) -> List[Dict]:
policies = []

dashboards = self.config.resources["dashboards"].get_resources(client)
notebooks = self.config.resources["notebooks"].get_resources(client)
slos = self.config.resources["service_level_objectives"].get_resources(client)
# # TODO: Commented out until security rules are supported
# security_rules = self.config.resources["security_rules"].get_resources(client)

if dashboards and len(dashboards) > 0:
for dashboard in dashboards:
policies.append(
{
"id": f"dashboard:{dashboard['id']}",
}
)
if notebooks and len(notebooks) > 0:
for notebook in notebooks:
policies.append(
{
"id": f"notebook:{notebook['id']}",
}
)
if slos and len(slos) > 0:
for slo in slos:
policies.append(
{
"id": f"slo:{slo['id']}",
}
)
# # TODO: Commented out until security rules are supported
# if security_rules and len(security_rules) > 0:
# for rule in security_rules:
# policies.append({
# "id": f"security-rule:{rule['id']}",
# })

return policies

def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
source_client = self.config.source_client
import_id = _id or resource["id"]

try:
resource = source_client.get(self.resource_config.base_path + f"/{import_id}").json()
except CustomClientHTTPError as e:
if e.status_code == 404:
raise SkipResource(_id, self.resource_type, "Resource does not exist.")
else:
raise e

if not resource["data"]["attributes"]["bindings"]:
raise SkipResource(_id, self.resource_type, "Resource does not have any bindings.")

return import_id, resource["data"]

def pre_resource_action_hook(self, _id, resource: Dict) -> None:
for binding in resource["attributes"]["bindings"]:
for i, key in enumerate(binding["principals"]):
if key.startswith("org:"):
binding["principals"][i] = self.org_principal
break

def pre_apply_hook(self) -> None:
destination_client = self.config.destination_client
try:
org = destination_client.get(self.orgs_path).json()["orgs"][0]
except Exception as e:
self.config.logger.error(f"Failed to get org details: {e}")

self.org_principal = self.org_principal.format(org["public_id"])

def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client
resource_id = resource["id"]
payload = {"data": resource}
resp = destination_client.post(self.resource_config.base_path + f"/{resource_id}", payload).json()

return _id, resp["data"]

def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client
resource_id = resource["id"]
payload = {"data": resource}
resp = destination_client.post(self.resource_config.base_path + f"/{resource_id}", payload).json()

return _id, resp["data"]

def delete_resource(self, _id: str) -> None:
destination_client = self.config.destination_client
destination_client.delete(
self.resource_config.base_path + f"/{self.resource_config.destination_resources[_id]['id']}"
)

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
dashboards = self.config.resources["dashboards"].resource_config.destination_resources
slos = self.config.resources["service_level_objectives"].resource_config.destination_resources
notebooks = self.config.resources["notebooks"].resource_config.destination_resources
users = self.config.resources["users"].resource_config.destination_resources
roles = self.config.resources["roles"].resource_config.destination_resources
teams = self.config.resources["teams"].resource_config.destination_resources

failed_connections = []
if key == "id":
_type, _id = r_obj[key].split(":")
if resource_to_connect == "dashboards" and _type == "dashboard":
if _id in dashboards:
r_obj[key] = f"dashboard:{dashboards[_id]['id']}"
else:
failed_connections.append(_id)
elif resource_to_connect == "service_level_objectives" and _type == "slo":
if _id in slos:
r_obj[key] = f"slo:{slos[_id]['id']}"
else:
failed_connections.append(_id)
elif resource_to_connect == "notebooks" and _type == "notebook":
if _id in notebooks:
r_obj[key] = f"notebook:{notebooks[_id]['id']}"
else:
failed_connections.append(_id)

if key == "principals":
for i, policy_id in enumerate(r_obj[key]):
_type, _id = policy_id.split(":")

if resource_to_connect == "users" and _type == "user":
if _id in users:
r_obj[key][i] = f"user:{users[_id]['id']}"
else:
failed_connections.append(_id)
elif resource_to_connect == "roles" and _type == "role":
if _id in roles:
r_obj[key][i] = f"role:{roles[_id]['id']}"
else:
failed_connections.append(_id)
elif resource_to_connect == "teams" and _type == "team":
if _id in teams:
r_obj[key][i] = f"team:{teams[_id]['id']}"
else:
failed_connections.append(_id)

return failed_connections
1 change: 1 addition & 0 deletions datadog_sync/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,5 @@
from datadog_sync.model.logs_indexes import LogsIndexes
from datadog_sync.model.logs_restriction_queries import LogsRestrictionQueries
from datadog_sync.model.spans_metrics import SpansMetrics
from datadog_sync.model.restriction_policies import RestrictionPolicies
from datadog_sync.model.teams import Teams
6 changes: 6 additions & 0 deletions datadog_sync/utils/resources_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,12 @@ def import_resources(self) -> None:
def diffs(self) -> None:
executor = thread_pool_executor(self.config.max_workers)
futures = []

# Run pre-apply hooks
for resource_type in set(self.resources_manager.all_resources.values()):
futures.append(executor.submit(self.config.resources[resource_type]._pre_apply_hook))
wait(futures)

for _id, resource_type in self.resources_manager.all_resources.items():
futures.append(executor.submit(self._diffs_worker, _id, resource_type))

Expand Down
26 changes: 20 additions & 6 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,17 @@ class BaseResourcesTestClass:
field_to_update = None
resources_to_preserve_filter = None
filter = ""
force_missing_deps = False

@pytest.fixture(autouse=True, scope="class")
def setup(self, tmpdir_factory):
my_tmpdir = tmpdir_factory.mktemp("tmp")
os.chdir(my_tmpdir)

def test_resource_import(self, runner):
ret = runner.invoke(cli, ["import", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
ret = runner.invoke(
cli, ["import", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
)
assert 0 == ret.exit_code

# Assert at lease one resource is imported
Expand All @@ -58,7 +61,10 @@ def test_resource_import(self, runner):

def test_resource_sync(self, runner, caplog):
caplog.set_level(logging.DEBUG)
ret = runner.invoke(cli, ["sync", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
cmd_list = ["sync", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
if self.force_missing_deps:
cmd_list.append("--force-missing-dependencies")
ret = runner.invoke(cli, cmd_list)
assert 0 == ret.exit_code

# By default, resources with failed connections are skipped. Hence, count number of skipped + success
Expand All @@ -84,17 +90,23 @@ def test_resource_update_sync(self, runner, caplog):
save_source_resources(self.resource_type, source_resources)

# assert diff is produced
ret = runner.invoke(cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
ret = runner.invoke(
cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
)
assert ret.output
assert 0 == ret.exit_code

# sync the updated resources
ret = runner.invoke(cli, ["sync", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
ret = runner.invoke(
cli, ["sync", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
)
assert 0 == ret.exit_code
caplog.clear()

# assert diff is no longer produced
ret = runner.invoke(cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
ret = runner.invoke(
cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
)
assert 0 == ret.exit_code
assert not ret.output

Expand All @@ -105,7 +117,9 @@ def test_resource_update_sync(self, runner, caplog):

def test_no_resource_diffs(self, runner, caplog):
caplog.set_level(logging.DEBUG)
ret = runner.invoke(cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"])
ret = runner.invoke(
cli, ["diffs", "--validate=false", f"--resources={self.resource_type}", f"--filter={self.filter}"]
)
assert not ret.output
assert 0 == ret.exit_code

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-02-14T17:16:40.026583-05:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Content-Type:
- application/json
method: GET
uri: https://api.datadoghq.eu/api/v1/org
response:
body:
string: '{"orgs": [{"public_id": "3cda39a0-9e99-11ee-94d5-da7ad0900005", "name":
"datadog-sync-cli N[EU Test Org]", "description": null, "created": "2023-12-19
18:05:48", "subscription": {"id": 1000228561, "billing_plan_id": 8, "started":
1703009149, "expires": null, "finished": null, "type": "dogfood", "is_custom":
false, "is_trial": false, "is_expired": true}, "billing": {}, "settings":
{"default_landing_page": "Dashboard Lists", "saml": {"enabled": false}, "saml_can_be_enabled":
false, "saml_login_url": null, "saml_idp_metadata_uploaded": false, "saml_idp_endpoint":
null, "saml_idp_metadata_valid_until": null, "custom_landing_page": null,
"saml_idp_initiated_login": {"enabled": false}, "saml_strict_mode": {"enabled":
false}, "saml_autocreate_access_role": "st", "saml_autocreate_users_domains":
{"enabled": false, "domains": []}, "private_widget_share": false, "manage_reports":
null}}]}'
headers: {}
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-02-14T17:16:40.516435-05:00
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Content-Type:
- application/json
method: DELETE
uri: https://api.datadoghq.eu/api/v2/restriction_policy/notebook:122913
response:
body:
string: ''
headers: {}
status:
code: 204
message: No Content
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Content-Type:
- application/json
method: DELETE
uri: https://api.datadoghq.eu/api/v2/restriction_policy/slo:696fc8b0f26a5ada86a7f132a061c84f
response:
body:
string: ''
headers: {}
status:
code: 204
message: No Content
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Content-Type:
- application/json
method: DELETE
uri: https://api.datadoghq.eu/api/v2/restriction_policy/dashboard:rgc-dqv-bdy
response:
body:
string: ''
headers: {}
status:
code: 204
message: No Content
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-02-14T17:16:25.724824-05:00
Loading

0 comments on commit a945f52

Please sign in to comment.