Skip to content
This repository has been archived by the owner on Jun 7, 2024. It is now read-only.

Commit

Permalink
chore: update charm libraries
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored Jul 16, 2023
1 parent 7086d2f commit 8e386b0
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 31 deletions.
65 changes: 50 additions & 15 deletions lib/charms/data_platform_libs/v0/database_requires.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2022 Canonical Ltd.
# Copyright 2023 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""[DEPRECATED] Relation 'requires' side abstraction for database relation.
r"""[DEPRECATED] Relation 'requires' side abstraction for database relation.
This library is a uniform interface to a selection of common database
metadata, with added custom events that add convenience to database management,
Expand Down Expand Up @@ -160,7 +160,7 @@ def _on_cluster2_database_created(self, event: DatabaseCreatedEvent) -> None:

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version.
LIBPATCH = 5
LIBPATCH = 6

logger = logging.getLogger(__name__)

Expand All @@ -171,16 +171,25 @@ class DatabaseEvent(RelationEvent):
@property
def endpoints(self) -> Optional[str]:
"""Returns a comma separated list of read/write endpoints."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("endpoints")

@property
def password(self) -> Optional[str]:
"""Returns the password for the created user."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("password")

@property
def read_only_endpoints(self) -> Optional[str]:
"""Returns a comma separated list of read only endpoints."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("read-only-endpoints")

@property
Expand All @@ -189,16 +198,25 @@ def replset(self) -> Optional[str]:
MongoDB only.
"""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("replset")

@property
def tls(self) -> Optional[str]:
"""Returns whether TLS is configured."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("tls")

@property
def tls_ca(self) -> Optional[str]:
"""Returns TLS CA."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("tls-ca")

@property
Expand All @@ -207,11 +225,17 @@ def uris(self) -> Optional[str]:
MongoDB, Redis, OpenSearch and Kafka only.
"""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("uris")

@property
def username(self) -> Optional[str]:
"""Returns the created username."""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("username")

@property
Expand All @@ -220,6 +244,9 @@ def version(self) -> Optional[str]:
Version as informed by the database daemon.
"""
if not self.relation.app:
return None

return self.relation.data[self.relation.app].get("version")


Expand Down Expand Up @@ -259,15 +286,15 @@ class DatabaseEvents(CharmEvents):
class DatabaseRequires(Object):
"""Requires-side of the database relation."""

on = DatabaseEvents()
on = DatabaseEvents() # pyright: ignore [reportGeneralTypeIssues]

def __init__(
self,
charm,
relation_name: str,
database_name: str,
extra_user_roles: str = None,
relations_aliases: List[str] = None,
extra_user_roles: Optional[str] = None,
relations_aliases: Optional[List[str]] = None,
):
"""Manager of database client relations."""
super().__init__(charm, relation_name)
Expand Down Expand Up @@ -352,9 +379,11 @@ def _diff(self, event: RelationChangedEvent) -> Diff:
# Retrieve the old data from the data key in the local unit relation databag.
old_data = json.loads(event.relation.data[self.local_unit].get("data", "{}"))
# Retrieve the new data from the event relation databag.
new_data = {
key: value for key, value in event.relation.data[event.app].items() if key != "data"
}
new_data = (
{key: value for key, value in event.relation.data[event.app].items() if key != "data"}
if event.app
else {}
)

# These are the keys that were added to the databag and triggered this event.
added = new_data.keys() - old_data.keys()
Expand Down Expand Up @@ -413,9 +442,11 @@ def fetch_relation_data(self) -> dict:
"""
data = {}
for relation in self.relations:
data[relation.id] = {
key: value for key, value in relation.data[relation.app].items() if key != "data"
}
data[relation.id] = (
{key: value for key, value in relation.data[relation.app].items() if key != "data"}
if relation.app
else {}
)
return data

def _update_relation_data(self, relation_id: int, data: dict) -> None:
Expand Down Expand Up @@ -461,7 +492,9 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None:
if "username" in diff.added and "password" in diff.added:
# Emit the default event (the one without an alias).
logger.info("database created at %s", datetime.now())
self.on.database_created.emit(event.relation, app=event.app, unit=event.unit)
getattr(self.on, "database_created").emit(
event.relation, app=event.app, unit=event.unit
)

# Emit the aliased event (if any).
self._emit_aliased_event(event, "database_created")
Expand All @@ -475,7 +508,9 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None:
if "endpoints" in diff.added or "endpoints" in diff.changed:
# Emit the default event (the one without an alias).
logger.info("endpoints changed on %s", datetime.now())
self.on.endpoints_changed.emit(event.relation, app=event.app, unit=event.unit)
getattr(self.on, "endpoints_changed").emit(
event.relation, app=event.app, unit=event.unit
)

# Emit the aliased event (if any).
self._emit_aliased_event(event, "endpoints_changed")
Expand All @@ -489,7 +524,7 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None:
if "read-only-endpoints" in diff.added or "read-only-endpoints" in diff.changed:
# Emit the default event (the one without an alias).
logger.info("read-only-endpoints changed on %s", datetime.now())
self.on.read_only_endpoints_changed.emit(
getattr(self.on, "read_only_endpoints_changed").emit(
event.relation, app=event.app, unit=event.unit
)

Expand Down
49 changes: 33 additions & 16 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def _on_scrape_targets_changed(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 37
LIBPATCH = 38

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -602,15 +602,22 @@ def render_alertmanager_static_configs(alertmanagers: List[str]):
# Create a mapping from paths to netlocs
# Group alertmanager targets into a dictionary of lists:
# {path: [netloc1, netloc2]}
paths = defaultdict(list) # type: Dict[str, List[str]]
paths = defaultdict(list) # type: Dict[Tuple[str, str], List[str]]
for parsed in map(urlparse, sanitized):
path = parsed.path or "/"
paths[path].append(parsed.netloc)
paths[(parsed.scheme, path)].append(parsed.netloc)

return {
"alertmanagers": [
{"path_prefix": path_prefix, "static_configs": [{"targets": netlocs}]}
for path_prefix, netlocs in paths.items()
{
"scheme": scheme,
"path_prefix": path_prefix,
"static_configs": [{"targets": netlocs}],
# FIXME figure out how to get alertmanager's ca_file into here
# Without this, prom errors: "x509: certificate signed by unknown authority"
"tls_config": {"insecure_skip_verify": True},
}
for (scheme, path_prefix), netlocs in paths.items()
]
}

Expand Down Expand Up @@ -1353,29 +1360,39 @@ def _static_scrape_config(self, relation) -> list:
if not relation.units:
return []

scrape_jobs = json.loads(relation.data[relation.app].get("scrape_jobs", "[]"))
scrape_configs = json.loads(relation.data[relation.app].get("scrape_jobs", "[]"))

if not scrape_jobs:
if not scrape_configs:
return []

scrape_metadata = json.loads(relation.data[relation.app].get("scrape_metadata", "{}"))

if not scrape_metadata:
return scrape_jobs
return scrape_configs

topology = JujuTopology.from_dict(scrape_metadata)

job_name_prefix = "juju_{}_prometheus_scrape".format(topology.identifier)
scrape_jobs = PrometheusConfig.prefix_job_names(scrape_jobs, job_name_prefix)
scrape_jobs = PrometheusConfig.sanitize_scrape_configs(scrape_jobs)
scrape_configs = PrometheusConfig.prefix_job_names(scrape_configs, job_name_prefix)
scrape_configs = PrometheusConfig.sanitize_scrape_configs(scrape_configs)

hosts = self._relation_hosts(relation)

scrape_jobs = PrometheusConfig.expand_wildcard_targets_into_individual_jobs(
scrape_jobs, hosts, topology
scrape_configs = PrometheusConfig.expand_wildcard_targets_into_individual_jobs(
scrape_configs, hosts, topology
)

return scrape_jobs
# If scheme is https but no ca section present, then auto add "insecure_skip_verify",
# otherwise scraping errors out with "x509: certificate signed by unknown authority".
# https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config
for scrape_config in scrape_configs:
tls_config = scrape_config.get("tls_config", {})
ca_present = "ca" in tls_config or "ca_file" in tls_config
if scrape_config.get("scheme") == "https" and not ca_present:
tls_config["insecure_skip_verify"] = True
scrape_config["tls_config"] = tls_config

return scrape_configs

def _relation_hosts(self, relation: Relation) -> Dict[str, Tuple[str, str]]:
"""Returns a mapping from unit names to (address, path) tuples, for the given relation."""
Expand Down Expand Up @@ -1793,10 +1810,10 @@ def _scrape_jobs(self) -> list:
A list of dictionaries, where each dictionary specifies a
single scrape job for Prometheus.
"""
jobs = self._jobs if self._jobs else [DEFAULT_JOB]
jobs = self._jobs or []
if callable(self._lookaside_jobs):
return jobs + PrometheusConfig.sanitize_scrape_configs(self._lookaside_jobs())
return jobs
jobs.extend(PrometheusConfig.sanitize_scrape_configs(self._lookaside_jobs()))
return jobs or [DEFAULT_JOB]

@property
def _scrape_metadata(self) -> dict:
Expand Down

0 comments on commit 8e386b0

Please sign in to comment.