From 951040c8edafee3516517c9b75eb5a38cd3bbb79 Mon Sep 17 00:00:00 2001 From: Dragomir Penev <6687393+dragomirp@users.noreply.github.com> Date: Fri, 11 Oct 2024 20:04:21 +0300 Subject: [PATCH] [MISC] Switch to typed charm and fixes (#435) * Switch to typed charm * Tweak int tests * Add config hash * Syncing the new relation * Sync legacy rel * Tweak hashing * Don't flipflop the legacy flag of the wildcard database * Don't override the db cache * Maintain password * Unit tests * Ignore errors during deploy and relate * Ignore docs for spelling * Bump libs --- .../data_platform_libs/v0/data_models.py | 354 ++++++++++++++++++ .../tempo_coordinator_k8s/v0/charm_tracing.py | 15 +- .../tempo_coordinator_k8s/v0/tracing.py | 17 +- src/charm.py | 68 +++- src/config.py | 21 ++ src/relations/backend_database.py | 10 +- src/relations/db.py | 27 +- src/relations/peers.py | 11 +- src/relations/pgbouncer_provider.py | 35 +- tests/integration/helpers/helpers.py | 1 + tests/integration/relations/test_db_admin.py | 10 +- tests/integration/test_charm.py | 9 +- tests/integration/test_config.py | 70 ++++ tests/integration/test_trust.py | 10 +- tests/unit/relations/test_backend_database.py | 13 +- tests/unit/relations/test_db.py | 9 +- tests/unit/relations/test_peers.py | 4 +- .../unit/relations/test_pgbouncer_provider.py | 5 +- tests/unit/test_charm.py | 11 +- 19 files changed, 620 insertions(+), 80 deletions(-) create mode 100644 lib/charms/data_platform_libs/v0/data_models.py create mode 100644 src/config.py create mode 100644 tests/integration/test_config.py diff --git a/lib/charms/data_platform_libs/v0/data_models.py b/lib/charms/data_platform_libs/v0/data_models.py new file mode 100644 index 000000000..a1dbb8299 --- /dev/null +++ b/lib/charms/data_platform_libs/v0/data_models.py @@ -0,0 +1,354 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""Library to provide simple API for promoting typed, validated and structured dataclass in charms. + +Dict-like data structure are often used in charms. They are used for config, action parameters +and databag. This library aims at providing simple API for using pydantic BaseModel-derived class +in charms, in order to enhance: +* Validation, by embedding custom business logic to validate single parameters or even have + validators that acts across different fields +* Parsing, by loading data into pydantic object we can both allow for other types (e.g. float) to + be used in configuration/parameters as well as specify even nested complex objects for databags +* Static typing checks, by moving from dict-like object to classes with typed-annotated properties, + that can be statically checked using mypy to ensure that the code is correct. + +Pydantic models can be used on: + +* Charm Configuration (as defined in config.yaml) +* Actions parameters (as defined in actions.yaml) +* Application/Unit Databag Information (thus making it more structured and encoded) + + +## Creating models + +Any data-structure can be modeled using dataclasses instead of dict-like objects (e.g. storing +config, action parameters and databags). Within pydantic, we can define dataclasses that provides +also parsing and validation on standard dataclass implementation: + +```python + +from charms.data_platform_libs.v0.data_models import BaseConfigModel + +class MyConfig(BaseConfigModel): + + my_key: int + + @validator("my_key") + def is_lower_than_100(cls, v: int): + if v > 100: + raise ValueError("Too high") + +``` + +This should allow to collapse both parsing and validation as the dataclass object is parsed and +created: + +```python +dataclass = MyConfig(my_key="1") + +dataclass.my_key # this returns 1 (int) +dataclass["my_key"] # this returns 1 (int) + +dataclass = MyConfig(my_key="102") # this returns a ValueError("Too High") +``` + +## Charm Configuration Model + +Using the class above, we can implement parsing and validation of configuration by simply +extending our charms using the `TypedCharmBase` class, as shown below. + +```python +class MyCharm(TypedCharmBase[MyConfig]): + config_type = MyConfig + + # everywhere in the code you will have config property already parsed and validate + def my_method(self): + self.config: MyConfig +``` + +## Action parameters + +In order to parse action parameters, we can use a decorator to be applied to action event +callbacks, as shown below. + +```python +@validate_params(PullActionModel) +def _pull_site_action( + self, event: ActionEvent, + params: Optional[Union[PullActionModel, ValidationError]] = None +): + if isinstance(params, ValidationError): + # handle errors + else: + # do stuff +``` + +Note that this changes the signature of the callbacks by adding an extra parameter with the parsed +counterpart of the `event.params` dict-like field. If validation fails, we return (not throw!) the +exception, to be handled (or raised) in the callback. + +## Databag + +In order to parse databag fields, we define a decorator to be applied to base relation event +callbacks. + +```python +@parse_relation_data(app_model=AppDataModel, unit_model=UnitDataModel) +def _on_cluster_relation_joined( + self, event: RelationEvent, + app_data: Optional[Union[AppDataModel, ValidationError]] = None, + unit_data: Optional[Union[UnitDataModel, ValidationError]] = None +) -> None: + ... +``` + +The parameters `app_data` and `unit_data` refers to the databag of the entity which fired the +RelationEvent. + +When we want to access to a relation databag outsides of an action, it can be useful also to +compact multiple databags into a single object (if there are no conflicting fields), e.g. + +```python + +class ProviderDataBag(BaseClass): + provider_key: str + +class RequirerDataBag(BaseClass): + requirer_key: str + +class MergedDataBag(ProviderDataBag, RequirerDataBag): + pass + +merged_data = get_relation_data_as( + MergedDataBag, relation.data[self.app], relation.data[relation.app] +) + +merged_data.requirer_key +merged_data.provider_key + +``` + +The above code can be generalized to other kinds of merged objects, e.g. application and unit, and +it can be extended to multiple sources beyond 2: + +```python +merged_data = get_relation_data_as( + MergedDataBag, relation.data[self.app], relation.data[relation.app], ... +) +``` + +""" + +import json +from functools import reduce, wraps +from typing import Callable, Generic, MutableMapping, Optional, Type, TypeVar, Union + +import pydantic +from ops.charm import ActionEvent, CharmBase, RelationEvent +from ops.model import RelationDataContent +from pydantic import BaseModel, ValidationError + +# The unique Charmhub library identifier, never change it +LIBID = "cb2094c5b07d47e1bf346aaee0fcfcfe" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +PYDEPS = ["ops>=2.0.0", "pydantic>=1.10,<2"] + +G = TypeVar("G") +T = TypeVar("T", bound=BaseModel) +AppModel = TypeVar("AppModel", bound=BaseModel) +UnitModel = TypeVar("UnitModel", bound=BaseModel) + +DataBagNativeTypes = (int, str, float) + + +class BaseConfigModel(BaseModel): + """Class to be used for defining the structured configuration options.""" + + def __getitem__(self, x): + """Return the item using the notation instance[key].""" + return getattr(self, x.replace("-", "_")) + + +class TypedCharmBase(CharmBase, Generic[T]): + """Class to be used for extending config-typed charms.""" + + config_type: Type[T] + + @property + def config(self) -> T: + """Return a config instance validated and parsed using the provided pydantic class.""" + translated_keys = {k.replace("-", "_"): v for k, v in self.model.config.items()} + return self.config_type(**translated_keys) + + +def validate_params(cls: Type[T]): + """Return a decorator to allow pydantic parsing of action parameters. + + Args: + cls: Pydantic class representing the model to be used for parsing the content of the + action parameter + """ + + def decorator( + f: Callable[[CharmBase, ActionEvent, Union[T, ValidationError]], G] + ) -> Callable[[CharmBase, ActionEvent], G]: + @wraps(f) + def event_wrapper(self: CharmBase, event: ActionEvent): + try: + params = cls( + **{key.replace("-", "_"): value for key, value in event.params.items()} + ) + except ValidationError as e: + params = e + return f(self, event, params) + + return event_wrapper + + return decorator + + +def write(relation_data: RelationDataContent, model: BaseModel): + """Write the data contained in a domain object to the relation databag. + + Args: + relation_data: pointer to the relation databag + model: instance of pydantic model to be written + """ + for key, value in model.dict(exclude_none=False).items(): + if value: + relation_data[key.replace("_", "-")] = ( + str(value) + if any(isinstance(value, _type) for _type in DataBagNativeTypes) + else json.dumps(value) + ) + else: + relation_data[key.replace("_", "-")] = "" + + +def read(relation_data: MutableMapping[str, str], obj: Type[T]) -> T: + """Read data from a relation databag and parse it into a domain object. + + Args: + relation_data: pointer to the relation databag + obj: pydantic class representing the model to be used for parsing + """ + return obj( + **{ + field_name: ( + relation_data[parsed_key] + if field.outer_type_ in DataBagNativeTypes + else json.loads(relation_data[parsed_key]) + ) + for field_name, field in obj.__fields__.items() + # pyright: ignore[reportGeneralTypeIssues] + if (parsed_key := field_name.replace("_", "-")) in relation_data + if relation_data[parsed_key] + } + ) + + +def parse_relation_data( + app_model: Optional[Type[AppModel]] = None, unit_model: Optional[Type[UnitModel]] = None +): + """Return a decorator to allow pydantic parsing of the app and unit databags. + + Args: + app_model: Pydantic class representing the model to be used for parsing the content of the + app databag. None if no parsing ought to be done. + unit_model: Pydantic class representing the model to be used for parsing the content of the + unit databag. None if no parsing ought to be done. + """ + + def decorator( + f: Callable[ + [ + CharmBase, + RelationEvent, + Optional[Union[AppModel, ValidationError]], + Optional[Union[UnitModel, ValidationError]], + ], + G, + ] + ) -> Callable[[CharmBase, RelationEvent], G]: + @wraps(f) + def event_wrapper(self: CharmBase, event: RelationEvent): + try: + app_data = ( + read(event.relation.data[event.app], app_model) + if app_model is not None and event.app + else None + ) + except pydantic.ValidationError as e: + app_data = e + + try: + unit_data = ( + read(event.relation.data[event.unit], unit_model) + if unit_model is not None and event.unit + else None + ) + except pydantic.ValidationError as e: + unit_data = e + + return f(self, event, app_data, unit_data) + + return event_wrapper + + return decorator + + +class RelationDataModel(BaseModel): + """Base class to be used for creating data models to be used for relation databags.""" + + def write(self, relation_data: RelationDataContent): + """Write data to a relation databag. + + Args: + relation_data: pointer to the relation databag + """ + return write(relation_data, self) + + @classmethod + def read(cls, relation_data: RelationDataContent) -> "RelationDataModel": + """Read data from a relation databag and parse it as an instance of the pydantic class. + + Args: + relation_data: pointer to the relation databag + """ + return read(relation_data, cls) + + +def get_relation_data_as( + model_type: Type[AppModel], + *relation_data: RelationDataContent, +) -> Union[AppModel, ValidationError]: + """Return a merged representation of the provider and requirer databag into a single object. + + Args: + model_type: pydantic class representing the merged databag + relation_data: list of RelationDataContent of provider/requirer/unit sides + """ + try: + app_data = read(reduce(lambda x, y: dict(x) | dict(y), relation_data, {}), model_type) + except pydantic.ValidationError as e: + app_data = e + return app_data diff --git a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py index 2604c39e6..1e7ff8405 100644 --- a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py +++ b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -269,7 +269,7 @@ def _remove_stale_otel_sdk_packages(): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 1 +LIBPATCH = 2 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -371,10 +371,6 @@ class UntraceableObjectError(TracingError): """Raised when an object you're attempting to instrument cannot be autoinstrumented.""" -class TLSError(TracingError): - """Raised when the tracing endpoint is https but we don't have a cert yet.""" - - def _get_tracing_endpoint( tracing_endpoint_attr: str, charm_instance: object, @@ -484,10 +480,15 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): ) if tracing_endpoint.startswith("https://") and not server_cert: - raise TLSError( + logger.error( "Tracing endpoint is https, but no server_cert has been passed." - "Please point @trace_charm to a `server_cert` attr." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " ) + return exporter = OTLPSpanExporter( endpoint=tracing_endpoint, diff --git a/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 4af379a5d..1f92867f8 100644 --- a/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -107,7 +107,7 @@ def __init__(self, *args): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 1 +LIBPATCH = 2 PYDEPS = ["pydantic"] @@ -985,11 +985,16 @@ def charm_tracing_config( is_https = endpoint.startswith("https://") if is_https: - if cert_path is None: - raise TracingError("Cannot send traces to an https endpoint without a certificate.") - elif not Path(cert_path).exists(): - # if endpoint is https BUT we don't have a server_cert yet: - # disable charm tracing until we do to prevent tls errors + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) return None, None return endpoint, str(cert_path) else: diff --git a/src/charm.py b/src/charm.py index 5f5d492fa..b58c4ddeb 100755 --- a/src/charm.py +++ b/src/charm.py @@ -16,6 +16,7 @@ import lightkube import psycopg2 from charms.data_platform_libs.v0.data_interfaces import DataPeerData, DataPeerUnitData +from charms.data_platform_libs.v0.data_models import TypedCharmBase from charms.grafana_k8s.v0.grafana_dashboard import GrafanaDashboardProvider from charms.loki_k8s.v0.loki_push_api import LogProxyConsumer from charms.postgresql_k8s.v0.postgresql_tls import PostgreSQLTLS @@ -23,12 +24,19 @@ from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer from jinja2 import Template -from ops import JujuVersion, main -from ops.charm import CharmBase, ConfigChangedEvent, PebbleReadyEvent -from ops.framework import StoredState -from ops.model import ActiveStatus, BlockedStatus, Relation, WaitingStatus +from ops import ( + ActiveStatus, + BlockedStatus, + ConfigChangedEvent, + JujuVersion, + PebbleReadyEvent, + Relation, + WaitingStatus, + main, +) from ops.pebble import ConnectionError, Layer, ServiceStatus +from config import CharmConfig from constants import ( APP_SCOPE, AUTH_FILE_DATABAG_KEY, @@ -79,10 +87,10 @@ PgbouncerUpgrade, ), ) -class PgBouncerK8sCharm(CharmBase): +class PgBouncerK8sCharm(TypedCharmBase): """A class implementing charmed PgBouncer.""" - _stored = StoredState() + config_type = CharmConfig def __init__(self, *args): super().__init__(*args) @@ -210,9 +218,9 @@ def _node_port(self, port_type: str) -> int: # [ServicePort(port=6432, appProtocol=None, name=None, nodePort=31438, protocol='TCP', targetPort=6432)] # Keeping this distinction, so we have similarity with mysql-router-k8s if port_type == "rw": - port = self.config["listen_port"] + port = self.config.listen_port elif port_type == "ro": - port = self.config["listen_port"] + port = self.config.listen_port else: raise ValueError(f"Invalid {port_type=}") logger.debug(f"Looking for NodePort for {port_type} in {service.spec.ports}") @@ -286,8 +294,8 @@ def patch_port(self, use_node_port: bool = False) -> None: ports=[ lightkube.models.core_v1.ServicePort( name="pgbouncer", - port=self.config["listen_port"], - targetPort=self.config["listen_port"], + port=self.config.listen_port, + targetPort=self.config.listen_port, ) ], type="NodePort", @@ -419,12 +427,15 @@ def _on_config_changed(self, event: ConfigChangedEvent) -> None: event.defer() return + if not self.configuration_check(): + return + old_port = self.peers.app_databag.get("current_port") - port_changed = old_port != str(self.config["listen_port"]) + port_changed = old_port != str(self.config.listen_port) if self.unit.is_leader() and port_changed: # This emits relation-changed events to every client relation, so only do it when # necessary - self.update_client_connection_info(self.config["listen_port"]) + self.update_client_connection_info() self.patch_port(self.client_relation.external_connectivity()) self.render_pgb_config() @@ -436,7 +447,8 @@ def _on_config_changed(self, event: ConfigChangedEvent) -> None: if self.unit.is_leader() and port_changed: # Only update the config once the services have been restarted - self.peers.app_databag["current_port"] = str(self.config["listen_port"]) + self.peers.app_databag["current_port"] = str(self.config.listen_port) + self.update_status() def _pgbouncer_layer(self) -> Layer: """Returns a default pebble config layer for the pgbouncer container. @@ -538,11 +550,24 @@ def _on_update_status(self, _) -> None: # information in all the relation databags. self.update_client_connection_info() + def configuration_check(self) -> bool: + """Check that configuration is valid.""" + try: + self.config + return True + except ValueError: + self.unit.status = BlockedStatus("Configuration Error. Please check the logs") + logger.exception("Invalid configuration") + return False + def update_status(self): """Health check to update pgbouncer status based on charm state.""" if self.unit.status.message == EXTENSIONS_BLOCKING_MESSAGE: return + if not self.configuration_check(): + return + if self.backend.postgres is None: self.unit.status = BlockedStatus("waiting for backend database relation to initialise") return @@ -592,7 +617,7 @@ def _generate_monitoring_service(self, enabled: bool = True) -> Dict[str, str]: if enabled and (stats_password := self.get_secret(APP_SCOPE, MONITORING_PASSWORD_KEY)): command = ( f'pgbouncer_exporter --web.listen-address=:{METRICS_PORT} --pgBouncer.connectionString="' - f'postgres://{self.backend.stats_user}:{stats_password}@localhost:{self.config["listen_port"]}/pgbouncer?sslmode=disable"' + f'postgres://{self.backend.stats_user}:{stats_password}@localhost:{self.config.listen_port}/pgbouncer?sslmode=disable"' ) startup = "enabled" else: @@ -928,7 +953,11 @@ def render_pgb_config(self, reload_pgbouncer=False, restart=False) -> None: restart: Whether to restart the service when reloading. """ perm = 0o400 - max_db_connections = self.config["max_db_connections"] + + if not self.configuration_check(): + return + + max_db_connections = self.config.max_db_connections if max_db_connections == 0: default_pool_size = 20 min_pool_size = 10 @@ -955,8 +984,8 @@ def render_pgb_config(self, reload_pgbouncer=False, restart=False) -> None: peers=service_ids, log_file=f"{service['log_dir']}/pgbouncer.log", pid_file=f"{service['dir']}/pgbouncer.pid", - listen_port=self.config["listen_port"], - pool_mode=self.config["pool_mode"], + listen_port=self.config.listen_port, + pool_mode=self.config.pool_mode, max_db_connections=max_db_connections, default_pool_size=default_pool_size, min_pool_size=min_pool_size, @@ -988,7 +1017,7 @@ def render_auth_file(self, auth_file: str, reload_pgbouncer=False): # Relation Utilities # ===================== - def update_client_connection_info(self, port: Optional[str] = None): + def update_client_connection_info(self): """Update connection info in client relations. TODO rename @@ -997,8 +1026,7 @@ def update_client_connection_info(self, port: Optional[str] = None): if not self.backend.postgres or not self.unit.is_leader(): return - if not port: - port = self.config["listen_port"] + port = self.config.listen_port for relation in self.model.relations.get("db", []): self.legacy_db_relation.update_connection_info(relation, port) diff --git a/src/config.py b/src/config.py new file mode 100644 index 000000000..0edb04ebe --- /dev/null +++ b/src/config.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Structured configuration for the PostgreSQL charm.""" + +import logging +from typing import Literal + +from charms.data_platform_libs.v0.data_models import BaseConfigModel +from pydantic import PositiveInt, conint + +logger = logging.getLogger(__name__) + + +class CharmConfig(BaseConfigModel): + """Manager for the structured configuration.""" + + listen_port: PositiveInt + pool_mode: Literal["session", "transaction", "statement"] + max_db_connections: conint(ge=0) diff --git a/src/relations/backend_database.py b/src/relations/backend_database.py index 9d2c3f417..0ea6fd567 100644 --- a/src/relations/backend_database.py +++ b/src/relations/backend_database.py @@ -53,6 +53,7 @@ Application, BlockedStatus, MaintenanceStatus, + ModelError, Relation, WaitingStatus, ) @@ -263,9 +264,14 @@ def _on_database_created(self, event: DatabaseCreatedEvent) -> None: if not self.charm.is_container_ready: return - if not (auth_file := self.charm.get_secret(APP_SCOPE, AUTH_FILE_DATABAG_KEY)): - logger.debug("_on_database_created deferred: waiting for leader to initialise") + try: + if not (auth_file := self.charm.get_secret(APP_SCOPE, AUTH_FILE_DATABAG_KEY)): + logger.debug("_on_database_created deferred: waiting for leader to initialise") + event.defer() + return + except ModelError: event.defer() + logger.error("deferring database-created hook - cannot access secrets") return self.charm.render_auth_file(auth_file) self.charm.render_pgb_config(reload_pgbouncer=True) diff --git a/src/relations/db.py b/src/relations/db.py index 491e3e37b..8ae71fd2e 100644 --- a/src/relations/db.py +++ b/src/relations/db.py @@ -56,6 +56,7 @@ """ import logging +from hashlib import shake_128 from typing import Dict, Iterable, List from charms.pgbouncer_k8s.v0 import pgb @@ -229,7 +230,8 @@ def _on_relation_joined(self, join_event: RelationJoinedEvent): database = remote_app_databag.get("database") user = self._generate_username(join_event.relation) - password = pgb.generate_password() + databag = self.get_databags(join_event.relation)[0] + password = databag.get("password", pgb.generate_password()) if None in [database, password]: # If database isn't available, defer @@ -239,9 +241,23 @@ def _on_relation_joined(self, join_event: RelationJoinedEvent): dbs = self.charm.generate_relation_databases() dbs[str(join_event.relation.id)] = {"name": database, "legacy": True} if self.admin: - dbs["*"] = {"name": "*", "auth_dbname": database} + dbs["*"] = {"name": "*", "auth_dbname": database, "legacy": False} self.charm.set_relation_databases(dbs) + pgb_dbs_hash = shake_128( + self.charm.peers.app_databag["pgb_dbs_config"].encode() + ).hexdigest(16) + for key, data in self.charm.peers.relation.data.items(): + # We skip the leader so we don't have to wait on the defer + if ( + key != self.charm.app + and key != self.charm.unit + and data.get("pgb_dbs", "") != pgb_dbs_hash + ): + logger.debug("Not all units have synced configuration") + join_event.defer() + return + self.update_databags( join_event.relation, { @@ -312,7 +328,7 @@ def _on_relation_changed(self, change_event: RelationChangedEvent): self.charm.render_pgb_config(reload_pgbouncer=True) if self.charm.unit.is_leader(): - self.update_connection_info(change_event.relation, self.charm.config["listen_port"]) + self.update_connection_info(change_event.relation, self.charm.config.listen_port) self.update_databags( change_event.relation, { @@ -331,8 +347,11 @@ def _on_relation_changed(self, change_event: RelationChangedEvent): def update_connection_info(self, relation: Relation, port: str): """Updates databag connection information.""" + if not self.charm.configuration_check(): + return + if not port: - port = self.charm.config["listen_port"] + port = self.charm.config.listen_port databag = self.get_databags(relation)[0] database = databag.get("database") diff --git a/src/relations/peers.py b/src/relations/peers.py index f939ebf07..be9bc9fc9 100644 --- a/src/relations/peers.py +++ b/src/relations/peers.py @@ -30,6 +30,7 @@ """ # noqa: W505 import logging +from hashlib import shake_128 from typing import Optional, Set from ops.charm import CharmBase, HookEvent, RelationCreatedEvent @@ -134,7 +135,7 @@ def _on_created(self, event: RelationCreatedEvent): def _on_joined(self, event: HookEvent): self._on_changed(event) - if self.charm.unit.is_leader(): + if self.charm.unit.is_leader() and self.charm.configuration_check(): self.charm.client_relation.update_endpoints() def _on_changed(self, event: HookEvent): @@ -150,16 +151,19 @@ def _on_changed(self, event: HookEvent): """ self.unit_databag.update({ADDRESS_KEY: self.charm.unit_pod_hostname}) - if self.charm.unit.is_leader(): - self.update_leader() + self.update_leader() if not self.charm.is_container_ready: logger.debug("_on_peer_changed defer: container unavailable") event.defer() return + pgb_dbs_hash = shake_128(self.app_databag.get("pgb_dbs_config", "{}").encode()).hexdigest( + 16 + ) self.charm.render_pgb_config(reload_pgbouncer=True) self.charm.toggle_monitoring_layer(self.charm.backend.ready) + self.unit_databag["pgb_dbs"] = pgb_dbs_hash def _on_departed(self, event): self.update_leader() @@ -179,4 +183,3 @@ def update_leader(self): """Updates leader hostname in peer databag to match this unit if it's the leader.""" if self.charm.unit.is_leader(): self.app_databag[LEADER_ADDRESS_KEY] = self.charm.unit_pod_hostname - self.charm.generate_relation_databases() diff --git a/src/relations/pgbouncer_provider.py b/src/relations/pgbouncer_provider.py index bf8f99034..321541f90 100644 --- a/src/relations/pgbouncer_provider.py +++ b/src/relations/pgbouncer_provider.py @@ -31,6 +31,7 @@ """ # noqa: W505 import logging +from hashlib import shake_128 from charms.data_platform_libs.v0.data_interfaces import ( DatabaseProvides, @@ -131,6 +132,27 @@ def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: extra_user_roles = event.extra_user_roles or "" rel_id = event.relation.id + dbs = self.charm.generate_relation_databases() + dbs[str(event.relation.id)] = {"name": database, "legacy": False} + roles = extra_user_roles.lower().split(",") + if "admin" in roles or "superuser" in roles or "createdb" in roles: + dbs["*"] = {"name": "*", "auth_dbname": database, "legacy": False} + self.charm.set_relation_databases(dbs) + + pgb_dbs_hash = shake_128( + self.charm.peers.app_databag["pgb_dbs_config"].encode() + ).hexdigest(16) + for key, data in self.charm.peers.relation.data.items(): + # We skip the leader so we don't have to wait on the defer + if ( + key != self.charm.app + and key != self.charm.unit + and data.get("pgb_dbs", "") != pgb_dbs_hash + ): + logger.debug("Not all units have synced configuration") + event.defer() + return + # Creates the user and the database for this specific relation. user = f"relation_id_{rel_id}" logger.debug("generating relation user") @@ -157,13 +179,6 @@ def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: ) return - dbs = self.charm.generate_relation_databases() - dbs[str(event.relation.id)] = {"name": database, "legacy": False} - roles = extra_user_roles.lower().split(",") - if "admin" in roles or "superuser" in roles or "createdb" in roles: - dbs["*"] = {"name": "*", "auth_dbname": database} - self.charm.set_relation_databases(dbs) - self.charm.render_pgb_config(reload_pgbouncer=True) # Share the credentials and updated connection info with the client application. @@ -222,7 +237,7 @@ def _on_relation_broken(self, event: RelationBrokenEvent) -> None: def update_connection_info(self, relation): """Updates client-facing relation information.""" - if not self.charm.unit.is_leader(): + if not self.charm.unit.is_leader() or not self.charm.configuration_check(): return self.update_endpoints(relation) @@ -239,11 +254,11 @@ def update_connection_info(self, relation): def update_endpoints(self, relation=None) -> None: """Set the endpoints for the relation.""" nodeports = self.charm.get_node_ports - internal_port = self.charm.config["listen_port"] + internal_port = self.charm.config.listen_port internal_hostnames = sorted(set(self.charm.peers.units_hostnames)) if self.charm.peers.leader_hostname in internal_hostnames: internal_hostnames.remove(self.charm.peers.leader_hostname) - internal_rw = f"{self.charm.leader_hostname}:{self.charm.config['listen_port']}" + internal_rw = f"{self.charm.leader_hostname}:{self.charm.config.listen_port}" relations = [relation] if relation else self.model.relations[self.relation_name] diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 05ade69e2..c65a858d6 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -308,6 +308,7 @@ async def deploy_and_relate_application_with_pgbouncer( apps=[application_name], status="active", raise_on_blocked=False, # Application that needs a relation is blocked initially. + raise_on_error=False, timeout=1000, ) diff --git a/tests/integration/relations/test_db_admin.py b/tests/integration/relations/test_db_admin.py index f18ebd8d9..3adb10f20 100644 --- a/tests/integration/relations/test_db_admin.py +++ b/tests/integration/relations/test_db_admin.py @@ -3,15 +3,16 @@ import asyncio import logging -from pathlib import Path import pytest -import yaml from pytest_operator.plugin import OpsTest from .. import markers from ..helpers.helpers import ( CHARM_SERIES, + PG, + PGB, + PGB_METADATA, get_backend_user_pass, get_legacy_relation_username, wait_for_relation_joined_between, @@ -26,9 +27,6 @@ DISCOURSE_APP_NAME = "discourse-charmers-discourse-k8s" REDIS_APP_NAME = "redis-k8s" -METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) -PGB = METADATA["name"] -PG = "postgresql-k8s" @pytest.mark.group(1) @@ -37,7 +35,7 @@ async def test_create_db_admin_legacy_relation(ops_test: OpsTest, pgb_charm): # Build, deploy, and relate charms. resources = { - "pgbouncer-image": METADATA["resources"]["pgbouncer-image"]["upstream-source"], + "pgbouncer-image": PGB_METADATA["resources"]["pgbouncer-image"]["upstream-source"], } async with ops_test.fast_forward(): diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index c191ce403..5b0e3932f 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -3,26 +3,21 @@ # See LICENSE file for licensing details. import logging -from pathlib import Path import pytest -import yaml from pytest_operator.plugin import OpsTest -from .helpers.helpers import CHARM_SERIES, get_cfg, run_command_on_unit +from .helpers.helpers import CHARM_SERIES, PGB, PGB_METADATA, get_cfg, run_command_on_unit logger = logging.getLogger(__name__) -METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) -PGB = METADATA["name"] - @pytest.mark.group(1) @pytest.mark.abort_on_fail async def test_build_and_deploy(ops_test: OpsTest, pgb_charm): """Build and deploy pgbouncer charm.""" resources = { - "pgbouncer-image": METADATA["resources"]["pgbouncer-image"]["upstream-source"], + "pgbouncer-image": PGB_METADATA["resources"]["pgbouncer-image"]["upstream-source"], } async with ops_test.fast_forward(): await ops_test.model.deploy( diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py new file mode 100644 index 000000000..9134d71c7 --- /dev/null +++ b/tests/integration/test_config.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +import logging +from asyncio import gather + +import pytest as pytest +from pytest_operator.plugin import OpsTest + +from constants import BACKEND_RELATION_NAME + +from .helpers.helpers import CHARM_SERIES, PG, PGB, PGB_METADATA + +logger = logging.getLogger(__name__) + + +@pytest.mark.group(1) +@pytest.mark.abort_on_fail +async def test_config_parameters(ops_test: OpsTest, pgb_charm) -> None: + """Build and deploy one unit of PostgreSQL and then test config with wrong parameters.""" + # Build and deploy the PostgreSQL charm. + async with ops_test.fast_forward(): + await gather( + ops_test.model.deploy( + pgb_charm, + resources={ + "pgbouncer-image": PGB_METADATA["resources"]["pgbouncer-image"][ + "upstream-source" + ] + }, + application_name=PGB, + num_units=1, + series=CHARM_SERIES, + trust=False, + ), + ops_test.model.deploy( + PG, + application_name=PG, + num_units=1, + channel="14/edge", + config={"profile": "testing"}, + ), + ) + await ops_test.model.add_relation(f"{PGB}:{BACKEND_RELATION_NAME}", f"{PG}:database") + + await ops_test.model.wait_for_idle(status="active", timeout=600) + + unit = ops_test.model.applications[PGB].units[0] + test_string = "abcXYZ123" + + configs = { + "listen_port": "0", + "pool_mode": test_string, + "max_db_connections": "-1", + } + + for key, val in configs.items(): + logger.info(key) + await ops_test.model.applications[PGB].set_config({key: val}) + await ops_test.model.block_until( + lambda: ops_test.model.units[f"{PGB}/0"].workload_status == "blocked", + timeout=100, + ) + assert "Configuration Error" in unit.workload_status_message + + await ops_test.model.applications[PGB].reset_config([key]) + await ops_test.model.block_until( + lambda: ops_test.model.units[f"{PGB}/0"].workload_status == "active", + timeout=100, + ) diff --git a/tests/integration/test_trust.py b/tests/integration/test_trust.py index 30b91b4c2..1d7f896db 100644 --- a/tests/integration/test_trust.py +++ b/tests/integration/test_trust.py @@ -5,22 +5,20 @@ import asyncio import logging import time -from pathlib import Path import pytest -import yaml from pytest_operator.plugin import OpsTest from .helpers.helpers import ( CHARM_SERIES, CLIENT_APP_NAME, + PG, + PGB, + PGB_METADATA, get_leader_unit, ) logger = logging.getLogger(__name__) -METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) -PGB = METADATA["name"] -PG = "postgresql-k8s" RELATION = "backend-database" MAX_RETRIES = 20 UNTRUST_ERROR_MESSAGE = f"Insufficient permissions, try: `juju trust {PGB} --scope=cluster`" @@ -95,7 +93,7 @@ async def test_build_and_deploy(ops_test: OpsTest, pgb_charm): await ops_test.model.deploy( pgb_charm, resources={ - "pgbouncer-image": METADATA["resources"]["pgbouncer-image"]["upstream-source"] + "pgbouncer-image": PGB_METADATA["resources"]["pgbouncer-image"]["upstream-source"] }, application_name=PGB, num_units=1, diff --git a/tests/unit/relations/test_backend_database.py b/tests/unit/relations/test_backend_database.py index f1fcca7ae..a6ea49e7e 100644 --- a/tests/unit/relations/test_backend_database.py +++ b/tests/unit/relations/test_backend_database.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, PropertyMock, call, patch from charms.pgbouncer_k8s.v0.pgb import get_hashed_password -from ops.model import WaitingStatus +from ops import ModelError, WaitingStatus from ops.pebble import ConnectionError from ops.testing import Harness @@ -84,6 +84,7 @@ def test_on_database_created( mock_event = MagicMock() mock_event.username = "mock_user" + self.backend._on_database_created(mock_event) hash_pw = get_hashed_password(self.backend.auth_user, pw) @@ -115,8 +116,18 @@ def test_on_database_created_not_leader( mock_event = MagicMock() mock_event.username = "mock_user" + + # Defer on model error + _get_secret.side_effect = ModelError + self.backend._on_database_created(mock_event) + mock_event.defer.assert_called_once_with() + mock_event.defer.reset_mock() + _get_secret.reset_mock() + _get_secret.side_effect = None + + self.backend._on_database_created(mock_event) assert not _render_auth.called assert not _render_pgb.called assert not _toggle_monitoring.called diff --git a/tests/unit/relations/test_db.py b/tests/unit/relations/test_db.py index 9a3500058..97468f0f4 100644 --- a/tests/unit/relations/test_db.py +++ b/tests/unit/relations/test_db.py @@ -103,6 +103,11 @@ def test_on_relation_joined( user = "pgbouncer_k8s_user_1_None" password = _gen_pw.return_value + with self.harness.hooks_disabled(): + self.harness.update_relation_data( + self.peers_rel_id, self.app, {"pgb_dbs_config": "{}"} + ) + _set_rel_dbs.reset_mock() relation_data = mock_event.relation.data = {} relation_data[self.charm.unit] = {} @@ -116,7 +121,7 @@ def test_on_relation_joined( self.db_admin_relation._on_relation_joined(mock_event) _set_rel_dbs.assert_called_once_with({ "1": {"name": "test_db", "legacy": True}, - "*": {"name": "*", "auth_dbname": "test_db"}, + "*": {"name": "*", "auth_dbname": "test_db", "legacy": False}, }) _create_user.assert_called_with(user, password, admin=True) @@ -134,7 +139,7 @@ def test_on_relation_joined( _create_user.assert_called_with(user, password, admin=False) _set_rel_dbs.assert_called_once_with({ "1": {"name": "test_db", "legacy": True}, - "*": {"name": "*", "auth_dbname": "test_db"}, + "*": {"name": "*", "auth_dbname": "test_db", "legacy": False}, }) @patch("relations.backend_database.BackendDatabaseRequires.check_backend", return_value=True) diff --git a/tests/unit/relations/test_peers.py b/tests/unit/relations/test_peers.py index e0e2a4325..ff66fe193 100644 --- a/tests/unit/relations/test_peers.py +++ b/tests/unit/relations/test_peers.py @@ -57,11 +57,9 @@ def test_on_peers_changed( new_callable=PropertyMock, return_value="test_pod_name", ) - @patch("charm.PgBouncerK8sCharm.generate_relation_databases") - def test_update_leader(self, _generate_relation_databases, unit_pod_hostname): + def test_update_leader(self, unit_pod_hostname): self.harness.add_relation(BACKEND_RELATION_NAME, "postgres") # Will run the hook self.harness.set_leader(True) - _generate_relation_databases.assert_called_once_with() assert self.charm.peers.app_databag[LEADER_ADDRESS_KEY] == "test_pod_name" diff --git a/tests/unit/relations/test_pgbouncer_provider.py b/tests/unit/relations/test_pgbouncer_provider.py index 404a7809d..986d04a09 100644 --- a/tests/unit/relations/test_pgbouncer_provider.py +++ b/tests/unit/relations/test_pgbouncer_provider.py @@ -104,6 +104,9 @@ def test_on_database_requested( user = f"relation_id_{rel_id}" with self.harness.hooks_disabled(): self.harness.update_relation_data(rel_id, "application", {"database": "test-db"}) + self.harness.update_relation_data( + self.peers_rel_id, self.app, {"pgb_dbs_config": "{}"} + ) # check we exit immediately if backend doesn't exist. _check_backend.return_value = False @@ -128,7 +131,7 @@ def test_on_database_requested( _set_read_only_endpoints.assert_called() _set_rel_dbs.assert_called_once_with({ str(rel_id): {"name": "test-db", "legacy": False}, - "*": {"name": "*", "auth_dbname": "test-db"}, + "*": {"name": "*", "auth_dbname": "test-db", "legacy": False}, }) _render_pgb_config.assert_called_once_with(reload_pgbouncer=True) diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 516ac8036..6b50f4748 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -86,7 +86,7 @@ def test_on_config_changed( "listen_port": 6464, }) _reload.assert_called_once_with(restart=True) - _update_connection_info.assert_called_with(6464) + _update_connection_info.assert_called_with() _check_pgb_running.assert_called_once_with() @patch( @@ -572,6 +572,15 @@ def test_patch_port_delete_errors(self, _client, _on_deployed_without_trust, _lo with self.assertRaises(_FakeApiError): self.charm.patch_port(False) + @patch("charm.PgBouncerK8sCharm.config", new_callable=PropertyMock, return_value={}) + def test_configuration_check(self, _config): + assert self.charm.configuration_check() + + _config.side_effect = ValueError + assert not self.charm.configuration_check() + assert isinstance(self.charm.unit.status, BlockedStatus) + assert self.charm.unit.status.message == "Configuration Error. Please check the logs" + # # Secrets #