feat: added charm

This commit is contained in:
Samuel Olwe
2025-07-07 19:44:05 +03:00
parent 33810b3cd7
commit cad7fdf170
25 changed files with 16471 additions and 8 deletions

30
.env
View File

@ -8,12 +8,40 @@ GREENHOUSE_DEBUG=true
SECRET_KEY=local_development_fake_key
# https://canonical.greenhouse.io/configure/dev_center/credentials
GREENHOUSE_API_KEY=
HARVEST_API_KEY=
APPLICATION_CRYPTO_SECRET_KEY=super_secret
SERVICE_ACCOUNT_EMAIL=test_email@email.com
SERVICE_ACCOUNT_PRIVATE_KEY=test_private_key
# Use this flag to wait for cached pages to expire (does not affect frontend):
RECAPTCHA_ENABLED="false"
RECAPTCHA_SITE_KEY=recaptch_site_key
RECAPTCHA_PROJECT_ID=recaptcha_project_id
RECAPTCHA_API_KEY=recaptcha_api_key
RECAPTCHA_SCORE_THRESHOLD=0.5
# Secret for sitemap endpoint
SITEMAP_SECRET=somesecret
# Charmhub API
CHARMHUB_DISCOURSE_API_KEY=charmhub_discourse_api_key
CHARMHUB_DISCOURSE_API_USER=charmhub_discourse_api_user
# DiscourseAPI
DISCOURSE_API_KEY=discourse_api_key
DISCOURSE_API_USERNAME=discourse_api_username
# Directory API
DIRECTORY_API_TOKEN=disrectory_api_token
SMTP_SERVER=smtp.example.com
SMTP_USER=smtp_user
SMTP_PASS=smtp_pass
SMTP_SENDER_ADDRESS=sender@example.com
# SMTP
SMTP_SERVER=smtp.example.com
SMTP_USER=smtp_user
SMTP_PASS=smtp_pass
SMTP_SENDER_ADDRESS=user@example.com

165
.github/workflows/deploy.yaml vendored Normal file
View File

@ -0,0 +1,165 @@
name: Deploy
on:
push:
branches:
- create-canonical-com-charm
- main
env:
CHARMCRAFT_ENABLE_EXPERIMENTAL_EXTENSIONS: true
ROCKCRAFT_ENABLE_EXPERIMENTAL_EXTENSIONS: true
jobs:
pack-charm:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Setup LXD
uses: canonical/setup-lxd@main
- name: Setup Charmcraft
run: sudo snap install charmcraft --classic --channel=latest/edge
- name: Pack charm
run: charmcraft pack -v --project-dir ./charm
- name: Upload charm
uses: actions/upload-artifact@v4
with:
name: canonical-com-charm
path: ./*.charm
pack-rock:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v3
- name: Build Assets
run: |
yarn install
yarn run build
- name: Setup LXD
uses: canonical/setup-lxd@main
- name: Create repositories directory
run: |
mkdir -m 777 repositories
mkdir -m 777 tree-cache
- name: Setup Rockcraft
run: sudo snap install rockcraft --classic --channel=latest/edge
- name: Pack Rock
run: rockcraft pack
- name: Upload Rock
uses: actions/upload-artifact@v4
with:
name: canonical-com-rock
path: ./*.rock
publish-image:
runs-on: ubuntu-latest
needs: pack-rock
outputs:
image_url: ${{ steps.set_image_url.outputs.image_url }}
steps:
- name: Get Rock
uses: actions/download-artifact@v4
with:
name: canonical-com-rock
- name: Set image URL
id: set_image_url
run: echo "image_url=ghcr.io/canonical/canonical.com:$(date +%s)-${GITHUB_SHA:0:7}" >> $GITHUB_OUTPUT
- name: Push to GHCR
run: skopeo --insecure-policy copy oci-archive:$(ls *.rock) docker://${{ steps.set_image_url.outputs.image_url }} --dest-creds "canonical:${{ secrets.GITHUB_TOKEN }}"
deploy-staging:
runs-on: [self-hosted, self-hosted-linux-amd64-jammy-private-endpoint-medium]
needs: [pack-charm, publish-image]
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Install Dependencies
run: |
sudo snap install juju --channel=3.4/stable --classic
sudo snap install vault --classic
- name: Download Charm Artifact
uses: actions/download-artifact@v4
with:
name: canonical-com-charm
- name: Configure Vault and Juju
run: |
export VAULT_ADDR=https://vault.admin.canonical.com:8200
export TF_VAR_login_approle_role_id=${{ secrets.STG_VAULT_APPROLE_ROLE_ID }}
export TF_VAR_login_approle_secret_id=${{ secrets.STG_VAULT_APPROLE_SECRET_ID }}
export VAULT_SECRET_PATH_ROLE=secret/prodstack6/roles/stg-staging-canonical-com
export VAULT_SECRET_PATH_COMMON=secret/prodstack6/juju/common
VAULT_TOKEN=$(vault write -f -field=token auth/approle/login role_id=${TF_VAR_login_approle_role_id} secret_id=${TF_VAR_login_approle_secret_id})
export VAULT_TOKEN
mkdir -p ~/.local/share/juju
vault read -field=controller_config "${VAULT_SECRET_PATH_COMMON}/controllers/juju-controller-35-staging-ps6" | base64 -d > ~/.local/share/juju/controllers.yaml
USERNAME=$(vault read -field=username "${VAULT_SECRET_PATH_ROLE}/juju")
PASSWORD=$(vault read -field=password "${VAULT_SECRET_PATH_ROLE}/juju")
printf "controllers:\n juju-controller-35-staging-ps6:\n user: %s\n password: %s\n" "$USERNAME" "$PASSWORD" > ~/.local/share/juju/accounts.yaml
- name: Deploy Application to staging
run: |
export JUJU_MODEL=admin/stg-staging-canonical-com
juju refresh canonical-com --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju refresh canonical-com-blog --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju refresh canonical-com-careers --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju wait-for application canonical-com --query='name=="canonical-com" && (status=="active" || status=="idle")'
deploy-production:
runs-on: [self-hosted, self-hosted-linux-amd64-jammy-private-endpoint-medium]
needs: [pack-charm, publish-image]
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Install Dependencies
run: |
sudo snap install juju --channel=3.6/stable --classic
sudo snap install vault --classic
- name: Download Charm Artifact
uses: actions/download-artifact@v4
with:
name: canonical-com-charm
- name: Configure Vault and Juju
run: |
export VAULT_ADDR=https://vault.admin.canonical.com:8200
export TF_VAR_login_approle_role_id=${{ secrets.PROD_VAULT_APPROLE_ROLE_ID }}
export TF_VAR_login_approle_secret_id=${{ secrets.PROD_VAULT_APPROLE_SECRET_ID }}
export VAULT_SECRET_PATH_ROLE=secret/prodstack6/roles/prod-canonical-com
export VAULT_SECRET_PATH_COMMON=secret/prodstack6/juju/common
VAULT_TOKEN=$(vault write -f -field=token auth/approle/login role_id=${TF_VAR_login_approle_role_id} secret_id=${TF_VAR_login_approle_secret_id})
export VAULT_TOKEN
mkdir -p ~/.local/share/juju
vault read -field=controller_config "${VAULT_SECRET_PATH_COMMON}/controllers/juju-controller-36-production-ps6" | base64 -d > ~/.local/share/juju/controllers.yaml
USERNAME=$(vault read -field=username "${VAULT_SECRET_PATH_ROLE}/juju")
PASSWORD=$(vault read -field=password "${VAULT_SECRET_PATH_ROLE}/juju")
printf "controllers:\n juju-controller-36-production-ps6:\n user: %s\n password: %s\n" "$USERNAME" "$PASSWORD" > ~/.local/share/juju/accounts.yaml
- name: Deploy Application to production
run: |
export JUJU_MODEL=admin/prod-canonical-com
juju refresh canonical-com --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju refresh canonical-com-blog --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju refresh canonical-com-careers --path ./canonical-com_ubuntu-22.04-amd64.charm --resource flask-app-image=${{ needs.publish-image.outputs.image_url }}
juju wait-for application canonical-com --query='name=="canonical-com" && (status=="active" || status=="idle")'

View File

@ -23,6 +23,10 @@ Afterwards the website will be available at <http://localhost:8002>.
When you start changing files, the server should reload and make the changes available immediately.
## Environment variables
Environment variables are read from the available shell. For the charm, these are prepended with the prefix `FLASK_`, which we strip before re-inserting them into the environment.
## Greenhouse API
To work locally on the `/careers` section of the site, you will need to add a `HARVEST_API_KEY` environment variable to `.env` file. You can find this via the [Greenhouse admin panel](https://canonical.greenhouse.io/configure/dev_center/credentials).

1
app.py Normal file
View File

@ -0,0 +1 @@
from webapp.app import app # noqa: F401

9
charm/.gitignore vendored Normal file
View File

@ -0,0 +1,9 @@
venv/
build/
*.charm
.tox/
.coverage
__pycache__/
*.py[cod]
.idea
.vscode/

118
charm/charmcraft.yaml Normal file
View File

@ -0,0 +1,118 @@
# This file configures Charmcraft.
# See https://juju.is/docs/sdk/charmcraft-config for guidance.
name: canonical-com
type: charm
bases:
- build-on:
- name: ubuntu
channel: "22.04"
run-on:
- name: ubuntu
channel: "22.04"
summary: https://canonical.com
description: |
This is the flask charm for the canonical.com website.
extensions:
- flask-framework
config:
options:
application-crypto-secret-key:
type: string
description: "Harvest api cipher key"
default: "super_secret"
charmhub-discourse-api-key:
type: string
description: "Charmhub Discourse API key"
default: "charmhub_discourse_api_key"
charmhub-discourse-api-user:
type: string
description: "Charmhub Discourse API user"
default: "charmhub_discourse_api_user"
directory-api-token:
description: "Directory API token"
default: "disrectory_api_token"
source: default
type: string
discourse-api-key:
description: "Discourse API key"
default: "discourse_api_key"
source: default
type: string
discourse-api-username:
type: string
description: "Discourse API username"
default: "discourse_api_username"
greenhouse-api-key:
type: string
description: "Greenhouse API key"
default: ""
greenhouse-debug:
type: string
description: "Greenhouse debug key used in harvest api"
default: "true"
harvest-api-key:
type: string
description: "Harvest api key used in harvest api"
default: "harvest_api_key"
recaptcha-api-key:
type: string
description: "Recaptcha API key"
default: "recaptcha_api_key"
recaptcha-enabled:
type: string
description: "Flag to enable or disable recaptcha"
default: "false"
recaptcha-project-id:
type: string
description: "Recaptcha project ID"
default: "recaptcha_project_id"
recaptcha-score-threshold:
type: string
description: "Recaptcha score threshold"
default: "0.5"
recaptcha-site-key:
type: string
description: "Recaptcha site key"
default: "recaptch_site_key"
search-api-key:
type: string
description: "Search API key"
default: "search_api_key"
sentry-dsn:
type: string
description: "Sentry DSN for error tracking"
default: "https://aedc7a57f0bc4d22bf7c0b6d63c3e1bb@sentry.is.canonical.com//14"
service-account-email:
type: string
description: "Google service account"
default: "test_email@email.com"
service-account-private-key:
type: string
description: "Google service account private key"
default: "test_private_key"
sitemap-secret:
type: string
description: "Sitemap access secret"
default: "sitemap_secret"
smtp-pass:
type: string
description: "SMTP password"
default: "smtp_pass"
smtp-sender-address:
type: string
description: "SMTP sender email address"
default: "careers@canonical.com"
smtp-server:
type: string
description: "SMTP server hostname"
default: "smtp.example.com"
smtp-user:
type: string
description: "SMTP username"
default: "smtp_user"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,791 @@
# Copyright 2023 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""A library for communicating with the S3 credentials providers and consumers.
This library provides the relevant interface code implementing the communication
specification for fetching, retrieving, triggering, and responding to events related to
the S3 provider charm and its consumers.
### Provider charm
The provider is implemented in the `s3-provider` charm which is meant to be deployed
alongside one or more consumer charms. The provider charm is serving the s3 credentials and
metadata needed to communicate and work with an S3 compatible backend.
Example:
```python
from charms.data_platform_libs.v0.s3 import CredentialRequestedEvent, S3Provider
class ExampleProviderCharm(CharmBase):
def __init__(self, *args) -> None:
super().__init__(*args)
self.s3_provider = S3Provider(self, "s3-credentials")
self.framework.observe(self.s3_provider.on.credentials_requested,
self._on_credential_requested)
def _on_credential_requested(self, event: CredentialRequestedEvent):
if not self.unit.is_leader():
return
# get relation id
relation_id = event.relation.id
# get bucket name
bucket = event.bucket
# S3 configuration parameters
desired_configuration = {"access-key": "your-access-key", "secret-key":
"your-secret-key", "bucket": "your-bucket"}
# update the configuration
self.s3_provider.update_connection_info(relation_id, desired_configuration)
# or it is possible to set each field independently
self.s3_provider.set_secret_key(relation_id, "your-secret-key")
if __name__ == "__main__":
main(ExampleProviderCharm)
### Requirer charm
The requirer charm is the charm requiring the S3 credentials.
An example of requirer charm is the following:
Example:
```python
from charms.data_platform_libs.v0.s3 import (
CredentialsChangedEvent,
CredentialsGoneEvent,
S3Requirer
)
class ExampleRequirerCharm(CharmBase):
def __init__(self, *args):
super().__init__(*args)
bucket_name = "test-bucket"
# if bucket name is not provided the bucket name will be generated
# e.g., ('relation-{relation.id}')
self.s3_client = S3Requirer(self, "s3-credentials", bucket_name)
self.framework.observe(self.s3_client.on.credentials_changed, self._on_credential_changed)
self.framework.observe(self.s3_client.on.credentials_gone, self._on_credential_gone)
def _on_credential_changed(self, event: CredentialsChangedEvent):
# access single parameter credential
secret_key = event.secret_key
access_key = event.access_key
# or as alternative all credentials can be collected as a dictionary
credentials = self.s3_client.get_s3_credentials()
def _on_credential_gone(self, event: CredentialsGoneEvent):
# credentials are removed
pass
if __name__ == "__main__":
main(ExampleRequirerCharm)
```
"""
import json
import logging
from collections import namedtuple
from typing import Dict, List, Optional, Union
import ops.charm
import ops.framework
import ops.model
from ops.charm import (
CharmBase,
CharmEvents,
RelationBrokenEvent,
RelationChangedEvent,
RelationEvent,
RelationJoinedEvent,
)
from ops.framework import EventSource, Object, ObjectEvents
from ops.model import Application, Relation, RelationDataContent, Unit
# The unique Charmhub library identifier, never change it
LIBID = "fca396f6254246c9bfa565b1f85ab528"
# Increment this major API version when introducing breaking changes
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 5
logger = logging.getLogger(__name__)
Diff = namedtuple("Diff", "added changed deleted")
Diff.__doc__ = """
A tuple for storing the diff between two data mappings.
added - keys that were added
changed - keys that still exist but have new values
deleted - key that were deleted"""
def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff:
"""Retrieves the diff of the data in the relation changed databag.
Args:
event: relation changed event.
bucket: bucket of the databag (app or unit)
Returns:
a Diff instance containing the added, deleted and changed
keys from the event relation databag.
"""
# Retrieve the old data from the data key in the application relation databag.
old_data = json.loads(event.relation.data[bucket].get("data", "{}"))
# Retrieve the new data from the event relation databag.
new_data = (
{key: value for key, value in event.relation.data[event.app].items() if key != "data"}
if event.app
else {}
)
# These are the keys that were added to the databag and triggered this event.
added = new_data.keys() - old_data.keys()
# These are the keys that were removed from the databag and triggered this event.
deleted = old_data.keys() - new_data.keys()
# These are the keys that already existed in the databag,
# but had their values changed.
changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]}
# TODO: evaluate the possibility of losing the diff if some error
# happens in the charm before the diff is completely checked (DPE-412).
# Convert the new_data to a serializable format and save it for a next diff check.
event.relation.data[bucket].update({"data": json.dumps(new_data)})
# Return the diff with all possible changes.
return Diff(added, changed, deleted)
class BucketEvent(RelationEvent):
"""Base class for bucket events."""
@property
def bucket(self) -> Optional[str]:
"""Returns the bucket was requested."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("bucket", "")
class CredentialRequestedEvent(BucketEvent):
"""Event emitted when a set of credential is requested for use on this relation."""
class S3CredentialEvents(CharmEvents):
"""Event descriptor for events raised by S3Provider."""
credentials_requested = EventSource(CredentialRequestedEvent)
class S3Provider(Object):
"""A provider handler for communicating S3 credentials to consumers."""
on = S3CredentialEvents() # pyright: ignore [reportAssignmentType]
def __init__(
self,
charm: CharmBase,
relation_name: str,
):
super().__init__(charm, relation_name)
self.charm = charm
self.local_app = self.charm.model.app
self.local_unit = self.charm.unit
self.relation_name = relation_name
# monitor relation changed event for changes in the credentials
self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed)
def _on_relation_changed(self, event: RelationChangedEvent) -> None:
"""React to the relation changed event by consuming data."""
if not self.charm.unit.is_leader():
return
diff = self._diff(event)
# emit on credential requested if bucket is provided by the requirer application
if "bucket" in diff.added:
getattr(self.on, "credentials_requested").emit(
event.relation, app=event.app, unit=event.unit
)
def _load_relation_data(self, raw_relation_data: dict) -> dict:
"""Loads relation data from the relation data bag.
Args:
raw_relation_data: Relation data from the databag
Returns:
dict: Relation data in dict format.
"""
connection_data = {}
for key in raw_relation_data:
try:
connection_data[key] = json.loads(raw_relation_data[key])
except (json.decoder.JSONDecodeError, TypeError):
connection_data[key] = raw_relation_data[key]
return connection_data
# def _diff(self, event: RelationChangedEvent) -> Diff:
# """Retrieves the diff of the data in the relation changed databag.
# Args:
# event: relation changed event.
# Returns:
# a Diff instance containing the added, deleted and changed
# keys from the event relation databag.
# """
# # Retrieve the old data from the data key in the application relation databag.
# old_data = json.loads(event.relation.data[self.local_app].get("data", "{}"))
# # Retrieve the new data from the event relation databag.
# new_data = {
# key: value for key, value in event.relation.data[event.app].items() if key != "data"
# }
# # These are the keys that were added to the databag and triggered this event.
# added = new_data.keys() - old_data.keys()
# # These are the keys that were removed from the databag and triggered this event.
# deleted = old_data.keys() - new_data.keys()
# # These are the keys that already existed in the databag,
# # but had their values changed.
# changed = {
# key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]
# }
# # TODO: evaluate the possibility of losing the diff if some error
# # happens in the charm before the diff is completely checked (DPE-412).
# # Convert the new_data to a serializable format and save it for a next diff check.
# event.relation.data[self.local_app].update({"data": json.dumps(new_data)})
# # Return the diff with all possible changes.
# return Diff(added, changed, deleted)
def _diff(self, event: RelationChangedEvent) -> Diff:
"""Retrieves the diff of the data in the relation changed databag.
Args:
event: relation changed event.
Returns:
a Diff instance containing the added, deleted and changed
keys from the event relation databag.
"""
return diff(event, self.local_app)
def fetch_relation_data(self) -> dict:
"""Retrieves data from relation.
This function can be used to retrieve data from a relation
in the charm code when outside an event callback.
Returns:
a dict of the values stored in the relation data bag
for all relation instances (indexed by the relation id).
"""
data = {}
for relation in self.relations:
data[relation.id] = (
{key: value for key, value in relation.data[relation.app].items() if key != "data"}
if relation.app
else {}
)
return data
def update_connection_info(self, relation_id: int, connection_data: dict) -> None:
"""Updates the credential data as set of key-value pairs in the relation.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
connection_data: dict containing the key-value pairs
that should be updated.
"""
# check and write changes only if you are the leader
if not self.local_unit.is_leader():
return
relation = self.charm.model.get_relation(self.relation_name, relation_id)
if not relation:
return
# configuration options that are list
s3_list_options = ["attributes", "tls-ca-chain"]
# update the databag, if connection data did not change with respect to before
# the relation changed event is not triggered
updated_connection_data = {}
for configuration_option, configuration_value in connection_data.items():
if configuration_option in s3_list_options:
updated_connection_data[configuration_option] = json.dumps(configuration_value)
else:
updated_connection_data[configuration_option] = configuration_value
relation.data[self.local_app].update(updated_connection_data)
logger.debug(f"Updated S3 connection info: {updated_connection_data}")
@property
def relations(self) -> List[Relation]:
"""The list of Relation instances associated with this relation_name."""
return list(self.charm.model.relations[self.relation_name])
def set_bucket(self, relation_id: int, bucket: str) -> None:
"""Sets bucket name in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
bucket: the bucket name.
"""
self.update_connection_info(relation_id, {"bucket": bucket})
def set_access_key(self, relation_id: int, access_key: str) -> None:
"""Sets access-key value in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
access_key: the access-key value.
"""
self.update_connection_info(relation_id, {"access-key": access_key})
def set_secret_key(self, relation_id: int, secret_key: str) -> None:
"""Sets the secret key value in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
secret_key: the value of the secret key.
"""
self.update_connection_info(relation_id, {"secret-key": secret_key})
def set_path(self, relation_id: int, path: str) -> None:
"""Sets the path value in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
path: the path value.
"""
self.update_connection_info(relation_id, {"path": path})
def set_endpoint(self, relation_id: int, endpoint: str) -> None:
"""Sets the endpoint address in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
endpoint: the endpoint address.
"""
self.update_connection_info(relation_id, {"endpoint": endpoint})
def set_region(self, relation_id: int, region: str) -> None:
"""Sets the region location in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
region: the region address.
"""
self.update_connection_info(relation_id, {"region": region})
def set_s3_uri_style(self, relation_id: int, s3_uri_style: str) -> None:
"""Sets the S3 URI style in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
s3_uri_style: the s3 URI style.
"""
self.update_connection_info(relation_id, {"s3-uri-style": s3_uri_style})
def set_storage_class(self, relation_id: int, storage_class: str) -> None:
"""Sets the storage class in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
storage_class: the storage class.
"""
self.update_connection_info(relation_id, {"storage-class": storage_class})
def set_tls_ca_chain(self, relation_id: int, tls_ca_chain: List[str]) -> None:
"""Sets the tls_ca_chain value in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
tls_ca_chain: the TLS Chain value.
"""
self.update_connection_info(relation_id, {"tls-ca-chain": tls_ca_chain})
def set_s3_api_version(self, relation_id: int, s3_api_version: str) -> None:
"""Sets the S3 API version in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
s3_api_version: the S3 version value.
"""
self.update_connection_info(relation_id, {"s3-api-version": s3_api_version})
def set_delete_older_than_days(self, relation_id: int, days: int) -> None:
"""Sets the retention days for full backups in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
days: the value.
"""
self.update_connection_info(relation_id, {"delete-older-than-days": str(days)})
def set_attributes(self, relation_id: int, attributes: List[str]) -> None:
"""Sets the connection attributes in application databag.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
attributes: the attributes value.
"""
self.update_connection_info(relation_id, {"attributes": attributes})
class S3Event(RelationEvent):
"""Base class for S3 storage events."""
@property
def bucket(self) -> Optional[str]:
"""Returns the bucket name."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("bucket")
@property
def access_key(self) -> Optional[str]:
"""Returns the access key."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("access-key")
@property
def secret_key(self) -> Optional[str]:
"""Returns the secret key."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("secret-key")
@property
def path(self) -> Optional[str]:
"""Returns the path where data can be stored."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("path")
@property
def endpoint(self) -> Optional[str]:
"""Returns the endpoint address."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("endpoint")
@property
def region(self) -> Optional[str]:
"""Returns the region."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("region")
@property
def s3_uri_style(self) -> Optional[str]:
"""Returns the s3 uri style."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("s3-uri-style")
@property
def storage_class(self) -> Optional[str]:
"""Returns the storage class name."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("storage-class")
@property
def tls_ca_chain(self) -> Optional[List[str]]:
"""Returns the TLS CA chain."""
if not self.relation.app:
return None
tls_ca_chain = self.relation.data[self.relation.app].get("tls-ca-chain")
if tls_ca_chain is not None:
return json.loads(tls_ca_chain)
return None
@property
def s3_api_version(self) -> Optional[str]:
"""Returns the S3 API version."""
if not self.relation.app:
return None
return self.relation.data[self.relation.app].get("s3-api-version")
@property
def delete_older_than_days(self) -> Optional[int]:
"""Returns the retention days for full backups."""
if not self.relation.app:
return None
days = self.relation.data[self.relation.app].get("delete-older-than-days")
if days is None:
return None
return int(days)
@property
def attributes(self) -> Optional[List[str]]:
"""Returns the attributes."""
if not self.relation.app:
return None
attributes = self.relation.data[self.relation.app].get("attributes")
if attributes is not None:
return json.loads(attributes)
return None
class CredentialsChangedEvent(S3Event):
"""Event emitted when S3 credential are changed on this relation."""
class CredentialsGoneEvent(RelationEvent):
"""Event emitted when S3 credential are removed from this relation."""
class S3CredentialRequiresEvents(ObjectEvents):
"""Event descriptor for events raised by the S3Provider."""
credentials_changed = EventSource(CredentialsChangedEvent)
credentials_gone = EventSource(CredentialsGoneEvent)
S3_REQUIRED_OPTIONS = ["access-key", "secret-key"]
class S3Requirer(Object):
"""Requires-side of the s3 relation."""
on = S3CredentialRequiresEvents() # pyright: ignore[reportAssignmentType]
def __init__(
self, charm: ops.charm.CharmBase, relation_name: str, bucket_name: Optional[str] = None
):
"""Manager of the s3 client relations."""
super().__init__(charm, relation_name)
self.relation_name = relation_name
self.charm = charm
self.local_app = self.charm.model.app
self.local_unit = self.charm.unit
self.bucket = bucket_name
self.framework.observe(
self.charm.on[self.relation_name].relation_changed, self._on_relation_changed
)
self.framework.observe(
self.charm.on[self.relation_name].relation_joined, self._on_relation_joined
)
self.framework.observe(
self.charm.on[self.relation_name].relation_broken,
self._on_relation_broken,
)
def _generate_bucket_name(self, event: RelationJoinedEvent):
"""Returns the bucket name generated from relation id."""
return f"relation-{event.relation.id}"
def _on_relation_joined(self, event: RelationJoinedEvent) -> None:
"""Event emitted when the application joins the s3 relation."""
if self.bucket is None:
self.bucket = self._generate_bucket_name(event)
self.update_connection_info(event.relation.id, {"bucket": self.bucket})
def fetch_relation_data(self) -> dict:
"""Retrieves data from relation.
This function can be used to retrieve data from a relation
in the charm code when outside an event callback.
Returns:
a dict of the values stored in the relation data bag
for all relation instances (indexed by the relation id).
"""
data = {}
for relation in self.relations:
data[relation.id] = self._load_relation_data(relation.data[self.charm.app])
return data
def update_connection_info(self, relation_id: int, connection_data: dict) -> None:
"""Updates the credential data as set of key-value pairs in the relation.
This function writes in the application data bag, therefore,
only the leader unit can call it.
Args:
relation_id: the identifier for a particular relation.
connection_data: dict containing the key-value pairs
that should be updated.
"""
# check and write changes only if you are the leader
if not self.local_unit.is_leader():
return
relation = self.charm.model.get_relation(self.relation_name, relation_id)
if not relation:
return
# update the databag, if connection data did not change with respect to before
# the relation changed event is not triggered
# configuration options that are list
s3_list_options = ["attributes", "tls-ca-chain"]
updated_connection_data = {}
for configuration_option, configuration_value in connection_data.items():
if configuration_option in s3_list_options:
updated_connection_data[configuration_option] = json.dumps(configuration_value)
else:
updated_connection_data[configuration_option] = configuration_value
relation.data[self.local_app].update(updated_connection_data)
logger.debug(f"Updated S3 credentials: {updated_connection_data}")
def _load_relation_data(self, raw_relation_data: RelationDataContent) -> Dict[str, str]:
"""Loads relation data from the relation data bag.
Args:
raw_relation_data: Relation data from the databag
Returns:
dict: Relation data in dict format.
"""
connection_data = {}
for key in raw_relation_data:
try:
connection_data[key] = json.loads(raw_relation_data[key])
except (json.decoder.JSONDecodeError, TypeError):
connection_data[key] = raw_relation_data[key]
return connection_data
def _diff(self, event: RelationChangedEvent) -> Diff:
"""Retrieves the diff of the data in the relation changed databag.
Args:
event: relation changed event.
Returns:
a Diff instance containing the added, deleted and changed
keys from the event relation databag.
"""
return diff(event, self.local_unit)
def _on_relation_changed(self, event: RelationChangedEvent) -> None:
"""Notify the charm about the presence of S3 credentials."""
# check if the mandatory options are in the relation data
contains_required_options = True
# get current credentials data
credentials = self.get_s3_connection_info()
# records missing options
missing_options = []
for configuration_option in S3_REQUIRED_OPTIONS:
if configuration_option not in credentials:
contains_required_options = False
missing_options.append(configuration_option)
# emit credential change event only if all mandatory fields are present
if contains_required_options:
getattr(self.on, "credentials_changed").emit(
event.relation, app=event.app, unit=event.unit
)
else:
logger.warning(
f"Some mandatory fields: {missing_options} are not present, do not emit credential change event!"
)
def get_s3_connection_info(self) -> Dict[str, str]:
"""Return the s3 credentials as a dictionary."""
for relation in self.relations:
if relation and relation.app:
return self._load_relation_data(relation.data[relation.app])
return {}
def _on_relation_broken(self, event: RelationBrokenEvent) -> None:
"""Notify the charm about a broken S3 credential store relation."""
getattr(self.on, "credentials_gone").emit(event.relation, app=event.app, unit=event.unit)
@property
def relations(self) -> List[Relation]:
"""The list of Relation instances associated with this relation_name."""
return list(self.charm.model.relations[self.relation_name])

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,301 @@
# Copyright 2022 Canonical Ltd.
# See LICENSE file for licensing details.
"""## Overview.
This document explains how to use the `JujuTopology` class to
create and consume topology information from Juju in a consistent manner.
The goal of the Juju topology is to uniquely identify a piece
of software running across any of your Juju-managed deployments.
This is achieved by combining the following four elements:
- Model name
- Model UUID
- Application name
- Unit identifier
For a more in-depth description of the concept, as well as a
walk-through of it's use-case in observability, see
[this blog post](https://juju.is/blog/model-driven-observability-part-2-juju-topology-metrics)
on the Juju blog.
## Library Usage
This library may be used to create and consume `JujuTopology` objects.
The `JujuTopology` class provides three ways to create instances:
### Using the `from_charm` method
Enables instantiation by supplying the charm as an argument. When
creating topology objects for the current charm, this is the recommended
approach.
```python
topology = JujuTopology.from_charm(self)
```
### Using the `from_dict` method
Allows for instantion using a dictionary of relation data, like the
`scrape_metadata` from Prometheus or the labels of an alert rule. When
creating topology objects for remote charms, this is the recommended
approach.
```python
scrape_metadata = json.loads(relation.data[relation.app].get("scrape_metadata", "{}"))
topology = JujuTopology.from_dict(scrape_metadata)
```
### Using the class constructor
Enables instantiation using whatever values you want. While this
is useful in some very specific cases, this is almost certainly not
what you are looking for as setting these values manually may
result in observability metrics which do not uniquely identify a
charm in order to provide accurate usage reporting, alerting,
horizontal scaling, or other use cases.
```python
topology = JujuTopology(
model="some-juju-model",
model_uuid="00000000-0000-0000-0000-000000000001",
application="fancy-juju-application",
unit="fancy-juju-application/0",
charm_name="fancy-juju-application-k8s",
)
```
"""
from collections import OrderedDict
from typing import Dict, List, Optional
from uuid import UUID
# The unique Charmhub library identifier, never change it
LIBID = "bced1658f20f49d28b88f61f83c2d232"
LIBAPI = 0
LIBPATCH = 6
class InvalidUUIDError(Exception):
"""Invalid UUID was provided."""
def __init__(self, uuid: str):
self.message = "'{}' is not a valid UUID.".format(uuid)
super().__init__(self.message)
class JujuTopology:
"""JujuTopology is used for storing, generating and formatting juju topology information.
DEPRECATED: This class is deprecated. Use `pip install cosl` and
`from cosl.juju_topology import JujuTopology` instead.
"""
def __init__(
self,
model: str,
model_uuid: str,
application: str,
unit: Optional[str] = None,
charm_name: Optional[str] = None,
):
"""Build a JujuTopology object.
A `JujuTopology` object is used for storing and transforming
Juju topology information. This information is used to
annotate Prometheus scrape jobs and alert rules. Such
annotation when applied to scrape jobs helps in identifying
the source of the scrapped metrics. On the other hand when
applied to alert rules topology information ensures that
evaluation of alert expressions is restricted to the source
(charm) from which the alert rules were obtained.
Args:
model: a string name of the Juju model
model_uuid: a globally unique string identifier for the Juju model
application: an application name as a string
unit: a unit name as a string
charm_name: name of charm as a string
"""
if not self.is_valid_uuid(model_uuid):
raise InvalidUUIDError(model_uuid)
self._model = model
self._model_uuid = model_uuid
self._application = application
self._charm_name = charm_name
self._unit = unit
def is_valid_uuid(self, uuid):
"""Validate the supplied UUID against the Juju Model UUID pattern.
Args:
uuid: string that needs to be checked if it is valid v4 UUID.
Returns:
True if parameter is a valid v4 UUID, False otherwise.
"""
try:
return str(UUID(uuid, version=4)) == uuid
except (ValueError, TypeError):
return False
@classmethod
def from_charm(cls, charm):
"""Creates a JujuTopology instance by using the model data available on a charm object.
Args:
charm: a `CharmBase` object for which the `JujuTopology` will be constructed
Returns:
a `JujuTopology` object.
"""
return cls(
model=charm.model.name,
model_uuid=charm.model.uuid,
application=charm.model.app.name,
unit=charm.model.unit.name,
charm_name=charm.meta.name,
)
@classmethod
def from_dict(cls, data: dict):
"""Factory method for creating `JujuTopology` children from a dictionary.
Args:
data: a dictionary with five keys providing topology information. The keys are
- "model"
- "model_uuid"
- "application"
- "unit"
- "charm_name"
`unit` and `charm_name` may be empty, but will result in more limited
labels. However, this allows us to support charms without workloads.
Returns:
a `JujuTopology` object.
"""
return cls(
model=data["model"],
model_uuid=data["model_uuid"],
application=data["application"],
unit=data.get("unit", ""),
charm_name=data.get("charm_name", ""),
)
def as_dict(
self,
*,
remapped_keys: Optional[Dict[str, str]] = None,
excluded_keys: Optional[List[str]] = None,
) -> OrderedDict:
"""Format the topology information into an ordered dict.
Keeping the dictionary ordered is important to be able to
compare dicts without having to resort to deep comparisons.
Args:
remapped_keys: A dictionary mapping old key names to new key names,
which will be substituted when invoked.
excluded_keys: A list of key names to exclude from the returned dict.
uuid_length: The length to crop the UUID to.
"""
ret = OrderedDict(
[
("model", self.model),
("model_uuid", self.model_uuid),
("application", self.application),
("unit", self.unit),
("charm_name", self.charm_name),
]
)
if excluded_keys:
ret = OrderedDict({k: v for k, v in ret.items() if k not in excluded_keys})
if remapped_keys:
ret = OrderedDict(
(remapped_keys.get(k), v) if remapped_keys.get(k) else (k, v) for k, v in ret.items() # type: ignore
)
return ret
@property
def identifier(self) -> str:
"""Format the topology information into a terse string.
This crops the model UUID, making it unsuitable for comparisons against
anything but other identifiers. Mainly to be used as a display name or file
name where long strings might become an issue.
>>> JujuTopology( \
model = "a-model", \
model_uuid = "00000000-0000-4000-8000-000000000000", \
application = "some-app", \
unit = "some-app/1" \
).identifier
'a-model_00000000_some-app'
"""
parts = self.as_dict(
excluded_keys=["unit", "charm_name"],
)
parts["model_uuid"] = self.model_uuid_short
values = parts.values()
return "_".join([str(val) for val in values]).replace("/", "_")
@property
def label_matcher_dict(self) -> Dict[str, str]:
"""Format the topology information into a dict with keys having 'juju_' as prefix.
Relabelled topology never includes the unit as it would then only match
the leader unit (ie. the unit that produced the dict).
"""
items = self.as_dict(
remapped_keys={"charm_name": "charm"},
excluded_keys=["unit"],
).items()
return {"juju_{}".format(key): value for key, value in items if value}
@property
def label_matchers(self) -> str:
"""Format the topology information into a promql/logql label matcher string.
Topology label matchers should never include the unit as it
would then only match the leader unit (ie. the unit that
produced the matchers).
"""
items = self.label_matcher_dict.items()
return ", ".join(['{}="{}"'.format(key, value) for key, value in items if value])
@property
def model(self) -> str:
"""Getter for the juju model value."""
return self._model
@property
def model_uuid(self) -> str:
"""Getter for the juju model uuid value."""
return self._model_uuid
@property
def model_uuid_short(self) -> str:
"""Getter for the juju model value, truncated to the first eight letters."""
return self._model_uuid[:8]
@property
def application(self) -> str:
"""Getter for the juju application value."""
return self._application
@property
def charm_name(self) -> Optional[str]:
"""Getter for the juju charm name value."""
return self._charm_name
@property
def unit(self) -> Optional[str]:
"""Getter for the juju unit value."""
return self._unit

View File

@ -0,0 +1,411 @@
# Copyright 2023 Canonical Ltd.
# See LICENSE file for licensing details.
"""# Interface Library for OpenFGA.
This library wraps relation endpoints using the `openfga` interface
and provides a Python API for requesting OpenFGA authorization model
stores to be created.
## Getting Started
To get started using the library, you just need to fetch the library using `charmcraft`.
```shell
cd some-charm
charmcraft fetch-lib charms.openfga_k8s.v1.openfga
```
In the `metadata.yaml` of the charm, add the following:
```yaml
requires:
openfga:
interface: openfga
```
Then, to initialise the library:
```python
from charms.openfga_k8s.v1.openfga import (
OpenFGARequires,
OpenFGAStoreCreateEvent,
)
class SomeCharm(CharmBase):
def __init__(self, *args):
# ...
self.openfga = OpenFGARequires(self, "test-openfga-store")
self.framework.observe(
self.openfga.on.openfga_store_created,
self._on_openfga_store_created,
)
def _on_openfga_store_created(self, event: OpenFGAStoreCreateEvent):
if not event.store_id:
return
info = self.openfga.get_store_info()
if not info:
return
logger.info("store id {}".format(info.store_id))
logger.info("token {}".format(info.token))
logger.info("grpc_api_url {}".format(info.grpc_api_url))
logger.info("http_api_url {}".format(info.http_api_url))
```
The OpenFGA charm will attempt to use Juju secrets to pass the token
to the requiring charm. However, if the Juju version does not support secrets it will
fall back to passing plaintext token via relation data.
"""
import json
import logging
from typing import Dict, MutableMapping, Optional, Union
import pydantic
from ops import (
Application,
CharmBase,
Handle,
HookEvent,
Relation,
RelationCreatedEvent,
RelationDepartedEvent,
TooManyRelatedAppsError,
)
from ops.charm import CharmEvents, RelationChangedEvent, RelationEvent
from ops.framework import EventSource, Object
from pydantic import BaseModel, Field
from typing_extensions import Self
# The unique Charmhub library identifier, never change it
LIBID = "216f28cfeea4447b8a576f01bfbecdf5"
# Increment this major API version when introducing breaking changes
LIBAPI = 1
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 3
PYDEPS = ["pydantic ~= 2.0"]
logger = logging.getLogger(__name__)
BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"}
DEFAULT_INTEGRATION_NAME = "openfga"
def _update_relation_app_databag(app: Application, relation: Relation, data: Dict) -> None:
if relation is None:
return
data = {k: str(v) if v else "" for k, v in data.items()}
relation.data[app].update(data)
class OpenfgaError(RuntimeError):
"""Base class for custom errors raised by this library."""
class DataValidationError(OpenfgaError):
"""Raised when data validation fails on relation data."""
class DatabagModel(BaseModel):
"""Base databag model."""
@classmethod
def _load_value(cls, v: str) -> Union[Dict, str]:
try:
return json.loads(v)
except json.JSONDecodeError:
return v
@classmethod
def load(cls, databag: MutableMapping) -> Self:
"""Load this model from a Juju databag."""
try:
data = {
k: cls._load_value(v) for k, v in databag.items() if k not in BUILTIN_JUJU_KEYS
}
except json.JSONDecodeError:
logger.error(f"invalid databag contents: expecting json. {databag}")
raise
return cls.model_validate_json(json.dumps(data))
class OpenfgaRequirerAppData(DatabagModel):
"""Openfga requirer application databag model."""
store_name: str = Field(description="The store name the application requires")
class OpenfgaProviderAppData(DatabagModel):
"""Openfga requirer application databag model."""
store_id: Optional[str] = Field(description="The store_id", default=None)
token: Optional[str] = Field(description="The token", default=None)
token_secret_id: Optional[str] = Field(
description="The juju secret_id which can be used to retrieve the token",
default=None,
)
grpc_api_url: str = Field(description="The openfga server GRPC address")
http_api_url: str = Field(description="The openfga server HTTP address")
class OpenFGAStoreCreateEvent(HookEvent):
"""Event emitted when a new OpenFGA store is created."""
def __init__(self, handle: Handle, store_id: str):
super().__init__(handle)
self.store_id = store_id
def snapshot(self) -> Dict:
"""Save event."""
return {
"store_id": self.store_id,
}
def restore(self, snapshot: Dict) -> None:
"""Restore event."""
self.store_id = snapshot["store_id"]
class OpenFGAStoreRemovedEvent(HookEvent):
"""Event emitted when a new OpenFGA store is removed."""
class OpenFGARequirerEvents(CharmEvents):
"""Custom charm events."""
openfga_store_created = EventSource(OpenFGAStoreCreateEvent)
openfga_store_removed = EventSource(OpenFGAStoreRemovedEvent)
class OpenFGARequires(Object):
"""This class defines the functionality for the 'requires' side of the 'openfga' relation.
Hook events observed:
- relation-created
- relation-changed
- relation-departed
"""
on = OpenFGARequirerEvents()
def __init__(
self,
charm: CharmBase,
store_name: str,
relation_name: str = DEFAULT_INTEGRATION_NAME,
) -> None:
super().__init__(charm, relation_name)
self.charm = charm
self.app = charm.app
self.relation_name = relation_name
self.store_name = store_name
self.framework.observe(charm.on[relation_name].relation_created, self._on_relation_created)
self.framework.observe(
charm.on[relation_name].relation_changed,
self._on_relation_changed,
)
self.framework.observe(
charm.on[relation_name].relation_departed,
self._on_relation_departed,
)
def _on_relation_created(self, event: RelationCreatedEvent) -> None:
"""Handle the relation-created event."""
if not self.model.unit.is_leader():
return
requirer_data = OpenfgaRequirerAppData(store_name=self.store_name)
_update_relation_app_databag(self.app, event.relation, requirer_data.model_dump())
def _on_relation_changed(self, event: RelationChangedEvent) -> None:
"""Handle the relation-changed event."""
if not (app := event.relation.app):
return
databag = event.relation.data[app]
try:
data = OpenfgaProviderAppData.load(databag)
except pydantic.ValidationError:
return
self.on.openfga_store_created.emit(store_id=data.store_id)
def _on_relation_departed(self, event: RelationDepartedEvent) -> None:
"""Handle the relation-departed event."""
self.on.openfga_store_removed.emit()
def _get_relation(self, relation_id: Optional[int] = None) -> Optional[Relation]:
try:
relation = self.model.get_relation(self.relation_name, relation_id=relation_id)
except TooManyRelatedAppsError:
raise RuntimeError("More than one relations are defined. Please provide a relation_id")
if not relation or not relation.app:
return None
return relation
def get_store_info(self) -> Optional[OpenfgaProviderAppData]:
"""Get the OpenFGA store and server info."""
if not (relation := self._get_relation()):
return None
if not relation.app:
return None
databag = relation.data[relation.app]
try:
data = OpenfgaProviderAppData.load(databag)
except pydantic.ValidationError:
return None
if data.token_secret_id:
token_secret = self.model.get_secret(id=data.token_secret_id)
token = token_secret.get_content()["token"]
data.token = token
return data
class OpenFGAStoreRequestEvent(RelationEvent):
"""Event emitted when a new OpenFGA store is requested."""
def __init__(self, handle: Handle, relation: Relation, store_name: str) -> None:
super().__init__(handle, relation)
self.store_name = store_name
def snapshot(self) -> Dict:
"""Save event."""
dct = super().snapshot()
dct["store_name"] = self.store_name
return dct
def restore(self, snapshot: Dict) -> None:
"""Restore event."""
super().restore(snapshot)
self.store_name = snapshot["store_name"]
class OpenFGAProviderEvents(CharmEvents):
"""Custom charm events."""
openfga_store_requested = EventSource(OpenFGAStoreRequestEvent)
class OpenFGAProvider(Object):
"""Requirer side of the openfga relation."""
on = OpenFGAProviderEvents()
def __init__(
self,
charm: CharmBase,
relation_name: str = DEFAULT_INTEGRATION_NAME,
http_port: Optional[str] = "8080",
grpc_port: Optional[str] = "8081",
scheme: Optional[str] = "http",
):
super().__init__(charm, relation_name)
self.charm = charm
self.app = charm.app
self.relation_name = relation_name
self.http_port = http_port
self.grpc_port = grpc_port
self.scheme = scheme
self.framework.observe(
charm.on[relation_name].relation_changed,
self._on_relation_changed,
)
def _on_relation_changed(self, event: RelationChangedEvent) -> None:
if not (app := event.app):
return
data = event.relation.data[app]
if not data:
logger.info("No relation data available.")
return
try:
data = OpenfgaRequirerAppData.load(data)
except pydantic.ValidationError:
return
self.on.openfga_store_requested.emit(event.relation, store_name=data.store_name)
def _get_http_url(self, relation: Relation) -> str:
address = self.model.get_binding(relation).network.ingress_address.exploded
return f"{self.scheme}://{address}:{self.http_port}"
def _get_grpc_url(self, relation: Relation) -> str:
address = self.model.get_binding(relation).network.ingress_address.exploded
return f"{self.scheme}://{address}:{self.grpc_port}"
def update_relation_info(
self,
store_id: str,
grpc_api_url: Optional[str] = None,
http_api_url: Optional[str] = None,
token: Optional[str] = None,
token_secret_id: Optional[str] = None,
relation_id: Optional[int] = None,
) -> None:
"""Update a relation databag."""
if not self.model.unit.is_leader():
return
relation = self.model.get_relation(self.relation_name, relation_id)
if not relation or not relation.app:
return
if not grpc_api_url:
grpc_api_url = self._get_grpc_url(relation=relation)
if not http_api_url:
http_api_url = self._get_http_url(relation=relation)
provider_data = OpenfgaProviderAppData(
store_id=store_id,
grpc_api_url=grpc_api_url,
http_api_url=http_api_url,
token_secret_id=token_secret_id,
token=token,
)
_update_relation_app_databag(
self.app,
relation,
provider_data.model_dump(),
)
def update_server_info(
self, grpc_api_url: Optional[str] = None, http_api_url: Optional[str] = None
) -> None:
"""Update all the relations databag with the server info."""
if not self.model.unit.is_leader():
return
for relation in self.model.relations[self.relation_name]:
relation_data = relation.data[self.app]
provider_data = OpenfgaProviderAppData(
store_id=relation_data.get("store_id"),
token=relation_data.get("token"),
token_secret_id=relation_data.get("token_secret_id"),
grpc_api_url=grpc_api_url or self._get_grpc_url(relation),
http_api_url=http_api_url or self._get_http_url(relation),
)
_update_relation_app_databag(
self.app,
relation,
provider_data.model_dump(),
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,153 @@
"""Library for the redis relation.
This library contains the Requires and Provides classes for handling the
redis interface.
Import `RedisRequires` in your charm by adding the following to `src/charm.py`:
```
from charms.redis_k8s.v0.redis import RedisRequires
```
Define the following attributes in charm charm class for the library to be able to work with it
```
on = RedisRelationCharmEvents()
```
And then wherever you need to reference the relation data it will be available
in the property `relation_data`:
```
redis_host = self.redis.relation_data.get("hostname")
redis_port = self.redis.relation_data.get("port")
```
You will also need to add the following to `metadata.yaml`:
```
requires:
redis:
interface: redis
```
"""
import logging
import socket
from typing import Dict, Optional
from ops.charm import CharmEvents
from ops.framework import EventBase, EventSource, Object
# The unique Charmhub library identifier, never change it.
LIBID = "fe18a608cec5465fa5153e419abcad7b"
# Increment this major API version when introducing breaking changes.
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version.
LIBPATCH = 7
logger = logging.getLogger(__name__)
DEFAULT_REALTION_NAME = "redis"
class RedisRelationUpdatedEvent(EventBase):
"""An event for the redis relation having been updated."""
class RedisRelationCharmEvents(CharmEvents):
"""A class to carry custom charm events so requires can react to relation changes."""
redis_relation_updated = EventSource(RedisRelationUpdatedEvent)
class RedisRequires(Object):
def __init__(self, charm, relation_name: str = DEFAULT_REALTION_NAME):
"""A class implementing the redis requires relation."""
super().__init__(charm, relation_name)
self.framework.observe(charm.on[relation_name].relation_joined, self._on_relation_changed)
self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed)
self.framework.observe(charm.on[relation_name].relation_broken, self._on_relation_broken)
self.charm = charm
self.relation_name = relation_name
def _on_relation_changed(self, event):
"""Handle the relation changed event."""
if not event.unit:
return
# Trigger an event that our charm can react to.
self.charm.on.redis_relation_updated.emit()
def _on_relation_broken(self, event):
"""Handle the relation broken event."""
# Trigger an event that our charm can react to.
self.charm.on.redis_relation_updated.emit()
@property
def app_data(self) -> Optional[Dict[str, str]]:
"""Retrieve the app data.
Returns:
Dict: dict containing the app data.
"""
relation = self.model.get_relation(self.relation_name)
if not relation:
return None
return relation.data[relation.app]
@property
def relation_data(self) -> Optional[Dict[str, str]]:
"""Retrieve the relation data.
Returns:
Dict: dict containing the relation data.
"""
relation = self.model.get_relation(self.relation_name)
if not relation or not relation.units:
return None
unit = next(iter(relation.units))
return relation.data[unit]
@property
def url(self) -> Optional[str]:
"""Retrieve the Redis URL.
Returns:
str: the Redis URL.
"""
if not (relation_data := self.relation_data):
return None
redis_host = relation_data.get("hostname")
if app_data := self.app_data:
try:
redis_host = self.app_data.get("leader-host", redis_host)
except KeyError:
pass
redis_port = relation_data.get("port")
return f"redis://{redis_host}:{redis_port}"
class RedisProvides(Object):
def __init__(self, charm, port):
"""A class implementing the redis provides relation."""
super().__init__(charm, DEFAULT_REALTION_NAME)
self.framework.observe(charm.on.redis_relation_changed, self._on_relation_changed)
self._port = port
self._charm = charm
def _on_relation_changed(self, event):
"""Handle the relation changed event."""
event.relation.data[self.model.unit]["hostname"] = self._get_master_ip()
event.relation.data[self.model.unit]["port"] = str(self._port)
# The reactive Redis charm also exposes 'password'. When tackling
# https://github.com/canonical/redis-k8s/issues/7 add 'password'
# field so that it matches the exposed interface information from it.
# event.relation.data[self.unit]['password'] = ''
def _bind_address(self, event):
"""Convenience function for getting the unit address."""
relation = self.model.get_relation(event.relation.name, event.relation.id)
if address := self.model.get_binding(relation).network.bind_address:
return address
return self.app.name
def _get_master_ip(self) -> str:
"""Gets the ip of the current redis master."""
return socket.gethostbyname(self._charm.current_master)

View File

@ -0,0 +1,347 @@
#!/usr/bin/env python3
# Copyright 2024 Canonical Ltd.
# Licensed under the Apache2.0. See LICENSE file in charm source for details.
"""Library to manage the relation data for the SAML Integrator charm.
This library contains the Requires and Provides classes for handling the relation
between an application and a charm providing the `saml`relation.
It also contains a `SamlRelationData` class to wrap the SAML data that will
be shared via the relation.
### Requirer Charm
```python
from charms.saml_integrator.v0 import SamlDataAvailableEvent, SamlRequires
class SamlRequirerCharm(ops.CharmBase):
def __init__(self, *args):
super().__init__(*args)
self.saml = saml.SamlRequires(self)
self.framework.observe(self.saml.on.saml_data_available, self._handler)
...
def _handler(self, events: SamlDataAvailableEvent) -> None:
...
```
As shown above, the library provides a custom event to handle the scenario in
which new SAML data has been added or updated.
### Provider Charm
Following the previous example, this is an example of the provider charm.
```python
from charms.saml_integrator.v0 import SamlDataAvailableEvent, SamlRequires
class SamlRequirerCharm(ops.CharmBase):
def __init__(self, *args):
super().__init__(*args)
self.saml = SamlRequires(self)
self.framework.observe(self.saml.on.saml_data_available, self._on_saml_data_available)
...
def _on_saml_data_available(self, events: SamlDataAvailableEvent) -> None:
...
def __init__(self, *args):
super().__init__(*args)
self.saml = SamlProvides(self)
```
The SamlProvides object wraps the list of relations into a `relations` property
and provides an `update_relation_data` method to update the relation data by passing
a `SamlRelationData` data object.
Additionally, SamlRelationData can be used to directly parse the relation data with the
class method `from_relation_data`.
"""
# The unique Charmhub library identifier, never change it
LIBID = "511cdfa7de3d43568bf9b512f9c9f89d"
# Increment this major API version when introducing breaking changes
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 10
# pylint: disable=wrong-import-position
import re
import typing
import ops
from pydantic import AnyHttpUrl, BaseModel, Field
from pydantic.tools import parse_obj_as
DEFAULT_RELATION_NAME = "saml"
class SamlEndpoint(BaseModel):
"""Represent a SAML endpoint.
Attrs:
name: Endpoint name.
url: Endpoint URL.
binding: Endpoint binding.
response_url: URL to address the response to.
"""
name: str = Field(..., min_length=1)
url: typing.Optional[AnyHttpUrl]
binding: str = Field(..., min_length=1)
response_url: typing.Optional[AnyHttpUrl]
def to_relation_data(self) -> typing.Dict[str, str]:
"""Convert an instance of SamlEndpoint to the relation representation.
Returns:
Dict containing the representation.
"""
result: typing.Dict[str, str] = {}
# Get the HTTP method from the SAML binding
http_method = self.binding.split(":")[-1].split("-")[-1].lower()
# Transform name into snakecase
lowercase_name = re.sub(r"(?<!^)(?=[A-Z])", "_", self.name).lower()
prefix = f"{lowercase_name}_{http_method}_"
if self.url:
result[f"{prefix}url"] = str(self.url)
result[f"{prefix}binding"] = self.binding
if self.response_url:
result[f"{prefix}response_url"] = str(self.response_url)
return result
@classmethod
def from_relation_data(cls, relation_data: typing.Dict[str, str]) -> "SamlEndpoint":
"""Initialize a new instance of the SamlEndpoint class from the relation data.
Args:
relation_data: the relation data.
Returns:
A SamlEndpoint instance.
"""
url_key = ""
for key in relation_data:
# A key per method and entpoint type that is always present
if key.endswith("_redirect_url") or key.endswith("_post_url"):
url_key = key
# Get endpoint name from the relation data key
lowercase_name = "_".join(url_key.split("_")[:-2])
name = "".join(x.capitalize() for x in lowercase_name.split("_"))
# Get HTTP method from the relation data key
http_method = url_key.split("_")[-2]
prefix = f"{lowercase_name}_{http_method}_"
return cls(
name=name,
url=(
parse_obj_as(AnyHttpUrl, relation_data[f"{prefix}url"])
if relation_data[f"{prefix}url"]
else None
),
binding=relation_data[f"{prefix}binding"],
response_url=(
parse_obj_as(AnyHttpUrl, relation_data[f"{prefix}response_url"])
if f"{prefix}response_url" in relation_data
else None
),
)
class SamlRelationData(BaseModel):
"""Represent the relation data.
Attrs:
entity_id: SAML entity ID.
metadata_url: URL to the metadata.
certificates: Tuple of SAML certificates.
endpoints: Tuple of SAML endpoints.
"""
entity_id: str = Field(..., min_length=1)
metadata_url: typing.Optional[AnyHttpUrl]
certificates: typing.Tuple[str, ...]
endpoints: typing.Tuple[SamlEndpoint, ...]
def to_relation_data(self) -> typing.Dict[str, str]:
"""Convert an instance of SamlDataAvailableEvent to the relation representation.
Returns:
Dict containing the representation.
"""
result = {
"entity_id": self.entity_id,
"x509certs": ",".join(self.certificates),
}
if self.metadata_url:
result["metadata_url"] = str(self.metadata_url)
for endpoint in self.endpoints:
result.update(endpoint.to_relation_data())
return result
@classmethod
def from_relation_data(cls, relation_data: ops.RelationDataContent) -> "SamlRelationData":
"""Get a SamlRelationData wrapping the relation data.
Arguments:
relation_data: the relation data.
Returns: a SamlRelationData instance with the relation data.
"""
# mypy is not aware of the relation data being present
endpoints = [
SamlEndpoint.from_relation_data(
{
key2: relation_data.get(key2) # type: ignore
for key2 in relation_data
if key2.startswith("_".join(key.split("_")[:-1]))
}
)
for key in relation_data
if key.endswith("_redirect_url") or key.endswith("_post_url")
]
endpoints.sort(key=lambda ep: ep.name)
return cls(
entity_id=relation_data.get("entity_id"), # type: ignore
metadata_url=(
parse_obj_as(AnyHttpUrl, relation_data.get("metadata_url"))
if relation_data.get("metadata_url")
else None
), # type: ignore
certificates=tuple(relation_data.get("x509certs").split(",")), # type: ignore
endpoints=tuple(endpoints),
)
class SamlDataAvailableEvent(ops.RelationEvent):
"""Saml event emitted when relation data has changed.
Attrs:
saml_relation_data: the SAML relation data
entity_id: SAML entity ID.
metadata_url: URL to the metadata.
certificates: Tuple containing the SAML certificates.
endpoints: Tuple containing the SAML endpoints.
"""
@property
def saml_relation_data(self) -> SamlRelationData:
"""Get a SamlRelationData for the relation data."""
assert self.relation.app
return SamlRelationData.from_relation_data(self.relation.data[self.relation.app])
@property
def entity_id(self) -> str:
"""Fetch the SAML entity ID from the relation."""
return self.saml_relation_data.entity_id
@property
def metadata_url(self) -> typing.Optional[str]:
"""Fetch the SAML metadata URL from the relation."""
return str(self.saml_relation_data.metadata_url)
@property
def certificates(self) -> typing.Tuple[str, ...]:
"""Fetch the SAML certificates from the relation."""
return self.saml_relation_data.certificates
@property
def endpoints(self) -> typing.Tuple[SamlEndpoint, ...]:
"""Fetch the SAML endpoints from the relation."""
return self.saml_relation_data.endpoints
class SamlRequiresEvents(ops.CharmEvents):
"""SAML events.
This class defines the events that a SAML requirer can emit.
Attrs:
saml_data_available: the SamlDataAvailableEvent.
"""
saml_data_available = ops.EventSource(SamlDataAvailableEvent)
class SamlRequires(ops.Object):
"""Requirer side of the SAML relation.
Attrs:
on: events the provider can emit.
"""
on = SamlRequiresEvents()
def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None:
"""Construct.
Args:
charm: the provider charm.
relation_name: the relation name.
"""
super().__init__(charm, relation_name)
self.charm = charm
self.relation_name = relation_name
self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed)
def _on_relation_changed(self, event: ops.RelationChangedEvent) -> None:
"""Event emitted when the relation has changed.
Args:
event: event triggering this handler.
"""
assert event.relation.app
if event.relation.data[event.relation.app]:
self.on.saml_data_available.emit(event.relation, app=event.app, unit=event.unit)
def get_relation_data(self) -> typing.Optional[SamlRelationData]:
"""Retrieve the relation data.
Returns:
SmtpRelationData: the relation data.
"""
relation = self.model.get_relation(self.relation_name)
if not relation or not relation.app or not relation.data[relation.app]:
return None
return SamlRelationData.from_relation_data(relation.data[relation.app])
class SamlProvides(ops.Object):
"""Provider side of the SAML relation.
Attrs:
relations: list of charm relations.
"""
def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None:
"""Construct.
Args:
charm: the provider charm.
relation_name: the relation name.
"""
super().__init__(charm, relation_name)
self.charm = charm
self.relation_name = relation_name
@property
def relations(self) -> typing.List[ops.Relation]:
"""The list of Relation instances associated with this relation_name.
Returns:
List of relations to this charm.
"""
return list(self.model.relations[self.relation_name])
def update_relation_data(self, relation: ops.Relation, saml_data: SamlRelationData) -> None:
"""Update the relation data.
Args:
relation: the relation for which to update the data.
saml_data: a SamlRelationData instance wrapping the data to be updated.
"""
relation.data[self.charm.model.app].update(saml_data.to_relation_data())

View File

@ -0,0 +1,395 @@
# Copyright 2025 Canonical Ltd.
# Licensed under the Apache2.0. See LICENSE file in charm source for details.
"""Library to manage the integration with the SMTP Integrator charm.
This library contains the Requires and Provides classes for handling the integration
between an application and a charm providing the `smtp` and `smtp-legacy` integrations.
If the requirer charm supports secrets, the preferred approach is to use the `smtp`
relation to leverage them.
This library also contains a `SmtpRelationData` class to wrap the SMTP data that will
be shared via the integration.
### Requirer Charm
```python
from charms.smtp_integrator.v0.smtp import SmtpDataAvailableEvent, SmtpRequires
class SmtpRequirerCharm(ops.CharmBase):
def __init__(self, *args):
super().__init__(*args)
self.smtp = smtp.SmtpRequires(self)
self.framework.observe(self.smtp.on.smtp_data_available, self._handler)
...
def _handler(self, events: SmtpDataAvailableEvent) -> None:
...
```
As shown above, the library provides a custom event to handle the scenario in
which new SMTP data has been added or updated.
### Provider Charm
Following the previous example, this is an example of the provider charm.
```python
from charms.smtp_integrator.v0.smtp import SmtpProvides
class SmtpProviderCharm(ops.CharmBase):
def __init__(self, *args):
super().__init__(*args)
self.smtp = SmtpProvides(self)
...
```
The SmtpProvides object wraps the list of relations into a `relations` property
and provides an `update_relation_data` method to update the relation data by passing
a `SmtpRelationData` data object.
```python
class SmtpProviderCharm(ops.CharmBase):
...
def _on_config_changed(self, _) -> None:
for relation in self.model.relations[self.smtp.relation_name]:
self.smtp.update_relation_data(relation, self._get_smtp_data())
```
"""
# The unique Charmhub library identifier, never change it
LIBID = "09583c2f9c1d4c0f9a40244cfc20b0c2"
# Increment this major API version when introducing breaking changes
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 15
PYDEPS = ["pydantic>=2"]
# pylint: disable=wrong-import-position
import itertools
import logging
import typing
from ast import literal_eval
from enum import Enum
from typing import Dict, Optional
import ops
from pydantic import BaseModel, Field, ValidationError
logger = logging.getLogger(__name__)
DEFAULT_RELATION_NAME = "smtp"
LEGACY_RELATION_NAME = "smtp-legacy"
class SmtpError(Exception):
"""Common ancestor for Smtp related exceptions."""
class SecretError(SmtpError):
"""Common ancestor for Secrets related exceptions."""
class TransportSecurity(str, Enum):
"""Represent the transport security values.
Attributes:
NONE: none
STARTTLS: starttls
TLS: tls
"""
NONE = "none"
STARTTLS = "starttls"
TLS = "tls"
class AuthType(str, Enum):
"""Represent the auth type values.
Attributes:
NONE: none
NOT_PROVIDED: not_provided
PLAIN: plain
"""
NONE = "none"
NOT_PROVIDED = "not_provided"
PLAIN = "plain"
class SmtpRelationData(BaseModel):
"""Represent the relation data.
Attributes:
host: The hostname or IP address of the outgoing SMTP relay.
port: The port of the outgoing SMTP relay.
user: The SMTP AUTH user to use for the outgoing SMTP relay.
password: The SMTP AUTH password to use for the outgoing SMTP relay.
password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored.
auth_type: The type used to authenticate with the SMTP relay.
transport_security: The security protocol to use for the outgoing SMTP relay.
domain: The domain used by the emails sent from SMTP relay.
skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay.
"""
host: str = Field(..., min_length=1)
port: int = Field(..., ge=1, le=65536)
user: Optional[str] = None
password: Optional[str] = None
password_id: Optional[str] = None
auth_type: AuthType
transport_security: TransportSecurity
domain: Optional[str] = None
skip_ssl_verify: Optional[bool] = False
def to_relation_data(self) -> Dict[str, str]:
"""Convert an instance of SmtpRelationData to the relation representation.
Returns:
Dict containing the representation.
"""
result = {
"host": str(self.host),
"port": str(self.port),
"auth_type": self.auth_type.value,
"transport_security": self.transport_security.value,
"skip_ssl_verify": str(self.skip_ssl_verify),
}
if self.domain:
result["domain"] = self.domain
if self.user:
result["user"] = self.user
if self.password:
result["password"] = self.password
if self.password_id:
result["password_id"] = self.password_id
return result
class SmtpDataAvailableEvent(ops.RelationEvent):
"""Smtp event emitted when relation data has changed.
Attributes:
host: The hostname or IP address of the outgoing SMTP relay.
port: The port of the outgoing SMTP relay.
user: The SMTP AUTH user to use for the outgoing SMTP relay.
password: The SMTP AUTH password to use for the outgoing SMTP relay.
password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored.
auth_type: The type used to authenticate with the SMTP relay.
transport_security: The security protocol to use for the outgoing SMTP relay.
domain: The domain used by the emails sent from SMTP relay.
skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay.
"""
@property
def host(self) -> str:
"""Fetch the SMTP host from the relation."""
assert self.relation.app
return typing.cast(str, self.relation.data[self.relation.app].get("host"))
@property
def port(self) -> int:
"""Fetch the SMTP port from the relation."""
assert self.relation.app
return int(typing.cast(str, self.relation.data[self.relation.app].get("port")))
@property
def user(self) -> str:
"""Fetch the SMTP user from the relation."""
assert self.relation.app
return typing.cast(str, self.relation.data[self.relation.app].get("user"))
@property
def password(self) -> str:
"""Fetch the SMTP password from the relation."""
assert self.relation.app
return typing.cast(str, self.relation.data[self.relation.app].get("password"))
@property
def password_id(self) -> str:
"""Fetch the SMTP password from the relation."""
assert self.relation.app
return typing.cast(str, self.relation.data[self.relation.app].get("password_id"))
@property
def auth_type(self) -> AuthType:
"""Fetch the SMTP auth type from the relation."""
assert self.relation.app
return AuthType(self.relation.data[self.relation.app].get("auth_type"))
@property
def transport_security(self) -> TransportSecurity:
"""Fetch the SMTP transport security protocol from the relation."""
assert self.relation.app
return TransportSecurity(self.relation.data[self.relation.app].get("transport_security"))
@property
def domain(self) -> str:
"""Fetch the SMTP domain from the relation."""
assert self.relation.app
return typing.cast(str, self.relation.data[self.relation.app].get("domain"))
@property
def skip_ssl_verify(self) -> bool:
"""Fetch the skip_ssl_verify flag from the relation."""
assert self.relation.app
return literal_eval(
typing.cast(str, self.relation.data[self.relation.app].get("skip_ssl_verify"))
)
class SmtpRequiresEvents(ops.CharmEvents):
"""SMTP events.
This class defines the events that a SMTP requirer can emit.
Attributes:
smtp_data_available: the SmtpDataAvailableEvent.
"""
smtp_data_available = ops.EventSource(SmtpDataAvailableEvent)
class SmtpRequires(ops.Object):
"""Requirer side of the SMTP relation.
Attributes:
on: events the provider can emit.
"""
on = SmtpRequiresEvents()
def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None:
"""Construct.
Args:
charm: the provider charm.
relation_name: the relation name.
"""
super().__init__(charm, relation_name)
self.charm = charm
self.relation_name = relation_name
self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed)
def get_relation_data(self) -> Optional[SmtpRelationData]:
"""Retrieve the relation data.
Returns:
SmtpRelationData: the relation data.
"""
relation = self.model.get_relation(self.relation_name)
return self._get_relation_data_from_relation(relation) if relation else None
def _get_relation_data_from_relation(
self, relation: ops.Relation
) -> Optional[SmtpRelationData]:
"""Retrieve the relation data.
Args:
relation: the relation to retrieve the data from.
Returns:
SmtpRelationData: the relation data.
"""
assert relation.app
relation_data = relation.data[relation.app]
if not relation_data:
return None
password = relation_data.get("password")
if password is None and relation_data.get("password_id"):
try:
password = (
self.model.get_secret(id=relation_data.get("password_id"))
.get_content()
.get("password")
)
except ops.model.ModelError as exc:
raise SecretError(
f"Could not consume secret {relation_data.get('password_id')}"
) from exc
return SmtpRelationData(
host=typing.cast(str, relation_data.get("host")),
port=typing.cast(int, relation_data.get("port")),
user=relation_data.get("user"),
password=password,
password_id=relation_data.get("password_id"),
auth_type=AuthType(relation_data.get("auth_type")),
transport_security=TransportSecurity(relation_data.get("transport_security")),
domain=relation_data.get("domain"),
skip_ssl_verify=typing.cast(bool, relation_data.get("skip_ssl_verify")),
)
def _is_relation_data_valid(self, relation: ops.Relation) -> bool:
"""Validate the relation data.
Args:
relation: the relation to validate.
Returns:
true: if the relation data is valid.
"""
try:
_ = self._get_relation_data_from_relation(relation)
return True
except ValidationError as ex:
error_fields = set(
itertools.chain.from_iterable(error["loc"] for error in ex.errors())
)
error_field_str = " ".join(f"{f}" for f in error_fields)
logger.warning("Error validation the relation data %s", error_field_str)
return False
def _on_relation_changed(self, event: ops.RelationChangedEvent) -> None:
"""Event emitted when the relation has changed.
Args:
event: event triggering this handler.
"""
assert event.relation.app
relation_data = event.relation.data[event.relation.app]
if relation_data:
if relation_data["auth_type"] == AuthType.NONE.value:
logger.warning('Insecure setting: auth_type has a value "none"')
if relation_data["transport_security"] == TransportSecurity.NONE.value:
logger.warning('Insecure setting: transport_security has value "none"')
if self._is_relation_data_valid(event.relation):
self.on.smtp_data_available.emit(event.relation, app=event.app, unit=event.unit)
class SmtpProvides(ops.Object):
"""Provider side of the SMTP relation."""
def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None:
"""Construct.
Args:
charm: the provider charm.
relation_name: the relation name.
"""
super().__init__(charm, relation_name)
self.charm = charm
self.relation_name = relation_name
def update_relation_data(self, relation: ops.Relation, smtp_data: SmtpRelationData) -> None:
"""Update the relation data.
Args:
relation: the relation for which to update the data.
smtp_data: a SmtpRelationData instance wrapping the data to be updated.
"""
relation_data = smtp_data.to_relation_data()
if relation_data["auth_type"] == AuthType.NONE.value:
logger.warning('Insecure setting: auth_type has a value "none"')
if relation_data["transport_security"] == TransportSecurity.NONE.value:
logger.warning('Insecure setting: transport_security has value "none"')
relation.data[self.charm.model.app].update(relation_data)

View File

@ -0,0 +1,994 @@
# Copyright 2024 Canonical Ltd.
# See LICENSE file for licensing details.
"""## Overview.
This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a
tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm
may maintain the same interface and be backward compatible with all currently integrated charms.
## Requirer Library Usage
Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer`
object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer`
object only requires instantiating it, typically in the constructor of your charm. The
`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint
is exposed by the Tempo charm, and a list of protocols it intends to send traces with.
This relation must use the `tracing` interface.
The `TracingEndpointRequirer` object may be instantiated as follows
from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer
def __init__(self, *args):
super().__init__(*args)
# ...
self.tracing = TracingEndpointRequirer(self,
protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift']
)
# ...
Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the
parent charm.
Alternatively to providing the list of requested protocols at init time, the charm can do it at
any point in time by calling the
`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method.
Using this method also allows you to use per-relation protocols.
Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling
`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example:
- `otlp_grpc`
- `otlp_http`
- `zipkin`
- `tempo`
If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time,
the library will raise an error.
We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests
go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down.
## Provider Library Usage
The `TracingEndpointProvider` object may be used by charms to manage relations with their
trace sources. For this purposes a Tempo-like charm needs to do two things
1. Instantiate the `TracingEndpointProvider` object by providing it a
reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm
uses to interact with its trace sources. This relation must conform to the `tracing` interface
and it is strongly recommended that this relation be named `tracing` which is its
default value.
For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as
follows
from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider
def __init__(self, *args):
super().__init__(*args)
# ...
self.tracing = TracingEndpointProvider(self)
# ...
""" # noqa: W505
import enum
import json
import logging
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Literal,
MutableMapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
import pydantic
from ops.charm import (
CharmBase,
CharmEvents,
RelationBrokenEvent,
RelationEvent,
RelationRole,
)
from ops.framework import EventSource, Object
from ops.model import ModelError, Relation
from pydantic import BaseModel, Field
# The unique Charmhub library identifier, never change it
LIBID = "d2f02b1f8d1244b5989fd55bc3a28943"
# Increment this major API version when introducing breaking changes
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 9
PYDEPS = ["pydantic"]
logger = logging.getLogger(__name__)
DEFAULT_RELATION_NAME = "tracing"
RELATION_INTERFACE_NAME = "tracing"
# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8
ReceiverProtocol = Literal[
"zipkin",
"otlp_grpc",
"otlp_http",
"jaeger_grpc",
"jaeger_thrift_http",
]
RawReceiver = Tuple[ReceiverProtocol, str]
# Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available),
# (secured, if available) resolvable server url.
BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"}
class TransportProtocolType(str, enum.Enum):
"""Receiver Type."""
http = "http"
grpc = "grpc"
receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = {
"zipkin": TransportProtocolType.http,
"otlp_grpc": TransportProtocolType.grpc,
"otlp_http": TransportProtocolType.http,
"jaeger_thrift_http": TransportProtocolType.http,
"jaeger_grpc": TransportProtocolType.grpc,
}
# A mapping between telemetry protocols and their corresponding transport protocol.
class TracingError(Exception):
"""Base class for custom errors raised by this library."""
class NotReadyError(TracingError):
"""Raised by the provider wrapper if a requirer hasn't published the required data (yet)."""
class ProtocolNotRequestedError(TracingError):
"""Raised if the user attempts to obtain an endpoint for a protocol it did not request."""
class DataValidationError(TracingError):
"""Raised when data validation fails on IPU relation data."""
class DataAccessPermissionError(TracingError):
"""Raised when follower units attempt leader-only operations."""
class AmbiguousRelationUsageError(TracingError):
"""Raised when one wrongly assumes that there can only be one relation on an endpoint."""
if int(pydantic.version.VERSION.split(".")[0]) < 2:
class DatabagModel(BaseModel): # type: ignore
"""Base databag model."""
class Config:
"""Pydantic config."""
# ignore any extra fields in the databag
extra = "ignore"
"""Ignore any extra fields in the databag."""
allow_population_by_field_name = True
"""Allow instantiating this class by field name (instead of forcing alias)."""
_NEST_UNDER = None
@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
if cls._NEST_UNDER:
return cls.parse_obj(json.loads(databag[cls._NEST_UNDER]))
try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {f.alias for f in cls.__fields__.values()}
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
logger.error(msg)
raise DataValidationError(msg) from e
try:
return cls.parse_raw(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
logger.debug(msg, exc_info=True)
raise DataValidationError(msg) from e
def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()
if databag is None:
databag = {}
if self._NEST_UNDER:
databag[self._NEST_UNDER] = self.json(by_alias=True)
return databag
dct = self.dict()
for key, field in self.__fields__.items(): # type: ignore
value = dct[key]
databag[field.alias or key] = json.dumps(value)
return databag
else:
from pydantic import ConfigDict
class DatabagModel(BaseModel):
"""Base databag model."""
model_config = ConfigDict(
# ignore any extra fields in the databag
extra="ignore",
# Allow instantiating this class by field name (instead of forcing alias).
populate_by_name=True,
# Custom config key: whether to nest the whole datastructure (as json)
# under a field or spread it out at the toplevel.
_NEST_UNDER=None, # type: ignore
)
"""Pydantic config."""
@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore
if nest_under:
return cls.model_validate(json.loads(databag[nest_under])) # type: ignore
try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {(f.alias or n) for n, f in cls.__fields__.items()}
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
logger.error(msg)
raise DataValidationError(msg) from e
try:
return cls.model_validate_json(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
logger.debug(msg, exc_info=True)
raise DataValidationError(msg) from e
def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()
if databag is None:
databag = {}
nest_under = self.model_config.get("_NEST_UNDER")
if nest_under:
databag[nest_under] = self.model_dump_json( # type: ignore
by_alias=True,
# skip keys whose values are default
exclude_defaults=True,
)
return databag
dct = self.model_dump() # type: ignore
for key, field in self.model_fields.items(): # type: ignore
value = dct[key]
if value == field.default:
continue
databag[field.alias or key] = json.dumps(value)
return databag
# todo use models from charm-relation-interfaces
if int(pydantic.version.VERSION.split(".")[0]) < 2:
class ProtocolType(BaseModel): # type: ignore
"""Protocol Type."""
class Config:
"""Pydantic config."""
use_enum_values = True
"""Allow serializing enum values."""
name: str = Field(
...,
description="Receiver protocol name. What protocols are supported (and what they are called) "
"may differ per provider.",
examples=["otlp_grpc", "otlp_http", "tempo_http"],
)
type: TransportProtocolType = Field(
...,
description="The transport protocol used by this receiver.",
examples=["http", "grpc"],
)
else:
class ProtocolType(BaseModel):
"""Protocol Type."""
model_config = ConfigDict( # type: ignore
# Allow serializing enum values.
use_enum_values=True
)
"""Pydantic config."""
name: str = Field(
...,
description="Receiver protocol name. What protocols are supported (and what they are called) "
"may differ per provider.",
examples=["otlp_grpc", "otlp_http", "tempo_http"],
)
type: TransportProtocolType = Field(
...,
description="The transport protocol used by this receiver.",
examples=["http", "grpc"],
)
class Receiver(BaseModel):
"""Specification of an active receiver."""
protocol: ProtocolType = Field(..., description="Receiver protocol name and type.")
url: str = Field(
...,
description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL.
Otherwise, it would be the service's fqdn or internal IP.
If the protocol type is grpc, the url will not contain a scheme.""",
examples=[
"http://traefik_address:2331",
"https://traefik_address:2331",
"http://tempo_public_ip:2331",
"https://tempo_public_ip:2331",
"tempo_public_ip:2331",
],
)
class TracingProviderAppData(DatabagModel): # noqa: D101 # type: ignore
"""Application databag model for the tracing provider."""
receivers: List[Receiver] = Field(
...,
description="List of all receivers enabled on the tracing provider.",
)
class TracingRequirerAppData(DatabagModel): # noqa: D101 # type: ignore
"""Application databag model for the tracing requirer."""
receivers: List[ReceiverProtocol]
"""Requested receivers."""
class _AutoSnapshotEvent(RelationEvent):
__args__: Tuple[str, ...] = ()
__optional_kwargs__: Dict[str, Any] = {}
@classmethod
def __attrs__(cls):
return cls.__args__ + tuple(cls.__optional_kwargs__.keys())
def __init__(self, handle, relation, *args, **kwargs):
super().__init__(handle, relation)
if not len(self.__args__) == len(args):
raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args)))
for attr, obj in zip(self.__args__, args):
setattr(self, attr, obj)
for attr, default in self.__optional_kwargs__.items():
obj = kwargs.get(attr, default)
setattr(self, attr, obj)
def snapshot(self) -> dict:
dct = super().snapshot()
for attr in self.__attrs__():
obj = getattr(self, attr)
try:
dct[attr] = obj
except ValueError as e:
raise ValueError(
"cannot automagically serialize {}: "
"override this method and do it "
"manually.".format(obj)
) from e
return dct
def restore(self, snapshot: dict) -> None:
super().restore(snapshot)
for attr, obj in snapshot.items():
setattr(self, attr, obj)
class RelationNotFoundError(Exception):
"""Raised if no relation with the given name is found."""
def __init__(self, relation_name: str):
self.relation_name = relation_name
self.message = "No relation named '{}' found".format(relation_name)
super().__init__(self.message)
class RelationInterfaceMismatchError(Exception):
"""Raised if the relation with the given name has an unexpected interface."""
def __init__(
self,
relation_name: str,
expected_relation_interface: str,
actual_relation_interface: str,
):
self.relation_name = relation_name
self.expected_relation_interface = expected_relation_interface
self.actual_relation_interface = actual_relation_interface
self.message = (
"The '{}' relation has '{}' as interface rather than the expected '{}'".format(
relation_name, actual_relation_interface, expected_relation_interface
)
)
super().__init__(self.message)
class RelationRoleMismatchError(Exception):
"""Raised if the relation with the given name has a different role than expected."""
def __init__(
self,
relation_name: str,
expected_relation_role: RelationRole,
actual_relation_role: RelationRole,
):
self.relation_name = relation_name
self.expected_relation_interface = expected_relation_role
self.actual_relation_role = actual_relation_role
self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format(
relation_name, repr(actual_relation_role), repr(expected_relation_role)
)
super().__init__(self.message)
def _validate_relation_by_interface_and_direction(
charm: CharmBase,
relation_name: str,
expected_relation_interface: str,
expected_relation_role: RelationRole,
):
"""Validate a relation.
Verifies that the `relation_name` provided: (1) exists in metadata.yaml,
(2) declares as interface the interface name passed as `relation_interface`
and (3) has the right "direction", i.e., it is a relation that `charm`
provides or requires.
Args:
charm: a `CharmBase` object to scan for the matching relation.
relation_name: the name of the relation to be verified.
expected_relation_interface: the interface name to be matched by the
relation named `relation_name`.
expected_relation_role: whether the `relation_name` must be either
provided or required by `charm`.
Raises:
RelationNotFoundError: If there is no relation in the charm's metadata.yaml
with the same name as provided via `relation_name` argument.
RelationInterfaceMismatchError: The relation with the same name as provided
via `relation_name` argument does not have the same relation interface
as specified via the `expected_relation_interface` argument.
RelationRoleMismatchError: If the relation with the same name as provided
via `relation_name` argument does not have the same role as specified
via the `expected_relation_role` argument.
"""
if relation_name not in charm.meta.relations:
raise RelationNotFoundError(relation_name)
relation = charm.meta.relations[relation_name]
# fixme: why do we need to cast here?
actual_relation_interface = cast(str, relation.interface_name)
if actual_relation_interface != expected_relation_interface:
raise RelationInterfaceMismatchError(
relation_name, expected_relation_interface, actual_relation_interface
)
if expected_relation_role is RelationRole.provides:
if relation_name not in charm.meta.provides:
raise RelationRoleMismatchError(
relation_name, RelationRole.provides, RelationRole.requires
)
elif expected_relation_role is RelationRole.requires:
if relation_name not in charm.meta.requires:
raise RelationRoleMismatchError(
relation_name, RelationRole.requires, RelationRole.provides
)
else:
raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role))
class RequestEvent(RelationEvent):
"""Event emitted when a remote requests a tracing endpoint."""
@property
def requested_receivers(self) -> List[ReceiverProtocol]:
"""List of receiver protocols that have been requested."""
relation = self.relation
app = relation.app
if not app:
raise NotReadyError("relation.app is None")
return TracingRequirerAppData.load(relation.data[app]).receivers
class BrokenEvent(RelationBrokenEvent):
"""Event emitted when a relation on tracing is broken."""
class TracingEndpointProviderEvents(CharmEvents):
"""TracingEndpointProvider events."""
request = EventSource(RequestEvent)
broken = EventSource(BrokenEvent)
class TracingEndpointProvider(Object):
"""Class representing a trace receiver service."""
on = TracingEndpointProviderEvents() # type: ignore
def __init__(
self,
charm: CharmBase,
external_url: Optional[str] = None,
relation_name: str = DEFAULT_RELATION_NAME,
):
"""Initialize.
Args:
charm: a `CharmBase` instance that manages this instance of the Tempo service.
external_url: external address of the node hosting the tempo server,
if an ingress is present.
relation_name: an optional string name of the relation between `charm`
and the Tempo charmed service. The default is "tracing".
Raises:
RelationNotFoundError: If there is no relation in the charm's metadata.yaml
with the same name as provided via `relation_name` argument.
RelationInterfaceMismatchError: The relation with the same name as provided
via `relation_name` argument does not have the `tracing` relation
interface.
RelationRoleMismatchError: If the relation with the same name as provided
via `relation_name` argument does not have the `RelationRole.requires`
role.
"""
_validate_relation_by_interface_and_direction(
charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides
)
super().__init__(charm, relation_name + "tracing-provider")
self._charm = charm
self._external_url = external_url
self._relation_name = relation_name
self.framework.observe(
self._charm.on[relation_name].relation_joined, self._on_relation_event
)
self.framework.observe(
self._charm.on[relation_name].relation_created, self._on_relation_event
)
self.framework.observe(
self._charm.on[relation_name].relation_changed, self._on_relation_event
)
self.framework.observe(
self._charm.on[relation_name].relation_broken, self._on_relation_broken_event
)
def _on_relation_broken_event(self, e: RelationBrokenEvent):
"""Handle relation broken events."""
self.on.broken.emit(e.relation)
def _on_relation_event(self, e: RelationEvent):
"""Handle relation created/joined/changed events."""
if self.is_requirer_ready(e.relation):
self.on.request.emit(e.relation)
def is_requirer_ready(self, relation: Relation):
"""Attempt to determine if requirer has already populated app data."""
try:
self._get_requested_protocols(relation)
except NotReadyError:
return False
return True
@staticmethod
def _get_requested_protocols(relation: Relation):
app = relation.app
if not app:
raise NotReadyError("relation.app is None")
try:
databag = TracingRequirerAppData.load(relation.data[app])
except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError):
logger.info(f"relation {relation} is not ready to talk tracing")
raise NotReadyError()
return databag.receivers
def requested_protocols(self):
"""All receiver protocols that have been requested by our related apps."""
requested_protocols = set()
for relation in self.relations:
try:
protocols = self._get_requested_protocols(relation)
except NotReadyError:
continue
requested_protocols.update(protocols)
return requested_protocols
@property
def relations(self) -> List[Relation]:
"""All relations active on this endpoint."""
return self._charm.model.relations[self._relation_name]
def publish_receivers(self, receivers: Sequence[RawReceiver]):
"""Let all requirers know that these receivers are active and listening."""
if not self._charm.unit.is_leader():
raise RuntimeError("only leader can do this")
for relation in self.relations:
try:
TracingProviderAppData(
receivers=[
Receiver(
url=url,
protocol=ProtocolType(
name=protocol,
type=receiver_protocol_to_transport_protocol[protocol],
),
)
for protocol, url in receivers
],
).dump(relation.data[self._charm.app])
except ModelError as e:
# args are bytes
msg = e.args[0]
if isinstance(msg, bytes):
if msg.startswith(
b"ERROR cannot read relation application settings: permission denied"
):
logger.error(
f"encountered error {e} while attempting to update_relation_data."
f"The relation must be gone."
)
continue
raise
class EndpointRemovedEvent(RelationBrokenEvent):
"""Event representing a change in one of the receiver endpoints."""
class EndpointChangedEvent(_AutoSnapshotEvent):
"""Event representing a change in one of the receiver endpoints."""
__args__ = ("_receivers",)
if TYPE_CHECKING:
_receivers = [] # type: List[dict]
@property
def receivers(self) -> List[Receiver]:
"""Cast receivers back from dict."""
return [Receiver(**i) for i in self._receivers]
class TracingEndpointRequirerEvents(CharmEvents):
"""TracingEndpointRequirer events."""
endpoint_changed = EventSource(EndpointChangedEvent)
endpoint_removed = EventSource(EndpointRemovedEvent)
class TracingEndpointRequirer(Object):
"""A tracing endpoint for Tempo."""
on = TracingEndpointRequirerEvents() # type: ignore
def __init__(
self,
charm: CharmBase,
relation_name: str = DEFAULT_RELATION_NAME,
protocols: Optional[List[ReceiverProtocol]] = None,
):
"""Construct a tracing requirer for a Tempo charm.
If your application supports pushing traces to a distributed tracing backend, the
`TracingEndpointRequirer` object enables your charm to easily access endpoint information
exchanged over a `tracing` relation interface.
Args:
charm: a `CharmBase` object that manages this
`TracingEndpointRequirer` object. Typically, this is `self` in the instantiating
class.
relation_name: an optional string name of the relation between `charm`
and the Tempo charmed service. The default is "tracing". It is strongly
advised not to change the default, so that people deploying your charm will have a
consistent experience with all other charms that provide tracing endpoints.
protocols: optional list of protocols that the charm intends to send traces with.
The provider will enable receivers for these and only these protocols,
so be sure to enable all protocols the charm or its workload are going to need.
Raises:
RelationNotFoundError: If there is no relation in the charm's metadata.yaml
with the same name as provided via `relation_name` argument.
RelationInterfaceMismatchError: The relation with the same name as provided
via `relation_name` argument does not have the `tracing` relation
interface.
RelationRoleMismatchError: If the relation with the same name as provided
via `relation_name` argument does not have the `RelationRole.provides`
role.
"""
_validate_relation_by_interface_and_direction(
charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires
)
super().__init__(charm, relation_name)
self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1
self._charm = charm
self._relation_name = relation_name
events = self._charm.on[self._relation_name]
self.framework.observe(events.relation_changed, self._on_tracing_relation_changed)
self.framework.observe(events.relation_broken, self._on_tracing_relation_broken)
if protocols and self._charm.unit.is_leader():
# we can't be sure that the current event context supports read/writing relation data for this relation,
# so we catch ModelErrors. This is because we're doing this in init.
try:
self.request_protocols(protocols)
except ModelError as e:
logger.error(
f"encountered error {e} while attempting to request_protocols."
f"The relation must be gone."
)
pass
def request_protocols(
self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None
):
"""Publish the list of protocols which the provider should activate."""
# todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here?
relations = [relation] if relation else self.relations
if not protocols:
# empty sequence
raise ValueError(
"You need to pass a nonempty sequence of protocols to `request_protocols`."
)
if self._charm.unit.is_leader():
for relation in relations:
TracingRequirerAppData(
receivers=list(protocols),
).dump(relation.data[self._charm.app])
else:
raise DataAccessPermissionError("only leaders can request_protocols")
@property
def relations(self) -> List[Relation]:
"""The tracing relations associated with this endpoint."""
return self._charm.model.relations[self._relation_name]
@property
def _relation(self) -> Optional[Relation]:
"""If this wraps a single endpoint, the relation bound to it, if any."""
if not self._is_single_endpoint:
objname = type(self).__name__
raise AmbiguousRelationUsageError(
f"This {objname} wraps a {self._relation_name} endpoint that has "
"limit != 1. We can't determine what relation, of the possibly many, you are "
f"talking about. Please pass a relation instance while calling {objname}, "
"or set limit=1 in the charm metadata."
)
relations = self.relations
return relations[0] if relations else None
def is_ready(self, relation: Optional[Relation] = None):
"""Is this endpoint ready?"""
relation = relation or self._relation
if not relation:
logger.debug(f"no relation on {self._relation_name !r}: tracing not ready")
return False
if relation.data is None:
logger.error(f"relation data is None for {relation}")
return False
if not relation.app:
logger.error(f"{relation} event received but there is no relation.app")
return False
try:
databag = dict(relation.data[relation.app])
TracingProviderAppData.load(databag)
except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError):
logger.info(f"failed validating relation data for {relation}")
return False
return True
def _on_tracing_relation_changed(self, event):
"""Notify the providers that there is new endpoint information available."""
relation = event.relation
if not self.is_ready(relation):
self.on.endpoint_removed.emit(relation) # type: ignore
return
data = TracingProviderAppData.load(relation.data[relation.app])
self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore
def _on_tracing_relation_broken(self, event: RelationBrokenEvent):
"""Notify the providers that the endpoint is broken."""
relation = event.relation
self.on.endpoint_removed.emit(relation) # type: ignore
def get_all_endpoints(
self, relation: Optional[Relation] = None
) -> Optional[TracingProviderAppData]:
"""Unmarshalled relation data."""
relation = relation or self._relation
if not self.is_ready(relation):
return
return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore
def _get_endpoint(
self, relation: Optional[Relation], protocol: ReceiverProtocol
) -> Optional[str]:
app_data = self.get_all_endpoints(relation)
if not app_data:
return None
receivers: List[Receiver] = list(
filter(lambda i: i.protocol.name == protocol, app_data.receivers)
)
if not receivers:
# it can happen if the charm requests tracing protocols, but the relay (such as grafana-agent) isn't yet
# connected to the tracing backend. In this case, it's not an error the charm author can do anything about
logger.warning(f"no receiver found with protocol={protocol!r}.")
return
if len(receivers) > 1:
# if we have more than 1 receiver that matches, it shouldn't matter which receiver we'll be using.
logger.warning(
f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}"
)
receiver = receivers[0]
return receiver.url
def get_endpoint(
self, protocol: ReceiverProtocol, relation: Optional[Relation] = None
) -> Optional[str]:
"""Receiver endpoint for the given protocol.
It could happen that this function gets called before the provider publishes the endpoints.
In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to
restricted access. To prevent this, this function needs to be guarded by the `is_ready` check.
Raises:
ProtocolNotRequestedError:
If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request.
"""
endpoint = self._get_endpoint(relation or self._relation, protocol=protocol)
if not endpoint:
requested_protocols = set()
relations = [relation] if relation else self.relations
for relation in relations:
try:
databag = TracingRequirerAppData.load(relation.data[self._charm.app])
except DataValidationError:
continue
requested_protocols.update(databag.receivers)
if protocol not in requested_protocols:
raise ProtocolNotRequestedError(protocol, relation)
return None
return endpoint
def charm_tracing_config(
endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]]
) -> Tuple[Optional[str], Optional[str]]:
"""Return the charm_tracing config you likely want.
If no endpoint is provided:
disable charm tracing.
If https endpoint is provided but cert_path is not found on disk:
disable charm tracing.
If https endpoint is provided and cert_path is None:
ERROR
Else:
proceed with charm tracing (with or without tls, as appropriate)
Usage:
>>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm
>>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config
>>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path")
>>> class MyCharm(...):
>>> _cert_path = "/path/to/cert/on/charm/container.crt"
>>> def __init__(self, ...):
>>> self.tracing = TracingEndpointRequirer(...)
>>> self.my_endpoint, self.cert_path = charm_tracing_config(
... self.tracing, self._cert_path)
"""
if not endpoint_requirer.is_ready():
return None, None
try:
endpoint = endpoint_requirer.get_endpoint("otlp_http")
except ModelError as e:
if e.args[0] == "ERROR permission denied\n":
# this can happen the app databag doesn't have data,
# or we're breaking the relation.
return None, None
raise
if not endpoint:
return None, None
is_https = endpoint.startswith("https://")
if is_https:
if cert_path is None or not Path(cert_path).exists():
# disable charm tracing until we obtain a cert to prevent tls errors
logger.error(
"Tracing endpoint is https, but no server_cert has been passed."
"Please point @trace_charm to a `server_cert` attr. "
"This might also mean that the tracing provider is related to a "
"certificates provider, but this application is not (yet). "
"In that case, you might just have to wait a bit for the certificates "
"integration to settle. "
)
return None, None
return endpoint, str(cert_path)
else:
return endpoint, None

View File

@ -0,0 +1,914 @@
# Copyright 2024 Canonical Ltd.
# See LICENSE file for licensing details.
r"""# Interface Library for ingress.
This library wraps relation endpoints using the `ingress` interface
and provides a Python API for both requesting and providing per-application
ingress, with load-balancing occurring across all units.
## Getting Started
To get started using the library, you just need to fetch the library using `charmcraft`.
```shell
cd some-charm
charmcraft fetch-lib charms.traefik_k8s.v2.ingress
```
In the `metadata.yaml` of the charm, add the following:
```yaml
requires:
ingress:
interface: ingress
limit: 1
```
Then, to initialise the library:
```python
from charms.traefik_k8s.v2.ingress import (IngressPerAppRequirer,
IngressPerAppReadyEvent, IngressPerAppRevokedEvent)
class SomeCharm(CharmBase):
def __init__(self, *args):
# ...
self.ingress = IngressPerAppRequirer(self, port=80)
# The following event is triggered when the ingress URL to be used
# by this deployment of the `SomeCharm` is ready (or changes).
self.framework.observe(
self.ingress.on.ready, self._on_ingress_ready
)
self.framework.observe(
self.ingress.on.revoked, self._on_ingress_revoked
)
def _on_ingress_ready(self, event: IngressPerAppReadyEvent):
logger.info("This app's ingress URL: %s", event.url)
def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent):
logger.info("This app no longer has ingress")
"""
import ipaddress
import json
import logging
import socket
import typing
from dataclasses import dataclass
from functools import partial
from typing import (
Any,
Callable,
Dict,
List,
MutableMapping,
Optional,
Sequence,
Tuple,
Union,
cast,
)
import pydantic
from ops.charm import CharmBase, RelationBrokenEvent, RelationEvent
from ops.framework import EventSource, Object, ObjectEvents, StoredState
from ops.model import ModelError, Relation, Unit
from pydantic import AnyHttpUrl, BaseModel, Field
# The unique Charmhub library identifier, never change it
LIBID = "e6de2a5cd5b34422a204668f3b8f90d2"
# Increment this major API version when introducing breaking changes
LIBAPI = 2
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 15
PYDEPS = ["pydantic"]
DEFAULT_RELATION_NAME = "ingress"
RELATION_INTERFACE = "ingress"
log = logging.getLogger(__name__)
BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"}
PYDANTIC_IS_V1 = int(pydantic.version.VERSION.split(".")[0]) < 2
if PYDANTIC_IS_V1:
from pydantic import validator
input_validator = partial(validator, pre=True)
class DatabagModel(BaseModel): # type: ignore
"""Base databag model."""
class Config:
"""Pydantic config."""
allow_population_by_field_name = True
"""Allow instantiating this class by field name (instead of forcing alias)."""
_NEST_UNDER = None
@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
if cls._NEST_UNDER:
return cls.parse_obj(json.loads(databag[cls._NEST_UNDER]))
try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {f.alias for f in cls.__fields__.values()} # type: ignore
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
log.error(msg)
raise DataValidationError(msg) from e
try:
return cls.parse_raw(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
log.debug(msg, exc_info=True)
raise DataValidationError(msg) from e
def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()
if databag is None:
databag = {}
if self._NEST_UNDER:
databag[self._NEST_UNDER] = self.json(by_alias=True, exclude_defaults=True)
return databag
for key, value in self.dict(by_alias=True, exclude_defaults=True).items(): # type: ignore
databag[key] = json.dumps(value)
return databag
else:
from pydantic import ConfigDict, field_validator
input_validator = partial(field_validator, mode="before")
class DatabagModel(BaseModel):
"""Base databag model."""
model_config = ConfigDict(
# tolerate additional keys in databag
extra="ignore",
# Allow instantiating this class by field name (instead of forcing alias).
populate_by_name=True,
# Custom config key: whether to nest the whole datastructure (as json)
# under a field or spread it out at the toplevel.
_NEST_UNDER=None,
) # type: ignore
"""Pydantic config."""
@classmethod
def load(cls, databag: MutableMapping):
"""Load this model from a Juju databag."""
nest_under = cls.model_config.get("_NEST_UNDER")
if nest_under:
return cls.model_validate(json.loads(databag[nest_under])) # type: ignore
try:
data = {
k: json.loads(v)
for k, v in databag.items()
# Don't attempt to parse model-external values
if k in {(f.alias or n) for n, f in cls.model_fields.items()} # type: ignore
}
except json.JSONDecodeError as e:
msg = f"invalid databag contents: expecting json. {databag}"
log.error(msg)
raise DataValidationError(msg) from e
try:
return cls.model_validate_json(json.dumps(data)) # type: ignore
except pydantic.ValidationError as e:
msg = f"failed to validate databag: {databag}"
log.debug(msg, exc_info=True)
raise DataValidationError(msg) from e
def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
"""Write the contents of this model to Juju databag.
:param databag: the databag to write the data to.
:param clear: ensure the databag is cleared before writing it.
"""
if clear and databag:
databag.clear()
if databag is None:
databag = {}
nest_under = self.model_config.get("_NEST_UNDER")
if nest_under:
databag[nest_under] = self.model_dump_json( # type: ignore
by_alias=True,
# skip keys whose values are default
exclude_defaults=True,
)
return databag
dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) # type: ignore
databag.update({k: json.dumps(v) for k, v in dct.items()})
return databag
# todo: import these models from charm-relation-interfaces/ingress/v2 instead of redeclaring them
class IngressUrl(BaseModel):
"""Ingress url schema."""
url: AnyHttpUrl
class IngressProviderAppData(DatabagModel):
"""Ingress application databag schema."""
ingress: Optional[IngressUrl] = None
class ProviderSchema(BaseModel):
"""Provider schema for Ingress."""
app: IngressProviderAppData
class IngressHealthCheck(BaseModel):
"""HealthCheck schema for Ingress."""
path: str = Field(description="The health check endpoint path (required).")
scheme: Optional[str] = Field(
default=None, description="Replaces the server URL scheme for the health check endpoint."
)
hostname: Optional[str] = Field(
default=None, description="Hostname to be set in the health check request."
)
port: Optional[int] = Field(
default=None, description="Replaces the server URL port for the health check endpoint."
)
interval: str = Field(default="30s", description="Frequency of the health check calls.")
timeout: str = Field(default="5s", description="Maximum duration for a health check request.")
class IngressRequirerAppData(DatabagModel):
"""Ingress requirer application databag model."""
model: str = Field(description="The model the application is in.")
name: str = Field(description="the name of the app requesting ingress.")
port: int = Field(description="The port the app wishes to be exposed.")
healthcheck_params: Optional[IngressHealthCheck] = Field(
default=None, description="Optional health check configuration for ingress."
)
# fields on top of vanilla 'ingress' interface:
strip_prefix: Optional[bool] = Field(
default=False,
description="Whether to strip the prefix from the ingress url.",
alias="strip-prefix",
)
redirect_https: Optional[bool] = Field(
default=False,
description="Whether to redirect http traffic to https.",
alias="redirect-https",
)
scheme: Optional[str] = Field(
default="http", description="What scheme to use in the generated ingress url"
)
@input_validator("scheme")
def validate_scheme(cls, scheme): # noqa: N805 # pydantic wants 'cls' as first arg
"""Validate scheme arg."""
if scheme not in {"http", "https", "h2c"}:
raise ValueError("invalid scheme: should be one of `http|https|h2c`")
return scheme
@input_validator("port")
def validate_port(cls, port): # noqa: N805 # pydantic wants 'cls' as first arg
"""Validate port."""
assert isinstance(port, int), type(port)
assert 0 < port < 65535, "port out of TCP range"
return port
class IngressRequirerUnitData(DatabagModel):
"""Ingress requirer unit databag model."""
host: str = Field(description="Hostname at which the unit is reachable.")
ip: Optional[str] = Field(
None,
description="IP at which the unit is reachable, "
"IP can only be None if the IP information can't be retrieved from juju.",
)
@input_validator("host")
def validate_host(cls, host): # noqa: N805 # pydantic wants 'cls' as first arg
"""Validate host."""
assert isinstance(host, str), type(host)
return host
@input_validator("ip")
def validate_ip(cls, ip): # noqa: N805 # pydantic wants 'cls' as first arg
"""Validate ip."""
if ip is None:
return None
if not isinstance(ip, str):
raise TypeError(f"got ip of type {type(ip)} instead of expected str")
try:
ipaddress.IPv4Address(ip)
return ip
except ipaddress.AddressValueError:
pass
try:
ipaddress.IPv6Address(ip)
return ip
except ipaddress.AddressValueError:
raise ValueError(f"{ip!r} is not a valid ip address")
class RequirerSchema(BaseModel):
"""Requirer schema for Ingress."""
app: IngressRequirerAppData
unit: IngressRequirerUnitData
class IngressError(RuntimeError):
"""Base class for custom errors raised by this library."""
class NotReadyError(IngressError):
"""Raised when a relation is not ready."""
class DataValidationError(IngressError):
"""Raised when data validation fails on IPU relation data."""
class _IngressPerAppBase(Object):
"""Base class for IngressPerUnit interface classes."""
def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME):
super().__init__(charm, relation_name)
self.charm: CharmBase = charm
self.relation_name = relation_name
self.app = self.charm.app
self.unit = self.charm.unit
observe = self.framework.observe
rel_events = charm.on[relation_name]
observe(rel_events.relation_created, self._handle_relation)
observe(rel_events.relation_joined, self._handle_relation)
observe(rel_events.relation_changed, self._handle_relation)
observe(rel_events.relation_departed, self._handle_relation)
observe(rel_events.relation_broken, self._handle_relation_broken)
observe(charm.on.leader_elected, self._handle_upgrade_or_leader) # type: ignore
observe(charm.on.upgrade_charm, self._handle_upgrade_or_leader) # type: ignore
@property
def relations(self):
"""The list of Relation instances associated with this endpoint."""
return list(self.charm.model.relations[self.relation_name])
def _handle_relation(self, event):
"""Subclasses should implement this method to handle a relation update."""
pass
def _handle_relation_broken(self, event):
"""Subclasses should implement this method to handle a relation breaking."""
pass
def _handle_upgrade_or_leader(self, event):
"""Subclasses should implement this method to handle upgrades or leadership change."""
pass
class _IPAEvent(RelationEvent):
__args__: Tuple[str, ...] = ()
__optional_kwargs__: Dict[str, Any] = {}
@classmethod
def __attrs__(cls):
return cls.__args__ + tuple(cls.__optional_kwargs__.keys())
def __init__(self, handle, relation, *args, **kwargs):
super().__init__(handle, relation)
if not len(self.__args__) == len(args):
raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args)))
for attr, obj in zip(self.__args__, args):
setattr(self, attr, obj)
for attr, default in self.__optional_kwargs__.items():
obj = kwargs.get(attr, default)
setattr(self, attr, obj)
def snapshot(self):
dct = super().snapshot()
for attr in self.__attrs__():
obj = getattr(self, attr)
try:
dct[attr] = obj
except ValueError as e:
raise ValueError(
"cannot automagically serialize {}: "
"override this method and do it "
"manually.".format(obj)
) from e
return dct
def restore(self, snapshot) -> None:
super().restore(snapshot)
for attr, obj in snapshot.items():
setattr(self, attr, obj)
class IngressPerAppDataProvidedEvent(_IPAEvent):
"""Event representing that ingress data has been provided for an app."""
__args__ = ("name", "model", "hosts", "strip_prefix", "redirect_https")
if typing.TYPE_CHECKING:
name: Optional[str] = None
model: Optional[str] = None
# sequence of hostname, port dicts
hosts: Sequence["IngressRequirerUnitData"] = ()
strip_prefix: bool = False
redirect_https: bool = False
class IngressPerAppDataRemovedEvent(RelationEvent):
"""Event representing that ingress data has been removed for an app."""
class IngressPerAppProviderEvents(ObjectEvents):
"""Container for IPA Provider events."""
data_provided = EventSource(IngressPerAppDataProvidedEvent)
data_removed = EventSource(IngressPerAppDataRemovedEvent)
@dataclass
class IngressRequirerData:
"""Data exposed by the ingress requirer to the provider."""
app: "IngressRequirerAppData"
units: List["IngressRequirerUnitData"]
class IngressPerAppProvider(_IngressPerAppBase):
"""Implementation of the provider of ingress."""
on = IngressPerAppProviderEvents() # type: ignore
def __init__(
self,
charm: CharmBase,
relation_name: str = DEFAULT_RELATION_NAME,
):
"""Constructor for IngressPerAppProvider.
Args:
charm: The charm that is instantiating the instance.
relation_name: The name of the relation endpoint to bind to
(defaults to "ingress").
"""
super().__init__(charm, relation_name)
def _handle_relation(self, event):
# created, joined or changed: if remote side has sent the required data:
# notify listeners.
if self.is_ready(event.relation):
data = self.get_data(event.relation)
self.on.data_provided.emit( # type: ignore
event.relation,
data.app.name,
data.app.model,
[
unit.dict() if PYDANTIC_IS_V1 else unit.model_dump(mode="json")
for unit in data.units
],
data.app.strip_prefix or False,
data.app.redirect_https or False,
)
def _handle_relation_broken(self, event):
self.on.data_removed.emit(event.relation) # type: ignore
def wipe_ingress_data(self, relation: Relation):
"""Clear ingress data from relation."""
assert self.unit.is_leader(), "only leaders can do this"
try:
relation.data
except ModelError as e:
log.warning(
"error {} accessing relation data for {!r}. "
"Probably a ghost of a dead relation is still "
"lingering around.".format(e, relation.name)
)
return
del relation.data[self.app]["ingress"]
def _get_requirer_units_data(self, relation: Relation) -> List["IngressRequirerUnitData"]:
"""Fetch and validate the requirer's app databag."""
out: List["IngressRequirerUnitData"] = []
unit: Unit
for unit in relation.units:
databag = relation.data[unit]
try:
data = IngressRequirerUnitData.load(databag)
out.append(data)
except pydantic.ValidationError:
log.info(f"failed to validate remote unit data for {unit}")
raise
return out
@staticmethod
def _get_requirer_app_data(relation: Relation) -> "IngressRequirerAppData":
"""Fetch and validate the requirer's app databag."""
app = relation.app
if app is None:
raise NotReadyError(relation)
databag = relation.data[app]
return IngressRequirerAppData.load(databag)
def get_data(self, relation: Relation) -> IngressRequirerData:
"""Fetch the remote (requirer) app and units' databags."""
try:
return IngressRequirerData(
self._get_requirer_app_data(relation), self._get_requirer_units_data(relation)
)
except (pydantic.ValidationError, DataValidationError) as e:
raise DataValidationError("failed to validate ingress requirer data") from e
def is_ready(self, relation: Optional[Relation] = None):
"""The Provider is ready if the requirer has sent valid data."""
if not relation:
return any(map(self.is_ready, self.relations))
try:
self.get_data(relation)
except (DataValidationError, NotReadyError) as e:
log.debug("Provider not ready; validation error encountered: %s" % str(e))
return False
return True
def _published_url(self, relation: Relation) -> Optional["IngressProviderAppData"]:
"""Fetch and validate this app databag; return the ingress url."""
if not self.is_ready(relation) or not self.unit.is_leader():
# Handle edge case where remote app name can be missing, e.g.,
# relation_broken events.
# Also, only leader units can read own app databags.
# FIXME https://github.com/canonical/traefik-k8s-operator/issues/34
return None
# fetch the provider's app databag
databag = relation.data[self.app]
if not databag.get("ingress"):
raise NotReadyError("This application did not `publish_url` yet.")
return IngressProviderAppData.load(databag)
def publish_url(self, relation: Relation, url: str):
"""Publish to the app databag the ingress url."""
ingress_url = {"url": url}
try:
IngressProviderAppData(ingress=ingress_url).dump(relation.data[self.app]) # type: ignore
except pydantic.ValidationError as e:
# If we cannot validate the url as valid, publish an empty databag and log the error.
log.error(f"Failed to validate ingress url '{url}' - got ValidationError {e}")
log.error(
"url was not published to ingress relation for {relation.app}. This error is likely due to an"
" error or misconfiguration of the charm calling this library."
)
IngressProviderAppData(ingress=None).dump(relation.data[self.app]) # type: ignore
@property
def proxied_endpoints(self) -> Dict[str, Dict[str, str]]:
"""Returns the ingress settings provided to applications by this IngressPerAppProvider.
For example, when this IngressPerAppProvider has provided the
`http://foo.bar/my-model.my-app` URL to the my-app application, the returned dictionary
will be:
```
{
"my-app": {
"url": "http://foo.bar/my-model.my-app"
}
}
```
"""
results: Dict[str, Dict[str, str]] = {}
for ingress_relation in self.relations:
if not ingress_relation.app:
log.warning(
f"no app in relation {ingress_relation} when fetching proxied endpoints: skipping"
)
continue
try:
ingress_data = self._published_url(ingress_relation)
except NotReadyError:
log.warning(
f"no published url found in {ingress_relation}: "
f"traefik didn't publish_url yet to this relation."
)
continue
if not ingress_data:
log.warning(f"relation {ingress_relation} not ready yet: try again in some time.")
continue
# Validation above means ingress cannot be None, but type checker doesn't know that.
ingress = ingress_data.ingress
ingress = cast(IngressProviderAppData, ingress)
if PYDANTIC_IS_V1:
results[ingress_relation.app.name] = ingress.dict()
else:
results[ingress_relation.app.name] = ingress.model_dump(mode="json")
return results
class IngressPerAppReadyEvent(_IPAEvent):
"""Event representing that ingress for an app is ready."""
__args__ = ("url",)
if typing.TYPE_CHECKING:
url: Optional[str] = None
class IngressPerAppRevokedEvent(RelationEvent):
"""Event representing that ingress for an app has been revoked."""
class IngressPerAppRequirerEvents(ObjectEvents):
"""Container for IPA Requirer events."""
ready = EventSource(IngressPerAppReadyEvent)
revoked = EventSource(IngressPerAppRevokedEvent)
class IngressPerAppRequirer(_IngressPerAppBase):
"""Implementation of the requirer of the ingress relation."""
on = IngressPerAppRequirerEvents() # type: ignore
# used to prevent spurious urls to be sent out if the event we're currently
# handling is a relation-broken one.
_stored = StoredState()
def __init__(
self,
charm: CharmBase,
relation_name: str = DEFAULT_RELATION_NAME,
*,
host: Optional[str] = None,
ip: Optional[str] = None,
port: Optional[int] = None,
strip_prefix: bool = False,
redirect_https: bool = False,
# fixme: this is horrible UX.
# shall we switch to manually calling provide_ingress_requirements with all args when ready?
scheme: Union[Callable[[], str], str] = lambda: "http",
healthcheck_params: Optional[Dict[str, Any]] = None,
):
"""Constructor for IngressRequirer.
The request args can be used to specify the ingress properties when the
instance is created. If any are set, at least `port` is required, and
they will be sent to the ingress provider as soon as it is available.
All request args must be given as keyword args.
Args:
charm: The charm that is instantiating the library.
relation_name: The name of the relation endpoint to bind to (defaults to "ingress");
the relation must be of interface type "ingress" and have a limit of 1.
host: Hostname to be used by the ingress provider to address the requiring
application; if unspecified, the default Kubernetes service name will be used.
ip: Alternative addressing method other than host to be used by the ingress provider;
if unspecified, the binding address from the Juju network API will be used.
healthcheck_params: Optional dictionary containing health check
configuration parameters conforming to the IngressHealthCheck schema. The dictionary must include:
- "path" (str): The health check endpoint path (required).
It may also include:
- "scheme" (Optional[str]): Replaces the server URL scheme for the health check endpoint.
- "hostname" (Optional[str]): Hostname to be set in the health check request.
- "port" (Optional[int]): Replaces the server URL port for the health check endpoint.
- "interval" (str): Frequency of the health check calls (defaults to "30s" if omitted).
- "timeout" (str): Maximum duration for a health check request (defaults to "5s" if omitted).
If provided, "path" is required while "interval" and "timeout" will use Traefik's defaults when not specified.
strip_prefix: Configure Traefik to strip the path prefix.
redirect_https: Redirect incoming requests to HTTPS.
scheme: Either a callable that returns the scheme to use when constructing the ingress URL,
or a string if the scheme is known and stable at charm initialization.
Request Args:
port: the port of the service
"""
super().__init__(charm, relation_name)
self.charm: CharmBase = charm
self.healthcheck_params = healthcheck_params
self.relation_name = relation_name
self._strip_prefix = strip_prefix
self._redirect_https = redirect_https
self._get_scheme = scheme if callable(scheme) else lambda: scheme
self._stored.set_default(current_url=None) # type: ignore
# if instantiated with a port, and we are related, then
# we immediately publish our ingress data to speed up the process.
if port:
self._auto_data = host, ip, port
else:
self._auto_data = None
def _handle_relation(self, event):
# created, joined or changed: if we have auto data: publish it
self._publish_auto_data()
if self.is_ready():
# Avoid spurious events, emit only when there is a NEW URL available
new_url = (
None
if isinstance(event, RelationBrokenEvent)
else self._get_url_from_relation_data()
)
if self._stored.current_url != new_url: # type: ignore
self._stored.current_url = new_url # type: ignore
self.on.ready.emit(event.relation, new_url) # type: ignore
def _handle_relation_broken(self, event):
self._stored.current_url = None # type: ignore
self.on.revoked.emit(event.relation) # type: ignore
def _handle_upgrade_or_leader(self, event):
"""On upgrade/leadership change: ensure we publish the data we have."""
self._publish_auto_data()
def is_ready(self):
"""The Requirer is ready if the Provider has sent valid data."""
try:
return bool(self._get_url_from_relation_data())
except DataValidationError as e:
log.debug("Requirer not ready; validation error encountered: %s" % str(e))
return False
def _publish_auto_data(self):
if self._auto_data:
host, ip, port = self._auto_data
self.provide_ingress_requirements(host=host, ip=ip, port=port)
def provide_ingress_requirements(
self,
*,
scheme: Optional[str] = None,
host: Optional[str] = None,
ip: Optional[str] = None,
port: int,
):
"""Publishes the data that Traefik needs to provide ingress.
Args:
scheme: Scheme to be used; if unspecified, use the one used by __init__.
host: Hostname to be used by the ingress provider to address the
requirer unit; if unspecified, FQDN will be used instead
ip: Alternative addressing method other than host to be used by the ingress provider.
if unspecified, binding address from juju network API will be used.
port: the port of the service (required)
"""
for relation in self.relations:
self._provide_ingress_requirements(scheme, host, ip, port, relation)
def _provide_ingress_requirements(
self,
scheme: Optional[str],
host: Optional[str],
ip: Optional[str],
port: int,
relation: Relation,
):
if self.unit.is_leader():
self._publish_app_data(scheme, port, relation)
self._publish_unit_data(host, ip, relation)
def _publish_unit_data(
self,
host: Optional[str],
ip: Optional[str],
relation: Relation,
):
if not host:
host = socket.getfqdn()
if ip is None:
network_binding = self.charm.model.get_binding(relation)
if (
network_binding is not None
and (bind_address := network_binding.network.bind_address) is not None
):
ip = str(bind_address)
else:
log.error("failed to retrieve ip information from juju")
unit_databag = relation.data[self.unit]
try:
IngressRequirerUnitData(host=host, ip=ip).dump(unit_databag)
except pydantic.ValidationError as e:
msg = "failed to validate unit data"
log.info(msg, exc_info=True) # log to INFO because this might be expected
raise DataValidationError(msg) from e
def _publish_app_data(
self,
scheme: Optional[str],
port: int,
relation: Relation,
):
# assumes leadership!
app_databag = relation.data[self.app]
if not scheme:
# If scheme was not provided, use the one given to the constructor.
scheme = self._get_scheme()
try:
IngressRequirerAppData( # type: ignore # pyright does not like aliases
model=self.model.name,
name=self.app.name,
scheme=scheme,
port=port,
strip_prefix=self._strip_prefix, # type: ignore # pyright does not like aliases
redirect_https=self._redirect_https, # type: ignore # pyright does not like aliases
healthcheck_params=(
IngressHealthCheck(**self.healthcheck_params)
if self.healthcheck_params
else None
),
).dump(app_databag)
except pydantic.ValidationError as e:
msg = "failed to validate app data"
log.info(msg, exc_info=True) # log to INFO because this might be expected
raise DataValidationError(msg) from e
@property
def relation(self):
"""The established Relation instance, or None."""
return self.relations[0] if self.relations else None
def _get_url_from_relation_data(self) -> Optional[str]:
"""The full ingress URL to reach the current unit.
Returns None if the URL isn't available yet.
"""
relation = self.relation
if not relation or not relation.app:
return None
# fetch the provider's app databag
try:
databag = relation.data[relation.app]
except ModelError as e:
log.debug(
f"Error {e} attempting to read remote app data; "
f"probably we are in a relation_departed hook"
)
return None
if not databag: # not ready yet
return None
ingress = IngressProviderAppData.load(databag).ingress
if ingress is None:
return None
return str(ingress.url)
@property
def url(self) -> Optional[str]:
"""The full ingress URL to reach the current unit.
Returns None if the URL isn't available yet.
"""
data = (
typing.cast(Optional[str], self._stored.current_url) # type: ignore
or self._get_url_from_relation_data()
)
return data

41
charm/pyproject.toml Normal file
View File

@ -0,0 +1,41 @@
# Testing tools configuration
[tool.coverage.run]
branch = true
[tool.coverage.report]
show_missing = true
[tool.pytest.ini_options]
minversion = "6.0"
log_cli_level = "INFO"
# Linting tools configuration
[tool.ruff]
line-length = 99
lint.select = ["E", "W", "F", "C", "N", "D", "I001"]
lint.ignore = [
"D105",
"D107",
"D203",
"D204",
"D213",
"D215",
"D400",
"D404",
"D406",
"D407",
"D408",
"D409",
"D413",
]
extend-exclude = ["__pycache__", "*.egg_info"]
lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]}
[tool.ruff.lint.mccabe]
max-complexity = 10
[tool.codespell]
skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage"
[tool.pyright]
include = ["src/**.py"]

2
charm/requirements.txt Normal file
View File

@ -0,0 +1,2 @@
ops ~= 2.17
paas-charm>=1.0,<2

26
charm/rockcraft.yml Normal file
View File

@ -0,0 +1,26 @@
name: canonical-com
# see https://documentation.ubuntu.com/rockcraft/en/1.12.0/explanation/bases/
# for more information about bases and using 'bare' bases for chiselled rocks
base: ubuntu@22.04
version: "0.1"
summary: https://canonical.com
description: |
This is the charm for the canonical.com website.
platforms:
amd64:
extensions:
- flask-framework
parts:
flask-framework/install-app:
prime:
# # note: prefix each entry with "flask/app/" followed by the local path.
- flask/app/.env
- flask/app/app.py
- flask/app/webapp
- flask/app/templates
- flask/app/static
- flask/app/scripts
- flask/app/redirects.yaml
- flask/app/navigation.yaml
- flask/app/secondary-navigation.yaml

30
charm/src/charm.py Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env python3
# Copyright 2025 Samuel Olwe
# See LICENSE file for licensing details.
"""Flask Charm entrypoint."""
import logging
import typing
import ops
import paas_charm.flask
logger = logging.getLogger(__name__)
class CharmCharm(paas_charm.flask.Charm):
"""Flask Charm service."""
def __init__(self, *args: typing.Any) -> None:
"""Initialize the instance.
Args:
args: passthrough to CharmBase.
"""
super().__init__(*args)
if __name__ == "__main__":
ops.main(CharmCharm)

84
charm/tox.ini Normal file
View File

@ -0,0 +1,84 @@
# Copyright 2025 Samuel Olwe
# See LICENSE file for licensing details.
[tox]
no_package = True
skip_missing_interpreters = True
env_list = format, lint, static
min_version = 4.0.0
[vars]
src_path = {tox_root}/src
;tests_path = {tox_root}/tests
;lib_path = {tox_root}/lib/charms/operator_name_with_underscores
all_path = {[vars]src_path}
[testenv]
set_env =
PYTHONPATH = {tox_root}/lib:{[vars]src_path}
PYTHONBREAKPOINT=pdb.set_trace
PY_COLORS=1
pass_env =
PYTHONPATH
CHARM_BUILD_DIR
MODEL_SETTINGS
[testenv:format]
description = Apply coding style standards to code
deps =
ruff
commands =
ruff format {[vars]all_path}
ruff check --fix {[vars]all_path}
[testenv:lint]
description = Check code against coding style standards
deps =
ruff
codespell
commands =
# if this charm owns a lib, uncomment "lib_path" variable
# and uncomment the following line
# codespell {[vars]lib_path}
codespell {tox_root}
ruff check {[vars]all_path}
ruff format --check --diff {[vars]all_path}
[testenv:unit]
description = Run unit tests
deps =
pytest
coverage[toml]
-r {tox_root}/requirements.txt
commands =
coverage run --source={[vars]src_path} \
-m pytest \
--tb native \
-v \
-s \
{posargs} \
{[vars]tests_path}/unit
coverage report
[testenv:static]
description = Run static type checks
deps =
pyright
-r {tox_root}/requirements.txt
commands =
pyright {posargs}
[testenv:integration]
description = Run integration tests
deps =
pytest
juju
pytest-operator
-r {tox_root}/requirements.txt
commands =
pytest -v \
-s \
--tb native \
--log-cli-level=INFO \
{posargs} \
{[vars]tests_path}/integration

View File

@ -27,3 +27,6 @@ cp node_modules/leaflet/dist/leaflet.js static/js/modules/leaflet
mkdir -p static/js/modules/venobox
cp node_modules/venobox/dist/venobox.min.js static/js/modules/venobox/venobox.min.js
mkdir -p static/js/modules/vanilla-framework
cp -r node_modules/vanilla-framework/templates/_macros/ static/js/modules/vanilla-framework

View File

@ -50,13 +50,6 @@ from webapp.recaptcha import verify_recaptcha, RECAPTCHA_CONFIG
logger = logging.getLogger(__name__)
CHARMHUB_DISCOURSE_API_KEY = os.getenv("CHARMHUB_DISCOURSE_API_KEY")
CHARMHUB_DISCOURSE_API_USERNAME = os.getenv("CHARMHUB_DISCOURSE_API_USERNAME")
RECAPTCHA_SITE_KEY = RECAPTCHA_CONFIG.get("site_key")
if not RECAPTCHA_SITE_KEY:
logger.error("RECAPTCHA_SITE_KEY is missing!")
# Sitemaps that are already generated and don't need to be updated.
# Can be seen on sitemap_index.xml
DYNAMIC_SITEMAPS = [
@ -77,6 +70,15 @@ app = FlaskBase(
template_500="500.html",
)
# Load env variables after the app is initialized
CHARMHUB_DISCOURSE_API_KEY = os.getenv("CHARMHUB_DISCOURSE_API_KEY")
CHARMHUB_DISCOURSE_API_USERNAME = os.getenv("CHARMHUB_DISCOURSE_API_USERNAME")
RECAPTCHA_SITE_KEY = RECAPTCHA_CONFIG.get("site_key")
if not RECAPTCHA_SITE_KEY:
logger.error("RECAPTCHA_SITE_KEY is missing!")
# ChoiceLoader attempts loading templates from each path in successive order
directory_parser_templates = (
Path(directory_parser.__file__).parent / "templates"
@ -85,6 +87,7 @@ loader = ChoiceLoader(
[
FileSystemLoader("templates"),
FileSystemLoader("node_modules/vanilla-framework/templates/"),
FileSystemLoader("static/js/modules/vanilla-framework/"),
FileSystemLoader(str(directory_parser_templates)),
]
)