Skip to content

Commit

Permalink
chore: update charm libraries (#156)
Browse files Browse the repository at this point in the history
Co-authored-by: Github Actions <[email protected]>
  • Loading branch information
observability-noctua-bot and Github Actions authored Mar 25, 2023
1 parent e66199f commit 1867c04
Showing 1 changed file with 61 additions and 8 deletions.
69 changes: 61 additions & 8 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def __init__(self, *args):
{
"targets": ["10.1.32.215:7000", "*:8000"],
"labels": {
"some-key": "some-value"
"some_key": "some-value"
}
}
]
Expand Down Expand Up @@ -151,7 +151,7 @@ def __init__(self, *args):
{
"targets": ["*:7000"],
"labels": {
"some-key": "some-value"
"some_key": "some-value"
}
}
]
Expand All @@ -163,7 +163,7 @@ def __init__(self, *args):
{
"targets": ["*:8000"],
"labels": {
"some-other-key": "some-other-value"
"some_other_key": "some-other-value"
}
}
]
Expand Down Expand Up @@ -368,7 +368,7 @@ def _on_scrape_targets_changed(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 32
LIBPATCH = 33

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -686,10 +686,27 @@ def restore(self, snapshot):
self.errors = snapshot["errors"]


class InvalidScrapeJobEvent(EventBase):
"""Event emitted when alert rule files are not valid."""

def __init__(self, handle, errors: str = ""):
super().__init__(handle)
self.errors = errors

def snapshot(self) -> Dict:
"""Save error information."""
return {"errors": self.errors}

def restore(self, snapshot):
"""Restore error information."""
self.errors = snapshot["errors"]


class MetricsEndpointProviderEvents(ObjectEvents):
"""Events raised by :class:`InvalidAlertRuleEvent`s."""

alert_rule_status_changed = EventSource(InvalidAlertRuleEvent)
invalid_scrape_job = EventSource(InvalidScrapeJobEvent)


def _type_convert_stored(obj):
Expand Down Expand Up @@ -1119,7 +1136,18 @@ def jobs(self) -> list:
for relation in self._charm.model.relations[self._relation_name]:
static_scrape_jobs = self._static_scrape_config(relation)
if static_scrape_jobs:
scrape_jobs.extend(static_scrape_jobs)
# Duplicate job names will cause validate_scrape_jobs to fail.
# Therefore we need to dedupe here and after all jobs are collected.
static_scrape_jobs = _dedupe_job_names(static_scrape_jobs)
try:
self._tool.validate_scrape_jobs(static_scrape_jobs)
except subprocess.CalledProcessError as e:
if self._charm.unit.is_leader():
data = json.loads(relation.data[self._charm.app].get("event", "{}"))
data["scrape_job_errors"] = str(e)
relation.data[self._charm.app]["event"] = json.dumps(data)
else:
scrape_jobs.extend(static_scrape_jobs)

scrape_jobs = _dedupe_job_names(scrape_jobs)

Expand Down Expand Up @@ -1198,13 +1226,18 @@ def alerts(self) -> dict:
)
continue

alerts[identifier] = alert_rules

_, errmsg = self._tool.validate_alert_rules(alert_rules)
if errmsg:
relation.data[self._charm.app]["event"] = json.dumps({"errors": errmsg})
if alerts[identifier]:
del alerts[identifier]
if self._charm.unit.is_leader():
data = json.loads(relation.data[self._charm.app].get("event", "{}"))
data["errors"] = errmsg
relation.data[self._charm.app]["event"] = json.dumps(data)
continue

alerts[identifier] = alert_rules

return alerts

def _get_identifier_by_alert_rules(
Expand Down Expand Up @@ -1664,6 +1697,10 @@ def _on_relation_changed(self, event):
else:
self.on.alert_rule_status_changed.emit(valid=valid, errors=errors)

scrape_errors = ev.get("scrape_job_errors", None)
if scrape_errors:
self.on.invalid_scrape_job.emit(errors=scrape_errors)

def update_scrape_job_spec(self, jobs):
"""Update scrape job specification."""
self._jobs = PrometheusConfig.sanitize_scrape_configs(jobs)
Expand Down Expand Up @@ -2473,6 +2510,22 @@ def validate_alert_rules(self, rules: dict) -> Tuple[bool, str]:
]
)

def validate_scrape_jobs(self, jobs: list) -> bool:
"""Validate scrape jobs using cos-tool."""
if not self.path:
logger.debug("`cos-tool` unavailable. Not validating scrape jobs.")
return True
conf = {"scrape_configs": jobs}
with tempfile.NamedTemporaryFile() as tmpfile:
with open(tmpfile.name, "w") as f:
f.write(yaml.safe_dump(conf))
try:
self._exec([str(self.path), "validate-config", tmpfile.name])
except subprocess.CalledProcessError as e:
logger.error("Validating scrape jobs failed: {}".format(e.output))
raise
return True

def inject_label_matchers(self, expression, topology) -> str:
"""Add label matchers to an expression."""
if not topology:
Expand Down

0 comments on commit 1867c04

Please sign in to comment.