diff --git a/.env-dist b/.env-dist index 5e441c4ed95..daca58ec3ff 100644 --- a/.env-dist +++ b/.env-dist @@ -65,6 +65,8 @@ BYTES_DB_URI=postgresql://${BYTES_DB_USER}:${BYTES_DB_PASSWORD}@postgres:5432/${ # --- Octopoes --- # # See `octopoes/octopoes/config/settings.py` +# Number of Celery workers (for the Octopoes API worker) that need to be started +CELERY_WORKER_CONCURRENCY=${CELERY_WORKER_CONCURRENCY:-4} # --- Mula --- # # See `mula/scheduler/config/settings.py` diff --git a/.gitignore b/.gitignore index cbf5265286c..bce85366c2c 100644 --- a/.gitignore +++ b/.gitignore @@ -444,3 +444,4 @@ nl-kat-* /boefjes/boefjes/plugins/kat_rpki/rpki-meta.json *.pstat +**/.cache* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 77f9b918082..99f38a8cfc6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -76,7 +76,7 @@ repos: rev: 1.16.0 hooks: - id: django-upgrade - args: [--target-version, "4.2"] + args: [--target-version, "5.0"] - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.9.0 @@ -91,11 +91,13 @@ repos: - httpx - types-python-dateutil - types-requests + - types-croniter exclude: | (?x)( ^boefjes/tools | ^keiko/templates | ^mula/whitelist\.py$ | + ^mula/scripts | ^octopoes/tools | ^rocky/whitelist\.py$ | /tests/ | @@ -108,7 +110,7 @@ repos: hooks: - id: codespell additional_dependencies: ["tomli"] - args: [-L, lama] + args: ["-L", "lama", "--ignore-regex", ".{1024}|.*codespell-ignore.*"] exclude: | (?x)( \.po$ | diff --git a/boefjes/.ci/.env.test b/boefjes/.ci/.env.test index 88a4030008b..896206d75ad 100644 --- a/boefjes/.ci/.env.test +++ b/boefjes/.ci/.env.test @@ -1,11 +1,42 @@ POSTGRES_USER=postgres POSTGRES_PASSWORD=postgres -POSTGRES_DB=ci_katalogus +POSTGRES_DB=test -KATALOGUS_DB_URI=postgresql://postgres:postgres@ci_katalogus-db:5432/ci_katalogus +KATALOGUS_DB_URI=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@ci_katalogus-db:5432/${POSTGRES_DB} CI=1 -ENCRYPTION_MIDDLEWARE=NACL_SEALBOX +BOEFJES_ENCRYPTION_MIDDLEWARE=NACL_SEALBOX KATALOGUS_PRIVATE_KEY_B64=Vpb0g34rGFbnoUuiSjkFr8TKh278AViSJEdjII5DvQY= KATALOGUS_PUBLIC_KEY_B64=iR/vPrBVrx0LXOiwK6DMB3QCggjzQXDtj/hyVK7mpy8= +BOEFJES_API=http://placeholder:1234 + + +# Benchmark setup +RABBITMQ_DEFAULT_VHOST=kat +RABBITMQ_DEFAULT_USER=ci_user +RABBITMQ_DEFAULT_PASS=ci_pass + +QUEUE_URI=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@ci_rabbitmq:5672/${RABBITMQ_DEFAULT_VHOST} + +KATALOGUS_API=http://ci_katalogus:8080 +OCTOPOES_API=http://ci_octopoes:80 +XTDB_URI=http://ci_xtdb:3000 +BYTES_API=http://ci_bytes:8000 +SCHEDULER_API=http://placeholder:8000 + +CI=1 + +# CI Bytes configuration + +BYTES_SECRET=3d54f6e4e65723aa678d17d8fd22aba5234136d3e44e5a77305dedaa8ce13f45 +BYTES_ACCESS_TOKEN_EXPIRE_MINUTES=1000 +BYTES_USERNAME=test +BYTES_PASSWORD=secret +BYTES_ENCRYPTION_MIDDLEWARE=IDENTITY + +BYTES_DB_URI=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@ci_bytes-db:5432/${POSTGRES_DB} +BYTES_LOG_FILE=/var/log/bytes-test.log +BYTES_FILE_PERMISSION=555 + +BYTES_METRICS_TTL_SECONDS=0 diff --git a/boefjes/.ci/docker-compose.yml b/boefjes/.ci/docker-compose.yml index 0ed9b4c14dc..f3dc4ecc244 100644 --- a/boefjes/.ci/docker-compose.yml +++ b/boefjes/.ci/docker-compose.yml @@ -5,9 +5,10 @@ services: dockerfile: boefjes/Dockerfile args: - ENVIRONMENT=dev - command: sh -c 'python -m pytest -v boefjes/katalogus/tests/integration' + command: sh -c 'python -m pytest -v tests/integration' depends_on: - ci_katalogus-db + - ci_katalogus env_file: - .ci/.env.test volumes: @@ -17,3 +18,101 @@ services: image: docker.io/library/postgres:15 env_file: - .ci/.env.test + + migration_bench: + build: + context: .. + dockerfile: boefjes/Dockerfile + args: + - ENVIRONMENT=dev + command: bash -c "python -m cProfile -o .ci/bench_$(date +%Y_%m_%d-%H:%M:%S).pstat -m pytest -v -m slow tests/integration" + depends_on: + - ci_bytes + - ci_octopoes + - ci_katalogus-db + env_file: + - .ci/.env.test + volumes: + - .:/app/boefjes + environment: + - DATABASE_MIGRATION=1 + + ci_bytes: + build: + context: ../bytes + args: + ENVIRONMENT: dev + command: uvicorn bytes.api:app --host 0.0.0.0 + depends_on: + ci_rabbitmq: + condition: service_healthy + ci_bytes-db: + condition: service_started + env_file: + - .ci/.env.test + environment: + - DATABASE_MIGRATION=1 + + ci_bytes-db: + image: docker.io/library/postgres:15 + env_file: + - .ci/.env.test + + ci_octopoes: + build: + context: ../octopoes + command: uvicorn octopoes.api.api:app --host 0.0.0.0 --port 80 + depends_on: + ci_rabbitmq: + condition: service_healthy + ci_xtdb: + condition: service_started + ci_katalogus: + condition: service_started + ci_octopoes_api_worker: + condition: service_started + env_file: + - .ci/.env.test + + ci_rabbitmq: + restart: on-failure + image: "docker.io/library/rabbitmq:3.12-management" + healthcheck: + test: ["CMD", "rabbitmqctl", "status"] + interval: 5s + retries: 4 + env_file: + - .ci/.env.test + + ci_xtdb: + image: "ghcr.io/dekkers/xtdb-http-multinode:v1.0.8" + + ci_octopoes_api_worker: + build: + context: ../octopoes + command: celery -A octopoes.tasks.tasks worker -E --loglevel=INFO + depends_on: + ci_rabbitmq: + condition: service_healthy + ci_xtdb: + condition: service_started + env_file: + - .ci/.env.test + ulimits: + nofile: + soft: 262144 + hard: 262144 + + ci_katalogus: + build: + context: .. + dockerfile: boefjes/Dockerfile + args: + - ENVIRONMENT=dev + command: uvicorn boefjes.katalogus.root:app --host 0.0.0.0 --port 8080 + depends_on: + - ci_katalogus-db + env_file: + - .ci/.env.test + volumes: + - .:/app/boefjes diff --git a/boefjes/Dockerfile b/boefjes/Dockerfile index c0a75319776..b38ca57e307 100644 --- a/boefjes/Dockerfile +++ b/boefjes/Dockerfile @@ -6,8 +6,8 @@ ARG USER_GID=1000 ENTRYPOINT ["/app/boefjes/entrypoint.sh"] -RUN groupadd --gid $USER_GID nonroot -RUN adduser --disabled-password --gecos '' --uid $USER_UID --gid $USER_GID nonroot +RUN groupadd --gid "$USER_GID" nonroot +RUN adduser --disabled-password --gecos '' --uid "$USER_UID" --gid "$USER_GID" nonroot WORKDIR /app/boefjes ENV PATH=/home/nonroot/.local/bin:${PATH} @@ -20,10 +20,10 @@ RUN --mount=type=cache,target=/root/.cache \ pip install --upgrade pip \ && if [ "$ENVIRONMENT" = "dev" ]; \ then \ - grep -v git+https:// requirements-dev.txt | pip install -r /dev/stdin ; \ + grep -v git+https:// requirements-dev.txt | pip install -r /dev/stdin && \ grep git+https:// requirements-dev.txt | pip install -r /dev/stdin ; \ else \ - grep -v git+https:// requirements.txt | pip install -r /dev/stdin ;\ + grep -v git+https:// requirements.txt | pip install -r /dev/stdin && \ grep git+https:// requirements.txt | pip install -r /dev/stdin ; \ fi diff --git a/boefjes/Makefile b/boefjes/Makefile index 579543e3926..09e5fc21b47 100644 --- a/boefjes/Makefile +++ b/boefjes/Makefile @@ -77,6 +77,12 @@ itest: ## Run the integration tests. $(ci-docker-compose) run --rm katalogus_integration $(ci-docker-compose) down +bench: ## Run the report benchmark. + $(ci-docker-compose) build + $(ci-docker-compose) down --remove-orphans + $(ci-docker-compose) run --rm migration_bench + $(ci-docker-compose) stop + debian12: docker run --rm \ --env PKG_NAME=kat-boefjes \ diff --git a/boefjes/boefjes/__main__.py b/boefjes/boefjes/__main__.py index e9621de3e0e..a5572253d49 100644 --- a/boefjes/boefjes/__main__.py +++ b/boefjes/boefjes/__main__.py @@ -2,6 +2,7 @@ import logging.config import click +import structlog from boefjes.app import get_runtime_manager from boefjes.config import settings @@ -10,7 +11,27 @@ with settings.log_cfg.open() as f: logging.config.dictConfig(json.load(f)) -logger = logging.getLogger(__name__) +structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.processors.add_log_level, + structlog.processors.StackInfoRenderer(), + structlog.dev.set_exc_info, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper("iso", utc=False), + ( + structlog.dev.ConsoleRenderer(colors=True, pad_level=False) + if settings.logging_format == "text" + else structlog.processors.JSONRenderer() + ), + ], + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.stdlib.BoundLogger, + cache_logger_on_first_use=True, +) + +logger = structlog.get_logger(__name__) @click.command() diff --git a/boefjes/boefjes/api.py b/boefjes/boefjes/api.py index c26f64e7979..22542898dfd 100644 --- a/boefjes/boefjes/api.py +++ b/boefjes/boefjes/api.py @@ -1,10 +1,10 @@ import base64 -import logging import multiprocessing from datetime import datetime, timezone from enum import Enum from uuid import UUID +import structlog from fastapi import Depends, FastAPI, HTTPException, Response from httpx import HTTPError, HTTPStatusError from pydantic import BaseModel, ConfigDict, Field @@ -13,15 +13,15 @@ from boefjes.clients.bytes_client import BytesAPIClient from boefjes.clients.scheduler_client import SchedulerAPIClient, TaskStatus from boefjes.config import settings -from boefjes.job_handler import get_environment_settings, get_octopoes_api_connector, serialize_ooi +from boefjes.job_handler import get_environment_settings, get_octopoes_api_connector from boefjes.job_models import BoefjeMeta -from boefjes.katalogus.local_repository import LocalPluginRepository, get_local_repository +from boefjes.local_repository import LocalPluginRepository, get_local_repository from boefjes.plugins.models import _default_mime_types from octopoes.models import Reference from octopoes.models.exception import ObjectNotFoundException app = FastAPI(title="Boefje API") -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class UvicornServer(multiprocessing.Process): @@ -85,7 +85,7 @@ async def root(): @app.get("/api/v0/tasks/{task_id}", response_model=BoefjeInput) -async def boefje_input( +def boefje_input( task_id: UUID, scheduler_client: SchedulerAPIClient = Depends(get_scheduler_client), local_repository: LocalPluginRepository = Depends(get_local_repository), @@ -102,7 +102,7 @@ async def boefje_input( @app.post("/api/v0/tasks/{task_id}") -async def boefje_output( +def boefje_output( task_id: UUID, boefje_output: BoefjeOutput, scheduler_client: SchedulerAPIClient = Depends(get_scheduler_client), @@ -122,7 +122,7 @@ async def boefje_output( bytes_client.save_boefje_meta(boefje_meta) if boefje_output.files: - mime_types = _default_mime_types(task.p_item.data.boefje) + mime_types = _default_mime_types(task.data.boefje) for file in boefje_output.files: raw = base64.b64decode(file.content) # when supported, also save file.name to Bytes @@ -149,13 +149,12 @@ def get_task(task_id, scheduler_client): def create_boefje_meta(task, local_repository): - boefje = task.p_item.data.boefje + boefje = task.data.boefje boefje_resource = local_repository.by_id(boefje.id) - env_keys = boefje_resource.environment_keys - environment = get_environment_settings(task.p_item.data, env_keys) if env_keys else {} + environment = get_environment_settings(task.data, boefje_resource.schema) - organization = task.p_item.data.organization - input_ooi = task.p_item.data.input_ooi + organization = task.data.organization + input_ooi = task.data.input_ooi arguments = {"oci_arguments": boefje_resource.oci_arguments} if input_ooi: @@ -165,7 +164,7 @@ def create_boefje_meta(task, local_repository): except ObjectNotFoundException as e: raise ObjectNotFoundException(f"Object {reference} not found in Octopoes") from e - arguments["input"] = serialize_ooi(ooi) + arguments["input"] = ooi.serialize() boefje_meta = BoefjeMeta( id=task.id, diff --git a/boefjes/boefjes/app.py b/boefjes/boefjes/app.py index 6734395a6cd..731cbd7e19c 100644 --- a/boefjes/boefjes/app.py +++ b/boefjes/boefjes/app.py @@ -1,4 +1,3 @@ -import logging import multiprocessing as mp import os import signal @@ -6,22 +5,18 @@ import time from queue import Queue +import structlog from httpx import HTTPError from pydantic import ValidationError -from boefjes.clients.scheduler_client import ( - QueuePrioritizedItem, - SchedulerAPIClient, - SchedulerClientInterface, - TaskStatus, -) +from boefjes.clients.scheduler_client import SchedulerAPIClient, SchedulerClientInterface, Task, TaskStatus from boefjes.config import Settings from boefjes.job_handler import BoefjeHandler, NormalizerHandler, bytes_api_client -from boefjes.katalogus.local_repository import get_local_repository from boefjes.local import LocalBoefjeJobRunner, LocalNormalizerJobRunner +from boefjes.local_repository import get_local_repository from boefjes.runtime_interfaces import Handler, WorkerManager -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class SchedulerWorkerManager(WorkerManager): @@ -192,7 +187,7 @@ def exit(self, queue_type: WorkerManager.Queue, signum: int | None = None): logger.info("Received %s, exiting", signal.Signals(signum).name) if not self.task_queue.empty(): - items: list[QueuePrioritizedItem] = [self.task_queue.get() for _ in range(self.task_queue.qsize())] + items: list[Task] = [self.task_queue.get() for _ in range(self.task_queue.qsize())] for p_item in items: try: diff --git a/boefjes/boefjes/clients/bytes_client.py b/boefjes/boefjes/clients/bytes_client.py index bfa91ecf9db..b7b66bbc272 100644 --- a/boefjes/boefjes/clients/bytes_client.py +++ b/boefjes/boefjes/clients/bytes_client.py @@ -1,16 +1,18 @@ -import logging import typing +import uuid +from base64 import b64encode from collections.abc import Callable, Set from functools import wraps from typing import Any from uuid import UUID +import structlog from httpx import Client, HTTPStatusError, HTTPTransport, Response from boefjes.job_models import BoefjeMeta, NormalizerMeta, RawDataMeta BYTES_API_CLIENT_VERSION = "0.3" -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) ClientSessionMethod = Callable[..., Any] @@ -89,19 +91,34 @@ def save_normalizer_meta(self, normalizer_meta: NormalizerMeta) -> None: self._verify_response(response) + @retry_with_login + def get_normalizer_meta(self, normalizer_meta_id: uuid.UUID) -> NormalizerMeta: + response = self._session.get(f"/bytes/normalizer_meta/{normalizer_meta_id}", headers=self.headers) + self._verify_response(response) + + return NormalizerMeta.model_validate_json(response.content) + @retry_with_login def save_raw(self, boefje_meta_id: str, raw: str | bytes, mime_types: Set[str] = frozenset()) -> UUID: - headers = {"content-type": "application/octet-stream"} - headers.update(self.headers) + file_name = "raw" # The name provides a key for all ids returned, so this is arbitrary as we only upload 1 file + response = self._session.post( "/bytes/raw", - content=raw, - headers=headers, - params={"mime_types": list(mime_types), "boefje_meta_id": boefje_meta_id}, + json={ + "files": [ + { + "name": file_name, + "content": b64encode(raw if isinstance(raw, bytes) else raw.encode()).decode(), + "tags": list(mime_types), + } + ] + }, + headers=self.headers, + params={"boefje_meta_id": str(boefje_meta_id)}, ) - self._verify_response(response) - return UUID(response.json()["id"]) + + return UUID(response.json()[file_name]) @retry_with_login def get_raw(self, raw_data_id: str) -> bytes: diff --git a/boefjes/boefjes/clients/scheduler_client.py b/boefjes/boefjes/clients/scheduler_client.py index be7c04f8d26..5e07d83d0be 100644 --- a/boefjes/boefjes/clients/scheduler_client.py +++ b/boefjes/boefjes/clients/scheduler_client.py @@ -1,5 +1,4 @@ import datetime -import logging import uuid from enum import Enum @@ -8,26 +7,12 @@ from boefjes.job_models import BoefjeMeta, NormalizerMeta -logger = logging.getLogger(__name__) - class Queue(BaseModel): id: str size: int -class QueuePrioritizedItem(BaseModel): - """Representation of a queue.PrioritizedItem on the priority queue. Used - for unmarshalling of priority queue prioritized items to a JSON - representation. - """ - - id: uuid.UUID - priority: int - hash: str | None = None - data: BoefjeMeta | NormalizerMeta - - class TaskStatus(Enum): """Status of a task.""" @@ -37,14 +22,18 @@ class TaskStatus(Enum): RUNNING = "running" COMPLETED = "completed" FAILED = "failed" + CANCELLED = "cancelled" class Task(BaseModel): id: uuid.UUID scheduler_id: str - type: str - p_item: QueuePrioritizedItem + schedule_id: str | None + priority: int status: TaskStatus + type: str + hash: str | None = None + data: BoefjeMeta | NormalizerMeta created_at: datetime.datetime modified_at: datetime.datetime @@ -53,7 +42,7 @@ class SchedulerClientInterface: def get_queues(self) -> list[Queue]: raise NotImplementedError() - def pop_item(self, queue: str) -> QueuePrioritizedItem | None: + def pop_item(self, queue: str) -> Task | None: raise NotImplementedError() def patch_task(self, task_id: uuid.UUID, status: TaskStatus) -> None: @@ -62,7 +51,7 @@ def patch_task(self, task_id: uuid.UUID, status: TaskStatus) -> None: def get_task(self, task_id: uuid.UUID) -> Task: raise NotImplementedError() - def push_item(self, queue_id: str, p_item: QueuePrioritizedItem) -> None: + def push_item(self, queue_id: str, p_item: Task) -> None: raise NotImplementedError() @@ -80,13 +69,13 @@ def get_queues(self) -> list[Queue]: return TypeAdapter(list[Queue]).validate_json(response.content) - def pop_item(self, queue: str) -> QueuePrioritizedItem | None: + def pop_item(self, queue: str) -> Task | None: response = self._session.post(f"/queues/{queue}/pop") self._verify_response(response) - return TypeAdapter(QueuePrioritizedItem | None).validate_json(response.content) + return TypeAdapter(Task | None).validate_json(response.content) - def push_item(self, queue_id: str, p_item: QueuePrioritizedItem) -> None: + def push_item(self, queue_id: str, p_item: Task) -> None: response = self._session.post(f"/queues/{queue_id}/push", content=p_item.json()) self._verify_response(response) diff --git a/boefjes/boefjes/config.py b/boefjes/boefjes/config.py index 353cfed9368..79f5655e093 100644 --- a/boefjes/boefjes/config.py +++ b/boefjes/boefjes/config.py @@ -1,13 +1,13 @@ import logging import os from pathlib import Path -from typing import Any +from typing import Any, Literal from pydantic import AmqpDsn, AnyHttpUrl, Field, FilePath, IPvAnyAddress, PostgresDsn, conint from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict from pydantic_settings.sources import EnvSettingsSource -from boefjes.katalogus.models import EncryptionMiddleware +from boefjes.models import EncryptionMiddleware BASE_DIR: Path = Path(__file__).parent.resolve() @@ -131,6 +131,8 @@ class Settings(BaseSettings): None, description="OpenTelemetry endpoint", validation_alias="SPAN_EXPORT_GRPC_ENDPOINT" ) + logging_format: Literal["text", "json"] = Field("text", description="Logging format") + model_config = SettingsConfigDict(env_prefix="BOEFJES_") @classmethod diff --git a/boefjes/boefjes/katalogus/api/__init__.py b/boefjes/boefjes/dependencies/__init__.py similarity index 100% rename from boefjes/boefjes/katalogus/api/__init__.py rename to boefjes/boefjes/dependencies/__init__.py diff --git a/boefjes/boefjes/katalogus/dependencies/encryption.py b/boefjes/boefjes/dependencies/encryption.py similarity index 100% rename from boefjes/boefjes/katalogus/dependencies/encryption.py rename to boefjes/boefjes/dependencies/encryption.py diff --git a/boefjes/boefjes/katalogus/dependencies/plugins.py b/boefjes/boefjes/dependencies/plugins.py similarity index 63% rename from boefjes/boefjes/katalogus/dependencies/plugins.py rename to boefjes/boefjes/dependencies/plugins.py index d254afa8ca7..080ff6d7da7 100644 --- a/boefjes/boefjes/katalogus/dependencies/plugins.py +++ b/boefjes/boefjes/dependencies/plugins.py @@ -1,63 +1,58 @@ import contextlib -import logging from collections.abc import Iterator from pathlib import Path from typing import Literal +import structlog from fastapi import Query from jsonschema.exceptions import ValidationError from jsonschema.validators import validate from sqlalchemy.orm import Session -from boefjes.katalogus.local_repository import LocalPluginRepository, get_local_repository -from boefjes.katalogus.models import Boefje, FilterParameters, Normalizer, PaginationParameters, PluginType -from boefjes.katalogus.storage.interfaces import ( +from boefjes.local_repository import LocalPluginRepository, get_local_repository +from boefjes.models import Boefje, FilterParameters, Normalizer, PaginationParameters, PluginType +from boefjes.sql.config_storage import create_config_storage +from boefjes.sql.db import session_managed_iterator +from boefjes.sql.plugin_storage import create_plugin_storage +from boefjes.storage.interfaces import ( + ConfigStorage, ExistingPluginId, NotFound, - PluginEnabledStorage, + PluginNotFound, PluginStorage, SettingsNotConformingToSchema, - SettingsStorage, ) -from boefjes.sql.db import session_managed_iterator -from boefjes.sql.plugin_enabled_storage import create_plugin_enabled_storage -from boefjes.sql.plugin_storage import create_plugin_storage -from boefjes.sql.setting_storage import create_setting_storage -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class PluginService: def __init__( self, plugin_storage: PluginStorage, - plugin_enabled_store: PluginEnabledStorage, - settings_storage: SettingsStorage, + config_storage: ConfigStorage, local_repo: LocalPluginRepository, ): self.plugin_storage = plugin_storage - self.plugin_enabled_store = plugin_enabled_store - self.settings_storage = settings_storage + self.config_storage = config_storage self.local_repo = local_repo def __enter__(self): - self.plugin_enabled_store.__enter__() self.plugin_storage.__enter__() - self.settings_storage.__enter__() + self.config_storage.__enter__() return self def __exit__(self, exc_type, exc_val, exc_tb): - self.plugin_enabled_store.__exit__(exc_type, exc_val, exc_tb) self.plugin_storage.__exit__(exc_type, exc_val, exc_tb) - self.settings_storage.__exit__(exc_type, exc_val, exc_tb) + self.config_storage.__exit__(exc_type, exc_val, exc_tb) def get_all(self, organisation_id: str) -> list[PluginType]: - all_plugins = self.get_all_without_enabled() + all_plugins = self._get_all_without_enabled() return [self._set_plugin_enabled(plugin, organisation_id) for plugin in all_plugins.values()] - def get_all_without_enabled(self): + def _get_all_without_enabled(self) -> dict[str, PluginType]: all_plugins = {plugin.id: plugin for plugin in self.local_repo.get_all()} for plugin in self.plugin_storage.get_all(): @@ -88,24 +83,25 @@ def by_plugin_ids(self, plugin_ids: list[str], organisation_id: str) -> list[Plu return found_plugins def get_all_settings(self, organisation_id: str, plugin_id: str): - return self.settings_storage.get_all(organisation_id, plugin_id) + return self.config_storage.get_all_settings(organisation_id, plugin_id) def clone_settings_to_organisation(self, from_organisation: str, to_organisation: str): # One requirement is that only boefjes enabled in the from_organisation end up being enabled for the target. - for plugin_id in self.plugin_enabled_store.get_all_enabled(to_organisation): + for plugin_id in self.config_storage.get_enabled_boefjes(to_organisation): self.set_enabled_by_id(plugin_id, to_organisation, enabled=False) for plugin in self.get_all(from_organisation): if all_settings := self.get_all_settings(from_organisation, plugin.id): self.upsert_settings(all_settings, to_organisation, plugin.id) - for plugin_id in self.plugin_enabled_store.get_all_enabled(from_organisation): + for plugin_id in self.config_storage.get_enabled_boefjes(from_organisation): self.set_enabled_by_id(plugin_id, to_organisation, enabled=True) - def upsert_settings(self, values: dict, organisation_id: str, plugin_id: str): - self._assert_settings_match_schema(values, organisation_id, plugin_id) + def upsert_settings(self, settings: dict, organisation_id: str, plugin_id: str): + self._assert_settings_match_schema(settings, plugin_id) + self._put_boefje(plugin_id) - return self.settings_storage.upsert(values, organisation_id, plugin_id) + return self.config_storage.upsert(organisation_id, plugin_id, settings=settings) def create_boefje(self, boefje: Boefje) -> None: try: @@ -121,18 +117,48 @@ def create_normalizer(self, normalizer: Normalizer) -> None: except KeyError: self.plugin_storage.create_normalizer(normalizer) - def delete_settings(self, organisation_id: str, plugin_id: str): - self.settings_storage.delete(organisation_id, plugin_id) + def _put_boefje(self, boefje_id: str) -> None: + """Check existence of a boefje, and insert a database entry if it concerns a local boefje""" + + try: + self.plugin_storage.boefje_by_id(boefje_id) + except PluginNotFound as e: + try: + plugin = self.local_repo.by_id(boefje_id) + except KeyError: + raise e + + if plugin.type != "boefje": + raise e + self.plugin_storage.create_boefje(plugin) + + def _put_normalizer(self, normalizer_id: str) -> None: + """Check existence of a normalizer, and insert a database entry if it concerns a local normalizer""" try: - self._assert_settings_match_schema({}, organisation_id, plugin_id) - except SettingsNotConformingToSchema: - logger.warning("Making sure %s is disabled for %s because settings are deleted", plugin_id, organisation_id) + self.plugin_storage.normalizer_by_id(normalizer_id) + except PluginNotFound: + try: + plugin = self.local_repo.by_id(normalizer_id) + except KeyError: + raise + + if plugin.type != "normalizer": + raise + self.plugin_storage.create_normalizer(plugin) - self.set_enabled_by_id(plugin_id, organisation_id, False) + def delete_settings(self, organisation_id: str, plugin_id: str): + self.config_storage.delete(organisation_id, plugin_id) + + # We don't check the schema anymore because we can provide entries through the global environment as well def schema(self, plugin_id: str) -> dict | None: - return self.local_repo.schema(plugin_id) + try: + boefje = self.plugin_storage.boefje_by_id(plugin_id) + + return boefje.schema + except PluginNotFound: + return self.local_repo.schema(plugin_id) def cover(self, plugin_id: str) -> Path: try: @@ -153,28 +179,27 @@ def description(self, plugin_id: str, organisation_id: str) -> str: return "" def set_enabled_by_id(self, plugin_id: str, organisation_id: str, enabled: bool): - if enabled: - all_settings = self.settings_storage.get_all(organisation_id, plugin_id) - self._assert_settings_match_schema(all_settings, organisation_id, plugin_id) - - self.plugin_enabled_store.update_or_create_by_id( - plugin_id, - enabled, - organisation_id, - ) + # We don't check the schema anymore because we can provide entries through the global environment as well + + try: + self._put_boefje(plugin_id) + except PluginNotFound: + self._put_normalizer(plugin_id) + + self.config_storage.upsert(organisation_id, plugin_id, enabled=enabled) - def _assert_settings_match_schema(self, all_settings: dict, organisation_id: str, plugin_id: str): + def _assert_settings_match_schema(self, all_settings: dict, plugin_id: str): schema = self.schema(plugin_id) if schema: # No schema means that there is nothing to assert try: validate(instance=all_settings, schema=schema) except ValidationError as e: - raise SettingsNotConformingToSchema(organisation_id, plugin_id, e.message) from e + raise SettingsNotConformingToSchema(plugin_id, e.message) from e def _set_plugin_enabled(self, plugin: PluginType, organisation_id: str) -> PluginType: with contextlib.suppress(KeyError, NotFound): - plugin.enabled = self.plugin_enabled_store.get_by_id(plugin.id, organisation_id) + plugin.enabled = self.config_storage.is_enabled_by_id(plugin.id, organisation_id) return plugin @@ -183,8 +208,7 @@ def get_plugin_service(organisation_id: str) -> Iterator[PluginService]: def closure(session: Session): return PluginService( create_plugin_storage(session), - create_plugin_enabled_storage(session), - create_setting_storage(session), + create_config_storage(session), get_local_repository(), ) diff --git a/boefjes/boefjes/docker_boefjes_runner.py b/boefjes/boefjes/docker_boefjes_runner.py index 8375c57a1be..f28c64055ae 100644 --- a/boefjes/boefjes/docker_boefjes_runner.py +++ b/boefjes/boefjes/docker_boefjes_runner.py @@ -1,7 +1,7 @@ -import logging from datetime import datetime, timezone import docker +import structlog from docker.errors import APIError, ContainerError, ImageNotFound from httpx import HTTPError @@ -9,9 +9,9 @@ from boefjes.clients.scheduler_client import SchedulerAPIClient, TaskStatus from boefjes.config import settings from boefjes.job_models import BoefjeMeta -from boefjes.katalogus.models import Boefje +from boefjes.models import Boefje -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class DockerBoefjesRunner: diff --git a/boefjes/boefjes/job_handler.py b/boefjes/boefjes/job_handler.py index 54f45052458..7ad0f7247ad 100644 --- a/boefjes/boefjes/job_handler.py +++ b/boefjes/boefjes/job_handler.py @@ -1,29 +1,31 @@ -import logging import os import traceback from collections.abc import Callable from datetime import datetime, timezone -from enum import Enum -from typing import Any, cast +from typing import cast import httpx +import structlog from httpx import HTTPError +from jsonschema.exceptions import ValidationError +from jsonschema.validators import validate from boefjes.clients.bytes_client import BytesAPIClient from boefjes.config import settings from boefjes.docker_boefjes_runner import DockerBoefjesRunner -from boefjes.job_models import BoefjeMeta, NormalizerMeta, SerializedOOI, SerializedOOIValue -from boefjes.katalogus.local_repository import LocalPluginRepository +from boefjes.job_models import BoefjeMeta, NormalizerMeta +from boefjes.local_repository import LocalPluginRepository from boefjes.plugins.models import _default_mime_types from boefjes.runtime_interfaces import BoefjeJobRunner, Handler, NormalizerJobRunner +from boefjes.storage.interfaces import SettingsNotConformingToSchema from octopoes.api.models import Affirmation, Declaration, Observation from octopoes.connector.octopoes import OctopoesAPIConnector -from octopoes.models import OOI, Reference, ScanLevel +from octopoes.models import Reference, ScanLevel from octopoes.models.exception import ObjectNotFoundException MIMETYPE_MIN_LENGTH = 5 # two chars before, and 2 chars after the slash ought to be reasonable -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) bytes_api_client = BytesAPIClient( str(settings.bytes_api), @@ -32,39 +34,11 @@ ) -def _serialize_value(value: Any, required: bool) -> SerializedOOIValue: - if isinstance(value, list): - return [_serialize_value(item, required) for item in value] - if isinstance(value, Reference): - try: - return value.tokenized.root - except AttributeError: - if required: - raise - - return None - if isinstance(value, Enum): - return value.value - if isinstance(value, int | float): - return value - else: - return str(value) - - -def serialize_ooi(ooi: OOI) -> SerializedOOI: - serialized_oois = {} - for key, value in ooi: - if key not in ooi.model_fields: - continue - serialized_oois[key] = _serialize_value(value, ooi.model_fields[key].is_required()) - return serialized_oois - - def get_octopoes_api_connector(org_code: str) -> OctopoesAPIConnector: return OctopoesAPIConnector(str(settings.octopoes_api), org_code) -def get_environment_settings(boefje_meta: BoefjeMeta, environment_keys: list[str]) -> dict[str, str]: +def get_environment_settings(boefje_meta: BoefjeMeta, schema: dict | None = None) -> dict[str, str]: try: katalogus_api = str(settings.katalogus_api).rstrip("/") response = httpx.get( @@ -72,22 +46,34 @@ def get_environment_settings(boefje_meta: BoefjeMeta, environment_keys: list[str timeout=30, ) response.raise_for_status() - environment = response.json() - - # Add prefixed BOEFJE_* global environment variables - for key, value in os.environ.items(): - if key.startswith("BOEFJE_"): - katalogus_key = key.split("BOEFJE_", 1)[1] - # Only pass the environment variable if it is not explicitly set through the katalogus, - # if and only if they are defined in boefje.json - if katalogus_key in environment_keys and katalogus_key not in environment: - environment[katalogus_key] = value - - return {k: str(v) for k, v in environment.items() if k in environment_keys} except HTTPError: logger.exception("Error getting environment settings") raise + allowed_keys = schema.get("properties", []) if schema else [] + new_env = { + key.split("BOEFJE_", 1)[1]: value + for key, value in os.environ.items() + if key.startswith("BOEFJE_") and key in allowed_keys + } + + settings_from_katalogus = response.json() + + for key, value in settings_from_katalogus.items(): + if key in allowed_keys: + new_env[key] = value + + # The schema, besides dictating that a boefje cannot run if it is not matched, also provides an extra safeguard: + # it is possible to inject code if arguments are passed that "escape" the call to a tool. Hence, we should enforce + # the schema somewhere and make the schema as strict as possible. + if schema is not None: + try: + validate(instance=new_env, schema=schema) + except ValidationError as e: + raise SettingsNotConformingToSchema(boefje_meta.boefje.id, e.message) from e + + return new_env + class BoefjeHandler(Handler): def __init__( @@ -124,12 +110,10 @@ def handle(self, boefje_meta: BoefjeMeta) -> None: except ObjectNotFoundException as e: raise ObjectNotFoundException(f"Object {reference} not found in Octopoes") from e - boefje_meta.arguments["input"] = serialize_ooi(ooi) - - env_keys = boefje_resource.environment_keys + boefje_meta.arguments["input"] = ooi.serialize() boefje_meta.runnable_hash = boefje_resource.runnable_hash - boefje_meta.environment = get_environment_settings(boefje_meta, env_keys) if env_keys else {} + boefje_meta.environment = get_environment_settings(boefje_meta, boefje_resource.schema) mime_types = _default_mime_types(boefje_meta.boefje) @@ -209,6 +193,7 @@ def handle(self, normalizer_meta: NormalizerMeta) -> None: Observation( method=normalizer_meta.normalizer.id, source=reference, + source_method=normalizer_meta.raw_data.boefje_meta.boefje.id, task_id=normalizer_meta.id, valid_time=normalizer_meta.raw_data.boefje_meta.ended_at, result=[ooi for ooi in observation.results if ooi.primary_key != observation.input_ooi], @@ -219,6 +204,7 @@ def handle(self, normalizer_meta: NormalizerMeta) -> None: connector.save_declaration( Declaration( method=normalizer_meta.normalizer.id, + source_method=normalizer_meta.raw_data.boefje_meta.boefje.id, ooi=declaration.ooi, task_id=normalizer_meta.id, valid_time=normalizer_meta.raw_data.boefje_meta.ended_at, @@ -229,12 +215,30 @@ def handle(self, normalizer_meta: NormalizerMeta) -> None: connector.save_affirmation( Affirmation( method=normalizer_meta.normalizer.id, + source_method=normalizer_meta.raw_data.boefje_meta.boefje.id, ooi=affirmation.ooi, task_id=normalizer_meta.id, valid_time=normalizer_meta.raw_data.boefje_meta.ended_at, ) ) + if ( + normalizer_meta.raw_data.boefje_meta.input_ooi # No input OOI means no deletion propagation + and not (results.observations or results.declarations or results.affirmations) + ): + # There were no results found, which we still need to signal to Octopoes for deletion propagation + + connector.save_observation( + Observation( + method=normalizer_meta.normalizer.id, + source=Reference.from_str(normalizer_meta.raw_data.boefje_meta.input_ooi), + source_method=normalizer_meta.raw_data.boefje_meta.boefje.id, + task_id=normalizer_meta.id, + valid_time=normalizer_meta.raw_data.boefje_meta.ended_at, + result=[], + ) + ) + corrected_scan_profiles = [] for profile in results.scan_profiles: profile.level = ScanLevel( diff --git a/boefjes/boefjes/job_models.py b/boefjes/boefjes/job_models.py index 9d1a0e49aca..8e419b79541 100644 --- a/boefjes/boefjes/job_models.py +++ b/boefjes/boefjes/job_models.py @@ -1,10 +1,10 @@ -from datetime import timedelta +from datetime import datetime, timedelta from typing import Annotated, Literal, TypeAlias from uuid import UUID from pydantic import AwareDatetime, BaseModel, Field, StringConstraints -from octopoes.models import DeclaredScanProfile, PrimaryKeyToken +from octopoes.models import DeclaredScanProfile from octopoes.models.types import OOIType @@ -85,6 +85,7 @@ class NormalizerObservation(BaseModel): class NormalizerDeclaration(BaseModel): type: Literal["declaration"] = "declaration" ooi: OOIType + end_valid_time: datetime | None = None class NormalizerAffirmation(BaseModel): @@ -100,5 +101,3 @@ class NormalizerResults(BaseModel): NormalizerOutput: TypeAlias = OOIType | NormalizerDeclaration | NormalizerAffirmation | DeclaredScanProfile -SerializedOOIValue: TypeAlias = None | str | int | float | dict[str, str | PrimaryKeyToken] | list["SerializedOOIValue"] -SerializedOOI: TypeAlias = dict[str, SerializedOOIValue] diff --git a/boefjes/boefjes/katalogus/dependencies/organisations.py b/boefjes/boefjes/katalogus/dependencies/organisations.py deleted file mode 100644 index 46c03a936c3..00000000000 --- a/boefjes/boefjes/katalogus/dependencies/organisations.py +++ /dev/null @@ -1,12 +0,0 @@ -import logging -from collections.abc import Iterator - -from boefjes.katalogus.storage.interfaces import OrganisationStorage -from boefjes.sql.db import session_managed_iterator -from boefjes.sql.organisation_storage import create_organisation_storage - -logger = logging.getLogger(__name__) - - -def get_organisations_store() -> Iterator[OrganisationStorage]: - yield from session_managed_iterator(create_organisation_storage) diff --git a/boefjes/boefjes/katalogus/api/organisations.py b/boefjes/boefjes/katalogus/organisations.py similarity index 88% rename from boefjes/boefjes/katalogus/api/organisations.py rename to boefjes/boefjes/katalogus/organisations.py index fcb68e0f99f..3f8d8e6cc84 100644 --- a/boefjes/boefjes/katalogus/api/organisations.py +++ b/boefjes/boefjes/katalogus/organisations.py @@ -1,9 +1,9 @@ from fastapi import APIRouter, Depends, HTTPException, status -from boefjes.katalogus.dependencies.organisations import get_organisations_store -from boefjes.katalogus.models import Organisation -from boefjes.katalogus.storage.interfaces import OrganisationNotFound, OrganisationStorage +from boefjes.models import Organisation from boefjes.sql.db import ObjectNotFoundException +from boefjes.sql.organisation_storage import get_organisations_store +from boefjes.storage.interfaces import OrganisationNotFound, OrganisationStorage router = APIRouter(prefix="/organisations", tags=["organisations"]) diff --git a/boefjes/boefjes/katalogus/api/plugins.py b/boefjes/boefjes/katalogus/plugins.py similarity index 90% rename from boefjes/boefjes/katalogus/api/plugins.py rename to boefjes/boefjes/katalogus/plugins.py index 7b639a88ba0..6ecc4b031db 100644 --- a/boefjes/boefjes/katalogus/api/plugins.py +++ b/boefjes/boefjes/katalogus/plugins.py @@ -3,18 +3,19 @@ from fastapi import APIRouter, Body, Depends, HTTPException, status from fastapi.responses import FileResponse, JSONResponse, Response -from pydantic import BaseModel, Field +from jsonschema.validators import Draft202012Validator +from pydantic import BaseModel, Field, field_validator -from boefjes.katalogus.api.organisations import check_organisation_exists -from boefjes.katalogus.dependencies.plugins import ( +from boefjes.dependencies.plugins import ( PluginService, get_pagination_parameters, get_plugin_service, get_plugins_filter_parameters, ) -from boefjes.katalogus.models import FilterParameters, PaginationParameters, PluginType -from boefjes.katalogus.storage.interfaces import PluginStorage +from boefjes.katalogus.organisations import check_organisation_exists +from boefjes.models import FilterParameters, PaginationParameters, PluginType from boefjes.sql.plugin_storage import get_plugin_storage +from boefjes.storage.interfaces import PluginStorage router = APIRouter( prefix="/organisations/{organisation_id}", @@ -90,6 +91,8 @@ def get_plugin( @router.post("/plugins", status_code=status.HTTP_201_CREATED) def add_plugin(plugin: PluginType, plugin_service: PluginService = Depends(get_plugin_service)): with plugin_service as service: + plugin.static = False # Creation through the API implies that these cannot be static + if plugin.type == "boefje": return service.create_boefje(plugin) @@ -120,13 +123,22 @@ class BoefjeIn(BaseModel): version: str | None = None created: datetime.datetime | None = None description: str | None = None - environment_keys: list[str] = Field(default_factory=list) scan_level: int = 1 consumes: set[str] = Field(default_factory=set) produces: set[str] = Field(default_factory=set) + schema: dict | None = None oci_image: str | None = None oci_arguments: list[str] = Field(default_factory=list) + @field_validator("schema") + @classmethod + def json_schema_valid(cls, schema: dict | None) -> dict | None: + if schema is not None: + Draft202012Validator.check_schema(schema) + return schema + + return None + @router.patch("/boefjes/{boefje_id}", status_code=status.HTTP_204_NO_CONTENT) def update_boefje( @@ -154,7 +166,6 @@ class NormalizerIn(BaseModel): version: str | None = None created: datetime.datetime | None = None description: str | None = None - environment_keys: list[str] = Field(default_factory=list) consumes: list[str] = Field(default_factory=list) # mime types (and/ or boefjes) produces: list[str] = Field(default_factory=list) # oois diff --git a/boefjes/boefjes/katalogus/api/root.py b/boefjes/boefjes/katalogus/root.py similarity index 66% rename from boefjes/boefjes/katalogus/api/root.py rename to boefjes/boefjes/katalogus/root.py index 0b3a7ac2bae..8aa0c1683c5 100644 --- a/boefjes/boefjes/katalogus/api/root.py +++ b/boefjes/boefjes/katalogus/root.py @@ -2,8 +2,10 @@ import logging.config from typing import Any +import structlog from fastapi import APIRouter, FastAPI, Request, status from fastapi.responses import JSONResponse, RedirectResponse +from jsonschema.exceptions import SchemaError from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor @@ -15,15 +17,35 @@ from pydantic import BaseModel, Field from boefjes.config import settings -from boefjes.katalogus.api import organisations, plugins -from boefjes.katalogus.api import settings as settings_router -from boefjes.katalogus.storage.interfaces import NotFound, StorageError +from boefjes.katalogus import organisations, plugins +from boefjes.katalogus import settings as settings_router from boefjes.katalogus.version import __version__ +from boefjes.storage.interfaces import NotAllowed, NotFound, StorageError with settings.log_cfg.open() as f: logging.config.dictConfig(json.load(f)) -logger = logging.getLogger(__name__) +structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.processors.add_log_level, + structlog.processors.StackInfoRenderer(), + structlog.dev.set_exc_info, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper("iso", utc=False), + ( + structlog.dev.ConsoleRenderer(pad_level=False) + if settings.logging_format == "text" + else structlog.processors.JSONRenderer() + ), + ], + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.stdlib.BoundLogger, + cache_logger_on_first_use=True, +) + +logger = structlog.get_logger(__name__) app = FastAPI(title="KAT-alogus API", version=__version__) @@ -59,6 +81,14 @@ def entity_not_found_handler(request: Request, exc: NotFound): ) +@app.exception_handler(NotAllowed) +def not_allowed_handler(request: Request, exc: NotAllowed): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"message": exc.message}, + ) + + @app.exception_handler(StorageError) def storage_error_handler(request: Request, exc: StorageError): return JSONResponse( @@ -67,6 +97,14 @@ def storage_error_handler(request: Request, exc: StorageError): ) +@app.exception_handler(SchemaError) +def schema_error_handler(request: Request, exc: StorageError): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"message": "Invalid jsonschema provided"}, + ) + + class ServiceHealth(BaseModel): service: str healthy: bool = False diff --git a/boefjes/boefjes/katalogus/api/settings.py b/boefjes/boefjes/katalogus/settings.py similarity index 85% rename from boefjes/boefjes/katalogus/api/settings.py rename to boefjes/boefjes/katalogus/settings.py index 7a710a92711..b40223bbd92 100644 --- a/boefjes/boefjes/katalogus/api/settings.py +++ b/boefjes/boefjes/katalogus/settings.py @@ -1,7 +1,7 @@ from fastapi import APIRouter, Depends -from boefjes.katalogus.api.organisations import check_organisation_exists -from boefjes.katalogus.dependencies.plugins import PluginService, get_plugin_service +from boefjes.dependencies.plugins import PluginService, get_plugin_service +from boefjes.katalogus.organisations import check_organisation_exists router = APIRouter( prefix="/organisations/{organisation_id}/{plugin_id}/settings", diff --git a/boefjes/boefjes/katalogus/storage/memory.py b/boefjes/boefjes/katalogus/storage/memory.py deleted file mode 100644 index d89e972ca32..00000000000 --- a/boefjes/boefjes/katalogus/storage/memory.py +++ /dev/null @@ -1,106 +0,0 @@ -from boefjes.katalogus.models import Boefje, Normalizer, Organisation, PluginType -from boefjes.katalogus.storage.interfaces import ( - OrganisationStorage, - PluginEnabledStorage, - PluginStorage, - SettingsStorage, -) - -# key = organisation id; value = organisation -organisations: dict[str, Organisation] = {} - -# key = organisation, repository/plugin id; value = enabled/ disabled -plugins_state: dict[str, dict[str, bool]] = {} - - -class OrganisationStorageMemory(OrganisationStorage): - def __init__(self, defaults: dict[str, Organisation] | None = None): - self._data = organisations if defaults is None else defaults - - def get_by_id(self, organisation_id: str) -> Organisation: - return self._data[organisation_id] - - def get_all(self) -> dict[str, Organisation]: - return self._data - - def create(self, organisation: Organisation) -> None: - self._data[organisation.id] = organisation - - def delete_by_id(self, organisation_id: str) -> None: - del self._data[organisation_id] - - -class PluginStorageMemory(PluginStorage): - def __init__(self): - self._boefjes = {} - self._normalizers = {} - - def get_all(self) -> list[PluginType]: - return list(self._boefjes.values()) + list(self._normalizers.values()) - - def boefje_by_id(self, boefje_id: str) -> Boefje: - return self._boefjes[boefje_id] - - def normalizer_by_id(self, normalizer_id: str) -> Normalizer: - return self._normalizers[normalizer_id] - - def create_boefje(self, boefje: Boefje) -> None: - self._boefjes[boefje.id] = boefje - - def create_normalizer(self, normalizer: Normalizer) -> None: - self._normalizers[normalizer.id] = normalizer - - def delete_boefje_by_id(self, boefje_id: str) -> None: - del self._boefjes[boefje_id] - - def delete_normalizer_by_id(self, normalizer_id: str) -> None: - del self._normalizers[normalizer_id] - - -class SettingsStorageMemory(SettingsStorage): - def __init__(self): - self._data = {} - - def get_all(self, organisation_id: str, plugin_id: str) -> dict[str, str]: - if organisation_id not in self._data: - return {} - - return self._data[organisation_id].get(plugin_id, {}) - - def upsert(self, values: dict, organisation_id: str, plugin_id: str) -> None: - if organisation_id not in self._data: - self._data[organisation_id] = {} - - if plugin_id not in self._data[organisation_id]: - self._data[organisation_id][plugin_id] = {} - - self._data[organisation_id][plugin_id] = values - - def delete(self, organisation_id: str, plugin_id: str) -> None: - del self._data[organisation_id][plugin_id] - - -class PluginStatesStorageMemory(PluginEnabledStorage): - def __init__( - self, - organisation: str, - defaults: dict[str, bool] | None = None, - ): - self._data = plugins_state.setdefault(organisation, {}) if defaults is None else defaults - self._organisation = organisation - - def get_by_id(self, plugin_id: str, organisation_id: str) -> bool: - return self._data[f"{organisation_id}.{plugin_id}"] - - def get_all_enabled(self, organisation_id: str) -> list[str]: - return [ - key.split(".", maxsplit=1)[1] - for key, value in self._data.items() - if value and key.split(".", maxsplit=1)[0] == organisation_id - ] - - def create(self, plugin_id: str, enabled: bool, organisation_id: str) -> None: - self._data[f"{organisation_id}.{plugin_id}"] = enabled - - def update_or_create_by_id(self, plugin_id: str, enabled: bool, organisation_id: str) -> None: - self._data[f"{organisation_id}.{plugin_id}"] = enabled diff --git a/boefjes/boefjes/local.py b/boefjes/boefjes/local.py index 7581423dbee..e743c9b9de9 100644 --- a/boefjes/boefjes/local.py +++ b/boefjes/boefjes/local.py @@ -1,7 +1,8 @@ -import logging import os from collections.abc import Iterable +import structlog + from boefjes.job_models import ( BoefjeMeta, InvalidReturnValueNormalizer, @@ -13,11 +14,11 @@ NormalizerResults, ObservationsWithoutInputOOI, ) -from boefjes.katalogus.local_repository import LocalPluginRepository +from boefjes.local_repository import LocalPluginRepository from boefjes.runtime_interfaces import BoefjeJobRunner, JobRuntimeError, NormalizerJobRunner from octopoes.models import OOI, DeclaredScanProfile -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class TemporaryEnvironment: diff --git a/boefjes/boefjes/katalogus/local_repository.py b/boefjes/boefjes/local_repository.py similarity index 92% rename from boefjes/boefjes/katalogus/local_repository.py rename to boefjes/boefjes/local_repository.py index 3e9071123f8..a29fb92ba35 100644 --- a/boefjes/boefjes/katalogus/local_repository.py +++ b/boefjes/boefjes/local_repository.py @@ -1,21 +1,23 @@ import json -import logging import pkgutil from pathlib import Path from typing import Any -from boefjes.katalogus.models import PluginType +import structlog + +from boefjes.models import PluginType from boefjes.plugins.models import ( BOEFJE_DEFINITION_FILE, BOEFJES_DIR, ENTRYPOINT_NORMALIZERS, NORMALIZER_DEFINITION_FILE, + SCHEMA_FILE, BoefjeResource, ModuleException, NormalizerResource, ) -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) class LocalPluginRepository: @@ -51,10 +53,10 @@ def schema(self, id_: str) -> dict | None: if id_ not in boefjes: return None - path = boefjes[id_].path / "schema.json" + path = boefjes[id_].path / SCHEMA_FILE if not path.exists(): - logger.debug("Did not find schema for boefje %s", boefjes[id_]) + logger.debug("Did not find schema for boefje %s", id_) return None return json.loads(path.read_text()) @@ -133,14 +135,14 @@ def _find_packages_in_path_containing_files(self, required_files: list[str]) -> for package in pkgutil.walk_packages([str(self.path)], prefix): if not package.ispkg: - logging.debug("%s is not a package", package.name) + logger.debug("%s is not a package", package.name) continue path = self.path / package.name.replace(prefix, "").replace(".", "/") missing_files = [file for file in required_files if not (path / file).exists()] if missing_files: - logging.debug("Files %s not found for %s", missing_files, package.name) + logger.debug("Files %s not found for %s", missing_files, package.name) continue paths.append((path, package.name)) diff --git a/boefjes/boefjes/migrations/versions/5be152459a7b_introduce_schema_field_to_boefje_model.py b/boefjes/boefjes/migrations/versions/5be152459a7b_introduce_schema_field_to_boefje_model.py new file mode 100644 index 00000000000..2cd63145aa5 --- /dev/null +++ b/boefjes/boefjes/migrations/versions/5be152459a7b_introduce_schema_field_to_boefje_model.py @@ -0,0 +1,62 @@ +"""Introduce schema field to Boefje model + +Revision ID: 5be152459a7b +Revises: f9de6eb7824b +Create Date: 2024-08-08 14:47:12.582017 + +""" + +import logging + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.orm import sessionmaker + +from boefjes.local_repository import get_local_repository +from boefjes.sql.plugin_storage import create_plugin_storage +from boefjes.storage.interfaces import PluginNotFound + +# revision identifiers, used by Alembic. +revision = "5be152459a7b" +down_revision = "f9de6eb7824b" +branch_labels = None +depends_on = None + +logger = logging.getLogger(__name__) + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("boefje", sa.Column("schema", sa.JSON(), nullable=True)) + + local_repo = get_local_repository() + session = sessionmaker(bind=op.get_bind())() + + with create_plugin_storage(session) as storage: + plugins = local_repo.get_all() + logger.info("Found %s plugins", len(plugins)) + + for plugin in local_repo.get_all(): + schema = local_repo.schema(plugin.id) + + if schema: + try: + # This way we avoid the safeguard that updating static boefjes is not allowed + instance = storage._db_boefje_instance_by_id(plugin.id) + instance.schema = schema + storage.session.add(instance) + logger.info("Updated database entry for plugin %s", plugin.id) + except PluginNotFound: + logger.info("No database entry for plugin %s", plugin.id) + continue + else: + logger.info("No schema present for plugin %s", plugin.id) + + session.close() + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("boefje", "schema") + # ### end Alembic commands ### diff --git a/boefjes/boefjes/migrations/versions/870fc302b852_remove_environment_keys_field.py b/boefjes/boefjes/migrations/versions/870fc302b852_remove_environment_keys_field.py new file mode 100644 index 00000000000..7bdfbd9e024 --- /dev/null +++ b/boefjes/boefjes/migrations/versions/870fc302b852_remove_environment_keys_field.py @@ -0,0 +1,37 @@ +"""Remove environment keys field + +Revision ID: 870fc302b852 +Revises: 5be152459a7b +Create Date: 2024-08-20 06:08:20.943924 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "870fc302b852" +down_revision = "5be152459a7b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("boefje", "environment_keys") + op.drop_column("normalizer", "environment_keys") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "normalizer", + sa.Column("environment_keys", postgresql.ARRAY(sa.VARCHAR(length=128)), autoincrement=False, nullable=False), + ) + op.add_column( + "boefje", + sa.Column("environment_keys", postgresql.ARRAY(sa.VARCHAR(length=128)), autoincrement=False, nullable=False), + ) + # ### end Alembic commands ### diff --git a/boefjes/boefjes/migrations/versions/cd34fdfafdaf_json_settings_for_settings_table.py b/boefjes/boefjes/migrations/versions/cd34fdfafdaf_json_settings_for_settings_table.py index 7351d140501..f76286dee3c 100644 --- a/boefjes/boefjes/migrations/versions/cd34fdfafdaf_json_settings_for_settings_table.py +++ b/boefjes/boefjes/migrations/versions/cd34fdfafdaf_json_settings_for_settings_table.py @@ -13,8 +13,8 @@ from sqlalchemy.engine import Connection from sqlalchemy.orm import sessionmaker +from boefjes.sql.config_storage import create_encrypter from boefjes.sql.db import get_engine -from boefjes.sql.setting_storage import create_encrypter # revision identifiers, used by Alembic. revision = "cd34fdfafdaf" diff --git a/boefjes/boefjes/migrations/versions/f9de6eb7824b_introduce_boefjeconfig_model.py b/boefjes/boefjes/migrations/versions/f9de6eb7824b_introduce_boefjeconfig_model.py new file mode 100644 index 00000000000..d46f360b703 --- /dev/null +++ b/boefjes/boefjes/migrations/versions/f9de6eb7824b_introduce_boefjeconfig_model.py @@ -0,0 +1,277 @@ +"""Introduce BoefjeConfig model + +Revision ID: f9de6eb7824b +Revises: 6f99834a4a5a +Create Date: 2024-05-31 10:45:16.474714 + +""" + +import logging + +import sqlalchemy as sa +from alembic import op +from psycopg2._json import Json +from psycopg2.extensions import register_adapter +from psycopg2.extras import execute_values + +from boefjes.local_repository import get_local_repository +from boefjes.models import Boefje, Normalizer + +# revision identifiers, used by Alembic. +revision = "f9de6eb7824b" +down_revision = "6f99834a4a5a" +branch_labels = None +depends_on = None + + +logger = logging.getLogger(__name__) + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "boefje_config", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("settings", sa.String(length=512), nullable=False, server_default="{}"), + sa.Column("enabled", sa.Boolean(), nullable=False, server_default="false"), + sa.Column("boefje_id", sa.Integer(), nullable=False), + sa.Column("organisation_pk", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["boefje_id"], ["boefje.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["organisation_pk"], ["organisation.pk"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("organisation_pk", "boefje_id", name="unique_boefje_config_per_organisation_per_boefje"), + ) + op.create_table( + "normalizer_config", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("enabled", sa.Boolean(), server_default="false", nullable=False), + sa.Column("normalizer_id", sa.Integer(), nullable=False), + sa.Column("organisation_pk", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["normalizer_id"], ["normalizer.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["organisation_pk"], ["organisation.pk"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "organisation_pk", "normalizer_id", name="unique_normalizer_config_per_organisation_per_normalizer" + ), + ) + + op.add_column("boefje", sa.Column("static", sa.Boolean(), server_default="false", nullable=False)) + op.add_column("normalizer", sa.Column("static", sa.Boolean(), server_default="false", nullable=False)) + + register_adapter(dict, Json) + + local_plugins = {plugin.id: plugin for plugin in get_local_repository().get_all()} + connection = op.get_bind() + + # Get unique plugin_ids from the settings table for boefjes that do not exist yet in the database + query = """ + SELECT DISTINCT s.plugin_id FROM settings s left join boefje b on b.plugin_id = s.plugin_id + where b.plugin_id IS NULL + """ # noqa: S608 + + to_insert: list[Boefje] = [] + + for plugin_id_output in connection.execute(query).fetchall(): + plugin_id = plugin_id_output[0] + if plugin_id not in local_plugins: + raise ValueError(f"Invalid plugin id found: {plugin_id}") + + # Since settings are boefje-only at this moment + if local_plugins[plugin_id].type != "boefje": + raise ValueError(f"Settings for normalizer or bit found: {plugin_id}. Remove these entries first.") + + res = connection.execute(f"SELECT id FROM boefje where plugin_id = '{plugin_id}'") # noqa: S608 + if res.fetchone() is not None: + continue # The Boefje already exists + + if local_plugins[plugin_id].type == "boefje": + to_insert.append(local_plugins[plugin_id]) + + entries = [ + ( + boefje.id, + boefje.name, + boefje.description, + str(boefje.scan_level), + list(boefje.consumes), + list(boefje.produces), + ["TEST_KEY"], + boefje.oci_image, + boefje.oci_arguments, + boefje.version, + ) + for boefje in to_insert + ] + query = """INSERT INTO boefje (plugin_id, name, description, scan_level, consumes, produces, environment_keys, + oci_image, oci_arguments, version) values %s""" + + with connection.begin(): + cursor = connection.connection.cursor() + execute_values(cursor, query, entries) + + to_insert = [] + + query = """ + SELECT DISTINCT p.plugin_id FROM plugin_state p left join boefje b on b.plugin_id = p.plugin_id + where b.plugin_id IS NULL + """ + + for plugin_id_output in connection.execute(query).fetchall(): + plugin_id = plugin_id_output[0] + if plugin_id not in local_plugins: + logger.warning("Unknown plugin id found: %s. You might have to re-enable the plugin!", plugin_id) + continue + + res = connection.execute(f"SELECT id FROM boefje where plugin_id = '{plugin_id}'") # noqa: S608 + if res.fetchone() is not None: + continue # The Boefje already exists + + if local_plugins[plugin_id].type == "boefje": + to_insert.append(local_plugins[plugin_id]) + + entries = [ + ( + boefje.id, + boefje.name, + boefje.description, + str(boefje.scan_level), + list(boefje.consumes), + list(boefje.produces), + ["TEST_KEY"], + boefje.oci_image, + boefje.oci_arguments, + boefje.version, + ) + for boefje in to_insert + ] + query = """INSERT INTO boefje (plugin_id, name, description, scan_level, consumes, produces, environment_keys, + oci_image, oci_arguments, version) values %s""" # noqa: S608 + + with connection.begin(): + cursor = connection.connection.cursor() + execute_values(cursor, query, entries) + + normalizers_to_insert: list[Normalizer] = [] + query = """ + SELECT DISTINCT p.plugin_id FROM plugin_state p left join normalizer n on n.plugin_id = p.plugin_id + where n.plugin_id IS NULL + """ # noqa: S608 + + for plugin_id_output in connection.execute(query).fetchall(): + plugin_id = plugin_id_output[0] + if plugin_id not in local_plugins: + logger.warning("Unknown plugin id found: %s. You might have to re-enable the plugin!", plugin_id) + continue + + res = connection.execute(f"SELECT id FROM normalizer where plugin_id = '{plugin_id}'") # noqa: S608 + if res.fetchone() is not None: + continue # The Normalizer already exists + + if local_plugins[plugin_id].type == "normalizer": + normalizers_to_insert.append(local_plugins[plugin_id]) + + normalizer_entries = [ + ( + normalizer.id, + normalizer.name, + normalizer.description, + normalizer.consumes, + normalizer.produces, + ["TEST_KEY"], + normalizer.version, + ) + for normalizer in normalizers_to_insert + ] + query = """INSERT INTO normalizer (plugin_id, name, description, consumes, produces, environment_keys, version) + values %s""" # noqa: S608 + + with connection.begin(): + cursor = connection.connection.cursor() + execute_values(cursor, query, normalizer_entries) + + with connection.begin(): + connection.execute(""" + INSERT INTO boefje_config (settings, boefje_id, organisation_pk) + SELECT s.values, b.id, s.organisation_pk from settings s + join boefje b on s.plugin_id = b.plugin_id + """) # Add boefjes and set the settings for boefjes + + with connection.begin(): + connection.execute(""" + INSERT INTO boefje_config (enabled, boefje_id, organisation_pk) + SELECT p.enabled, b.id, p.organisation_pk FROM plugin_state p + JOIN boefje b ON p.plugin_id = b.plugin_id + LEFT JOIN boefje_config bc ON bc.boefje_id = b.id WHERE bc.boefje_id IS NULL + """) # Add boefjes and set the enabled field for boefjes that to not exist yet + connection.execute(""" + UPDATE boefje_config bc SET enabled = p.enabled from plugin_state p + JOIN boefje b ON p.plugin_id = b.plugin_id + where b.id = bc.boefje_id and p.organisation_pk = bc.organisation_pk + """) # Set the enabled field for boefjes + connection.execute(""" + UPDATE normalizer_config nc SET enabled = p.enabled from plugin_state p + JOIN normalizer n ON p.plugin_id = n.plugin_id + where n.id = nc.normalizer_id and p.organisation_pk = nc.organisation_pk + """) # Set the enabled field for normalizers + + op.drop_table("settings") + op.drop_table("plugin_state") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("normalizer", "static") + op.drop_column("boefje", "static") + + op.create_table( + "settings", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("values", sa.VARCHAR(length=512), autoincrement=False, nullable=False), + sa.Column("plugin_id", sa.VARCHAR(length=64), autoincrement=False, nullable=False), + sa.Column("organisation_pk", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["organisation_pk"], ["organisation.pk"], name="settings_organisation_pk_fkey", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id", name="settings_pkey"), + sa.UniqueConstraint("organisation_pk", "plugin_id", name="unique_settings_per_organisation_per_plugin"), + ) + op.create_table( + "plugin_state", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("plugin_id", sa.VARCHAR(length=64), autoincrement=False, nullable=False), + sa.Column("enabled", sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column("organisation_pk", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["organisation_pk"], ["organisation.pk"], name="plugin_state_organisation_pk_fkey", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id", name="plugin_state_pkey"), + sa.UniqueConstraint("plugin_id", "organisation_pk", name="unique_plugin_id_per_org"), + ) + + connection = op.get_bind() + with connection.begin(): + connection.execute(""" + INSERT INTO settings (values, plugin_id, organisation_pk) + SELECT bc.settings, b.plugin_id, bc.organisation_pk from boefje_config bc + join boefje b on bc.boefje_id = b.id + """) + + with connection.begin(): + connection.execute(""" + INSERT INTO plugin_state (enabled, plugin_id, organisation_pk) + SELECT bc.enabled, b.plugin_id, bc.organisation_pk from boefje_config bc + join boefje b on bc.boefje_id = b.id + """) + + with connection.begin(): + connection.execute(""" + INSERT INTO plugin_state (enabled, plugin_id, organisation_pk) + SELECT nc.enabled, n.plugin_id, nc.organisation_pk from normalizer_config nc + join normalizer n on nc.normalizer_id = n.id + """) + + op.drop_table("boefje_config") + op.drop_table("normalizer_config") + + # ### end Alembic commands ### diff --git a/boefjes/boefjes/katalogus/models.py b/boefjes/boefjes/models.py similarity index 79% rename from boefjes/boefjes/katalogus/models.py rename to boefjes/boefjes/models.py index 69d8cc2c637..58665b8588a 100644 --- a/boefjes/boefjes/katalogus/models.py +++ b/boefjes/boefjes/models.py @@ -2,7 +2,8 @@ from enum import Enum from typing import Literal -from pydantic import BaseModel, Field +from jsonschema.validators import Draft202012Validator +from pydantic import BaseModel, Field, field_validator class Organisation(BaseModel): @@ -12,11 +13,10 @@ class Organisation(BaseModel): class Plugin(BaseModel): id: str - name: str | None = None + name: str version: str | None = None created: datetime.datetime | None = None description: str | None = None - environment_keys: list[str] = Field(default_factory=list) enabled: bool = False static: bool = True # We need to differentiate between local and remote plugins to know which ones can be deleted @@ -29,10 +29,21 @@ class Boefje(Plugin): scan_level: int = 1 consumes: set[str] = Field(default_factory=set) produces: set[str] = Field(default_factory=set) + schema: dict | None = None runnable_hash: str | None = None oci_image: str | None = None oci_arguments: list[str] = Field(default_factory=list) + @field_validator("schema") + @classmethod + def json_schema_valid(cls, schema: dict) -> dict: + if schema is not None: + Draft202012Validator.check_schema(schema) + return schema + + class Config: + validate_assignment = True + class Normalizer(Plugin): type: Literal["normalizer"] = "normalizer" diff --git a/boefjes/boefjes/plugins/kat_adr_finding_types/boefje.json b/boefjes/boefjes/plugins/kat_adr_finding_types/boefje.json index e53d47f3982..93c2ae8ef7d 100644 --- a/boefjes/boefjes/plugins/kat_adr_finding_types/boefje.json +++ b/boefjes/boefjes/plugins/kat_adr_finding_types/boefje.json @@ -1,7 +1,7 @@ { "id": "adr-finding-types", "name": "ADR Finding Types", - "description": "Hydrate information of ADR finding types", + "description": "Hydrate information on API Design Rules (ADR) finding types for common design mistakes.", "consumes": [ "ADRFindingType" ], diff --git a/boefjes/boefjes/plugins/kat_adr_finding_types/normalizer.json b/boefjes/boefjes/plugins/kat_adr_finding_types/normalizer.json index 583b7714bed..fabda504805 100644 --- a/boefjes/boefjes/plugins/kat_adr_finding_types/normalizer.json +++ b/boefjes/boefjes/plugins/kat_adr_finding_types/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_adr_finding_types_normalize", + "name": "API Design Rules (ADR) Finding Types", + "description": "Parse API Design Rules (ADR) finding types.", "consumes": [ "boefje/adr-finding-types" ], diff --git a/boefjes/boefjes/plugins/kat_adr_validator/boefje.json b/boefjes/boefjes/plugins/kat_adr_validator/boefje.json index b782dbb5b5b..9d43ee60519 100644 --- a/boefjes/boefjes/plugins/kat_adr_validator/boefje.json +++ b/boefjes/boefjes/plugins/kat_adr_validator/boefje.json @@ -1,7 +1,7 @@ { "id": "adr-validator", "name": "API Design Rules validator", - "description": "Validate if an API conforms to the API Design Rules", + "description": "Validate if an API conforms to the API Design Rules (ADR).", "consumes": [ "RESTAPI" ], diff --git a/boefjes/boefjes/plugins/kat_adr_validator/normalizer.json b/boefjes/boefjes/plugins/kat_adr_validator/normalizer.json index 52c21e9a03e..f840cded2ad 100644 --- a/boefjes/boefjes/plugins/kat_adr_validator/normalizer.json +++ b/boefjes/boefjes/plugins/kat_adr_validator/normalizer.json @@ -1,5 +1,7 @@ { "id": "adr-validator-normalize", + "name": "API Design Rules validator", + "description": "TODO", "consumes": [ "boefje/adr-validator" ], diff --git a/boefjes/boefjes/plugins/kat_answer_parser/normalizer.json b/boefjes/boefjes/plugins/kat_answer_parser/normalizer.json index 41a89a217b0..922b333697f 100644 --- a/boefjes/boefjes/plugins/kat_answer_parser/normalizer.json +++ b/boefjes/boefjes/plugins/kat_answer_parser/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_answer_parser", + "name": "Answer Parser", + "description": "Parses the answers from Config objects.", "consumes": [ "answer" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/boefje.json b/boefjes/boefjes/plugins/kat_binaryedge/boefje.json index 004015e0570..e7d90e4ee98 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/boefje.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/boefje.json @@ -1,13 +1,10 @@ { "id": "binaryedge", "name": "BinaryEdge", - "description": "Use BinaryEdge to find open ports with vulnerabilities that are found on that port", + "description": "Use BinaryEdge to find open ports with vulnerabilities. Requires a BinaryEdge API key.", "consumes": [ "IPAddressV4", "IPAddressV6" ], - "environment_keys": [ - "BINARYEDGE_API" - ], "scan_level": 2 } diff --git a/boefjes/boefjes/plugins/kat_binaryedge/containers/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/containers/normalizer.json index 46a034d0d1c..086ce350160 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/containers/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/containers/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_containers", + "name": "BinaryEdge containers", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/databases/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/databases/normalizer.json index 22fd81eb927..2af3f47f891 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/databases/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/databases/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_databases", + "name": "BinaryEdge databases", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/http_web/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/http_web/normalizer.json index f0e5825f36d..f5cafc7560a 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/http_web/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/http_web/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_http_web", + "name": "BinaryEdge Websites", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/message_queues/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/message_queues/normalizer.json index 15ea3e250b0..caa59b56f4b 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/message_queues/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/message_queues/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_message_queues", + "name": "BinaryEdge message queues", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/protocols/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/protocols/normalizer.json index 34f17a681c1..30d0f02963e 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/protocols/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/protocols/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_protocols", + "name": "BinaryEdge protocols", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/remote_desktop/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/remote_desktop/normalizer.json index c28180a88c7..80e1837a499 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/remote_desktop/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/remote_desktop/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_remote_desktop", + "name": "Binary Edge remote desktop", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/service_identification/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/service_identification/normalizer.json index eaea2744052..d451a79b150 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/service_identification/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/service_identification/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_service_identification", + "name": "BinaryEdge service identification", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_binaryedge/services/normalizer.json b/boefjes/boefjes/plugins/kat_binaryedge/services/normalizer.json index b2671be67a1..57a0f8dac16 100644 --- a/boefjes/boefjes/plugins/kat_binaryedge/services/normalizer.json +++ b/boefjes/boefjes/plugins/kat_binaryedge/services/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_binaryedge_services", + "name": "BinaryEdge services", "consumes": [ "boefje/binaryedge" ], diff --git a/boefjes/boefjes/plugins/kat_burpsuite/normalizer.json b/boefjes/boefjes/plugins/kat_burpsuite/normalizer.json index 44c8b40ab3e..c0b88e6a857 100644 --- a/boefjes/boefjes/plugins/kat_burpsuite/normalizer.json +++ b/boefjes/boefjes/plugins/kat_burpsuite/normalizer.json @@ -1,7 +1,7 @@ { "id": "kat_burpsuite_normalize", "name": "Burpsuite normalizer", - "description": "Parses Burpsuite XML output (reports). Check https://docs.openkat.nl on how to create the XML file.", + "description": "Parses Burpsuite XML output into findings. Check https://docs.openkat.nl/manual/normalizers.html#burp-suite on how to create the XML file.", "consumes": [ "xml/burp-export" ], diff --git a/boefjes/boefjes/plugins/kat_calvin/normalizer.json b/boefjes/boefjes/plugins/kat_calvin/normalizer.json index 601433e8681..c596dbdcd4a 100644 --- a/boefjes/boefjes/plugins/kat_calvin/normalizer.json +++ b/boefjes/boefjes/plugins/kat_calvin/normalizer.json @@ -1,5 +1,7 @@ { "id": "calvin-normalize", + "name": "Calvin", + "description": "Produces applications and incidents for Calvin.", "consumes": [ "boefje/calvin" ], diff --git a/boefjes/boefjes/plugins/kat_censys/boefje.json b/boefjes/boefjes/plugins/kat_censys/boefje.json index e8c15547c76..ef6c3ab9a67 100644 --- a/boefjes/boefjes/plugins/kat_censys/boefje.json +++ b/boefjes/boefjes/plugins/kat_censys/boefje.json @@ -1,14 +1,10 @@ { "id": "censys", "name": "Censys", - "description": "Use Censys to discover open ports, services and certificates", + "description": "Use Censys to discover open ports, services and certificates. Requires and API key.", "consumes": [ "IPAddressV4", "IPAddressV6" ], - "environment_keys": [ - "CENSYS_API_ID", - "CENSYS_API_SECRET" - ], "scan_level": 1 } diff --git a/boefjes/boefjes/plugins/kat_censys/normalizer.json b/boefjes/boefjes/plugins/kat_censys/normalizer.json index 446c55cd485..809fc7d7174 100644 --- a/boefjes/boefjes/plugins/kat_censys/normalizer.json +++ b/boefjes/boefjes/plugins/kat_censys/normalizer.json @@ -1,5 +1,6 @@ { "id": "kat_censys_normalize", + "name": "Censys", "consumes": [ "boefje/censys" ], diff --git a/boefjes/boefjes/plugins/kat_crt_sh/boefje.json b/boefjes/boefjes/plugins/kat_crt_sh/boefje.json index 72051dbb411..f9aa67e604e 100644 --- a/boefjes/boefjes/plugins/kat_crt_sh/boefje.json +++ b/boefjes/boefjes/plugins/kat_crt_sh/boefje.json @@ -1,7 +1,7 @@ { "id": "certificate-search", "name": "CRT", - "description": "Certificate search", + "description": "Searches for certificates and new hostnames in the transparency logs of crt.sh.", "consumes": [ "DNSZone" ], diff --git a/boefjes/boefjes/plugins/kat_crt_sh/normalize.py b/boefjes/boefjes/plugins/kat_crt_sh/normalize.py index aba1315df12..3c430005fb9 100644 --- a/boefjes/boefjes/plugins/kat_crt_sh/normalize.py +++ b/boefjes/boefjes/plugins/kat_crt_sh/normalize.py @@ -16,7 +16,6 @@ def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: current = fqdn.lstrip(".") network = Network(name="internet") - yield network network_reference = network.reference unique_domains = set() diff --git a/boefjes/boefjes/plugins/kat_crt_sh/normalizer.json b/boefjes/boefjes/plugins/kat_crt_sh/normalizer.json index 5fd671f9719..130bd2b8301 100644 --- a/boefjes/boefjes/plugins/kat_crt_sh/normalizer.json +++ b/boefjes/boefjes/plugins/kat_crt_sh/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_crt_sh_normalize", + "name": "Certificate Transparency logs (crt.sh)", + "description": "Parses data from certificate transparency logs (crt.sh) into hostnames and X509 certificates.", "consumes": [ "boefje/certificate-search" ], diff --git a/boefjes/boefjes/plugins/kat_cve_2023_34039/boefje.json b/boefjes/boefjes/plugins/kat_cve_2023_34039/boefje.json index 9c82a08ec66..0b0f6b6dc6e 100644 --- a/boefjes/boefjes/plugins/kat_cve_2023_34039/boefje.json +++ b/boefjes/boefjes/plugins/kat_cve_2023_34039/boefje.json @@ -1,7 +1,7 @@ { "id": "CVE-2023-34039", - "name": "CVE_2023_34039", - "description": "Check to see if known keys are usable on VMware CVE-2023-34039", + "name": "CVE-2023-34039 - VMware Aria Operations", + "description": "Checks if there are static SSH keys present that can be used for remote code execution on VWware Aria Operations (CVE-2023-34039). This vulnerability can be used to bypass SSH authentication and gain access to the Aria Operations for Networks CLI.", "consumes": [ "IPService" ], diff --git a/boefjes/boefjes/plugins/kat_cve_2023_34039/main.py b/boefjes/boefjes/plugins/kat_cve_2023_34039/main.py index f6e580e1468..d5bd7f4795a 100644 --- a/boefjes/boefjes/plugins/kat_cve_2023_34039/main.py +++ b/boefjes/boefjes/plugins/kat_cve_2023_34039/main.py @@ -57,7 +57,8 @@ def run(boefje_meta: BoefjeMeta) -> list[tuple[set, str | bytes]]: "\n".join( (str(coutput), f"{key_file} is allowed access to vRealize Network Insight on {ip}:{port}") ), - ) + ), + ({"openkat/finding"}, "CVE-2023-34039"), ] except Exception: # noqa: S112 diff --git a/boefjes/boefjes/plugins/kat_cve_2023_34039/normalize.py b/boefjes/boefjes/plugins/kat_cve_2023_34039/normalize.py deleted file mode 100644 index b379e8158f1..00000000000 --- a/boefjes/boefjes/plugins/kat_cve_2023_34039/normalize.py +++ /dev/null @@ -1,19 +0,0 @@ -from collections.abc import Iterable - -from boefjes.job_models import NormalizerOutput -from octopoes.models import Reference -from octopoes.models.ooi.findings import CVEFindingType, Finding - - -def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: - ooi = Reference.from_str(input_ooi["primary_key"]) - - if "is allowed access to vRealize Network Insight " in raw.decode(): - finding_type = CVEFindingType(id="CVE-2023-34039") - finding = Finding( - finding_type=finding_type.reference, - ooi=ooi, - description="Service is most likely vulnerable to CVE-2023-34039", - ) - yield finding_type - yield finding diff --git a/boefjes/boefjes/plugins/kat_cve_2023_34039/normalizer.json b/boefjes/boefjes/plugins/kat_cve_2023_34039/normalizer.json deleted file mode 100644 index 4cbb1bddda9..00000000000 --- a/boefjes/boefjes/plugins/kat_cve_2023_34039/normalizer.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "id": "kat_cve_2023_normalize", - "consumes": [ - "boefje/CVE-2023-34039" - ], - "produces": [ - "Finding", - "CVEFindingType" - ] -} diff --git a/boefjes/boefjes/plugins/kat_cve_2023_35078/boefje.json b/boefjes/boefjes/plugins/kat_cve_2023_35078/boefje.json index 07525502bf3..52c93d41450 100644 --- a/boefjes/boefjes/plugins/kat_cve_2023_35078/boefje.json +++ b/boefjes/boefjes/plugins/kat_cve_2023_35078/boefje.json @@ -1,7 +1,7 @@ { "id": "CVE_2023_35078", - "name": "CVE_2023_35078", - "description": "Use NFIR script to find CVE-2023-35078", + "name": "CVE-2023-35078 - Ivanti EPMM", + "description": "Checks websites for the presents of the Ivanti EPMM interface and whether the interface is vulnerable to the remote unauthenticated API access vulnerability (CVE-2023-35078). Script contribution by NFIR.", "consumes": [ "Website" ], diff --git a/boefjes/boefjes/plugins/kat_cve_2023_35078/normalizer.json b/boefjes/boefjes/plugins/kat_cve_2023_35078/normalizer.json index 0b7413eede9..c735560eae4 100644 --- a/boefjes/boefjes/plugins/kat_cve_2023_35078/normalizer.json +++ b/boefjes/boefjes/plugins/kat_cve_2023_35078/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_CVE_2023_35078_normalize", + "name": "CVE-2023-35078 Ivanti EPMM", + "description": "Checks if the Ivanti EPMM website is vulnerable to CVE-2023-35078. Produces a finding if it is vulnerable.", "consumes": [ "boefje/CVE_2023_35078" ], diff --git a/boefjes/boefjes/katalogus/dependencies/__init__.py b/boefjes/boefjes/plugins/kat_cve_2024_6387/__init__.py similarity index 100% rename from boefjes/boefjes/katalogus/dependencies/__init__.py rename to boefjes/boefjes/plugins/kat_cve_2024_6387/__init__.py diff --git a/boefjes/boefjes/plugins/kat_cve_2024_6387/normalize.py b/boefjes/boefjes/plugins/kat_cve_2024_6387/normalize.py new file mode 100644 index 00000000000..0948823cecc --- /dev/null +++ b/boefjes/boefjes/plugins/kat_cve_2024_6387/normalize.py @@ -0,0 +1,68 @@ +""" +CVE-2024-6387 checker +Author: Mischa van Geelen <@rickgeex> + +""" + +from collections.abc import Iterable + +from boefjes.job_models import NormalizerOutput +from octopoes.models import Reference +from octopoes.models.ooi.findings import CVEFindingType, Finding +from packaging.version import Version + +VULNERABLE_VERSIONS = [ + "SSH-2.0-OpenSSH_8.5", + "SSH-2.0-OpenSSH_8.6", + "SSH-2.0-OpenSSH_8.7", + "SSH-2.0-OpenSSH_8.8", + "SSH-2.0-OpenSSH_8.9", + "SSH-2.0-OpenSSH_9.0", + "SSH-2.0-OpenSSH_9.1", + "SSH-2.0-OpenSSH_9.2", + "SSH-2.0-OpenSSH_9.3", + "SSH-2.0-OpenSSH_9.4", + "SSH-2.0-OpenSSH_9.5", + "SSH-2.0-OpenSSH_9.6", + "SSH-2.0-OpenSSH_9.7", +] + + +def is_vulnerable(banner: str) -> bool: + if not any(version in banner for version in VULNERABLE_VERSIONS): + return False + + if banner.startswith("SSH-2.0-OpenSSH_9.2p1 Debian-2+deb12u"): + _, security_update = banner.split("deb12u") + if Version(security_update) >= Version("3"): + return False + elif banner.startswith("SSH-2.0-OpenSSH_9.6p1 Ubuntu-3ubuntu"): + _, security_update = banner.split("3ubuntu") + if Version(security_update) >= Version("13.3"): + return False + elif banner.startswith("SSH-2.0-OpenSSH_9.3p1 Ubuntu-1ubuntu"): + _, security_update = banner.split("1ubuntu") + if Version(security_update) >= Version("3.6"): + return False + elif banner.startswith("SSH-2.0-OpenSSH_8.9p1 Ubuntu-3ubuntu"): + _, security_update = banner.split("3ubuntu") + if Version(security_update) >= Version("0.10"): + return False + + return True + + +def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: + ooi = Reference.from_str(input_ooi["primary_key"]) + + banner = raw.decode() + + if banner.startswith("SSH-2.0-OpenSSH") and is_vulnerable(banner): + finding_type = CVEFindingType(id="CVE-2024-6387") + finding = Finding( + finding_type=finding_type.reference, + ooi=ooi, + description="Service is most likely vulnerable to CVE-2024-6387", + ) + yield finding_type + yield finding diff --git a/boefjes/boefjes/plugins/kat_cve_2024_6387/normalizer.json b/boefjes/boefjes/plugins/kat_cve_2024_6387/normalizer.json new file mode 100644 index 00000000000..0e06b9d2362 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_cve_2024_6387/normalizer.json @@ -0,0 +1,12 @@ +{ + "id": "kat_cve_2024_6387_normalize", + "name": "CVE-2024-6387 OpenSSH", + "description": "Checks the service banner for a race condition in OpenSSH server which can result in an unauthenticated remote attacker to trigger that some signals are handled in an unsafe manner (CVE-2024-6387). Requires the Service-Banner-boefje to be enabled.", + "consumes": [ + "openkat/service-banner" + ], + "produces": [ + "Finding", + "CVEFindingType" + ] +} diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json b/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json index 2b390197290..f1315d93c33 100644 --- a/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json @@ -1,13 +1,10 @@ { "id": "cve-finding-types", "name": "CVE Finding Types", - "description": "Hydrate information of CVE finding types from the CVE API", + "description": "Hydrate information of Common Vulnerabilities and Exposures (CVE) finding types from the CVE API", "consumes": [ "CVEFindingType" ], - "environment_keys": [ - "CVEAPI_URL" - ], "scan_level": 0, "enabled": true } diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/normalizer.json b/boefjes/boefjes/plugins/kat_cve_finding_types/normalizer.json index 6e2d52291aa..6ae5590562d 100644 --- a/boefjes/boefjes/plugins/kat_cve_finding_types/normalizer.json +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_cve_finding_types_normalize", + "name": "CVE finding types", + "description": "Parses CVE findings.", "consumes": [ "boefje/cve-finding-types" ], diff --git a/boefjes/boefjes/plugins/kat_cwe_finding_types/boefje.json b/boefjes/boefjes/plugins/kat_cwe_finding_types/boefje.json index a3656aa48c6..abeeaa7d9d0 100644 --- a/boefjes/boefjes/plugins/kat_cwe_finding_types/boefje.json +++ b/boefjes/boefjes/plugins/kat_cwe_finding_types/boefje.json @@ -1,7 +1,7 @@ { "id": "cwe-finding-types", "name": "CWE Finding Types", - "description": "Hydrate information of CWE finding types", + "description": "Hydrate information of Common Weakness Enumeration (CWE) finding types", "consumes": [ "CWEFindingType" ], diff --git a/boefjes/boefjes/plugins/kat_cwe_finding_types/normalizer.json b/boefjes/boefjes/plugins/kat_cwe_finding_types/normalizer.json index 7b19ddd4c99..9b939d07df5 100644 --- a/boefjes/boefjes/plugins/kat_cwe_finding_types/normalizer.json +++ b/boefjes/boefjes/plugins/kat_cwe_finding_types/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_cwe_finding_types_normalize", + "name": "CWE finding", + "description": "Parses CWE findings.", "consumes": [ "boefje/cwe-finding-types" ], diff --git a/boefjes/boefjes/plugins/kat_dicom/boefje.json b/boefjes/boefjes/plugins/kat_dicom/boefje.json index 437829787a9..6cfd4e76498 100644 --- a/boefjes/boefjes/plugins/kat_dicom/boefje.json +++ b/boefjes/boefjes/plugins/kat_dicom/boefje.json @@ -1,7 +1,7 @@ { "id": "dicom", "name": "DICOM", - "description": "Find exposed DICOM servers.", + "description": "Find exposed DICOM servers. DICOM servers are used to process medical imaging information.", "consumes": [ "IPAddressV4", "IPAddressV6" diff --git a/boefjes/boefjes/plugins/kat_dicom/normalizer.json b/boefjes/boefjes/plugins/kat_dicom/normalizer.json index b8e5f1dd49c..74519e6e96c 100644 --- a/boefjes/boefjes/plugins/kat_dicom/normalizer.json +++ b/boefjes/boefjes/plugins/kat_dicom/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_dicom_normalize", + "name": "DICOM servers", + "description": "Parses DICOM output into findings and identified software.", "consumes": [ "boefje/dicom" ], diff --git a/boefjes/boefjes/plugins/kat_dns/boefje.json b/boefjes/boefjes/plugins/kat_dns/boefje.json index 76c36ae1775..5773364b9b6 100644 --- a/boefjes/boefjes/plugins/kat_dns/boefje.json +++ b/boefjes/boefjes/plugins/kat_dns/boefje.json @@ -1,13 +1,9 @@ { "id": "dns-records", - "name": "DnsRecords", - "description": "Fetch the DNS record(s) of a hostname", + "name": "DNS records", + "description": "Fetch the DNS record(s) of a hostname.", "consumes": [ "Hostname" ], - "environment_keys": [ - "RECORD_TYPES", - "REMOTE_NS" - ], "scan_level": 1 } diff --git a/boefjes/boefjes/plugins/kat_dns/main.py b/boefjes/boefjes/plugins/kat_dns/main.py index 2db21a40685..ce01870911f 100644 --- a/boefjes/boefjes/plugins/kat_dns/main.py +++ b/boefjes/boefjes/plugins/kat_dns/main.py @@ -6,6 +6,7 @@ from os import getenv import dns.resolver +from dns.edns import EDEOption from dns.name import Name from dns.resolver import Answer @@ -28,6 +29,10 @@ } +class TimeoutException(Exception): + pass + + class ZoneNotFoundException(Exception): pass @@ -48,6 +53,9 @@ def run(boefje_meta: BoefjeMeta) -> list[tuple[set, bytes | str]]: requested_dns_name = dns.name.from_text(hostname) resolver = dns.resolver.Resolver() + # https://dnspython.readthedocs.io/en/stable/_modules/dns/edns.html + # enable EDE to get the DNSSEC Bogus return values if the server supports it # codespell-ignore + resolver.use_edns(options=[EDEOption(15)]) nameserver = getenv("REMOTE_NS", "1.1.1.1") resolver.nameservers = [nameserver] @@ -76,6 +84,8 @@ def run(boefje_meta: BoefjeMeta) -> list[tuple[set, bytes | str]]: "dmarc_response": get_email_security_records(resolver, hostname, "_dmarc"), "dkim_response": get_email_security_records(resolver, hostname, "_domainkey"), } + if not answers_formatted and results["dmarc_response"] == "Timeout" and results["dmarc_response"] == "Timeout": + raise TimeoutException("No answers from DNS-Server due to timeouts.") return [(set(), json.dumps(results))] @@ -96,6 +106,16 @@ def get_email_security_records(resolver: dns.resolver.Resolver, hostname: str, r try: answer = resolver.resolve(f"{record_subdomain}.{hostname}", "TXT", raise_on_no_answer=False) return answer.response.to_text() + except dns.resolver.NoNameservers as error: + # no servers responded happily, we'll check the response from the first + # https://dnspython.readthedocs.io/en/latest/_modules/dns/rcode.html + # https://www.rfc-editor.org/rfc/rfc8914#name-extended-dns-error-code-6-d + firsterror = error.kwargs["errors"][0] + if firsterror[3] == "SERVFAIL": + edeerror = int(firsterror[4].options[0].code) + if edeerror in (1, 2, 5, 6, 7, 8, 9, 10, 11, 12): # DNSSEC error codes defined in RFC 8914 + return "DNSSECFAIL" # returned when the resolver indicates a DNSSEC failure. + raise # Not dnssec related, unhandled, raise. except dns.resolver.NXDOMAIN: return "NXDOMAIN" except dns.resolver.Timeout: diff --git a/boefjes/boefjes/plugins/kat_dns/normalize.py b/boefjes/boefjes/plugins/kat_dns/normalize.py index ab8dda9a799..5dd3912b268 100644 --- a/boefjes/boefjes/plugins/kat_dns/normalize.py +++ b/boefjes/boefjes/plugins/kat_dns/normalize.py @@ -170,7 +170,7 @@ def register_record(record: DNSRecord) -> DNSRecord: # DKIM dkim_results = results["dkim_response"] - if dkim_results not in ["NXDOMAIN", "Timeout"] and dkim_results.split("\n")[2] == "rcode NOERROR": + if dkim_results not in ["NXDOMAIN", "Timeout", "DNSSECFAIL"] and dkim_results.split("\n")[2] == "rcode NOERROR": yield DKIMExists( hostname=input_hostname.reference, ) diff --git a/boefjes/boefjes/plugins/kat_dns/normalizer.json b/boefjes/boefjes/plugins/kat_dns/normalizer.json index e4a2316eda0..fa9c8a73fa6 100644 --- a/boefjes/boefjes/plugins/kat_dns/normalizer.json +++ b/boefjes/boefjes/plugins/kat_dns/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_dns_normalize", + "name": "DNS records", + "description": "Parses the DNS records.", "consumes": [ "boefje/dns-records" ], diff --git a/boefjes/boefjes/katalogus/storage/__init__.py b/boefjes/boefjes/plugins/kat_dns_version/__init__.py similarity index 100% rename from boefjes/boefjes/katalogus/storage/__init__.py rename to boefjes/boefjes/plugins/kat_dns_version/__init__.py diff --git a/boefjes/boefjes/plugins/kat_dns_version/boefje.json b/boefjes/boefjes/plugins/kat_dns_version/boefje.json new file mode 100644 index 00000000000..3aa66ca3cfd --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/boefje.json @@ -0,0 +1,9 @@ +{ + "id": "dns-bind-version", + "name": "DNS software version", + "description": "Uses the DNS VERSION.BIND command to attempt to learn the servers software.", + "consumes": [ + "IPService" + ], + "scan_level": 2 +} diff --git a/boefjes/boefjes/plugins/kat_dns_version/description.md b/boefjes/boefjes/plugins/kat_dns_version/description.md new file mode 100644 index 00000000000..5ac8b8ea5f2 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/description.md @@ -0,0 +1,3 @@ +# Fetch DNS Server software version + +This boefje tries to detect the DNS Server version by doing a VERSION.BIND call. diff --git a/boefjes/boefjes/plugins/kat_dns_version/main.py b/boefjes/boefjes/plugins/kat_dns_version/main.py new file mode 100644 index 00000000000..40631e61f69 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/main.py @@ -0,0 +1,42 @@ +"""Boefje script for getting namserver version""" + +import json +from os import getenv + +import dns +import dns.message +import dns.query + +from boefjes.job_models import BoefjeMeta + + +def run(boefje_meta: BoefjeMeta) -> list[tuple[set, str | bytes]]: + input_ = boefje_meta.arguments["input"] # input is IPService + ip_port = input_["ip_port"] + if input_["service"]["name"] != "domain": + return [({"boefje/error"}, "Not a DNS service")] + + ip = ip_port["address"]["address"] + port = int(ip_port["port"]) + protocol = ip_port["protocol"] + + timeout = float(getenv("TIMEOUT", 30)) + + method = dns.query.udp if protocol == "udp" else dns.query.tcp + + queries = [ + dns.message.make_query("VERSION.BIND", dns.rdatatype.TXT, dns.rdataclass.CHAOS), + dns.message.make_query("VERSION.SERVER", dns.rdatatype.TXT, dns.rdataclass.CHAOS), + ] + + results = [] + for query in queries: + response = method(query, where=ip, timeout=timeout, port=port) + + try: + answer = response.answer[0] + results.append(answer.to_rdataset().pop().strings[0].decode()) + except IndexError: + pass + + return [(set(), json.dumps(results))] diff --git a/boefjes/boefjes/plugins/kat_dns_version/normalize.py b/boefjes/boefjes/plugins/kat_dns_version/normalize.py new file mode 100644 index 00000000000..b3e805cc1c5 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/normalize.py @@ -0,0 +1,36 @@ +import json +from collections.abc import Iterable + +from boefjes.job_models import NormalizerOutput +from octopoes.models import Reference +from octopoes.models.ooi.software import Software, SoftwareInstance + + +def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: + input_ooi_reference = Reference.from_str(input_ooi["primary_key"]) + + results = json.loads(raw) + for version in results: + if version.startswith("bind"): + name = "bind" + version_number = version.split("-")[1] + elif version.startswith("9."): + name = "bind" + version_number = version + elif version.startswith("Microsoft DNS"): + name = "Microsoft DNS" + version_number = version.replace("Microsoft DNS ", "").split(" ")[0] + elif version.startswith("dnsmasq"): + name = "dnsmasq" + version_number = version.split("-")[1] + elif version.startswith("PowerDNS"): + name = "PowerDNS" + version_number = version.replace("PowerDNS Authoritative Server ", "").split(" ")[0] + else: + name = None + version_number = None + + if name and version_number: + software = Software(name=name, version=version_number) + software_instance = SoftwareInstance(ooi=input_ooi_reference, software=software.reference) + yield from [software, software_instance] diff --git a/boefjes/boefjes/plugins/kat_dns_version/normalizer.json b/boefjes/boefjes/plugins/kat_dns_version/normalizer.json new file mode 100644 index 00000000000..4bd2cad202f --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/normalizer.json @@ -0,0 +1,11 @@ +{ + "id": "dns-bind-version-normalize", + "name": "DNS bind version normalizer", + "consumes": [ + "boefje/dns-bind-version" + ], + "produces": [ + "Software", + "SoftwareInstance" + ] +} diff --git a/boefjes/boefjes/plugins/kat_dns_version/schema.json b/boefjes/boefjes/plugins/kat_dns_version/schema.json new file mode 100644 index 00000000000..6a9fbe29348 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_dns_version/schema.json @@ -0,0 +1,13 @@ +{ + "title": "Arguments", + "type": "object", + "properties": { + "TIMEOUT": { + "title": "TIMEOUT", + "type": "integer", + "description": "Timeout for requests to the targeted dns servers", + "default": 30, + "minimum": 0 + } + } +} diff --git a/boefjes/boefjes/plugins/kat_dns_zone/boefje.json b/boefjes/boefjes/plugins/kat_dns_zone/boefje.json index 25df0af7bcd..cc03e079bd1 100644 --- a/boefjes/boefjes/plugins/kat_dns_zone/boefje.json +++ b/boefjes/boefjes/plugins/kat_dns_zone/boefje.json @@ -1,6 +1,6 @@ { "id": "dns-zone", - "name": "DnsZone", + "name": "DNS zone", "description": "Fetch the parent DNS zone of a DNS zone", "consumes": [ "DNSZone" diff --git a/boefjes/boefjes/plugins/kat_dns_zone/normalizer.json b/boefjes/boefjes/plugins/kat_dns_zone/normalizer.json index c4060c833ec..e9e156f6d2c 100644 --- a/boefjes/boefjes/plugins/kat_dns_zone/normalizer.json +++ b/boefjes/boefjes/plugins/kat_dns_zone/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_dns_zone_normalize", + "name": "DNS zone", + "description": "Parses the parent DNS zone into new hostnames and DNS zones.", "consumes": [ "boefje/dns-zone" ], diff --git a/boefjes/boefjes/plugins/kat_dnssec/boefje.json b/boefjes/boefjes/plugins/kat_dnssec/boefje.json index 7b59b0fae25..8b4d156396e 100644 --- a/boefjes/boefjes/plugins/kat_dnssec/boefje.json +++ b/boefjes/boefjes/plugins/kat_dnssec/boefje.json @@ -1,6 +1,6 @@ { "id": "dns-sec", - "name": "Dnssec", + "name": "DNSSEC", "description": "Validates DNSSec of a hostname", "consumes": [ "Hostname" diff --git a/boefjes/boefjes/plugins/kat_dnssec/normalizer.json b/boefjes/boefjes/plugins/kat_dnssec/normalizer.json index 24877c2e897..670f16592f4 100644 --- a/boefjes/boefjes/plugins/kat_dnssec/normalizer.json +++ b/boefjes/boefjes/plugins/kat_dnssec/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_dnssec_normalize", + "name": "DNS records", + "description": "Parses DNSSEC data into findings.", "consumes": [ "boefje/dns-sec" ], diff --git a/boefjes/boefjes/plugins/kat_external_db/boefje.json b/boefjes/boefjes/plugins/kat_external_db/boefje.json index 1f27e7f9d2e..cbf7ee0c927 100644 --- a/boefjes/boefjes/plugins/kat_external_db/boefje.json +++ b/boefjes/boefjes/plugins/kat_external_db/boefje.json @@ -1,16 +1,9 @@ { "id": "external_db", - "name": "External Database", - "description": "Fetch hostnames and IP addresses/netblocks from an external database with API. See `description.md` for more information.", + "name": "External database host fetcher", + "description": "Fetch hostnames and IP addresses/netblocks from an external database with API. See `description.md` for more information. Useful if you have a large network.", "consumes": [ "Network" ], - "environment_keys": [ - "DB_URL", - "DB_ACCESS_TOKEN", - "DB_ORGANIZATION_IDENTIFIER", - "DB_ENDPOINT_FORMAT", - "REQUESTS_CA_BUNDLE" - ], "scan_level": 0 } diff --git a/boefjes/boefjes/plugins/kat_external_db/description.md b/boefjes/boefjes/plugins/kat_external_db/description.md index f40bffd9733..cd69fa4fb6a 100644 --- a/boefjes/boefjes/plugins/kat_external_db/description.md +++ b/boefjes/boefjes/plugins/kat_external_db/description.md @@ -33,4 +33,4 @@ For example: } ``` -The expected ip and domain (item) key lists can be configured in `normalize.py`. Ranges are expected as strings in CIDR notation. Clearance level for fetched items is set to `L0`. Reference implementation of the API server is in the works. +The expected ip and domain (item) key lists can be configured in `normalize.py`. Ranges are expected as strings in CIDR notation. Clearance level for fetched items is set to `L3` when `BOEFJES_SCAN_PROFILE_WHITELIST='{"kat_external_db_normalize": 3}'` is added to the `.env` file otherwise it is set to `L0`. Reference implementation of the API server is in the works. diff --git a/boefjes/boefjes/plugins/kat_external_db/normalize.py b/boefjes/boefjes/plugins/kat_external_db/normalize.py index e1b7fdb99c9..71595001f92 100644 --- a/boefjes/boefjes/plugins/kat_external_db/normalize.py +++ b/boefjes/boefjes/plugins/kat_external_db/normalize.py @@ -17,6 +17,8 @@ IP_ADDRESS_ITEM_PATH = ["address"] DOMAIN_LIST_PATH = ["domains"] DOMAIN_ITEM_PATH = ["name"] +INDEMNIFICATION_ITEM_PATH = ["indemnification_level"] +DEFAULT_INDEMNIFICATION_LEVEL = 3 def follow_path_in_dict(path, path_dict): @@ -29,6 +31,18 @@ def follow_path_in_dict(path, path_dict): return path_dict +def get_indemnification_level(path_dict): + """Return indemnification level from metadata or default.""" + try: + indemnification_level = int(follow_path_in_dict(path=INDEMNIFICATION_ITEM_PATH, path_dict=path_dict)) + if 0 <= indemnification_level < 5: + return indemnification_level + raise ValueError(f"Invalid indemnificationlevel {indemnification_level}, aborting.") + except KeyError: + logging.info("No integer indemnification level found, using default.") + return DEFAULT_INDEMNIFICATION_LEVEL + + def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: """Yields hostnames, IPv4/6 addresses or netblocks.""" results = json.loads(raw) @@ -37,6 +51,7 @@ def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: for address_item in follow_path_in_dict(path=IP_ADDRESS_LIST_PATH, path_dict=results): interface = ip_interface(follow_path_in_dict(path=IP_ADDRESS_ITEM_PATH, path_dict=address_item)) + indemnification_level = get_indemnification_level(path_dict=address_item) address, mask_str = interface.with_prefixlen.split("/") mask = int(mask_str) @@ -50,7 +65,7 @@ def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: ip_address = address_type(address=address, network=network.reference) yield ip_address - yield DeclaredScanProfile(reference=ip_address.reference, level=3) + yield DeclaredScanProfile(reference=ip_address.reference, level=indemnification_level) addresses_count += 1 if mask < interface.ip.max_prefixlen: @@ -60,15 +75,17 @@ def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: network=network.reference, ) yield block - yield DeclaredScanProfile(reference=block.reference, level=3) + yield DeclaredScanProfile(reference=block.reference, level=indemnification_level) blocks_count += 1 - for hostname in follow_path_in_dict(path=DOMAIN_LIST_PATH, path_dict=results): + for hostname_data in follow_path_in_dict(path=DOMAIN_LIST_PATH, path_dict=results): hostname = Hostname( - name=follow_path_in_dict(path=DOMAIN_ITEM_PATH, path_dict=hostname), network=network.reference + name=follow_path_in_dict(path=DOMAIN_ITEM_PATH, path_dict=hostname_data), network=network.reference ) yield hostname - yield DeclaredScanProfile(reference=hostname.reference, level=3) + yield DeclaredScanProfile( + reference=hostname.reference, level=get_indemnification_level(path_dict=hostname_data) + ) hostnames_count += 1 logging.info( diff --git a/boefjes/boefjes/plugins/kat_external_db/normalizer.json b/boefjes/boefjes/plugins/kat_external_db/normalizer.json index 36d425db438..2d9e72d56e9 100644 --- a/boefjes/boefjes/plugins/kat_external_db/normalizer.json +++ b/boefjes/boefjes/plugins/kat_external_db/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_external_db_normalize", + "name": "External database hosts fetcher", + "description": "Parse data the fetched host data from the external database into hostnames and IP-addresses.", "consumes": [ "boefje/external_db" ], diff --git a/boefjes/boefjes/plugins/kat_fierce/boefje.json b/boefjes/boefjes/plugins/kat_fierce/boefje.json index c198875c8a1..1f7d5c677db 100644 --- a/boefjes/boefjes/plugins/kat_fierce/boefje.json +++ b/boefjes/boefjes/plugins/kat_fierce/boefje.json @@ -1,9 +1,9 @@ { "id": "fierce", "name": "Fierce", - "description": "Use a Fierce scan to find subdomains (with their ip)", + "description": "Perform DNS reconnaissance using Fierce, to help locate non-contiguous IP space and hostnames against specified hostnames. No exploitation is performed.", "consumes": [ "Hostname" ], - "scan_level": 3 + "scan_level": 1 } diff --git a/boefjes/boefjes/plugins/kat_fierce/normalizer.json b/boefjes/boefjes/plugins/kat_fierce/normalizer.json index 536944b4995..82589b6565d 100644 --- a/boefjes/boefjes/plugins/kat_fierce/normalizer.json +++ b/boefjes/boefjes/plugins/kat_fierce/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_fierce_normalize", + "name": "Fierce", + "description": "Parse the DNS reconnaissance data from Fierce into hostnames and/or IP addresses.", "consumes": [ "boefje/fierce" ], diff --git a/boefjes/boefjes/katalogus/tests/__init__.py b/boefjes/boefjes/plugins/kat_finding_normalizer/__init__.py similarity index 100% rename from boefjes/boefjes/katalogus/tests/__init__.py rename to boefjes/boefjes/plugins/kat_finding_normalizer/__init__.py diff --git a/boefjes/boefjes/plugins/kat_finding_normalizer/normalize.py b/boefjes/boefjes/plugins/kat_finding_normalizer/normalize.py new file mode 100644 index 00000000000..e00cc7d08fb --- /dev/null +++ b/boefjes/boefjes/plugins/kat_finding_normalizer/normalize.py @@ -0,0 +1,38 @@ +import re +from collections.abc import Iterable + +from boefjes.job_models import NormalizerOutput +from octopoes.models import Reference +from octopoes.models.ooi.findings import CVEFindingType, Finding, KATFindingType, RetireJSFindingType, SnykFindingType + +CVE_PATTERN = re.compile(r"CVE-\d{4}-\d{4,}") + + +def run(input_ooi: dict, raw: bytes) -> Iterable[NormalizerOutput]: + ooi = Reference.from_str(input_ooi["primary_key"]) + finding_ids_str = raw.decode() + finding_ids_list = [fid.strip().upper() for fid in finding_ids_str.split(",")] + + finding_type_mapping = { + "CVE": CVEFindingType, + "KAT": KATFindingType, + "SNYK": SnykFindingType, + "RETIREJS": RetireJSFindingType, + } + + for finding_id in finding_ids_list: + parts = finding_id.split("-") + prefix = parts[0] + + if prefix in finding_type_mapping: + if prefix == "CVE" and not CVE_PATTERN.match(finding_id): + raise ValueError(f"{finding_id} is not a valid CVE ID") + + finding_type = finding_type_mapping[prefix](id=finding_id) + finding = Finding( + finding_type=finding_type.reference, + ooi=ooi, + description=f"{finding_id} is found on this OOI", + ) + yield finding_type + yield finding diff --git a/boefjes/boefjes/plugins/kat_finding_normalizer/normalizer.json b/boefjes/boefjes/plugins/kat_finding_normalizer/normalizer.json new file mode 100644 index 00000000000..70adfd46c47 --- /dev/null +++ b/boefjes/boefjes/plugins/kat_finding_normalizer/normalizer.json @@ -0,0 +1,11 @@ +{ + "id": "kat_generic_finding_normalize", + "name": "Finding types", + "consumes": [ + "openkat/finding" + ], + "produces": [ + "Finding", + "CVEFindingType" + ] +} diff --git a/boefjes/boefjes/plugins/kat_green_hosting/boefje.json b/boefjes/boefjes/plugins/kat_green_hosting/boefje.json index 9fe34d17ae8..846b05efa33 100644 --- a/boefjes/boefjes/plugins/kat_green_hosting/boefje.json +++ b/boefjes/boefjes/plugins/kat_green_hosting/boefje.json @@ -1,7 +1,7 @@ { "id": "green-hosting", "name": "GreenHosting", - "description": "Use the Green Web Foundation Partner API to check whether the website is hosted on a green server. Meaning it runs on renewable energy and/or offsets its carbon footprint", + "description": "Use the Green Web Foundation Partner API to check whether the website is hosted on a green server. Meaning it runs on renewable energy and/or offsets its carbon footprint. Does not require an API key.", "consumes": [ "Website" ], diff --git a/boefjes/boefjes/plugins/kat_green_hosting/normalizer.json b/boefjes/boefjes/plugins/kat_green_hosting/normalizer.json index 993413d85e4..714628e5587 100644 --- a/boefjes/boefjes/plugins/kat_green_hosting/normalizer.json +++ b/boefjes/boefjes/plugins/kat_green_hosting/normalizer.json @@ -1,5 +1,7 @@ { "id": "kat_green_hosting_normalize", + "description": "Parses the Green Hosting output into findings.", + "name": "Green Hosting", "consumes": [ "boefje/green-hosting" ], diff --git a/boefjes/boefjes/plugins/kat_kat_finding_types/kat_finding_types.json b/boefjes/boefjes/plugins/kat_kat_finding_types/kat_finding_types.json index 102a0868ebe..00fbbba47a4 100644 --- a/boefjes/boefjes/plugins/kat_kat_finding_types/kat_finding_types.json +++ b/boefjes/boefjes/plugins/kat_kat_finding_types/kat_finding_types.json @@ -21,14 +21,14 @@ "recommendation": "This header is not supported by default by Mozilla. If this header is required for your environment: Set the HTTP header X-Permitted-Cross- Domain-Policies: none in all HTTP responses. Use value master-only if a Flash or Acrobat cross- domain configuration file is used that is placed in the root of the web server" }, "KAT-NO-EXPLICIT-XSS-PROTECTION": { - "description": "This is a deprecated header previously used to prevent against Cross-Site-Scripting attacks. Support in modern browsers could introduce XSS attacks again.", + "description": "The 'X-XSS-Protection' header is a deprecated header previously used to prevent against Cross-Site-Scripting attacks. Support in modern browsers could introduce XSS attacks again.", "source": "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-XSS-Protection", "risk": "recommendation", "impact": "Reflected cross-site scripting attacks may not be blocked.", - "recommendation": "This header is deprecated and should not be used." + "recommendation": "Remove the deprecated header to reduce the chance of XSS attacks." }, "KAT-NO-X-FRAME-OPTIONS": { - "description": "HTTP header 'X-Frame-Options' is missing. It is possible that the website can be loaded via an
- {% include "tasks/partials/boefje_task_history.html" %} + {% include "tasks/plugin_detail_task_list.html" %}
diff --git a/rocky/katalogus/templates/normalizer_detail.html b/rocky/katalogus/templates/normalizer_detail.html index 162778077d2..55d115d8db2 100644 --- a/rocky/katalogus/templates/normalizer_detail.html +++ b/rocky/katalogus/templates/normalizer_detail.html @@ -24,14 +24,15 @@- {% include "tasks/partials/normalizer_task_history.html" %} + {% include "tasks/plugin_detail_task_list.html" %}
diff --git a/rocky/katalogus/templates/partials/boefje_tile.html b/rocky/katalogus/templates/partials/boefje_tile.html index 422bd56bcf6..08ad467167a 100644 --- a/rocky/katalogus/templates/partials/boefje_tile.html +++ b/rocky/katalogus/templates/partials/boefje_tile.html @@ -4,13 +4,16 @@{{ plugin.name }}{{ plugin.type|title }} @@ -44,14 +47,24 @@
{% endif %} {% endif %}{{ plugin.description }}
+{% translate "This boefje is required by the following report types." %}
++ {% for plugin_id, report_types in plugin_report_types.items %} + {% if plugin_id == plugin.id %} + {% for report_type in report_types %} +
+ + {% endfor %} + {% endif %} + {% endfor %} ++ {% blocktranslate trimmed with plugin_name=plugin.name %} + {{ plugin_name }} can produce the following output: + {% endblocktranslate %} +
++
{% blocktranslate trimmed %} @@ -33,7 +33,7 @@
{% blocktranslate trimmed %}
After creating a new object OpenKAT will ask you to set a clearance level.
diff --git a/rocky/onboarding/views.py b/rocky/onboarding/views.py
index 3ef68dc3932..db68a262ae2 100644
--- a/rocky/onboarding/views.py
+++ b/rocky/onboarding/views.py
@@ -139,7 +139,7 @@ def setup(self, request, *args, **kwargs):
def get_or_create_url_object(self, url: str) -> OOI:
network = Network(name="internet")
- url = URL(network=network.reference, raw=url)
+ url = URL(network=network.reference, raw=url, user_id=self.request.user.id)
observed_at = datetime.now(timezone.utc)
url_ooi, _ = get_or_create_ooi(self.octopoes_api_connector, self.bytes_client, url, observed_at)
return url_ooi
@@ -183,7 +183,7 @@ def get_boefjes_tiles(self) -> list[dict[str, Any]]:
{
"id": "dns_zone",
"type": "boefje",
- "scan_level": "l1",
+ "scan_level": "1",
"name": "DNS-Zone",
"description": _("Fetch the parent DNS zone of a hostname"),
"enabled": False,
@@ -191,7 +191,7 @@ def get_boefjes_tiles(self) -> list[dict[str, Any]]:
{
"id": "fierce",
"type": "boefje",
- "scan_level": "l3",
+ "scan_level": "3",
"name": "Fierce",
"description": _("Finds subdomains by brute force"),
"enabled": False,
@@ -362,19 +362,19 @@ class OnboardingReportView(
current_step = 4
permission_required = "tools.can_scan_organization"
- def get_oois_pk(self) -> list[str]:
- """
- Gets the Hostname primary key out of the URL object specified by the ooi query parameter.
- """
- ooi_pk = self.request.GET.get("ooi", "")
- ooi = self.get_ooi(ooi_pk)
+ def setup(self, request, *args, **kwargs):
+ super().setup(request, *args, **kwargs)
+ ooi = self.get_ooi(self.request.GET.get("ooi", ""))
+ self.oois = [Hostname(name=ooi.web_url.tokenized["netloc"]["name"], network=ooi.network)]
+ self.selected_oois = [self.oois[0].primary_key]
- return [Hostname(name=ooi.web_url.tokenized["netloc"]["name"], network=ooi.network).primary_key]
+ def get_report_type_selection(self) -> list[str]:
+ return [self.request.GET.get("report_type", "")]
def post(self, request, *args, **kwargs):
self.set_member_onboarded()
- report_ooi = self.save_report()
+ report_ooi = self.save_report([("Onboarding Report", "Onboarding Report")])
return redirect(
reverse("view_report", kwargs={"organization_code": self.organization.code})
diff --git a/rocky/package.json b/rocky/package.json
index a9c7a0543fa..e1597de7b38 100644
--- a/rocky/package.json
+++ b/rocky/package.json
@@ -16,5 +16,6 @@
},
"browserslist": [
"last 1 Chrome version"
- ]
+ ],
+ "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
}
diff --git a/rocky/poetry.lock b/rocky/poetry.lock
index 9f16ad18d20..5fc54c05d7b 100644
--- a/rocky/poetry.lock
+++ b/rocky/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "annotated-types"
@@ -234,13 +234,13 @@ cffi = ">=1.0.0"
[[package]]
name = "certifi"
-version = "2024.2.2"
+version = "2024.7.4"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
- {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
]
[[package]]
@@ -452,17 +452,17 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
[[package]]
name = "django"
-version = "4.2.11"
+version = "5.0.8"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.10"
files = [
- {file = "Django-4.2.11-py3-none-any.whl", hash = "sha256:ddc24a0a8280a0430baa37aff11f28574720af05888c62b7cfe71d219f4599d3"},
- {file = "Django-4.2.11.tar.gz", hash = "sha256:6e6ff3db2d8dd0c986b4eec8554c8e4f919b5c1ff62a5b4390c17aff2ed6e5c4"},
+ {file = "Django-5.0.8-py3-none-any.whl", hash = "sha256:333a7988f7ca4bc14d360d3d8f6b793704517761ae3813b95432043daec22a45"},
+ {file = "Django-5.0.8.tar.gz", hash = "sha256:ebe859c9da6fead9c9ee6dbfa4943b04f41342f4cea2c4d8c978ef0d10694f2b"},
]
[package.dependencies]
-asgiref = ">=3.6.0,<4"
+asgiref = ">=3.7.0,<4"
sqlparse = ">=0.3.1"
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
@@ -499,6 +499,20 @@ files = [
[package.dependencies]
django = "*"
+[[package]]
+name = "django-components"
+version = "0.88"
+description = "A way to create simple reusable template components in Django."
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "django_components-0.88-py3-none-any.whl", hash = "sha256:19641759bcbdafeaf48d4363c11639201fc893946745799b12b77cd7996da8fc"},
+ {file = "django_components-0.88.tar.gz", hash = "sha256:a796077706423b491234625d95bb8084761211ae022df159ead8472a1a256c7a"},
+]
+
+[package.dependencies]
+Django = ">=4.2"
+
[[package]]
name = "django_compressor"
version = "4.4"
@@ -521,21 +535,21 @@ resolved_reference = "620bc0ab86590f8981dd24456a70951c9bdbf91f"
[[package]]
name = "django-csp"
-version = "3.7"
+version = "3.8"
description = "Django Content Security Policy support."
optional = false
python-versions = "*"
files = [
- {file = "django_csp-3.7-py2.py3-none-any.whl", hash = "sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a"},
- {file = "django_csp-3.7.tar.gz", hash = "sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727"},
+ {file = "django_csp-3.8-py3-none-any.whl", hash = "sha256:19b2978b03fcd73517d7d67acbc04fbbcaec0facc3e83baa502965892d1e0719"},
+ {file = "django_csp-3.8.tar.gz", hash = "sha256:ef0f1a9f7d8da68ae6e169c02e9ac661c0ecf04db70e0d1d85640512a68471c0"},
]
[package.dependencies]
-Django = ">=1.8"
+Django = ">=3.2"
[package.extras]
jinja2 = ["jinja2 (>=2.9.6)"]
-tests = ["jinja2 (>=2.9.6)", "mock (==1.0.1)", "pep8 (==1.4.6)", "pytest (<4.0)", "pytest-django", "pytest-flakes (==1.0.1)", "pytest-pep8 (==1.0.6)", "six (==1.12.0)"]
+tests = ["jinja2 (>=2.9.6)", "pytest", "pytest-cov", "pytest-django", "pytest-ruff"]
[[package]]
name = "django-environ"
@@ -567,15 +581,29 @@ files = [
[package.dependencies]
Django = ">=3.2"
+[[package]]
+name = "django-ipware"
+version = "7.0.1"
+description = "A Django application to retrieve user's IP address"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "django-ipware-7.0.1.tar.gz", hash = "sha256:d9ec43d2bf7cdf216fed8d494a084deb5761a54860a53b2e74346a4f384cff47"},
+ {file = "django_ipware-7.0.1-py2.py3-none-any.whl", hash = "sha256:db16bbee920f661ae7f678e4270460c85850f03c6761a4eaeb489bdc91f64709"},
+]
+
+[package.dependencies]
+python-ipware = ">=2.0.3"
+
[[package]]
name = "django-otp"
-version = "1.3.0"
+version = "1.5.1"
description = "A pluggable framework for adding two-factor authentication to Django using one-time passwords."
optional = false
python-versions = ">=3.7"
files = [
- {file = "django_otp-1.3.0-py3-none-any.whl", hash = "sha256:5277731bc05b6cdbf96aa84ac46018e30ed5fb248086053b0146f925de059060"},
- {file = "django_otp-1.3.0.tar.gz", hash = "sha256:8f4156a3c14ce2aaa31379385eadf388925cd50fc4b5d20a3b944f454c98ff7c"},
+ {file = "django_otp-1.5.1-py3-none-any.whl", hash = "sha256:48d0a1943cbeb610f0bca51a0da42cc7eefcff387b101c69d3ed432cd75a0fd4"},
+ {file = "django_otp-1.5.1.tar.gz", hash = "sha256:d0c60a3c20dd16e9f2c7c3d8669306c34a83c20fd021565adbf782f4ba911b13"},
]
[package.dependencies]
@@ -583,6 +611,7 @@ django = ">=3.2"
[package.extras]
qrcode = ["qrcode"]
+segno = ["segno"]
[[package]]
name = "django-password-validators"
@@ -638,6 +667,27 @@ url = "https://github.com/jazzband/django-rest-knox"
reference = "dd7b062147bc4b9718e22d5acd6cf1301a1036b9"
resolved_reference = "dd7b062147bc4b9718e22d5acd6cf1301a1036b9"
+[[package]]
+name = "django-structlog"
+version = "8.1.0"
+description = "Structured Logging for Django"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "django_structlog-8.1.0-py3-none-any.whl", hash = "sha256:1072564bd6f36e8d3ba9893e7b31c1c46e94301189fedaecc0fb8a46525a3214"},
+ {file = "django_structlog-8.1.0.tar.gz", hash = "sha256:0229b9a2efbd24a4e3500169788e53915c2429521e34e41dd58ccc56039bef3f"},
+]
+
+[package.dependencies]
+asgiref = ">=3.6.0"
+django = ">=4.2"
+django-ipware = ">=6.0.2"
+structlog = ">=21.4.0"
+
+[package.extras]
+celery = ["celery (>=5.1)"]
+commands = ["django-extensions (>=1.4.9)"]
+
[[package]]
name = "django-tagulous"
version = "1.3.3"
@@ -682,13 +732,13 @@ yubikey = ["django-otp-yubikey"]
[[package]]
name = "django-weasyprint"
-version = "2.2.2"
+version = "2.3.0"
description = "Django WeasyPrint CBV"
optional = false
python-versions = ">=3.8"
files = [
- {file = "django-weasyprint-2.2.2.tar.gz", hash = "sha256:7f554bcc428293aeadc175ab5607b4f3bf30c0e5da3d4aa34453b3d96e0ffd3a"},
- {file = "django_weasyprint-2.2.2-py3-none-any.whl", hash = "sha256:605eba0dd3246c0410a60fdaa581139330ad6c637fc273e1bfe90a7a09f53728"},
+ {file = "django-weasyprint-2.3.0.tar.gz", hash = "sha256:2f849e15bfd6c1b2a58512097b9042eddf3533651d37d2e096cd6f7d8be6442b"},
+ {file = "django_weasyprint-2.3.0-py3-none-any.whl", hash = "sha256:807cb3b16332123d97c8bbe2ac9c70286103fe353235351803ffd33b67284735"},
]
[package.dependencies]
@@ -1489,42 +1539,42 @@ django = ">=1.11.0"
[[package]]
name = "opentelemetry-api"
-version = "1.24.0"
+version = "1.26.0"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"},
- {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"},
+ {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"},
+ {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"},
]
[package.dependencies]
deprecated = ">=1.2.6"
-importlib-metadata = ">=6.0,<=7.0"
+importlib-metadata = ">=6.0,<=8.0.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
-version = "1.24.0"
+version = "1.26.0"
description = "OpenTelemetry Protobuf encoding"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"},
- {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92"},
]
[package.dependencies]
-opentelemetry-proto = "1.24.0"
+opentelemetry-proto = "1.26.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
-version = "1.24.0"
+version = "1.26.0"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"},
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae"},
]
[package.dependencies]
@@ -1532,22 +1582,19 @@ deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
grpcio = ">=1.0.0,<2.0.0"
opentelemetry-api = ">=1.15,<2.0"
-opentelemetry-exporter-otlp-proto-common = "1.24.0"
-opentelemetry-proto = "1.24.0"
-opentelemetry-sdk = ">=1.24.0,<1.25.0"
-
-[package.extras]
-test = ["pytest-grpc"]
+opentelemetry-exporter-otlp-proto-common = "1.26.0"
+opentelemetry-proto = "1.26.0"
+opentelemetry-sdk = ">=1.26.0,<1.27.0"
[[package]]
name = "opentelemetry-instrumentation"
-version = "0.45b0"
+version = "0.47b0"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"},
- {file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"},
+ {file = "opentelemetry_instrumentation-0.47b0-py3-none-any.whl", hash = "sha256:88974ee52b1db08fc298334b51c19d47e53099c33740e48c4f084bd1afd052d5"},
+ {file = "opentelemetry_instrumentation-0.47b0.tar.gz", hash = "sha256:96f9885e450c35e3f16a4f33145f2ebf620aea910c9fd74a392bbc0f807a350f"},
]
[package.dependencies]
@@ -1557,150 +1604,150 @@ wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-asgi"
-version = "0.45b0"
+version = "0.47b0"
description = "ASGI instrumentation for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"},
- {file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"},
+ {file = "opentelemetry_instrumentation_asgi-0.47b0-py3-none-any.whl", hash = "sha256:b798dc4957b3edc9dfecb47a4c05809036a4b762234c5071212fda39ead80ade"},
+ {file = "opentelemetry_instrumentation_asgi-0.47b0.tar.gz", hash = "sha256:e78b7822c1bca0511e5e9610ec484b8994a81670375e570c76f06f69af7c506a"},
]
[package.dependencies]
asgiref = ">=3.0,<4.0"
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
-opentelemetry-util-http = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
+opentelemetry-util-http = "0.47b0"
[package.extras]
instruments = ["asgiref (>=3.0,<4.0)"]
[[package]]
name = "opentelemetry-instrumentation-dbapi"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry Database API instrumentation"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_dbapi-0.45b0-py3-none-any.whl", hash = "sha256:0678578d6a98300841b8ed743724ad17a9fb3a555a7cfc0f6bb61e8441c94618"},
- {file = "opentelemetry_instrumentation_dbapi-0.45b0.tar.gz", hash = "sha256:f6753e13548e45a9cf86f92eaa6e9cd9a8803a56376819c7f7e6ea1aa7ff984c"},
+ {file = "opentelemetry_instrumentation_dbapi-0.47b0-py3-none-any.whl", hash = "sha256:24a160029dfffdb9716ce3908f140afe7c91a4704fbe42fc623341fc5645440f"},
+ {file = "opentelemetry_instrumentation_dbapi-0.47b0.tar.gz", hash = "sha256:31fe72b7f45467592880ded77bb19aa4c04d126228684de9f0b46318325b9d50"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-django"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry Instrumentation for Django"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_django-0.45b0-py3-none-any.whl", hash = "sha256:1e612c90eb4c69e1f0aa2e38dea89c47616596d3600392640fa7c0a201e299fa"},
- {file = "opentelemetry_instrumentation_django-0.45b0.tar.gz", hash = "sha256:d8b55747d6784167ab3a50dc128cc13b6966a2215ce55f4043392ac1c83b5bb2"},
+ {file = "opentelemetry_instrumentation_django-0.47b0-py3-none-any.whl", hash = "sha256:85d5d5dd4047945917b823879933a28efddcf06d5f7fabef5ac806226602b18d"},
+ {file = "opentelemetry_instrumentation_django-0.47b0.tar.gz", hash = "sha256:f23c97ffa9b9b0d06a76e4a5296f189cc6e02f66c29a0ca30a97b0ea121a30b9"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-instrumentation-wsgi = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
-opentelemetry-util-http = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-instrumentation-wsgi = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
+opentelemetry-util-http = "0.47b0"
[package.extras]
-asgi = ["opentelemetry-instrumentation-asgi (==0.45b0)"]
+asgi = ["opentelemetry-instrumentation-asgi (==0.47b0)"]
instruments = ["django (>=1.10)"]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry FastAPI Instrumentation"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"},
- {file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"},
+ {file = "opentelemetry_instrumentation_fastapi-0.47b0-py3-none-any.whl", hash = "sha256:5ac28dd401160b02e4f544a85a9e4f61a8cbe5b077ea0379d411615376a2bd21"},
+ {file = "opentelemetry_instrumentation_fastapi-0.47b0.tar.gz", hash = "sha256:0c7c10b5d971e99a420678ffd16c5b1ea4f0db3b31b62faf305fbb03b4ebee36"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-instrumentation-asgi = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
-opentelemetry-util-http = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-instrumentation-asgi = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
+opentelemetry-util-http = "0.47b0"
[package.extras]
-instruments = ["fastapi (>=0.58,<1.0)"]
+instruments = ["fastapi (>=0.58,<1.0)", "fastapi-slim (>=0.111.0,<0.112.0)"]
[[package]]
name = "opentelemetry-instrumentation-httpx"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry HTTPX Instrumentation"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_httpx-0.45b0-py3-none-any.whl", hash = "sha256:9cfe4061cd090652d4854ba95668b7fd1c258ab8e95b2c4129df66470a68c225"},
- {file = "opentelemetry_instrumentation_httpx-0.45b0.tar.gz", hash = "sha256:2e9913ca4c568767cf7bb5facab4d22e1dc65ea01ad0b6b6f77b5fcee136fb1d"},
+ {file = "opentelemetry_instrumentation_httpx-0.47b0-py3-none-any.whl", hash = "sha256:24a2db480919b326e50c6a5ad01bb53b717fbd4116bb4d736d104608bf34a25a"},
+ {file = "opentelemetry_instrumentation_httpx-0.47b0.tar.gz", hash = "sha256:4a4f7ff4726445e81aaedc025ee0f9ac66c2e9074987879082edea882c4aa22d"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
-opentelemetry-util-http = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
+opentelemetry-util-http = "0.47b0"
[package.extras]
instruments = ["httpx (>=0.18.0)"]
[[package]]
name = "opentelemetry-instrumentation-psycopg2"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry psycopg2 instrumentation"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_psycopg2-0.45b0-py3-none-any.whl", hash = "sha256:53abba97fdf103af281e704300ba722b4ec4afb0127149967e25a1adb117d4d7"},
- {file = "opentelemetry_instrumentation_psycopg2-0.45b0.tar.gz", hash = "sha256:60152afb9986f33ab15d49875847f845a54de06603be4c0bc24ce65413c39ca0"},
+ {file = "opentelemetry_instrumentation_psycopg2-0.47b0-py3-none-any.whl", hash = "sha256:838fd49caf1b4fef0b5e436970e34eb1a9f79f76439ecf6383169053d0c72a73"},
+ {file = "opentelemetry_instrumentation_psycopg2-0.47b0.tar.gz", hash = "sha256:35085c295d1ef9b299ba7fb0ed19e6ff31c2be010b3e7371df196c17a43885f8"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-instrumentation-dbapi = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-instrumentation-dbapi = "0.47b0"
[package.extras]
instruments = ["psycopg2 (>=2.7.3.1)"]
[[package]]
name = "opentelemetry-instrumentation-wsgi"
-version = "0.45b0"
+version = "0.47b0"
description = "WSGI Middleware for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_wsgi-0.45b0-py3-none-any.whl", hash = "sha256:7a6f9c71b25f5c5e112827540008882f6a9088447cb65745e7f2083749516663"},
- {file = "opentelemetry_instrumentation_wsgi-0.45b0.tar.gz", hash = "sha256:f53a2a38e6582406e207d404e4c1b859b83bec11a68ad6c7366642d01c873ad0"},
+ {file = "opentelemetry_instrumentation_wsgi-0.47b0-py3-none-any.whl", hash = "sha256:9a1a78aa2f5682fe1073c4cc77f24ef4f083b18b66bbb674a995b0b77eef1815"},
+ {file = "opentelemetry_instrumentation_wsgi-0.47b0.tar.gz", hash = "sha256:4903c3d686d53ca7ab6545bb4cc42c3de8af5b2f370996e84db2cfec688860af"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.45b0"
-opentelemetry-semantic-conventions = "0.45b0"
-opentelemetry-util-http = "0.45b0"
+opentelemetry-instrumentation = "0.47b0"
+opentelemetry-semantic-conventions = "0.47b0"
+opentelemetry-util-http = "0.47b0"
[[package]]
name = "opentelemetry-proto"
-version = "1.24.0"
+version = "1.26.0"
description = "OpenTelemetry Python Proto"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"},
- {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"},
+ {file = "opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725"},
+ {file = "opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e"},
]
[package.dependencies]
@@ -1708,40 +1755,44 @@ protobuf = ">=3.19,<5.0"
[[package]]
name = "opentelemetry-sdk"
-version = "1.24.0"
+version = "1.26.0"
description = "OpenTelemetry Python SDK"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"},
- {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"},
+ {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"},
+ {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"},
]
[package.dependencies]
-opentelemetry-api = "1.24.0"
-opentelemetry-semantic-conventions = "0.45b0"
+opentelemetry-api = "1.26.0"
+opentelemetry-semantic-conventions = "0.47b0"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
-version = "0.45b0"
+version = "0.47b0"
description = "OpenTelemetry Semantic Conventions"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"},
- {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"},
+ {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"},
+ {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"},
]
+[package.dependencies]
+deprecated = ">=1.2.6"
+opentelemetry-api = "1.26.0"
+
[[package]]
name = "opentelemetry-util-http"
-version = "0.45b0"
+version = "0.47b0"
description = "Web util for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"},
- {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"},
+ {file = "opentelemetry_util_http-0.47b0-py3-none-any.whl", hash = "sha256:3d3215e09c4a723b12da6d0233a31395aeb2bb33a64d7b15a1500690ba250f19"},
+ {file = "opentelemetry_util_http-0.47b0.tar.gz", hash = "sha256:352a07664c18eef827eb8ddcbd64c64a7284a39dd1655e2f16f577eb046ccb32"},
]
[[package]]
@@ -1779,13 +1830,13 @@ files = [
[[package]]
name = "phonenumbers"
-version = "8.13.33"
+version = "8.13.42"
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
optional = false
python-versions = "*"
files = [
- {file = "phonenumbers-8.13.33-py2.py3-none-any.whl", hash = "sha256:f2d653268ece55a4f3752d9cda4be6f7465f298e6d028d522aedda13cf057201"},
- {file = "phonenumbers-8.13.33.tar.gz", hash = "sha256:991f2619f0593b36b674c345af47944ec4bae526b353cf53d707e662087be63b"},
+ {file = "phonenumbers-8.13.42-py2.py3-none-any.whl", hash = "sha256:18acc22ee03116d27b26e990f53806a1770a3e05f05e1620bc09ad187f889456"},
+ {file = "phonenumbers-8.13.42.tar.gz", hash = "sha256:7137904f2db3b991701e853174ce8e1cb8f540b8bfdf27617540de04c0b7bed5"},
]
[[package]]
@@ -1960,109 +2011,122 @@ files = [
[[package]]
name = "pydantic"
-version = "2.7.1"
+version = "2.8.2"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
- {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
+ {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
+ {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
]
[package.dependencies]
annotated-types = ">=0.4.0"
-pydantic-core = "2.18.2"
-typing-extensions = ">=4.6.1"
+pydantic-core = "2.20.1"
+typing-extensions = [
+ {version = ">=4.12.2", markers = "python_version >= \"3.13\""},
+ {version = ">=4.6.1", markers = "python_version < \"3.13\""},
+]
[package.extras]
email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
-version = "2.18.2"
+version = "2.20.1"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
- {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
- {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
- {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
- {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
- {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
- {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
- {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
- {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
- {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
- {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
- {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
- {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
- {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
- {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
- {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
- {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
- {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
- {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
- {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
- {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
- {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
- {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
- {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"},
- {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"},
- {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"},
- {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"},
- {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"},
- {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"},
- {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"},
- {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"},
- {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"},
- {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"},
- {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"},
- {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"},
- {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"},
- {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
- {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
- {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
- {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
+ {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
+ {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
+ {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
+ {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"},
+ {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"},
+ {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"},
+ {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"},
+ {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"},
+ {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"},
+ {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"},
+ {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"},
+ {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"},
+ {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
+ {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
+ {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
+ {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
]
[package.dependencies]
@@ -2070,17 +2134,17 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-settings"
-version = "2.2.1"
+version = "2.3.4"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"},
- {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"},
+ {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"},
+ {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"},
]
[package.dependencies]
-pydantic = ">=2.3.0"
+pydantic = ">=2.7.0"
python-dotenv = ">=0.21.0"
[package.extras]
@@ -2380,6 +2444,20 @@ files = [
[package.extras]
cli = ["click (>=5.0)"]
+[[package]]
+name = "python-ipware"
+version = "3.0.0"
+description = "A Python package to retrieve user's IP address"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "python_ipware-3.0.0-py3-none-any.whl", hash = "sha256:fc936e6e7ec9fcc107f9315df40658f468ac72f739482a707181742882e36b60"},
+ {file = "python_ipware-3.0.0.tar.gz", hash = "sha256:9117b1c4dddcb5d5ca49e6a9617de2fc66aec2ef35394563ac4eecabdf58c062"},
+]
+
+[package.extras]
+dev = ["coverage[toml]", "coveralls (>=3.3,<4.0)", "ruff", "twine"]
+
[[package]]
name = "pyyaml"
version = "6.0.1"
@@ -2405,7 +2483,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@@ -2919,19 +2996,19 @@ files = [
[[package]]
name = "setuptools"
-version = "69.1.0"
+version = "71.0.3"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"},
- {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"},
+ {file = "setuptools-71.0.3-py3-none-any.whl", hash = "sha256:f501b6e6db709818dc76882582d9c516bf3b67b948864c5fa1d1624c09a49207"},
+ {file = "setuptools-71.0.3.tar.gz", hash = "sha256:3d8531791a27056f4a38cd3e54084d8b1c4228ff9cf3f2d7dd075ec99f9fd70d"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (<7.4)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
@@ -2997,6 +3074,23 @@ docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
release = ["twine"]
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
+[[package]]
+name = "structlog"
+version = "24.4.0"
+description = "Structured Logging for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"},
+ {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"},
+]
+
+[package.extras]
+dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"]
+docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"]
+tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"]
+typing = ["mypy (>=1.4)", "rich", "twisted"]
+
[[package]]
name = "tinycss2"
version = "1.2.1"
@@ -3048,13 +3142,13 @@ telegram = ["requests"]
[[package]]
name = "typing-extensions"
-version = "4.11.0"
+version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
@@ -3262,18 +3356,18 @@ files = [
[[package]]
name = "zipp"
-version = "3.17.0"
+version = "3.19.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
- {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
+ {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
+ {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "zopfli"
@@ -3351,4 +3445,4 @@ test = ["pytest"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "45b14155e352fd530181532052623c77507cb11ae4596e4c59ba7658856ff3a1"
+content-hash = "02f0992a2c8b4010f34f92d601c5ab82862ee9d70c477eccc04eb4cb8e188b26"
diff --git a/rocky/pyproject.toml b/rocky/pyproject.toml
index 261a4ad81ac..197efaeea9d 100644
--- a/rocky/pyproject.toml
+++ b/rocky/pyproject.toml
@@ -8,7 +8,7 @@ license = "EUPL"
[tool.poetry.dependencies]
python = "^3.10"
beautifulsoup4 = "^4.11.2"
-Django = "^4.2.11"
+Django = "^5.0.6"
django-two-factor-auth = "^1.14.0"
django-environ = "^0.11.2"
jsonschema = "^4.17.0"
@@ -27,28 +27,31 @@ strenum = "^0.4.15"
django-rest-knox = { git = "https://github.com/jazzband/django-rest-knox", rev = "dd7b062147bc4b9718e22d5acd6cf1301a1036b9" }
# OpenTelemetry
-opentelemetry-sdk = "^1.24.0"
-opentelemetry-exporter-otlp-proto-grpc = "^1.24.0"
-opentelemetry-instrumentation-django = "^0.45b0"
-opentelemetry-instrumentation-psycopg2 = "^0.45b0"
+opentelemetry-sdk = "^1.26.0"
+opentelemetry-exporter-otlp-proto-grpc = "^1.26.0"
+opentelemetry-instrumentation-django = "^0.47b0"
+opentelemetry-instrumentation-psycopg2 = "^0.47b0"
whitenoise = { extras = ["brotli"], version = "^6.5.0" }
-opentelemetry-instrumentation = "^0.45b0"
-opentelemetry-instrumentation-fastapi = "^0.45b0"
+opentelemetry-instrumentation = "^0.47b0"
+opentelemetry-instrumentation-fastapi = "^0.47b0"
granian = "^1.3.2"
+django-components = "^0.88"
# These used in octopoes parts that are used by rocky
pyparsing = "^3.1.1"
pydantic-settings = "^2.0.3"
-opentelemetry-instrumentation-httpx = "^0.45b0"
+opentelemetry-instrumentation-httpx = "^0.47b0"
httpx = "^0.27.0"
-opentelemetry-api = "^1.24.0"
-opentelemetry-exporter-otlp-proto-common = "^1.24.0"
-opentelemetry-instrumentation-asgi = "^0.45b0"
-opentelemetry-instrumentation-dbapi = "^0.45b0"
-opentelemetry-instrumentation-wsgi = "^0.45b0"
-opentelemetry-proto = "^1.24.0"
-opentelemetry-semantic-conventions = "^0.45b0"
-opentelemetry-util-http = "^0.45b0"
+opentelemetry-api = "^1.26.0"
+opentelemetry-exporter-otlp-proto-common = "^1.26.0"
+opentelemetry-instrumentation-asgi = "^0.47b0"
+opentelemetry-instrumentation-dbapi = "^0.47b0"
+opentelemetry-instrumentation-wsgi = "^0.47b0"
+opentelemetry-proto = "^1.26.0"
+opentelemetry-semantic-conventions = "^0.47b0"
+opentelemetry-util-http = "^0.47b0"
+structlog = "^24.2.0"
+django-structlog = "^8.1.0"
[tool.poetry.group.dev.dependencies]
diff --git a/rocky/reports/report_types/aggregate_organisation_report/introduction.html b/rocky/reports/report_types/aggregate_organisation_report/introduction.html
index adc56c365d9..d71dcbed43f 100644
--- a/rocky/reports/report_types/aggregate_organisation_report/introduction.html
+++ b/rocky/reports/report_types/aggregate_organisation_report/introduction.html
@@ -3,23 +3,21 @@
+ {% blocktranslate trimmed %}
+ Give your report a custom name and optionally add the reports' reference date
+ to the name. To do so you can select a standard option or use a Python
+ strftime code in the report name.
+ {% endblocktranslate %}
+ {% translate "Select which objects you want to include in your report." %}
- {% if active_filters %}
- {% translate "Currently filtered on:" %}
- {% for filter, value in active_filters.items %}{{ filter }}{{ value|title }} {% endfor %}
- {% endif %}
- {% translate "No objects found." %}{{ type }} {% translate "server" %}
- {% translate "RPKI Not expired" %}
+ {% translate "RPKI valid" %}
{% if data.number_of_valid != data.number_of_ips %}
diff --git a/rocky/reports/report_types/rpki_report/report.py b/rocky/reports/report_types/rpki_report/report.py
index a4fadb72745..ed2d05576e0 100644
--- a/rocky/reports/report_types/rpki_report/report.py
+++ b/rocky/reports/report_types/rpki_report/report.py
@@ -1,6 +1,5 @@
from collections.abc import Iterable
from datetime import datetime
-from logging import getLogger
from typing import Any, TypedDict
from django.utils.translation import gettext_lazy as _
@@ -10,8 +9,6 @@
from octopoes.models.ooi.network import IPAddressV4, IPAddressV6
from reports.report_types.definitions import Report
-logger = getLogger(__name__)
-
class RPKIData(TypedDict):
exists: bool
@@ -53,11 +50,11 @@ def collect_data(self, input_oois: Iterable[str], valid_time: datetime) -> dict[
for ip in ips:
finding_types = finding_types_by_source.get(ip, [])
exists = not any(finding_type for finding_type in finding_types if finding_type.id in ["KAT-NO-RPKI"])
- expired = any(finding_type for finding_type in finding_types if finding_type.id in ["KAT-EXPIRED-RPKI"])
- rpki_ips[ip] = {"exists": exists, "valid": not expired}
+ invalid = any(finding_type for finding_type in finding_types if finding_type.id in ["KAT-INVALID-RPKI"])
+ rpki_ips[ip] = {"exists": exists, "valid": not invalid}
number_of_available -= 1 if not exists else 0
- number_of_valid -= 1 if expired else 0
- number_of_compliant -= 1 if not (exists and not expired) else 0
+ number_of_valid -= 1 if invalid else 0
+ number_of_compliant -= 1 if not (exists and not invalid) else 0
result[input_ooi] = {
"input_ooi": input_ooi,
diff --git a/rocky/reports/report_types/safe_connections_report/report.py b/rocky/reports/report_types/safe_connections_report/report.py
index 46d6f01da9b..13554d39562 100644
--- a/rocky/reports/report_types/safe_connections_report/report.py
+++ b/rocky/reports/report_types/safe_connections_report/report.py
@@ -1,6 +1,5 @@
from collections.abc import Iterable
from datetime import datetime
-from logging import getLogger
from typing import Any
from django.utils.translation import gettext_lazy as _
@@ -15,8 +14,6 @@
"KAT-CRITICAL-BAD-CIPHER",
]
-logger = getLogger(__name__)
-
class SafeConnectionsReport(Report):
id = "safe-connections-report"
diff --git a/rocky/reports/report_types/systems_report/report.py b/rocky/reports/report_types/systems_report/report.py
index 1bd0dadfb53..db71c686271 100644
--- a/rocky/reports/report_types/systems_report/report.py
+++ b/rocky/reports/report_types/systems_report/report.py
@@ -1,7 +1,6 @@
from collections.abc import Iterable
from dataclasses import dataclass
from datetime import datetime
-from logging import getLogger
from typing import Any
from django.utils.translation import gettext_lazy as _
@@ -11,8 +10,6 @@
from octopoes.models.ooi.network import IPAddressV4, IPAddressV6
from reports.report_types.definitions import Report
-logger = getLogger(__name__)
-
class SystemType(StrEnum):
WEB = "Web"
diff --git a/rocky/reports/report_types/tls_report/report.py b/rocky/reports/report_types/tls_report/report.py
index 8d067cc5e71..e56cd5cf8ba 100644
--- a/rocky/reports/report_types/tls_report/report.py
+++ b/rocky/reports/report_types/tls_report/report.py
@@ -1,5 +1,4 @@
from datetime import datetime
-from logging import getLogger
from typing import Any
from django.utils.translation import gettext_lazy as _
@@ -9,8 +8,6 @@
from octopoes.models.ooi.service import IPService, TLSCipher
from reports.report_types.definitions import Report
-logger = getLogger(__name__)
-
CIPHER_FINDINGS = [
"KAT-RECOMMENDATION-BAD-CIPHER",
"KAT-MEDIUM-BAD-CIPHER",
diff --git a/rocky/reports/report_types/vulnerability_report/report.py b/rocky/reports/report_types/vulnerability_report/report.py
index 1ad08c45580..021e88978d5 100644
--- a/rocky/reports/report_types/vulnerability_report/report.py
+++ b/rocky/reports/report_types/vulnerability_report/report.py
@@ -1,7 +1,6 @@
from collections import Counter
from collections.abc import Iterable
from datetime import datetime
-from logging import getLogger
from typing import Any, TypedDict
from django.utils.translation import gettext_lazy as _
@@ -11,8 +10,6 @@
from octopoes.models.ooi.network import IPAddressV4, IPAddressV6
from reports.report_types.definitions import Report
-logger = getLogger(__name__)
-
class FindingsData(TypedDict):
finding_types: list[FindingType]
diff --git a/rocky/reports/report_types/web_system_report/report.py b/rocky/reports/report_types/web_system_report/report.py
index 804a4806c6e..75aece24111 100644
--- a/rocky/reports/report_types/web_system_report/report.py
+++ b/rocky/reports/report_types/web_system_report/report.py
@@ -1,7 +1,6 @@
from collections.abc import Iterable
from dataclasses import dataclass, field
from datetime import datetime
-from logging import getLogger
from typing import Any, cast
from django.utils.translation import gettext_lazy as _
@@ -11,8 +10,6 @@
from octopoes.models.ooi.network import IPAddressV4, IPAddressV6
from reports.report_types.definitions import Report
-logger = getLogger(__name__)
-
@dataclass
class WebCheck:
diff --git a/rocky/reports/templates/aggregate_report/export_setup.html b/rocky/reports/templates/aggregate_report/export_setup.html
new file mode 100644
index 00000000000..88ffee1a119
--- /dev/null
+++ b/rocky/reports/templates/aggregate_report/export_setup.html
@@ -0,0 +1,23 @@
+{% extends "layouts/base.html" %}
+
+{% load i18n %}
+{% load static %}
+
+{% block content %}
+ {% include "header.html" %}
+
+ {{ data.report_name }} {% translate "for" %} {{ ooi|human_readable }}
+ {{ data.report_name }}
{% include data.template with data=data.data show_introduction="yes" %}
{{ report_type }} {% translate "for" %} {{ ooi }}
+ {{ data.report_name }}
{% include data.template with data=data.data %}
{% translate "Report name" %}
+ {% include "partials/return_button.html" with btn_text="Change selection" %}
+
+ {% translate "Table of contents" %}
{% for ooi,data in report.items %}
{% if data.data %}
{% endblocktranslate %}