From 4ea8c7a737e73a743d3efef9f871da4a497a0eab Mon Sep 17 00:00:00 2001 From: Kamforka Date: Thu, 2 May 2024 12:56:16 +0200 Subject: [PATCH] debug --- .github/workflows/_build-package.yml | 2 +- .github/workflows/_integration-tests.yml | 42 +++++-- .github/workflows/_static-checks.yml | 2 +- .github/workflows/_upload-package.yml | 2 +- .github/workflows/integrator.yml | 62 +++++++++ .github/workflows/main-cicd.yml | 2 - README.md | 35 +++++- docker/thehive4py-integrator/Dockerfile | 55 ++++++++ .../configs/elasticsearch.yml | 7 ++ .../configs/thehive.conf | 22 ++++ docker/thehive4py-integrator/entrypoint.sh | 119 ++++++++++++++++++ pyproject.toml | 3 +- tests/conftest.py | 10 +- tests/test_case_endpoint.py | 2 + tests/test_custom_field_endpoint.py | 3 +- tests/test_user_endpoint.py | 1 + tests/utils.py | 39 ++++-- 17 files changed, 371 insertions(+), 37 deletions(-) create mode 100644 .github/workflows/integrator.yml create mode 100644 docker/thehive4py-integrator/Dockerfile create mode 100644 docker/thehive4py-integrator/configs/elasticsearch.yml create mode 100644 docker/thehive4py-integrator/configs/thehive.conf create mode 100755 docker/thehive4py-integrator/entrypoint.sh diff --git a/.github/workflows/_build-package.yml b/.github/workflows/_build-package.yml index 54f5973e..a690bc67 100644 --- a/.github/workflows/_build-package.yml +++ b/.github/workflows/_build-package.yml @@ -10,7 +10,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install build dependencies run: pip install --no-cache-dir -U pip .['build'] - name: Build package diff --git a/.github/workflows/_integration-tests.yml b/.github/workflows/_integration-tests.yml index aace0a24..490e89df 100644 --- a/.github/workflows/_integration-tests.yml +++ b/.github/workflows/_integration-tests.yml @@ -1,22 +1,46 @@ name: integration-tests on: workflow_call: - secrets: - DOCKER_TOKEN: - required: true jobs: - integration-tests: - name: Run integration tests + test-alert-endpoints: + name: Test alert endpoints runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install dependencies run: pip install --no-cache-dir -U pip .['test'] - - name: Docker login - run: docker login -u kamforka -p ${{ secrets.DOCKER_TOKEN }} - name: Run integration tests - run: scripts/ci.py --test + run: pytest -v tests/test_alert_endpoint.py + + test-case-endpoints: + name: Test case endpoints + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.12 + - name: Install dependencies + run: pip install --no-cache-dir -U pip .['test'] + - name: Run integration tests + run: pytest -v tests/test_case_endpoint.py + + test-other-endpoints: + name: Test other endpoints + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.12 + - name: Install dependencies + run: pip install --no-cache-dir -U pip .['test'] + - name: Run integration tests + run: | + pytest -v --ignore=tests/test_alert_endpoint.py --ignore=tests/test_case_endpoint.py \ No newline at end of file diff --git a/.github/workflows/_static-checks.yml b/.github/workflows/_static-checks.yml index 6b672a47..1dd80386 100644 --- a/.github/workflows/_static-checks.yml +++ b/.github/workflows/_static-checks.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/_upload-package.yml b/.github/workflows/_upload-package.yml index eb9024de..a0a14a94 100644 --- a/.github/workflows/_upload-package.yml +++ b/.github/workflows/_upload-package.yml @@ -26,7 +26,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install build dependencies run: pip install --no-cache-dir -U pip .['build'] - name: Upload to PyPI diff --git a/.github/workflows/integrator.yml b/.github/workflows/integrator.yml new file mode 100644 index 00000000..6cc06423 --- /dev/null +++ b/.github/workflows/integrator.yml @@ -0,0 +1,62 @@ +name: integrator-image +on: + push: + branches: + - main + pull_request: +jobs: + changes: + name: Change detection + runs-on: ubuntu-latest + outputs: + integrator: ${{ steps.filter.outputs.integrator }} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | + integrator: + - 'docker/thehive4py-integrator/**' + build: + name: Build and push + needs: changes + if: ${{ needs.changes.outputs.integrator == 'true' }} + runs-on: ubuntu-latest + env: + INTEGRATOR_BUILD_CTX: docker/thehive4py-integrator + INTEGRATOR_IMAGE_NAME: kamforka/thehive4py-integrator + THEHIVE_VERSION: 5.3.0 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set variables + id: variables + run: | + echo "integrator_image_fullname=$INTEGRATOR_IMAGE_NAME:thehive-$THEHIVE_VERSION" >> "$GITHUB_OUTPUT" + echo "integrator_image_fullname_with_hash=$INTEGRATOR_IMAGE_NAME:thehive-$THEHIVE_VERSION-$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: kamforka + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: ${{ env.INTEGRATOR_BUILD_CTX }} + platforms: linux/amd64,linux/arm64 + # push: ${{ github.ref == 'refs/heads/main' }} + push: true + tags: ${{ steps.variables.outputs.integrator_image_fullname }},${{ steps.variables.outputs.integrator_image_fullname_with_hash}} + build-args: | + THEHIVE_VERSION=${{ env.THEHIVE_VERSION }} diff --git a/.github/workflows/main-cicd.yml b/.github/workflows/main-cicd.yml index 706c313d..78516feb 100644 --- a/.github/workflows/main-cicd.yml +++ b/.github/workflows/main-cicd.yml @@ -10,8 +10,6 @@ jobs: uses: ./.github/workflows/_static-checks.yml integration-tests: uses: ./.github/workflows/_integration-tests.yml - secrets: - DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} build-package: uses: ./.github/workflows/_build-package.yml upload-package: diff --git a/README.md b/README.md index 8adf9d44..2fd1d59b 100644 --- a/README.md +++ b/README.md @@ -258,7 +258,7 @@ If you are a first time contributor to github projects please make yourself comf Navigate to the cloned repository's directory and install the package with development extras using pip: ``` -pip install -e '.[dev]' +pip install -e .[dev] ``` This command installs the package in editable mode (`-e`) and includes additional development dependencies. @@ -304,14 +304,37 @@ With pre-commit hooks in place, your changes will be automatically validated for ## Testing +> IMPORTANT NOTE: Since TheHive 5.3 the licensing constraints has been partially lifted therefore a public integrator image is available for running tests both locally and in github. + `thehive4py` primarily relies on integration tests, which are designed to execute against a live TheHive 5.x instance. These tests ensure that the library functions correctly in an environment closely resembling real-world usage. -However, due to licensing constraints with TheHive 5.x, the integration tests are currently not available for public or local use. +### Test requirements + +Since the test suite relies on the existence of a live TheHive docker container a local docker engine installation is a must. +If you are unfamiliar with docker please check out the [official documentation][get-docker]. + +### Test setup + +The test suite relies on a self-contained TheHive image which we call: [thehive4py-integrator]. +This image contains everything that is needed to run a barebone TheHive instance. + +The test suite uses this image to create a container locally with the predefined name `thehive4py-integration-tester` which will act as a unique id. +The container will expose TheHive on a random port to make sure it causes no conflicts for any other containers which expose ports. +The suite can identify this random port by querying the container info based on the predefined name. +During the very first test run the container will take a longer time to initialize due to the ElasticSearch and TheHive startups. +Once TheHive is responsive the suite will initialize the instance with a setup required by the tests (e.g.: test users, organisations, etc.). +After a successful initialization the + +### Testing locally +To execute the whole test suite locally one can use the `scripts/ci.py` utility script like: -To ensure code quality and prevent broken code from being merged, a private image is available for the integration-test workflow. This means that any issues should be detected and addressed during the PR phase. + ./scripts/ci.py --test -The project is actively working on a solution to enable developers to run integration tests locally, providing a more accessible and comprehensive testing experience. +Note however that the above will execute the entire test suite which can take several minutes to complete. +In case one wants to execute only a portion of the test suite then the easiest workaround is to use `pytest` and pass the path to the specific test module. For example to only execute tests for the alert endpoints one can do: -While local testing is in development, relying on the automated PR checks ensures the reliability and quality of the `thehive4py` library. + pytest -v tests/test_alert_endpoint.py -[query-api-docs]: https://docs.strangebee.com/thehive/api-docs/#operation/Query%20API \ No newline at end of file +[get-docker]: https://docs.docker.com/get-docker/ +[query-api-docs]: https://docs.strangebee.com/thehive/api-docs/#operation/Query%20API +[thehive4py-integrator]: https://hub.docker.com/repository/docker/kamforka/thehive4py-integrator/general \ No newline at end of file diff --git a/docker/thehive4py-integrator/Dockerfile b/docker/thehive4py-integrator/Dockerfile new file mode 100644 index 00000000..fad8427e --- /dev/null +++ b/docker/thehive4py-integrator/Dockerfile @@ -0,0 +1,55 @@ +FROM alpine:3.17 as base + +# BUILDER STAGE +FROM base as builder + +ARG ES_VERSION=7.17.19 +ARG THEHIVE_VERSION=5.3.0 + +RUN apk update && apk upgrade && apk add curl + +## ES DOWNLOAD +ARG ES_DOWNLOAD_URL=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz + +RUN curl -Lo /tmp/elasticsearch.tgz ${ES_DOWNLOAD_URL} \ + && tar -xzf /tmp/elasticsearch.tgz -C /tmp \ + && mv /tmp/elasticsearch-${ES_VERSION} /tmp/elasticsearch + +## THEHIVE DOWNLOAD +ARG THEHIVE_DOWNLOAD_URL=https://archives.strangebee.com/zip/thehive-${THEHIVE_VERSION}-1.zip + +RUN curl -Lo /tmp/thehive.zip ${THEHIVE_DOWNLOAD_URL} +RUN unzip -qo /tmp/thehive.zip -d /tmp \ + && mv /tmp/thehive-${THEHIVE_VERSION}-1 /tmp/thehive + +# FINAL STAGE +FROM base +RUN apk update && apk upgrade && apk add --no-cache openjdk11-jre-headless bash su-exec curl jq + +## ES SETUP +COPY --from=builder /tmp/elasticsearch /usr/share/elasticsearch +COPY configs/elasticsearch.yml /usr/share/elasticsearch/config/elasticsearch.yml + +RUN adduser -u 1000 -g 1000 -Dh /usr/share/elasticsearch elasticsearch \ + && mkdir -p /usr/share/elasticsearch/data \ + && chown -R elasticsearch:elasticsearch /usr/share/elasticsearch \ + && rm -rf /usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64 + +## THEHIVE SETUP +COPY --from=builder /tmp/thehive /opt/thehive/ +COPY configs/thehive.conf /opt/thehive/conf/application.conf + +RUN adduser -u 1001 -g 1001 -Dh /opt/thehive thehive \ + && mkdir /var/log/thehive \ + && chown -R thehive:thehive /opt/thehive /var/log/thehive + + +## ENTRYPOINT +COPY entrypoint.sh / +RUN chmod +x entrypoint.sh + +RUN /entrypoint.sh warmup + +EXPOSE 9000 +ENTRYPOINT /entrypoint.sh startup +HEALTHCHECK --start-period=60s CMD curl -f http://localhost:9000/api/status \ No newline at end of file diff --git a/docker/thehive4py-integrator/configs/elasticsearch.yml b/docker/thehive4py-integrator/configs/elasticsearch.yml new file mode 100644 index 00000000..c7f8ebea --- /dev/null +++ b/docker/thehive4py-integrator/configs/elasticsearch.yml @@ -0,0 +1,7 @@ +http.host: 0.0.0.0 +transport.host: 0.0.0.0 +discovery.type: single-node +cluster.name: thehive4py +xpack.security.enabled: false +xpack.ml.enabled: false +script.allowed_types: "inline,stored" \ No newline at end of file diff --git a/docker/thehive4py-integrator/configs/thehive.conf b/docker/thehive4py-integrator/configs/thehive.conf new file mode 100644 index 00000000..910f3534 --- /dev/null +++ b/docker/thehive4py-integrator/configs/thehive.conf @@ -0,0 +1,22 @@ +play.http.secret.key="supersecret" +play.http.parser.maxDiskBuffer: 20MB + +db { + provider: janusgraph + janusgraph { + storage { + backend: berkeleyje + directory: /opt/thehive/db + } + + index.search { + backend: elasticsearch + hostname: ["127.0.0.1"] + } + } +} + +storage { + provider: localfs + localfs.location: /opt/thehive/data +} diff --git a/docker/thehive4py-integrator/entrypoint.sh b/docker/thehive4py-integrator/entrypoint.sh new file mode 100755 index 00000000..9a8b02d5 --- /dev/null +++ b/docker/thehive4py-integrator/entrypoint.sh @@ -0,0 +1,119 @@ +#!/bin/bash + +THEHIVE_URL=http://localhost:9000 +ES_URL=http://localhost:9200 +MITRE_CATALOG_URL=https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json + +wait_until_up() { + local health_url="$1" + local timeout="${2:-30}" + local start_time=$(date +%s) + while true; do + local current_time=$(date +%s) + local elapsed_time=$((current_time - start_time)) + + if [ "$elapsed_time" -ge "$timeout" ]; then + echo "error: service was not responding for $timeout seconds" + exit 1 + fi + + local status_code=$(curl -so /dev/null -w %{http_code} ${health_url}) + if [ "$status_code" -eq 200 ]; then + return + fi + + sleep 0.25 + done +} + + +startup() { + + echo "starting elasticsearch in the background" + export ES_JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java)))) + export ES_JAVA_OPTS="-Xms1g -Xmx1g" + su-exec elasticsearch /usr/share/elasticsearch/bin/elasticsearch > /dev/null 2>&1 & + # su-exec elasticsearch /usr/share/elasticsearch/bin/elasticsearch & + + echo "waiting for elasticsearch to be up..." + wait_until_up "http://localhost:9200/_cat/health" + echo "elasticsearch is up!" + + echo "starting thehive in the foreground" + su-exec thehive /opt/thehive/bin/thehive -Dconfig.file=/opt/thehive/conf/application.conf + +} + + +warmup() { + echo "warming up thehive instance for integration testing" + echo "starting elasticsearch in the background" + export ES_JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java)))) + export ES_JAVA_OPTS="-Xms1g -Xmx1g" + su-exec elasticsearch /usr/share/elasticsearch/bin/elasticsearch > /dev/null 2>&1 & + # su-exec elasticsearch /usr/share/elasticsearch/bin/elasticsearch & + local es_pid=$! + + echo "waiting for elasticsearch to be up" + wait_until_up "${ES_URL}/_cat/health" + echo "elasticsearch is up!" + + echo "starting thehive in the background" + # su-exec thehive /opt/thehive/bin/thehive -Dconfig.file=/opt/thehive/conf/application.conf > /dev/null 2>&1 & + su-exec thehive /opt/thehive/bin/thehive -Dconfig.file=/opt/thehive/conf/application.conf & + local thehive_pid=$! + + echo "waiting for thehive to be up" + wait_until_up "${THEHIVE_URL}/api/status" 90 + echo "thehive is up!" + + local auth="admin@thehive.local:secret" + local content_type="Content-Type: application/json" + + echo "create main test organisation" + curl -su "$auth" -H "$content_type" -XPOST "${THEHIVE_URL}/api/v1/organisation" -d '{"name": "main-org", "description": "main organisation for tests"}' + echo + + echo "setup admin user organisations" + local user_id=$(curl -XGET -H "$content_type" -su "$auth" "${THEHIVE_URL}/api/v1/user/current" | jq -r '._id') + curl -su "$auth" -H "$content_type" -XPUT "${THEHIVE_URL}/api/v1/user/${user_id}/organisations" -d '{"organisations": [{"organisation": "admin", "profile": "admin"}, {"organisation": "main-org", "profile": "org-admin", "default": true}]}' + echo + + local import_timeout=150 + echo "waiting $import_timeout seconds to finish importing Taxonomies and MITRE patterns..." + sleep $import_timeout + + echo "shutting down thehive" + kill -15 $thehive_pid + echo "shutting down elasticsearch" + kill -15 $es_pid +} + +display_help() { + echo "Usage: $program [startup|warmup|help]" + echo " startup Startup and serve TheHive instance for the integration tests" + echo " warmup Warmup TheHive instance with the necessary preparations for the integration tests" + echo " help Display this message" +} + +main() { + program=$0 + case "$1" in + "startup") + startup + ;; + "warmup") + warmup + ;; + "help") + display_help + ;; + *) + echo "Error: Invalid command '$1'." + display_help + exit 1 + ;; + esac +} + +main $@ diff --git a/pyproject.toml b/pyproject.toml index c6f3de0f..4abb2e88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "License :: OSI Approved :: GNU Affero General Public License v3", ] authors = [{ name = "Szabolcs Antal", email = "antalszabolcs01@gmail.com" }] @@ -27,7 +28,7 @@ authors = [{ name = "Szabolcs Antal", email = "antalszabolcs01@gmail.com" }] [project.optional-dependencies] audit = ["bandit", "pip-audit"] build = ["build", "twine"] -lint = ["black", "flake8", "flake8-pyproject", "mypy", "pre-commit"] +lint = ["black", "flake8-pyproject", "mypy", "pre-commit"] test = ["pytest", "pytest-cov"] dev = ["thehive4py[audit, lint, test, build]"] diff --git a/tests/conftest.py b/tests/conftest.py index 648005fb..c7a82748 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,7 +2,7 @@ import pytest -from tests.utils import TestConfig, reinit_hive_container, spawn_hive_container +from tests.utils import TestConfig, reset_hive_instance, spawn_hive_container from thehive4py.client import TheHiveApi from thehive4py.helpers import now_to_ts from thehive4py.types.alert import InputAlert, OutputAlert @@ -23,8 +23,8 @@ @pytest.fixture(scope="session") def test_config(): return TestConfig( - image_name="kamforka/thehive4py-integrator:thehive-5.2.11", - container_name="thehive4py-integration-tests", + image_name="kamforka/thehive4py-integrator:thehive-5.3.0", + container_name="thehive4py-integration-tester", user="admin@thehive.local", password="secret", admin_org="admin", @@ -34,8 +34,8 @@ def test_config(): @pytest.fixture(scope="function", autouse=True) -def init_hive_container(test_config: TestConfig): - reinit_hive_container(test_config=test_config) +def auto_reset_hive_instance(thehive: TheHiveApi, test_config: TestConfig): + reset_hive_instance(hive_url=thehive.session.hive_url, test_config=test_config) @pytest.fixture(scope="session") diff --git a/tests/test_case_endpoint.py b/tests/test_case_endpoint.py index 712097e7..279831ec 100644 --- a/tests/test_case_endpoint.py +++ b/tests/test_case_endpoint.py @@ -197,6 +197,7 @@ def test_share_and_unshare(self, thehive: TheHiveApi, test_case: OutputCase): thehive.case.unshare(case_id=test_case["_id"], organisation_ids=[organisation]) assert len(thehive.case.list_shares(case_id=test_case["_id"])) == 1 + @pytest.mark.skip(reason="integrator container only supports a single org ") def test_share_and_remove_share(self, thehive: TheHiveApi, test_case: OutputCase): organisation = "share-org" share: InputShare = {"organisation": organisation} @@ -220,6 +221,7 @@ def test_update_share(self, thehive: TheHiveApi, test_case: OutputCase): updated_share = thehive.case.share(case_id=test_case["_id"], shares=[share])[0] assert updated_share["profileName"] == update_profile + @pytest.mark.skip(reason="integrator container only supports a single org ") def test_share_and_set_share(self, thehive: TheHiveApi, test_case: OutputCase): organisation = "share-org" share: InputShare = {"organisation": organisation} diff --git a/tests/test_custom_field_endpoint.py b/tests/test_custom_field_endpoint.py index aad23770..b355fe70 100644 --- a/tests/test_custom_field_endpoint.py +++ b/tests/test_custom_field_endpoint.py @@ -1,10 +1,11 @@ import pytest + from thehive4py.client import TheHiveApi from thehive4py.errors import TheHiveError from thehive4py.types.custom_field import InputUpdateCustomField, OutputCustomField -class TestCustomeFieldEndpoint: +class TestCustomFieldEndpoint: def test_create_and_list(self, thehive_admin: TheHiveApi): created_custom_field = thehive_admin.custom_field.create( custom_field={ diff --git a/tests/test_user_endpoint.py b/tests/test_user_endpoint.py index 289a3e7c..68bbfd25 100644 --- a/tests/test_user_endpoint.py +++ b/tests/test_user_endpoint.py @@ -61,6 +61,7 @@ def test_delete(self, thehive: TheHiveApi, test_user: OutputUser): with pytest.raises(TheHiveError): thehive.user.get(user_id=user_id) + @pytest.mark.skip(reason="integrator container only supports a single org ") def test_set_organisations( self, test_config: TestConfig, thehive: TheHiveApi, test_user: OutputUser ): diff --git a/tests/utils.py b/tests/utils.py index 750e94a5..ef7473e0 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,6 +7,7 @@ import requests from thehive4py.client import TheHiveApi +from thehive4py.helpers import now_to_ts from thehive4py.query.filters import Eq @@ -27,7 +28,7 @@ class TestConfig: def _is_container_responsive(container_url: str) -> bool: COOLDOWN = 1.0 - TIMEOUT = 60.0 + TIMEOUT = 120.0 now = time.time() end = now + TIMEOUT @@ -70,7 +71,8 @@ def _build_container_url(container_name: str) -> str: def _run_container(container_name: str, container_image: str): subprocess.run( shlex.split( - f"docker run -d --rm -p 9000 --name {container_name} {container_image}" + f"docker run --pull=always -d --rm -p 9000 " + f"--name {container_name} {container_image}" ), capture_output=True, text=True, @@ -85,7 +87,7 @@ def _destroy_container(container_name: str): ) -def _reinit_hive_org(hive_url: str, test_config: TestConfig, organisation: str) -> None: +def _reset_hive_org(hive_url: str, test_config: TestConfig, organisation: str) -> None: client = TheHiveApi( url=hive_url, username=test_config.user, @@ -101,7 +103,7 @@ def _reinit_hive_org(hive_url: str, test_config: TestConfig, organisation: str) executor.map(client.case.delete, [case["_id"] for case in cases]) -def _reinit_hive_admin_org(hive_url: str, test_config: TestConfig) -> None: +def _reset_hive_admin_org(hive_url: str, test_config: TestConfig) -> None: client = TheHiveApi( url=hive_url, username=test_config.user, @@ -129,6 +131,19 @@ def _reinit_hive_admin_org(hive_url: str, test_config: TestConfig) -> None: ) +def _is_valid_container_license(url: str, test_config: TestConfig): + hive = TheHiveApi( + url=url, + username=test_config.user, + password=test_config.password, + organisation="admin", + ) + + current_license = hive.session.make_request("GET", "/api/v1/license/current") + + return current_license["fallback"]["expiresAt"] > now_to_ts() + + def spawn_hive_container(test_config: TestConfig) -> str: if not _is_container_exist(container_name=test_config.container_name): _run_container( @@ -141,15 +156,19 @@ def spawn_hive_container(test_config: TestConfig) -> str: _destroy_container(container_name=test_config.container_name) raise RuntimeError("Unable to startup test container for TheHive") + if not _is_valid_container_license(url=url, test_config=test_config): + _destroy_container(container_name=test_config.container_name) + spawn_hive_container(test_config=test_config) + return url -def reinit_hive_container(test_config: TestConfig) -> None: - hive_url = spawn_hive_container(test_config=test_config) +def reset_hive_instance(hive_url: str, test_config: TestConfig) -> None: + # TODO: add back share config reinitialization once the license allows it with ThreadPoolExecutor() as executor: - for organisation in [ + for org in [ test_config.main_org, - test_config.share_org, + # test_config.share_org, ]: - executor.submit(_reinit_hive_org, hive_url, test_config, organisation) - executor.submit(_reinit_hive_admin_org, hive_url, test_config) + executor.submit(_reset_hive_org, hive_url, test_config, org) + executor.submit(_reset_hive_admin_org, hive_url, test_config)