From 63e9f87d56d0f65da9669d4c33a3802483505b1f Mon Sep 17 00:00:00 2001 From: Ruslan Date: Tue, 3 Oct 2023 10:38:35 +0200 Subject: [PATCH] Support diracx extension: Gubbins --- .coveragerc | 4 +- .github/workflows/extensions.yml | 284 ++ .github/workflows/main.yml | 6 +- .gitignore | 1 + .pre-commit-config.yaml | 2 +- codecov.yml | 6 +- diracx-cli/pyproject.toml | 10 + diracx-cli/src/diracx/cli/__init__.py | 25 +- diracx-client/pyproject.toml | 25 +- diracx-client/src/diracx/client/__init__.py | 22 +- diracx-client/src/diracx/client/aio.py | 3 + diracx-client/src/diracx/client/extensions.py | 95 + .../client/{aio => generated}/__init__.py | 4 +- .../diracx/client/{ => generated}/_client.py | 13 +- .../client/{ => generated}/_configuration.py | 2 +- .../src/diracx/client/generated/_patch.py | 47 + .../client/{ => generated}/_serialization.py | 452 +- .../diracx/client/{ => generated}/_vendor.py | 2 +- .../diracx/client/generated/aio/__init__.py | 21 + .../client/{ => generated}/aio/_client.py | 13 +- .../{ => generated}/aio/_configuration.py | 2 +- .../src/diracx/client/generated/aio/_patch.py | 35 + .../client/{ => generated}/aio/_vendor.py | 2 +- .../aio/operations/__init__.py | 2 +- .../generated/aio/operations/_operations.py | 2906 +++++++++++++ .../{ => generated}/aio/operations/_patch.py | 0 .../client/{ => generated}/models/__init__.py | 4 +- .../client/{ => generated}/models/_enums.py | 4 +- .../client/{ => generated}/models/_models.py | 127 +- .../client/{ => generated}/models/_patch.py | 0 .../{ => generated}/operations/__init__.py | 2 +- .../generated/operations/_operations.py | 3728 +++++++++++++++++ .../{ => generated}/operations/_patch.py | 0 .../src/diracx/client/generated/py.typed | 1 + diracx-client/src/diracx/client/models.py | 5 + .../src/diracx/client/patches/__init__.py | 19 + .../src/diracx/client/patches/aio/__init__.py | 18 + .../{aio/_patch.py => patches/aio/utils.py} | 32 +- .../client/{_patch.py => patches/utils.py} | 222 +- diracx-client/tests/test_regenerate.py | 15 +- diracx-core/pyproject.toml | 3 +- .../src/diracx/core/config/__init__.py | 10 +- diracx-core/src/diracx/core/config/schema.py | 62 +- diracx-db/pyproject.toml | 1 + diracx-db/src/diracx/db/__main__.py | 1 + diracx-routers/pyproject.toml | 3 + diracx-routers/src/diracx/routers/__init__.py | 4 + .../src/diracx/routers/fastapi_classes.py | 29 +- diracx-testing/src/diracx/testing/__init__.py | 7 +- docs/CLIENT.md | 20 +- docs/CODING_CONVENTION.md | 54 + docs/VERSIONING.md | 2 +- extensions/README.md | 212 + extensions/containers/client/Dockerfile | 14 + extensions/containers/services/Dockerfile | 16 + extensions/gubbins/environment.yml | 10 + extensions/gubbins/gubbins-cli/pyproject.toml | 52 + .../gubbins-cli/src/gubbins/cli/__init__.py | 0 .../gubbins-cli/src/gubbins/cli/config.py | 20 + .../gubbins-cli/src/gubbins/cli/lollygag.py | 38 + .../gubbins-cli/src/gubbins/cli/py.typed | 0 .../gubbins-cli/tests/test_gubbins_cli.py | 62 + .../gubbins/gubbins-client/pyproject.toml | 51 + .../src/gubbins/client/__init__.py | 9 + .../gubbins-client/src/gubbins/client/aio.py | 2 + .../src/gubbins/client/generated/__init__.py | 21 + .../src/gubbins/client/generated/_client.py | 127 + .../client/generated/_configuration.py | 48 + .../src/gubbins/client/generated/_patch.py | 26 + .../client/generated/_serialization.py | 2274 ++++++++++ .../src/gubbins/client/generated/_vendor.py | 55 + .../gubbins/client/generated/aio/__init__.py | 21 + .../gubbins/client/generated/aio/_client.py | 127 + .../client/generated/aio/_configuration.py | 50 + .../gubbins/client/generated/aio/_patch.py | 22 + .../gubbins/client/generated/aio/_vendor.py | 55 + .../generated/aio/operations/__init__.py | 25 + .../generated}/aio/operations/_operations.py | 539 ++- .../client/generated/aio/operations/_patch.py | 22 + .../client/generated/models/__init__.py | 99 + .../gubbins/client/generated/models/_enums.py | 103 + .../client/generated/models/_models.py | 1226 ++++++ .../gubbins/client/generated/models/_patch.py | 22 + .../client/generated/operations/__init__.py | 25 + .../generated}/operations/_operations.py | 587 ++- .../client/generated/operations/_patch.py | 22 + .../src/gubbins/client/generated/py.typed | 1 + .../src/gubbins/client/models.py | 1 + .../src/gubbins/client/patches/__init__.py | 10 + .../gubbins/client/patches/aio/__init__.py | 12 + .../src/gubbins/client/py.typed | 1 + .../tests/test_gubbins_client.py | 178 + .../gubbins-client/tests/test_regenerate.py | 90 + .../gubbins/gubbins-core/pyproject.toml | 53 + .../gubbins-core/src/gubbins/core/__init__.py | 1 + .../src/gubbins/core/config/__init__.py | 1 + .../src/gubbins/core/config/schema.py | 30 + .../src/gubbins/core/properties.py | 5 + .../gubbins-core/src/gubbins/core/py.typed | 0 .../gubbins/gubbins-core/tests/test_config.py | 39 + .../gubbins-core/tests/test_properties.py | 9 + extensions/gubbins/gubbins-db/pyproject.toml | 58 + .../gubbins-db/src/gubbins/db/__init__.py | 6 + .../gubbins-db/src/gubbins/db/py.typed | 0 .../gubbins-db/src/gubbins/db/sql/__init__.py | 6 + .../src/gubbins/db/sql/jobs/__init__.py | 0 .../gubbins-db/src/gubbins/db/sql/jobs/db.py | 53 + .../src/gubbins/db/sql/jobs/schema.py | 19 + .../src/gubbins/db/sql/lollygag/__init__.py | 0 .../src/gubbins/db/sql/lollygag/db.py | 64 + .../src/gubbins/db/sql/lollygag/schema.py | 21 + .../gubbins-db/tests/test_gubbinsJobDB.py | 48 + .../gubbins-db/tests/test_lollygagDB.py | 85 + .../gubbins/gubbins-routers/pyproject.toml | 68 + .../src/gubbins/routers/__init__.py | 1 + .../src/gubbins/routers/dependencies.py | 14 + .../src/gubbins/routers/lollygag/__init__.py | 2 + .../gubbins/routers/lollygag/access_policy.py | 45 + .../src/gubbins/routers/lollygag/lollygag.py | 50 + .../src/gubbins/routers/py.typed | 0 .../src/gubbins/routers/well_known.py | 52 + .../.well-known/openid-configuration | 1 + .../tests/test_gubbins_job_manager.py | 86 + .../gubbins-routers/tests/test_lollybag.py | 34 + .../gubbins-routers/tests/test_wellknown.py | 37 + .../gubbins/gubbins-testing/pyproject.toml | 29 + .../src/gubbins/testing/__init__.py | 17 + extensions/gubbins/pyproject.toml | 130 + extensions/gubbins/release.notes | 2 + extensions/gubbins/requirements-dev.txt | 6 + extensions/gubbins_values.yaml | 12 + pyproject.toml | 4 +- 132 files changed, 14850 insertions(+), 870 deletions(-) create mode 100644 .github/workflows/extensions.yml create mode 100644 diracx-client/src/diracx/client/aio.py create mode 100644 diracx-client/src/diracx/client/extensions.py rename diracx-client/src/diracx/client/{aio => generated}/__init__.py (90%) rename diracx-client/src/diracx/client/{ => generated}/_client.py (92%) rename diracx-client/src/diracx/client/{ => generated}/_configuration.py (97%) create mode 100644 diracx-client/src/diracx/client/generated/_patch.py rename diracx-client/src/diracx/client/{ => generated}/_serialization.py (85%) rename diracx-client/src/diracx/client/{ => generated}/_vendor.py (97%) create mode 100644 diracx-client/src/diracx/client/generated/aio/__init__.py rename diracx-client/src/diracx/client/{ => generated}/aio/_client.py (92%) rename diracx-client/src/diracx/client/{ => generated}/aio/_configuration.py (97%) create mode 100644 diracx-client/src/diracx/client/generated/aio/_patch.py rename diracx-client/src/diracx/client/{ => generated}/aio/_vendor.py (97%) rename diracx-client/src/diracx/client/{ => generated}/aio/operations/__init__.py (94%) create mode 100644 diracx-client/src/diracx/client/generated/aio/operations/_operations.py rename diracx-client/src/diracx/client/{ => generated}/aio/operations/_patch.py (100%) rename diracx-client/src/diracx/client/{ => generated}/models/__init__.py (96%) rename diracx-client/src/diracx/client/{ => generated}/models/_enums.py (97%) rename diracx-client/src/diracx/client/{ => generated}/models/_models.py (89%) rename diracx-client/src/diracx/client/{ => generated}/models/_patch.py (100%) rename diracx-client/src/diracx/client/{ => generated}/operations/__init__.py (94%) create mode 100644 diracx-client/src/diracx/client/generated/operations/_operations.py rename diracx-client/src/diracx/client/{ => generated}/operations/_patch.py (100%) create mode 100644 diracx-client/src/diracx/client/generated/py.typed create mode 100644 diracx-client/src/diracx/client/models.py create mode 100644 diracx-client/src/diracx/client/patches/__init__.py create mode 100644 diracx-client/src/diracx/client/patches/aio/__init__.py rename diracx-client/src/diracx/client/{aio/_patch.py => patches/aio/utils.py} (90%) rename diracx-client/src/diracx/client/{_patch.py => patches/utils.py} (88%) create mode 100644 extensions/README.md create mode 100644 extensions/containers/client/Dockerfile create mode 100644 extensions/containers/services/Dockerfile create mode 100644 extensions/gubbins/environment.yml create mode 100644 extensions/gubbins/gubbins-cli/pyproject.toml create mode 100644 extensions/gubbins/gubbins-cli/src/gubbins/cli/__init__.py create mode 100644 extensions/gubbins/gubbins-cli/src/gubbins/cli/config.py create mode 100644 extensions/gubbins/gubbins-cli/src/gubbins/cli/lollygag.py create mode 100644 extensions/gubbins/gubbins-cli/src/gubbins/cli/py.typed create mode 100644 extensions/gubbins/gubbins-cli/tests/test_gubbins_cli.py create mode 100644 extensions/gubbins/gubbins-client/pyproject.toml create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/aio.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/_client.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/_configuration.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/_patch.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/_serialization.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/_vendor.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_client.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_configuration.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_patch.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_vendor.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/__init__.py rename {diracx-client/src/diracx/client => extensions/gubbins/gubbins-client/src/gubbins/client/generated}/aio/operations/_operations.py (88%) create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_patch.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_enums.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_models.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_patch.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/__init__.py rename {diracx-client/src/diracx/client => extensions/gubbins/gubbins-client/src/gubbins/client/generated}/operations/_operations.py (89%) create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_patch.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/generated/py.typed create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/models.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/patches/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/patches/aio/__init__.py create mode 100644 extensions/gubbins/gubbins-client/src/gubbins/client/py.typed create mode 100644 extensions/gubbins/gubbins-client/tests/test_gubbins_client.py create mode 100644 extensions/gubbins/gubbins-client/tests/test_regenerate.py create mode 100644 extensions/gubbins/gubbins-core/pyproject.toml create mode 100644 extensions/gubbins/gubbins-core/src/gubbins/core/__init__.py create mode 100644 extensions/gubbins/gubbins-core/src/gubbins/core/config/__init__.py create mode 100644 extensions/gubbins/gubbins-core/src/gubbins/core/config/schema.py create mode 100644 extensions/gubbins/gubbins-core/src/gubbins/core/properties.py create mode 100644 extensions/gubbins/gubbins-core/src/gubbins/core/py.typed create mode 100644 extensions/gubbins/gubbins-core/tests/test_config.py create mode 100644 extensions/gubbins/gubbins-core/tests/test_properties.py create mode 100644 extensions/gubbins/gubbins-db/pyproject.toml create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/__init__.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/py.typed create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/__init__.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/__init__.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/db.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/schema.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/__init__.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/db.py create mode 100644 extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/schema.py create mode 100644 extensions/gubbins/gubbins-db/tests/test_gubbinsJobDB.py create mode 100644 extensions/gubbins/gubbins-db/tests/test_lollygagDB.py create mode 100644 extensions/gubbins/gubbins-routers/pyproject.toml create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/__init__.py create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/dependencies.py create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/__init__.py create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/access_policy.py create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/lollygag.py create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/py.typed create mode 100644 extensions/gubbins/gubbins-routers/src/gubbins/routers/well_known.py create mode 100644 extensions/gubbins/gubbins-routers/tests/data/lhcb-auth.web.cern.ch/.well-known/openid-configuration create mode 100644 extensions/gubbins/gubbins-routers/tests/test_gubbins_job_manager.py create mode 100644 extensions/gubbins/gubbins-routers/tests/test_lollybag.py create mode 100644 extensions/gubbins/gubbins-routers/tests/test_wellknown.py create mode 100644 extensions/gubbins/gubbins-testing/pyproject.toml create mode 100644 extensions/gubbins/gubbins-testing/src/gubbins/testing/__init__.py create mode 100644 extensions/gubbins/pyproject.toml create mode 100644 extensions/gubbins/release.notes create mode 100644 extensions/gubbins/requirements-dev.txt create mode 100644 extensions/gubbins_values.yaml diff --git a/.coveragerc b/.coveragerc index f2775131..37d1e52b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,8 +2,10 @@ omit = tests/* */tests/* - **/diracx/client/* + **/diracx/client/generated/* **/diracx/testing/* + **/gubbins/testing/* + **/gubbins/client/generated/* [paths] source = diff --git a/.github/workflows/extensions.yml b/.github/workflows/extensions.yml new file mode 100644 index 00000000..e51307c8 --- /dev/null +++ b/.github/workflows/extensions.yml @@ -0,0 +1,284 @@ +name: Extensions full test + +on: + push: + branches: + - main + - gubbins + pull_request: + branches: + - main + + +defaults: + run: + shell: bash -el {0} + +jobs: + unittest: + name: Unit test - ${{ matrix.package }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + # In principle, the dependencies could be limited to the extension packages. + # However, we want to use the local packages, and not those published on pypi + include: + - package: "./extensions/gubbins/gubbins-core" + dependencies: "./extensions/gubbins/gubbins-testing ./diracx-testing ./diracx-core" + - package: "./extensions/gubbins/gubbins-db" + dependencies: "./extensions/gubbins/gubbins-testing ./extensions/gubbins/gubbins-core ./diracx-testing ./diracx-db ./diracx-core " + - package: "./extensions/gubbins/gubbins-routers" + dependencies: "./extensions/gubbins/gubbins-testing ./extensions/gubbins/gubbins-db ./extensions/gubbins/gubbins-core ./diracx-testing ./diracx-db ./diracx-core ./diracx-routers" + - package: "./extensions/gubbins/gubbins-client" + dependencies: "./extensions/gubbins/gubbins-testing ./diracx-testing ./extensions/gubbins/gubbins-client ./extensions/gubbins/gubbins-core ./diracx-client ./diracx-core " + - package: "./extensions/gubbins/gubbins-cli" + dependencies: "./extensions/gubbins/gubbins-testing ./extensions/gubbins/gubbins-client ./extensions/gubbins/gubbins-core ./diracx-testing ./diracx-cli ./diracx-client ./diracx-core ./diracx-api" + steps: + - name: Checkout code + uses: actions/checkout@v4 + - uses: mamba-org/setup-micromamba@v2 + with: + # TODO: Use a conda environment file used for the diracx/base container image + environment-name: test-env + create-args: >- + python=3.11 + m2crypto + python-gfal2 + mypy + pip + init-shell: bash + post-cleanup: 'all' + - name: Set up environment + run: | + pip install pytest-github-actions-annotate-failures + pip install git+https://github.com/DIRACGrid/DIRAC.git@integration + pip install ${{ matrix.dependencies }} ${{ matrix.package }}[types] + - name: Run mypy + run: | + mypy ${{ matrix.package }}/src + - name: Run pytest + run: | + cd ${{ matrix.package }} + pip install .[testing] + export DIRACX_EXTENSIONS=gubbins,diracx + pytest --cov-report=xml:coverage.xml --junitxml=report.xml + - name: Upload coverage report + uses: codecov/codecov-action@v4.6.0 + + + build-wheels: + name: Build wheels + runs-on: "ubuntu-latest" + if: github.event_name != 'push' || github.repository == 'DIRACGrid/diracx' + defaults: + run: + # We need extglob for REFERENCE_BRANCH substitution + shell: bash -l -O extglob {0} + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Installing dependencies + run: | + python -m pip install \ + build \ + python-dateutil \ + pytz \ + readme_renderer[md] \ + requests \ + setuptools_scm + - name: Build distributions + run: | + for pkg_dir in $PWD/diracx-*; do + echo "Building $pkg_dir" + python -m build --outdir $PWD/dist $pkg_dir + done + # Also build the diracx metapackage + python -m build --outdir $PWD/dist . + # And build the gubbins package + for pkg_dir in $PWD/extensions/gubbins/gubbins-*; do + # Skip the testing package + if [[ "${pkg_dir}" =~ .*testing.* ]]; + then + echo "Do not build ${pkg_dir}"; + continue; + fi + echo "Building $pkg_dir" + python -m build --outdir $PWD/dist $pkg_dir + done + - name: 'Upload Artifact' + uses: actions/upload-artifact@v4 + with: + name: gubbins-whl + path: dist/*.whl + retention-days: 5 + + # Build to docker image with the code in it + build-image: + needs: build-wheels + timeout-minutes: 30 + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Download gubbins wheels + uses: actions/download-artifact@v4 + with: + name: gubbins-whl + - name: "Find wheels" + id: find_wheel + run: | + # We need to copy them there to be able to access them in the RUN --mount + cp diracx*.whl gubbins*.whl extensions/containers/services/ + for wheel_fn in *.whl; do + pkg_name=$(basename "${wheel_fn}" | cut -d '-' -f 1) + echo "${pkg_name}-wheel-name=$(ls "${pkg_name}"-*.whl)" >> $GITHUB_OUTPUT + done + - name: Build and export service + uses: docker/build-push-action@v6 + with: + context: extensions/containers/services + tags: gubbins/services:dev + outputs: type=docker,dest=/tmp/gubbins_services_image.tar + build-args: | + EXTRA_PACKAGES_TO_INSTALL=git+https://github.com/DIRACGrid/DIRAC.git@integration + EXTENSION_CUSTOM_SOURCES_TO_INSTALL=/bindmount/gubbins_db*.whl,/bindmount/gubbins_routers*.whl,/bindmount/gubbins_client*.whl + - name: Build and export client + uses: docker/build-push-action@v6 + with: + context: extensions/containers/client + tags: gubbins/client:dev + outputs: type=docker,dest=/tmp/gubbins_client_image.tar + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: gubbins-services-img + path: /tmp/gubbins_services_image.tar + + + pytest-integration: + needs: build-image + runs-on: ubuntu-latest + steps: + - name: Download gubbins-image + uses: actions/download-artifact@v4 + with: + name: gubbins-services-img + path: /tmp + - name: Load image + run: | + docker load --input /tmp/gubbins_services_image.tar + docker image ls -a + - name: Checkout code + uses: actions/checkout@v4 + - uses: mamba-org/setup-micromamba@v2 + with: + environment-file: environment.yml + init-shell: bash + post-cleanup: 'all' + - name: Set up environment + run: | + pip install pytest-github-actions-annotate-failures + pip install git+https://github.com/DIRACGrid/DIRAC.git@integration + pip install ./diracx-core/[testing] ./diracx-api/[testing] ./diracx-cli/[testing] ./diracx-client/[testing] ./diracx-routers/[testing] ./diracx-db/[testing] ./diracx-testing/[testing] ./extensions/gubbins/gubbins-testing[testing] ./extensions/gubbins/gubbins-db[testing] ./extensions/gubbins/gubbins-routers/[testing] ./extensions/gubbins/gubbins-client/[testing] ./extensions/gubbins/gubbins-cli/[testing] ./extensions/gubbins/gubbins-core/[testing] + - name: Start demo + run: | + git clone https://github.com/DIRACGrid/diracx-charts.git ../diracx-charts + # We have to copy the code to another directory + # and make it a git repository by itself because otherwise the + # root in the pyproject to do not make sense once mounted + # in the containers. + cp -r ./extensions/gubbins /tmp/ + sed -i 's@../..@.@g' /tmp/gubbins/pyproject.toml + sed -i 's@../../@@g' /tmp/gubbins/gubbins-*/pyproject.toml + git init /tmp/gubbins/ + ../diracx-charts/run_demo.sh --enable-open-telemetry --enable-coverage --exit-when-done --set-value developer.autoReload=false --ci-values ../diracx-charts/demo/ci_values.yaml --ci-values ./extensions/gubbins_values.yaml --load-docker-image "gubbins/services:dev" $PWD /tmp/gubbins/ + - name: Debugging information + run: | + DIRACX_DEMO_DIR=$PWD/../diracx-charts/.demo + export KUBECONFIG=${DIRACX_DEMO_DIR}/kube.conf + export PATH=${DIRACX_DEMO_DIR}:$PATH + kubectl get pods + for pod_name in $(kubectl get pods -o json | jq -r '.items[] | .metadata.name' | grep -vE '(dex|minio|mysql|rabbitmq|opensearch)'); do + echo "${pod_name}" + kubectl describe pod/"${pod_name}" || true + for container_name in $(kubectl get pods $pod_name -o jsonpath='{.spec.initContainers[*].name} {.spec.containers[*].name}'); do + echo $pod_name $container_name + kubectl logs "${pod_name}" -c "${container_name}" || true + done + done + if [ ! -f "${DIRACX_DEMO_DIR}/.success" ]; then + cat "${DIRACX_DEMO_DIR}/.failed" + exit 1 + fi + - name: Run pytest + run: | + cd extensions/gubbins + export DIRACX_EXTENSIONS=gubbins,diracx + pytest --demo-dir=../../../diracx-charts/ --cov-report=xml:coverage-pytest.xml --junitxml=report.xml + - name: Collect demo coverage + run: | + DIRACX_DEMO_DIR=$PWD/../diracx-charts/.demo + export KUBECONFIG=${DIRACX_DEMO_DIR}/kube.conf + export PATH=${DIRACX_DEMO_DIR}:$PATH + # Shutdown the pods so we collect coverage data + for pod_name in $(kubectl get pods -o json | jq -r '.items[] | .metadata.name' | grep -vE '(dex|minio|mysql|rabbitmq|opensearch)'); do + kubectl delete pod/"${pod_name}" + done + set -x + # Combine the coverage data from the demo and make an XML report + coverage_data=$(mktemp) + sudo chown -R $(id -u) "${DIRACX_DEMO_DIR}"/coverage-reports/ + coverage combine --keep --data-file "${coverage_data}" "${DIRACX_DEMO_DIR}"/coverage-reports/* + + # coverage can't handle having multiple src directories, so we need to make a fake one with symlinks + fake_module=$(mktemp -d) + + mkdir -p "${fake_module}/src/diracx" + for fn in "${PWD}"/*/src/diracx/*; do + ln -sf "${fn}" "${fake_module}/src/diracx/$(basename "${fn}")" + done + + + mkdir -p "${fake_module}/src/gubbins" + for fn in "${PWD}"/extensions/gubbins/*/src/gubbins/*; do + ln -sf "${fn}" "${fake_module}/src/gubbins/$(basename "${fn}")" + done + + sed -i "s@source =@source =\n ${fake_module}/src@g" .coveragerc + + cat .coveragerc + + coverage xml -o coverage-demo.xml --data-file "${coverage_data}" + - name: Upload coverage report + uses: codecov/codecov-action@v4.6.0 + with: + files: ./coverage-pytest.xml,./coverage-demo.xml + + client-generation: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - uses: mamba-org/setup-micromamba@v2 + with: + environment-file: environment.yml + init-shell: bash + post-cleanup: 'all' + - name: Set up environment + run: | + micromamba install -c conda-forge nodejs pre-commit + pip install git+https://github.com/DIRACGrid/DIRAC.git@integration + pip install ./diracx-core/[testing] ./diracx-api/[testing] ./diracx-cli/[testing] ./diracx-client/[testing] ./diracx-routers/[testing] ./diracx-db/[testing] ./diracx-testing/[testing] ./extensions/gubbins/gubbins-testing[testing] ./extensions/gubbins/gubbins-db[testing] ./extensions/gubbins/gubbins-routers/[testing] ./extensions/gubbins/gubbins-testing/[testing] -e ./extensions/gubbins/gubbins-client/[testing] ./extensions/gubbins/gubbins-core/[testing] + npm install -g autorest + - name: Run autorest + run: | + autorest --python --help + $HOME/.autorest/\@autorest_python\@*/node_modules/\@autorest/python/venv/bin/python -m pip install --upgrade setuptools + export DIRACX_EXTENSIONS=gubbins,diracx + pytest --no-cov --regenerate-client extensions/gubbins/gubbins-client/tests/test_regenerate.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5b26b8a3..2b1a7a1b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,8 @@ jobs: run: | find -name '*.sh' -print0 | xargs -0 -n1 shellcheck --exclude=SC1090,SC1091 --external-source - pytest: + + unittest: name: Unit test - ${{ matrix.package }} runs-on: ubuntu-latest strategy: @@ -61,6 +62,9 @@ jobs: - name: Set up environment run: | pip install pytest-github-actions-annotate-failures + # Note: DIRAC will install pretty much everything + # from diracx so installing just the dependency may + # be a bit useless pip install git+https://github.com/DIRACGrid/DIRAC.git@integration pip install ${{ matrix.dependencies }} - name: Run pytest diff --git a/.gitignore b/.gitignore index fafc5d10..32cded81 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ parts bin develop-eggs .installed.cfg +*.whl # Translations *.mo diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 20d16099..b4fb2818 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,4 +35,4 @@ repos: - types-requests - types-aiobotocore[essential] - boto3-stubs[essential] - exclude: ^(diracx-client/src/diracx/client/|diracx-[a-z]+/tests/|diracx-testing/|build) + exclude: ^(diracx-client/src/diracx/client/generated|diracx-[a-z]+/tests/|diracx-testing/|build|extensions/gubbins/gubbins-client/src/gubbins/client/generated) diff --git a/codecov.yml b/codecov.yml index 93dbba97..c7904cfc 100644 --- a/codecov.yml +++ b/codecov.yml @@ -12,6 +12,10 @@ coverage: codecov: notify: - after_n_builds: 7 + # 6 diracx unit tests + # 1 diracx integration test + # 5 gubbins unit test + # 1 gubbins integration test + after_n_builds: 13 comment: false diff --git a/diracx-cli/pyproject.toml b/diracx-cli/pyproject.toml index 31ac7fab..d33abfb6 100644 --- a/diracx-cli/pyproject.toml +++ b/diracx-cli/pyproject.toml @@ -36,6 +36,15 @@ types = [ [project.scripts] dirac = "diracx.cli:app" +[project.entry-points."diracx.cli"] +jobs = "diracx.cli.jobs:app" +config = "diracx.cli.config:app" + +[project.entry-points."diracx.cli.hidden"] +internal = "diracx.cli.internal:app" + + + [tool.setuptools.packages.find] where = ["src"] @@ -58,3 +67,4 @@ asyncio_mode = "auto" markers = [ "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", ] +asyncio_default_fixture_loop_scope = "session" diff --git a/diracx-cli/src/diracx/cli/__init__.py b/diracx-cli/src/diracx/cli/__init__.py index ed76821c..d9292268 100644 --- a/diracx-cli/src/diracx/cli/__init__.py +++ b/diracx-cli/src/diracx/cli/__init__.py @@ -8,10 +8,10 @@ from diracx.client.aio import DiracClient from diracx.client.models import DeviceFlowErrorResponse +from diracx.core.extensions import select_from_extension from diracx.core.preferences import get_diracx_preferences from diracx.core.utils import write_credentials -from . import config, internal, jobs from .utils import AsyncTyper app = AsyncTyper() @@ -115,9 +115,26 @@ def callback(output_format: Optional[str] = None): os.environ["DIRACX_OUTPUT_FORMAT"] = output_format -app.add_typer(jobs.app, name="jobs") -app.add_typer(config.app, name="config") -app.add_typer(internal.app, name="internal", hidden=True) +# Load all the sub commands + +cli_names = set( + [entry_point.name for entry_point in select_from_extension(group="diracx.cli")] +) +for cli_name in cli_names: + entry_point = select_from_extension(group="diracx.cli", name=cli_name)[0] + print(f"CHRIS EXTENSION {entry_point=}") + app.add_typer(entry_point.load(), name=entry_point.name) + + +cli_hidden_names = set( + [ + entry_point.name + for entry_point in select_from_extension(group="diracx.cli.hidden") + ] +) +for cli_name in cli_hidden_names: + entry_point = select_from_extension(group="diracx.cli.hidden", name=cli_name)[0] + app.add_typer(entry_point.load(), name=entry_point.name, hidden=True) if __name__ == "__main__": diff --git a/diracx-client/pyproject.toml b/diracx-client/pyproject.toml index 1763228d..849a26fb 100644 --- a/diracx-client/pyproject.toml +++ b/diracx-client/pyproject.toml @@ -4,7 +4,7 @@ description = "TODO" readme = "README.md" requires-python = ">=3.11" keywords = [] -license = {text = "GPL-3.0-only"} +license = { text = "GPL-3.0-only" } classifiers = [ "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", @@ -12,21 +12,12 @@ classifiers = [ "Topic :: Scientific/Engineering", "Topic :: System :: Distributed Computing", ] -dependencies = [ - "azure-core", - "diracx-core", - "isodate", - "requests", -] +dependencies = ["azure-core", "diracx-core", "isodate", "requests"] dynamic = ["version"] [project.optional-dependencies] -testing = [ - "diracx-testing", -] -types = [ - "types-requests", -] +testing = ["diracx-testing"] +types = ["types-requests"] [tool.setuptools.packages.find] where = ["src"] @@ -35,6 +26,11 @@ where = ["src"] requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] build-backend = "setuptools.build_meta" +[project.entry-points."diracx"] +client_class = "diracx.client.generated._client:Dirac" +aio_client_class = "diracx.client.generated.aio._client:Dirac" + + [tool.setuptools_scm] root = ".." @@ -42,7 +38,8 @@ root = ".." testpaths = ["tests"] addopts = [ "-v", - "--cov=diracx.client", "--cov-report=term-missing", + "--cov=diracx.client", + "--cov-report=term-missing", "-pdiracx.testing", "--import-mode=importlib", ] diff --git a/diracx-client/src/diracx/client/__init__.py b/diracx-client/src/diracx/client/__init__.py index cc37da18..38262ec7 100644 --- a/diracx-client/src/diracx/client/__init__.py +++ b/diracx-client/src/diracx/client/__init__.py @@ -1,21 +1,7 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- +from .extensions import initialize_client -from ._client import Dirac +initialize_client() -try: - from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "Dirac", -] -__all__.extend([p for p in _patch_all if p not in __all__]) - -_patch_sdk() +from .generated import * # pylint: disable=unused-wildcard-import +from .patches import DiracClient diff --git a/diracx-client/src/diracx/client/aio.py b/diracx-client/src/diracx/client/aio.py new file mode 100644 index 00000000..619b5cb8 --- /dev/null +++ b/diracx-client/src/diracx/client/aio.py @@ -0,0 +1,3 @@ +# from .generated.aio import * # pylint: disable=unused-wildcard-import + +from .patches.aio import DiracClient diff --git a/diracx-client/src/diracx/client/extensions.py b/diracx-client/src/diracx/client/extensions.py new file mode 100644 index 00000000..3379573e --- /dev/null +++ b/diracx-client/src/diracx/client/extensions.py @@ -0,0 +1,95 @@ +import os +import sys +import importlib +from importlib.abc import MetaPathFinder, Loader + + +from importlib.abc import MetaPathFinder, Loader +from importlib.util import spec_from_loader +from importlib.machinery import SourceFileLoader, ModuleSpec + + +class DiracxLoader(SourceFileLoader): + + def create_module(self, spec): + if spec.name in sys.modules: + return sys.modules[spec.name] + + def exec_module(self, module): ... + + +class DiracxPathFinder(MetaPathFinder): + """ + This MetaPathFinder modifies the import such that the patches + from vanila diracx are looked at first. + """ + + diracx_extensions = os.environ.get("DIRACX_EXTENSIONS", "diracx").split(",") + + @classmethod + def find_spec(cls, fullname, path, target=None): + for i, extension in enumerate(cls.diracx_extensions, start=1): + # If we are trying to load the patch from an extension + # make sure it does not exist in the lower levels first + if any( + [ + fullname.startswith(prefix) + for prefix in [ + f"{extension}.client.generated.operations._patch", + f"{extension}.client.generated.models._patch", + f"{extension}.client.generated.aio.operations._patch", + ] + ] + ): + for lower_extension in cls.diracx_extensions[i:][::-1]: + try: + patched_name = fullname.replace(extension, lower_extension) + # breakpoint() + overwritten = importlib.util.find_spec(patched_name) + + spec = ModuleSpec( + patched_name, DiracxLoader(patched_name, path) + ) + return spec + if patched_name in sys.modules: + # print(sys.modules[patched_name].__spec__) + return sys.modules[patched_name].__spec__ + + overwritten = importlib.util.find_spec(patched_name) + + # overwritten = spec_from_loader(patched_name, DiracxLoader(filepath)) + return overwritten + except Exception as e: + pass + + return None + + +def initialize_client(): + + # insert a DiracxPathFinder instance at the start of the meta_path list + if not isinstance(sys.meta_path[0], DiracxPathFinder): + + sys.meta_path.insert(0, DiracxPathFinder()) + + # Reload all the client module that could potentially have been + # already loaded + # This was needed when the generated code was at the top + # level of the module. + # In principle, this is not needed anymore so I comment it out, + # but in case it ends up being needed, I keep it there, as it is rather + # tricky + # importlib.invalidate_caches() + # diracx_extensions = os.environ.get("DIRACX_EXTENSIONS", "diracx").split(",") + # for top_module in diracx_extensions: + # for module_name, module in sys.modules.copy().items(): + # if ( + # (f"{top_module}.client" in module_name) + # and module_name + # not in ( + # f"{top_module}.client.generated", + # f"{top_module}.client.generated._patch", + # ) + # and "_patch" in module_name + # ): + # importlib.reload(module) diff --git a/diracx-client/src/diracx/client/aio/__init__.py b/diracx-client/src/diracx/client/generated/__init__.py similarity index 90% rename from diracx-client/src/diracx/client/aio/__init__.py rename to diracx-client/src/diracx/client/generated/__init__.py index cc37da18..fa740e66 100644 --- a/diracx-client/src/diracx/client/aio/__init__.py +++ b/diracx-client/src/diracx/client/generated/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -9,7 +9,7 @@ try: from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import -except ImportError: +except ValueError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/diracx-client/src/diracx/client/_client.py b/diracx-client/src/diracx/client/generated/_client.py similarity index 92% rename from diracx-client/src/diracx/client/_client.py rename to diracx-client/src/diracx/client/generated/_client.py index 962319fa..434a8038 100644 --- a/diracx-client/src/diracx/client/_client.py +++ b/diracx-client/src/diracx/client/generated/_client.py @@ -1,11 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -26,13 +27,13 @@ class Dirac: # pylint: disable=client-accepts-api-version-keyword """Dirac. :ivar well_known: WellKnownOperations operations - :vartype well_known: client.operations.WellKnownOperations + :vartype well_known: generated.operations.WellKnownOperations :ivar auth: AuthOperations operations - :vartype auth: client.operations.AuthOperations + :vartype auth: generated.operations.AuthOperations :ivar config: ConfigOperations operations - :vartype config: client.operations.ConfigOperations + :vartype config: generated.operations.ConfigOperations :ivar jobs: JobsOperations operations - :vartype jobs: client.operations.JobsOperations + :vartype jobs: generated.operations.JobsOperations :keyword endpoint: Service URL. Required. Default value is "". :paramtype endpoint: str """ @@ -112,7 +113,7 @@ def send_request( def close(self) -> None: self._client.close() - def __enter__(self) -> "Dirac": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/diracx-client/src/diracx/client/_configuration.py b/diracx-client/src/diracx/client/generated/_configuration.py similarity index 97% rename from diracx-client/src/diracx/client/_configuration.py rename to diracx-client/src/diracx/client/generated/_configuration.py index e1883c55..e79ab80a 100644 --- a/diracx-client/src/diracx/client/_configuration.py +++ b/diracx-client/src/diracx/client/generated/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/generated/_patch.py b/diracx-client/src/diracx/client/generated/_patch.py new file mode 100644 index 00000000..5c98b9b8 --- /dev/null +++ b/diracx-client/src/diracx/client/generated/_patch.py @@ -0,0 +1,47 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from __future__ import annotations + +import os +from datetime import datetime, timezone +import importlib.util +import json +import jwt +import requests + +from pathlib import Path +from typing import Any, Dict, List, Optional, cast +from urllib import parse +from azure.core.credentials import AccessToken +from azure.core.credentials import TokenCredential +from azure.core.pipeline import PipelineRequest +from azure.core.pipeline.policies import BearerTokenCredentialPolicy + +from diracx.core.preferences import DiracxPreferences, get_diracx_preferences + + +import sys +import importlib +from importlib.abc import MetaPathFinder, Loader + +__all__: List[str] = [ + "DiracClient", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ + + +from ..patches import DiracClient diff --git a/diracx-client/src/diracx/client/_serialization.py b/diracx-client/src/diracx/client/generated/_serialization.py similarity index 85% rename from diracx-client/src/diracx/client/_serialization.py rename to diracx-client/src/diracx/client/generated/_serialization.py index 5e7b24dc..0ba76b66 100644 --- a/diracx-client/src/diracx/client/_serialization.py +++ b/diracx-client/src/diracx/client/generated/_serialization.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -24,7 +25,6 @@ # # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -52,7 +52,6 @@ MutableMapping, Type, List, - Mapping, ) try: @@ -93,6 +92,8 @@ def deserialize_from_text( :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -114,7 +115,9 @@ def deserialize_from_text( try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError( + "JSON is invalid: {}".format(err), err + ) from err elif "xml" in (content_type or []): try: @@ -146,6 +149,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError( "Cannot deserialize content-type: {}".format(content_type) ) @@ -159,6 +164,11 @@ def deserialize_from_http_generics( Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -188,15 +198,30 @@ class UTC(datetime.tzinfo): """Time Zone info for handling UTC""" def utcoffset(self, dt): - """UTF offset for UTC is 0.""" + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ return datetime.timedelta(0) def tzname(self, dt): - """Timestamp representation.""" + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ return "Z" def dst(self, dt): - """No daylight saving for UTC.""" + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ return datetime.timedelta(hours=1) @@ -239,24 +264,28 @@ def __getinitargs__(self): _FLATTEN = re.compile(r"(? None: self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning( "%s is not a known attribute of class %s and will be ignored", @@ -312,13 +348,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -338,7 +384,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -362,7 +412,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, @@ -398,12 +450,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -415,7 +470,7 @@ def _infer_class_models(cls): } if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @@ -430,6 +485,7 @@ def deserialize( :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @@ -448,9 +504,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -470,7 +528,9 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result.update( + objects[valuetype]._flatten_subtype(key, objects) + ) # pylint: disable=protected-access return result @classmethod @@ -478,6 +538,11 @@ def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None @@ -531,11 +596,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer(object): # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -590,13 +657,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict :raises: SerializationError if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -624,13 +694,18 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = ( + target_obj._create_xml_node() + ) # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get( - "readonly", False + if ( + not keep_readonly + and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False) ): continue @@ -671,7 +746,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -704,17 +780,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs): attr_name, class_name, str(target_obj) ) raise SerializationError(msg) from err - else: - return serialized + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -745,7 +821,9 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize( + data_type, data + ) # pylint: disable=protected-access except DeserializationError as err: raise SerializationError( "Unable to build a model: " + str(err) @@ -756,9 +834,11 @@ def body(self, data, data_type, **kwargs): def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str + :returns: The serialized URL path :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -772,21 +852,20 @@ def url(self, name, data, data_type, **kwargs): output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator @@ -805,19 +884,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -826,21 +906,20 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") @@ -851,7 +930,7 @@ def serialize_data(self, data, data_type, **kwargs): if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class @@ -867,11 +946,12 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers( + cls, data_type, **kwargs + ): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -887,23 +967,26 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + return eval(data_type)(data) # nosec # pylint: disable=eval-used @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -917,8 +1000,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -928,15 +1010,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -999,9 +1079,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -1029,7 +1108,9 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object( + self, attr, **kwargs + ): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -1037,6 +1118,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -1061,7 +1143,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1094,56 +1176,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1151,11 +1238,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1165,30 +1253,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1201,12 +1291,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1237,13 +1328,14 @@ def serialize_iso(attr, **kwargs): raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int :raises: SerializationError if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1251,11 +1343,11 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data @@ -1276,7 +1368,9 @@ def rest_key_extractor(attr, attr_desc, data): return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1299,17 +1393,31 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor( + attr, attr_desc, data +): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1346,7 +1454,9 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor( + attr, attr_desc, data +): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1403,22 +1513,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1438,7 +1547,7 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} valid_date = re.compile( - r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?" + r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?" ) def __init__(self, classes: Optional[Mapping[str, type]] = None): @@ -1479,11 +1588,14 @@ def __call__(self, target_obj, response_data, content_type=None): :param str content_type: Swagger "produces" if available. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize( + self, target_obj, data + ): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree @@ -1492,6 +1604,7 @@ def _deserialize(self, target_obj, data): :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): @@ -1501,7 +1614,10 @@ def _deserialize(self, target_obj, data): if config.get("constant") ] try: - for attr, mapconfig in data._attribute_map.items(): + for ( + attr, + mapconfig, + ) in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1522,13 +1638,13 @@ def _deserialize(self, target_obj, data): if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1558,9 +1674,8 @@ def _deserialize(self, target_obj, data): except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1590,6 +1705,8 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None @@ -1601,7 +1718,7 @@ def _classify_target(self, target, data): return target, target try: - target = target._classify(data, self.dependencies) # type: ignore + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1616,10 +1733,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True, @@ -1638,10 +1757,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1671,17 +1792,24 @@ def _unpack_content(raw_data, content_type=None): def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: readonly = [ - k for k, v in response._validation.items() if v.get("readonly") + k + for k, v in response._validation.items() + if v.get("readonly") # pylint: disable=protected-access ] const = [ - k for k, v in response._validation.items() if v.get("constant") + k + for k, v in response._validation.items() + if v.get("constant") # pylint: disable=protected-access ] kwargs = { k: v @@ -1696,7 +1824,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1705,15 +1833,18 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data( + self, data, data_type + ): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1729,7 +1860,14 @@ def deserialize_data(self, data, data_type): ): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = ( + lambda x: x + not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + ) if ( isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) @@ -1753,14 +1891,14 @@ def deserialize_data(self, data, data_type): msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1783,6 +1921,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1795,11 +1934,14 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object( + self, attr, **kwargs + ): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ @@ -1834,11 +1976,12 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic( + self, attr, data_type + ): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1846,6 +1989,7 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ @@ -1857,24 +2001,23 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, str): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + return eval(data_type)(attr) # nosec # pylint: disable=eval-used @staticmethod def deserialize_unicode(data): @@ -1882,6 +2025,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1895,8 +2039,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1908,6 +2051,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1918,9 +2062,9 @@ def deserialize_enum(data, enum_obj): # Workaround. We might consider remove it in the future. try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1940,6 +2084,7 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1952,6 +2097,7 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1967,8 +2113,9 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal + :return: Deserialized decimal :raises: DeserializationError if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text @@ -1983,6 +2130,7 @@ def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int :raises: ValueError if string format invalid. """ @@ -1995,6 +2143,7 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ @@ -2005,14 +2154,14 @@ def deserialize_duration(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise DeserializationError(msg) from err - else: - return duration + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date :raises: DeserializationError if string format invalid. """ @@ -2030,6 +2179,7 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time :raises: DeserializationError if string format invalid. """ @@ -2046,6 +2196,7 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -2064,14 +2215,14 @@ def deserialize_rfc(attr): except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -2101,8 +2252,7 @@ def deserialize_iso(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_unix(attr): @@ -2110,6 +2260,7 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime :raises: DeserializationError if format invalid """ @@ -2121,5 +2272,4 @@ def deserialize_unix(attr): except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj diff --git a/diracx-client/src/diracx/client/_vendor.py b/diracx-client/src/diracx/client/generated/_vendor.py similarity index 97% rename from diracx-client/src/diracx/client/_vendor.py rename to diracx-client/src/diracx/client/generated/_vendor.py index 2b77be83..b8f659c3 100644 --- a/diracx-client/src/diracx/client/_vendor.py +++ b/diracx-client/src/diracx/client/generated/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/generated/aio/__init__.py b/diracx-client/src/diracx/client/generated/aio/__init__.py new file mode 100644 index 00000000..fa740e66 --- /dev/null +++ b/diracx-client/src/diracx/client/generated/aio/__init__.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._client import Dirac + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ValueError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Dirac", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/diracx-client/src/diracx/client/aio/_client.py b/diracx-client/src/diracx/client/generated/aio/_client.py similarity index 92% rename from diracx-client/src/diracx/client/aio/_client.py rename to diracx-client/src/diracx/client/generated/aio/_client.py index e0128831..a6259c31 100644 --- a/diracx-client/src/diracx/client/aio/_client.py +++ b/diracx-client/src/diracx/client/generated/aio/_client.py @@ -1,11 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, Awaitable +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -26,13 +27,13 @@ class Dirac: # pylint: disable=client-accepts-api-version-keyword """Dirac. :ivar well_known: WellKnownOperations operations - :vartype well_known: client.aio.operations.WellKnownOperations + :vartype well_known: generated.aio.operations.WellKnownOperations :ivar auth: AuthOperations operations - :vartype auth: client.aio.operations.AuthOperations + :vartype auth: generated.aio.operations.AuthOperations :ivar config: ConfigOperations operations - :vartype config: client.aio.operations.ConfigOperations + :vartype config: generated.aio.operations.ConfigOperations :ivar jobs: JobsOperations operations - :vartype jobs: client.aio.operations.JobsOperations + :vartype jobs: generated.aio.operations.JobsOperations :keyword endpoint: Service URL. Required. Default value is "". :paramtype endpoint: str """ @@ -112,7 +113,7 @@ def send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "Dirac": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/diracx-client/src/diracx/client/aio/_configuration.py b/diracx-client/src/diracx/client/generated/aio/_configuration.py similarity index 97% rename from diracx-client/src/diracx/client/aio/_configuration.py rename to diracx-client/src/diracx/client/generated/aio/_configuration.py index 3a48c2b6..02de397a 100644 --- a/diracx-client/src/diracx/client/aio/_configuration.py +++ b/diracx-client/src/diracx/client/generated/aio/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/generated/aio/_patch.py b/diracx-client/src/diracx/client/generated/aio/_patch.py new file mode 100644 index 00000000..b2dddec5 --- /dev/null +++ b/diracx-client/src/diracx/client/generated/aio/_patch.py @@ -0,0 +1,35 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +import json +from types import TracebackType +from pathlib import Path +from typing import Any, List, Optional +from azure.core.credentials import AccessToken +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.pipeline import PipelineRequest +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy + +from diracx.core.preferences import get_diracx_preferences, DiracxPreferences + + +__all__: List[str] = [ + # "DiracClient", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ + + +# from ...patches.aio import DiracClient diff --git a/diracx-client/src/diracx/client/aio/_vendor.py b/diracx-client/src/diracx/client/generated/aio/_vendor.py similarity index 97% rename from diracx-client/src/diracx/client/aio/_vendor.py rename to diracx-client/src/diracx/client/generated/aio/_vendor.py index 2b77be83..b8f659c3 100644 --- a/diracx-client/src/diracx/client/aio/_vendor.py +++ b/diracx-client/src/diracx/client/generated/aio/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/operations/__init__.py b/diracx-client/src/diracx/client/generated/aio/operations/__init__.py similarity index 94% rename from diracx-client/src/diracx/client/aio/operations/__init__.py rename to diracx-client/src/diracx/client/generated/aio/operations/__init__.py index eb877968..66a084b1 100644 --- a/diracx-client/src/diracx/client/aio/operations/__init__.py +++ b/diracx-client/src/diracx/client/generated/aio/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/generated/aio/operations/_operations.py b/diracx-client/src/diracx/client/generated/aio/operations/_operations.py new file mode 100644 index 00000000..61f44cb9 --- /dev/null +++ b/diracx-client/src/diracx/client/generated/aio/operations/_operations.py @@ -0,0 +1,2906 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import ( + Any, + Callable, + Dict, + IO, + List, + Optional, + Type, + TypeVar, + Union, + overload, +) + +from azure.core import MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ...operations._operations import ( + build_auth_authorization_flow_complete_request, + build_auth_authorization_flow_request, + build_auth_do_device_flow_request, + build_auth_finish_device_flow_request, + build_auth_finished_request, + build_auth_get_refresh_tokens_request, + build_auth_initiate_device_flow_request, + build_auth_revoke_refresh_token_request, + build_auth_userinfo_request, + build_config_serve_config_request, + build_jobs_assign_sandbox_to_job_request, + build_jobs_delete_bulk_jobs_request, + build_jobs_delete_single_job_request, + build_jobs_get_job_sandbox_request, + build_jobs_get_job_sandboxes_request, + build_jobs_get_job_status_bulk_request, + build_jobs_get_job_status_history_bulk_request, + build_jobs_get_sandbox_file_request, + build_jobs_get_single_job_request, + build_jobs_get_single_job_status_history_request, + build_jobs_get_single_job_status_request, + build_jobs_initiate_sandbox_upload_request, + build_jobs_kill_bulk_jobs_request, + build_jobs_kill_single_job_request, + build_jobs_remove_bulk_jobs_request, + build_jobs_remove_single_job_request, + build_jobs_reschedule_bulk_jobs_request, + build_jobs_reschedule_single_job_request, + build_jobs_search_request, + build_jobs_set_job_status_bulk_request, + build_jobs_set_single_job_properties_request, + build_jobs_set_single_job_status_request, + build_jobs_submit_bulk_jobs_request, + build_jobs_summary_request, + build_jobs_unassign_bulk_jobs_sandboxes_request, + build_jobs_unassign_job_sandboxes_request, + build_well_known_installation_metadata_request, + build_well_known_openid_configuration_request, +) +from .._vendor import raise_if_not_implemented + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[ + Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any] +] +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + + +class WellKnownOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.aio.Dirac`'s + :attr:`well_known` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace_async + async def openid_configuration(self, **kwargs: Any) -> Any: + """Openid Configuration. + + OpenID Connect discovery endpoint. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_well_known_openid_configuration_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def installation_metadata(self, **kwargs: Any) -> _models.Metadata: + """Installation Metadata. + + Get metadata about the dirac installation. + + :return: Metadata + :rtype: ~generated.models.Metadata + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Metadata] = kwargs.pop("cls", None) + + _request = build_well_known_installation_metadata_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("Metadata", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class AuthOperations: # pylint: disable=abstract-class-instantiated + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.aio.Dirac`'s + :attr:`auth` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + raise_if_not_implemented( + self.__class__, + [ + "token", + ], + ) + + @distributed_trace_async + async def initiate_device_flow( + self, *, client_id: str, scope: str, **kwargs: Any + ) -> _models.InitiateDeviceFlowResponse: + """Initiate Device Flow. + + Initiate the device flow against DIRAC authorization Server. + Scope must have exactly up to one ``group`` (otherwise default) and + one or more ``property`` scope. + If no property, then get default one. + + Offers the user to go with the browser to + ``auth//device?user_code=XYZ``. + + :keyword client_id: Required. + :paramtype client_id: str + :keyword scope: Required. + :paramtype scope: str + :return: InitiateDeviceFlowResponse + :rtype: ~generated.models.InitiateDeviceFlowResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.InitiateDeviceFlowResponse] = kwargs.pop("cls", None) + + _request = build_auth_initiate_device_flow_request( + client_id=client_id, + scope=scope, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "InitiateDeviceFlowResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: + """Do Device Flow. + + This is called as the verification URI for the device flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user_code obtained from the device flow in a cookie + to be able to map the authorization flow with the corresponding + device flow. + (note: it can't be put as parameter or in the URL). + + :keyword user_code: Required. + :paramtype user_code: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_do_device_flow_request( + user_code=user_code, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: + """Finish Device Flow. + + This the url callbacked by IAM/Checkin after the authorization + flow was granted. + It gets us the code we need for the authorization flow, and we + can map it to the corresponding device flow using the user_code + in the cookie/session. + + :keyword code: Required. + :paramtype code: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_finish_device_flow_request( + code=code, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def finished(self, **kwargs: Any) -> Any: + """Finished. + + This is the final step of the device flow. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_finished_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: + """Get Refresh Tokens. + + Get all refresh tokens for the user. If the user has the ``proxy_management`` property, then + the subject is not used to filter the refresh tokens. + + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_auth_get_refresh_tokens_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: + """Revoke Refresh Token. + + Revoke a refresh token. If the user has the ``proxy_management`` property, then + the subject is not used to filter the refresh tokens. + + :param jti: Required. + :type jti: str + :return: str + :rtype: str + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[str] = kwargs.pop("cls", None) + + _request = build_auth_revoke_refresh_token_request( + jti=jti, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("str", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: + """Userinfo. + + Get information about the user's identity. + + :return: UserInfoResponse + :rtype: ~generated.models.UserInfoResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.UserInfoResponse] = kwargs.pop("cls", None) + + _request = build_auth_userinfo_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "UserInfoResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def authorization_flow( + self, + *, + response_type: Union[str, _models.Enum0], + code_challenge: str, + code_challenge_method: Union[str, _models.Enum1], + client_id: str, + redirect_uri: str, + scope: str, + state: str, + **kwargs: Any, + ) -> Any: + """Authorization Flow. + + Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user details obtained from the user authorize flow in a cookie + to be able to map the authorization flow with the corresponding + user authorize flow. + + :keyword response_type: "code" Required. + :paramtype response_type: str or ~generated.models.Enum0 + :keyword code_challenge: Required. + :paramtype code_challenge: str + :keyword code_challenge_method: "S256" Required. + :paramtype code_challenge_method: str or ~generated.models.Enum1 + :keyword client_id: Required. + :paramtype client_id: str + :keyword redirect_uri: Required. + :paramtype redirect_uri: str + :keyword scope: Required. + :paramtype scope: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_authorization_flow_request( + response_type=response_type, + code_challenge=code_challenge, + code_challenge_method=code_challenge_method, + client_id=client_id, + redirect_uri=redirect_uri, + scope=scope, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def authorization_flow_complete( + self, *, code: str, state: str, **kwargs: Any + ) -> Any: + """Authorization Flow Complete. + + Complete the authorization flow. + + The user is redirected back to the DIRAC auth service after completing the IAM's authorization + flow. + We retrieve the original flow details from the decrypted state and store the ID token requested + from the IAM. + The user is then redirected to the client's redirect URI. + + :keyword code: Required. + :paramtype code: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_authorization_flow_complete_request( + code=code, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class ConfigOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.aio.Dirac`'s + :attr:`config` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace_async + async def serve_config( + self, + *, + if_modified_since: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any, + ) -> Any: + """Serve Config. + + Get the latest view of the config. + + If If-None-Match header is given and matches the latest ETag, return 304 + + If If-Modified-Since is given and is newer than latest, + return 304: this is to avoid flip/flopping. + + :keyword if_modified_since: Default value is None. + :paramtype if_modified_since: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_config_serve_config_request( + if_modified_since=if_modified_since, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class JobsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.aio.Dirac`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @overload + async def initiate_sandbox_upload( + self, + body: _models.SandboxInfo, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: ~generated.models.SandboxInfo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def initiate_sandbox_upload( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def initiate_sandbox_upload( + self, body: Union[_models.SandboxInfo, IO[bytes]], **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Is either a SandboxInfo type or a IO[bytes] type. Required. + :type body: ~generated.models.SandboxInfo or IO[bytes] + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[_models.SandboxUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "SandboxInfo") + + _request = build_jobs_initiate_sandbox_upload_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "SandboxUploadResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_sandbox_file( + self, *, pfn: str, **kwargs: Any + ) -> _models.SandboxDownloadResponse: + """Get Sandbox File. + + Get a presigned URL to download a sandbox file. + + This route cannot use a redirect response most clients will also send the + authorization header when following a redirect. This is not desirable as + it would leak the authorization token to the storage backend. Additionally, + most storage backends return an error when they receive an authorization + header for a presigned URL. + + :keyword pfn: Required. + :paramtype pfn: str + :return: SandboxDownloadResponse + :rtype: ~generated.models.SandboxDownloadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SandboxDownloadResponse] = kwargs.pop("cls", None) + + _request = build_jobs_get_sandbox_file_request( + pfn=pfn, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "SandboxDownloadResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def unassign_bulk_jobs_sandboxes( + self, *, jobs_ids: List[int], **kwargs: Any + ) -> Any: + """Unassign Bulk Jobs Sandboxes. + + Delete bulk jobs sandbox mapping. + + :keyword jobs_ids: Required. + :paramtype jobs_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_bulk_jobs_sandboxes_request( + jobs_ids=jobs_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_sandboxes( + self, job_id: int, **kwargs: Any + ) -> Dict[str, List[Any]]: + """Get Job Sandboxes. + + Get input and output sandboxes of given job. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of any + :rtype: dict[str, list[any]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[Any]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("{[object]}", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: + """Unassign Job Sandboxes. + + Delete single job sandbox mapping. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_sandbox( + self, job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any + ) -> List[Any]: + """Get Job Sandbox. + + Get input or output sandbox of given job. + + :param job_id: Required. + :type job_id: int + :param sandbox_type: Known values are: "input" and "output". Required. + :type sandbox_type: str or ~generated.models.SandboxType + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandbox_request( + job_id=job_id, + sandbox_type=sandbox_type, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: + """Assign Sandbox To Job. + + Map the pfn as output sandbox to job. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _content = self._serialize.body(body, "str") + + _request = build_jobs_assign_sandbox_to_job_request( + job_id=job_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def submit_bulk_jobs( + self, body: List[str], *, content_type: str = "application/json", **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Required. + :type body: list[str] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def submit_bulk_jobs( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def submit_bulk_jobs( + self, body: Union[List[str], IO[bytes]], **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Is either a [str] type or a IO[bytes] type. Required. + :type body: list[str] or IO[bytes] + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[List[_models.InsertedJob]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "[str]") + + _request = build_jobs_submit_bulk_jobs_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "[InsertedJob]", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Delete Bulk Jobs. + + Delete Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_delete_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Kill Bulk Jobs. + + Kill Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_kill_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Remove Bulk Jobs. + + Fully remove a list of jobs from the WMS databases. + + WARNING: This endpoint has been implemented for the compatibility with the legacy DIRAC WMS + and the JobCleaningAgent. However, once this agent is ported to diracx, this endpoint should + be removed, and the delete endpoint should be used instead for any other purpose. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_remove_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_status_bulk( + self, *, job_ids: List[int], **kwargs: Any + ) -> Dict[str, _models.LimitedJobStatusReturn]: + """Get Job Status Bulk. + + Get Job Status Bulk. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: dict mapping str to LimitedJobStatusReturn + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, _models.LimitedJobStatusReturn]] = kwargs.pop( + "cls", None + ) + + _request = build_jobs_get_job_status_bulk_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_job_status_bulk( + self, + body: Dict[str, Dict[str, _models.JobStatusUpdate]], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Required. + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_job_status_bulk( + self, + body: IO[bytes], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Required. + :type body: IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_job_status_bulk( + self, + body: Union[Dict[str, Dict[str, _models.JobStatusUpdate]], IO[bytes]], + *, + force: bool = False, + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Is either a {str: {str: JobStatusUpdate}} type or a IO[bytes] type. Required. + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] or IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Dict[str, _models.SetJobStatusReturn]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "{{JobStatusUpdate}}") + + _request = build_jobs_set_job_status_bulk_request( + force=force, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_job_status_history_bulk( + self, *, job_ids: List[int], **kwargs: Any + ) -> Dict[str, List[_models.JobStatusReturn]]: + """Get Job Status History Bulk. + + Get Job Status History Bulk. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: dict mapping str to list of JobStatusReturn + :rtype: dict[str, list[~generated.models.JobStatusReturn]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[_models.JobStatusReturn]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_status_history_bulk_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Reschedule Bulk Jobs. + + Reschedule Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_reschedule_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Reschedule Single Job. + + Reschedule Single Job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_reschedule_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def search( + self, + body: Optional[_models.JobSearchParams] = None, + *, + page: int = 1, + per_page: int = 100, + content_type: str = "application/json", + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Default value is None. + :type body: ~generated.models.JobSearchParams + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def search( + self, + body: Optional[IO[bytes]] = None, + *, + page: int = 1, + per_page: int = 100, + content_type: str = "application/json", + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Default value is None. + :type body: IO[bytes] + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def search( + self, + body: Optional[Union[_models.JobSearchParams, IO[bytes]]] = None, + *, + page: int = 1, + per_page: int = 100, + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. + :type body: ~generated.models.JobSearchParams or IO[bytes] + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[List[JSON]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "JobSearchParams") + else: + _json = None + + _request = build_jobs_search_request( + page=page, + per_page=per_page, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + response_headers = {} + if response.status_code == 206: + response_headers["Content-Range"] = self._deserialize( + "str", response.headers.get("Content-Range") + ) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def summary( + self, + body: _models.JobSummaryParams, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Required. + :type body: ~generated.models.JobSummaryParams + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def summary( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def summary( + self, body: Union[_models.JobSummaryParams, IO[bytes]], **kwargs: Any + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Is either a JobSummaryParams type or a IO[bytes] type. Required. + :type body: ~generated.models.JobSummaryParams or IO[bytes] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobSummaryParams") + + _request = build_jobs_summary_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Get Single Job. + + Get Single Job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_get_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Delete Single Job. + + Delete a job by killing and setting the job status to DELETED. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_delete_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def set_single_job_properties( + self, job_id: int, body: JSON, *, update_timestamp: bool = False, **kwargs: Any + ) -> Any: + """Set Single Job Properties. + + Update the given job properties (MinorStatus, ApplicationStatus, etc). + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: JSON + :keyword update_timestamp: Default value is False. + :paramtype update_timestamp: bool + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _json = self._serialize.body(body, "object") + + _request = build_jobs_set_single_job_properties_request( + job_id=job_id, + update_timestamp=update_timestamp, + content_type=content_type, + json=_json, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Kill Single Job. + + Kill a job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_kill_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Remove Single Job. + + Fully remove a job from the WMS databases. + + WARNING: This endpoint has been implemented for the compatibility with the legacy DIRAC WMS + and the JobCleaningAgent. However, once this agent is ported to diracx, this endpoint should + be removed, and the delete endpoint should be used instead. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_remove_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_single_job_status( + self, job_id: int, **kwargs: Any + ) -> Dict[str, _models.LimitedJobStatusReturn]: + """Get Single Job Status. + + Get Single Job Status. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to LimitedJobStatusReturn + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, _models.LimitedJobStatusReturn]] = kwargs.pop( + "cls", None + ) + + _request = build_jobs_get_single_job_status_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def set_single_job_status( + self, + job_id: int, + body: Dict[str, _models.JobStatusUpdate], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: dict[str, ~generated.models.JobStatusUpdate] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def set_single_job_status( + self, + job_id: int, + body: IO[bytes], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def set_single_job_status( + self, + job_id: int, + body: Union[Dict[str, _models.JobStatusUpdate], IO[bytes]], + *, + force: bool = False, + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Is either a {str: JobStatusUpdate} type or a IO[bytes] type. Required. + :type body: dict[str, ~generated.models.JobStatusUpdate] or IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Dict[str, _models.SetJobStatusReturn]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "{JobStatusUpdate}") + + _request = build_jobs_set_single_job_status_request( + job_id=job_id, + force=force, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_single_job_status_history( + self, job_id: int, **kwargs: Any + ) -> Dict[str, List[_models.JobStatusReturn]]: + """Get Single Job Status History. + + Get Single Job Status History. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of JobStatusReturn + :rtype: dict[str, list[~generated.models.JobStatusReturn]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[_models.JobStatusReturn]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_single_job_status_history_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/diracx-client/src/diracx/client/aio/operations/_patch.py b/diracx-client/src/diracx/client/generated/aio/operations/_patch.py similarity index 100% rename from diracx-client/src/diracx/client/aio/operations/_patch.py rename to diracx-client/src/diracx/client/generated/aio/operations/_patch.py diff --git a/diracx-client/src/diracx/client/models/__init__.py b/diracx-client/src/diracx/client/generated/models/__init__.py similarity index 96% rename from diracx-client/src/diracx/client/models/__init__.py rename to diracx-client/src/diracx/client/generated/models/__init__.py index 7cd1643a..ab8c7e13 100644 --- a/diracx-client/src/diracx/client/models/__init__.py +++ b/diracx-client/src/diracx/client/generated/models/__init__.py @@ -1,11 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._models import BodyAuthToken from ._models import BodyAuthTokenGrantType +from ._models import DevelopmentSettings from ._models import GroupInfo from ._models import HTTPValidationError from ._models import InitiateDeviceFlowResponse @@ -53,6 +54,7 @@ __all__ = [ "BodyAuthToken", "BodyAuthTokenGrantType", + "DevelopmentSettings", "GroupInfo", "HTTPValidationError", "InitiateDeviceFlowResponse", diff --git a/diracx-client/src/diracx/client/models/_enums.py b/diracx-client/src/diracx/client/generated/models/_enums.py similarity index 97% rename from diracx-client/src/diracx/client/models/_enums.py rename to diracx-client/src/diracx/client/generated/models/_enums.py index 935cbb17..32076969 100644 --- a/diracx-client/src/diracx/client/models/_enums.py +++ b/diracx-client/src/diracx/client/generated/models/_enums.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -99,5 +99,5 @@ class SortDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): class VectorSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): """VectorSearchOperator.""" - IN_ENUM = "in" + IN = "in" NOT_IN = "not in" diff --git a/diracx-client/src/diracx/client/models/_models.py b/diracx-client/src/diracx/client/generated/models/_models.py similarity index 89% rename from diracx-client/src/diracx/client/models/_models.py rename to diracx-client/src/diracx/client/generated/models/_models.py index 23833265..e19faec2 100644 --- a/diracx-client/src/diracx/client/models/_models.py +++ b/diracx-client/src/diracx/client/generated/models/_models.py @@ -1,7 +1,7 @@ # coding=utf-8 # pylint: disable=too-many-lines # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -28,7 +28,7 @@ class BodyAuthToken(_serialization.Model): All required parameters must be populated in order to send to server. :ivar grant_type: OAuth2 Grant type. Required. - :vartype grant_type: ~client.models.BodyAuthTokenGrantType + :vartype grant_type: ~generated.models.BodyAuthTokenGrantType :ivar client_id: OAuth2 client id. Required. :vartype client_id: str :ivar device_code: device code for OAuth2 device flow. @@ -73,7 +73,7 @@ def __init__( ) -> None: """ :keyword grant_type: OAuth2 Grant type. Required. - :paramtype grant_type: ~client.models.BodyAuthTokenGrantType + :paramtype grant_type: ~generated.models.BodyAuthTokenGrantType :keyword client_id: OAuth2 client id. Required. :paramtype client_id: str :keyword device_code: device code for OAuth2 device flow. @@ -102,6 +102,31 @@ class BodyAuthTokenGrantType(_serialization.Model): """OAuth2 Grant type.""" +class DevelopmentSettings(_serialization.Model): + """Settings for the Development Configuration that can influence run time. + + :ivar crash_on_missed_access_policy: Crash On Missed Access Policy. + :vartype crash_on_missed_access_policy: bool + """ + + _attribute_map = { + "crash_on_missed_access_policy": { + "key": "crash_on_missed_access_policy", + "type": "bool", + }, + } + + def __init__( + self, *, crash_on_missed_access_policy: bool = False, **kwargs: Any + ) -> None: + """ + :keyword crash_on_missed_access_policy: Crash On Missed Access Policy. + :paramtype crash_on_missed_access_policy: bool + """ + super().__init__(**kwargs) + self.crash_on_missed_access_policy = crash_on_missed_access_policy + + class GroupInfo(_serialization.Model): """GroupInfo. @@ -132,7 +157,7 @@ class HTTPValidationError(_serialization.Model): """HTTPValidationError. :ivar detail: Detail. - :vartype detail: list[~client.models.ValidationError] + :vartype detail: list[~generated.models.ValidationError] """ _attribute_map = { @@ -144,7 +169,7 @@ def __init__( ) -> None: """ :keyword detail: Detail. - :paramtype detail: list[~client.models.ValidationError] + :paramtype detail: list[~generated.models.ValidationError] """ super().__init__(**kwargs) self.detail = detail @@ -277,9 +302,9 @@ class JobSearchParams(_serialization.Model): :ivar parameters: Parameters. :vartype parameters: list[str] :ivar search: Search. - :vartype search: list[~client.models.JobSearchParamsSearchItem] + :vartype search: list[~generated.models.JobSearchParamsSearchItem] :ivar sort: Sort. - :vartype sort: list[~client.models.SortSpec] + :vartype sort: list[~generated.models.SortSpec] :ivar distinct: Distinct. :vartype distinct: bool """ @@ -304,9 +329,9 @@ def __init__( :keyword parameters: Parameters. :paramtype parameters: list[str] :keyword search: Search. - :paramtype search: list[~client.models.JobSearchParamsSearchItem] + :paramtype search: list[~generated.models.JobSearchParamsSearchItem] :keyword sort: Sort. - :paramtype sort: list[~client.models.SortSpec] + :paramtype sort: list[~generated.models.SortSpec] :keyword distinct: Distinct. :paramtype distinct: bool """ @@ -329,7 +354,7 @@ class JobStatusReturn(_serialization.Model): :ivar status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :vartype status: str or ~client.models.JobStatus + :vartype status: str or ~generated.models.JobStatus :ivar minor_status: Minorstatus. Required. :vartype minor_status: str :ivar application_status: Applicationstatus. Required. @@ -370,7 +395,7 @@ def __init__( :keyword status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :paramtype status: str or ~client.models.JobStatus + :paramtype status: str or ~generated.models.JobStatus :keyword minor_status: Minorstatus. Required. :paramtype minor_status: str :keyword application_status: Applicationstatus. Required. @@ -394,7 +419,7 @@ class JobStatusUpdate(_serialization.Model): :ivar status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :vartype status: str or ~client.models.JobStatus + :vartype status: str or ~generated.models.JobStatus :ivar minor_status: Minorstatus. :vartype minor_status: str :ivar application_status: Applicationstatus. @@ -423,7 +448,7 @@ def __init__( :keyword status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :paramtype status: str or ~client.models.JobStatus + :paramtype status: str or ~generated.models.JobStatus :keyword minor_status: Minorstatus. :paramtype minor_status: str :keyword application_status: Applicationstatus. @@ -446,7 +471,7 @@ class JobSummaryParams(_serialization.Model): :ivar grouping: Grouping. Required. :vartype grouping: list[str] :ivar search: Search. - :vartype search: list[~client.models.JobSummaryParamsSearchItem] + :vartype search: list[~generated.models.JobSummaryParamsSearchItem] """ _validation = { @@ -469,7 +494,7 @@ def __init__( :keyword grouping: Grouping. Required. :paramtype grouping: list[str] :keyword search: Search. - :paramtype search: list[~client.models.JobSummaryParamsSearchItem] + :paramtype search: list[~generated.models.JobSummaryParamsSearchItem] """ super().__init__(**kwargs) self.grouping = grouping @@ -488,7 +513,7 @@ class LimitedJobStatusReturn(_serialization.Model): :ivar status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :vartype status: str or ~client.models.JobStatus + :vartype status: str or ~generated.models.JobStatus :ivar minor_status: Minorstatus. Required. :vartype minor_status: str :ivar application_status: Applicationstatus. Required. @@ -519,7 +544,7 @@ def __init__( :keyword status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :paramtype status: str or ~client.models.JobStatus + :paramtype status: str or ~generated.models.JobStatus :keyword minor_status: Minorstatus. Required. :paramtype minor_status: str :keyword application_status: Applicationstatus. Required. @@ -537,26 +562,42 @@ class Metadata(_serialization.Model): All required parameters must be populated in order to send to server. :ivar virtual_organizations: Virtual Organizations. Required. - :vartype virtual_organizations: dict[str, ~client.models.VOInfo] + :vartype virtual_organizations: dict[str, ~generated.models.VOInfo] + :ivar development_settings: Settings for the Development Configuration that can influence run + time. Required. + :vartype development_settings: ~generated.models.DevelopmentSettings """ _validation = { "virtual_organizations": {"required": True}, + "development_settings": {"required": True}, } _attribute_map = { "virtual_organizations": {"key": "virtual_organizations", "type": "{VOInfo}"}, + "development_settings": { + "key": "development_settings", + "type": "DevelopmentSettings", + }, } def __init__( - self, *, virtual_organizations: Dict[str, "_models.VOInfo"], **kwargs: Any + self, + *, + virtual_organizations: Dict[str, "_models.VOInfo"], + development_settings: "_models.DevelopmentSettings", + **kwargs: Any, ) -> None: """ :keyword virtual_organizations: Virtual Organizations. Required. - :paramtype virtual_organizations: dict[str, ~client.models.VOInfo] + :paramtype virtual_organizations: dict[str, ~generated.models.VOInfo] + :keyword development_settings: Settings for the Development Configuration that can influence + run time. Required. + :paramtype development_settings: ~generated.models.DevelopmentSettings """ super().__init__(**kwargs) self.virtual_organizations = virtual_organizations + self.development_settings = development_settings class SandboxDownloadResponse(_serialization.Model): @@ -598,18 +639,18 @@ class SandboxInfo(_serialization.Model): All required parameters must be populated in order to send to server. :ivar checksum_algorithm: ChecksumAlgorithm. Required. "sha256" - :vartype checksum_algorithm: str or ~client.models.ChecksumAlgorithm + :vartype checksum_algorithm: str or ~generated.models.ChecksumAlgorithm :ivar checksum: Checksum. Required. :vartype checksum: str :ivar size: Size. Required. :vartype size: int :ivar format: SandboxFormat. Required. "tar.bz2" - :vartype format: str or ~client.models.SandboxFormat + :vartype format: str or ~generated.models.SandboxFormat """ _validation = { "checksum_algorithm": {"required": True}, - "checksum": {"required": True, "pattern": r"^[0-f]{64}$"}, + "checksum": {"required": True, "pattern": r"^[0-9a-fA-F]{64}$"}, "size": {"required": True, "minimum": 1}, "format": {"required": True}, } @@ -632,13 +673,13 @@ def __init__( ) -> None: """ :keyword checksum_algorithm: ChecksumAlgorithm. Required. "sha256" - :paramtype checksum_algorithm: str or ~client.models.ChecksumAlgorithm + :paramtype checksum_algorithm: str or ~generated.models.ChecksumAlgorithm :keyword checksum: Checksum. Required. :paramtype checksum: str :keyword size: Size. Required. :paramtype size: int :keyword format: SandboxFormat. Required. "tar.bz2" - :paramtype format: str or ~client.models.SandboxFormat + :paramtype format: str or ~generated.models.SandboxFormat """ super().__init__(**kwargs) self.checksum_algorithm = checksum_algorithm @@ -701,9 +742,9 @@ class ScalarSearchSpec(_serialization.Model): :vartype parameter: str :ivar operator: ScalarSearchOperator. Required. Known values are: "eq", "neq", "gt", "lt", and "like". - :vartype operator: str or ~client.models.ScalarSearchOperator + :vartype operator: str or ~generated.models.ScalarSearchOperator :ivar value: Value. Required. - :vartype value: ~client.models.ScalarSearchSpecValue + :vartype value: ~generated.models.ScalarSearchSpecValue """ _validation = { @@ -731,9 +772,9 @@ def __init__( :paramtype parameter: str :keyword operator: ScalarSearchOperator. Required. Known values are: "eq", "neq", "gt", "lt", and "like". - :paramtype operator: str or ~client.models.ScalarSearchOperator + :paramtype operator: str or ~generated.models.ScalarSearchOperator :keyword value: Value. Required. - :paramtype value: ~client.models.ScalarSearchSpecValue + :paramtype value: ~generated.models.ScalarSearchSpecValue """ super().__init__(**kwargs) self.parameter = parameter @@ -751,7 +792,7 @@ class SetJobStatusReturn(_serialization.Model): :ivar status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :vartype status: str or ~client.models.JobStatus + :vartype status: str or ~generated.models.JobStatus :ivar minor_status: Minorstatus. :vartype minor_status: str :ivar application_status: Applicationstatus. @@ -792,7 +833,7 @@ def __init__( :keyword status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", "Deleted", "Killed", and "Rescheduled". - :paramtype status: str or ~client.models.JobStatus + :paramtype status: str or ~generated.models.JobStatus :keyword minor_status: Minorstatus. :paramtype minor_status: str :keyword application_status: Applicationstatus. @@ -824,7 +865,7 @@ class SortSpec(_serialization.Model): :ivar parameter: Parameter. Required. :vartype parameter: str :ivar direction: SortDirection. Required. Known values are: "asc" and "desc". - :vartype direction: str or ~client.models.SortDirection + :vartype direction: str or ~generated.models.SortDirection """ _validation = { @@ -848,7 +889,7 @@ def __init__( :keyword parameter: Parameter. Required. :paramtype parameter: str :keyword direction: SortDirection. Required. Known values are: "asc" and "desc". - :paramtype direction: str or ~client.models.SortDirection + :paramtype direction: str or ~generated.models.SortDirection """ super().__init__(**kwargs) self.parameter = parameter @@ -1027,7 +1068,7 @@ class ValidationError(_serialization.Model): All required parameters must be populated in order to send to server. :ivar loc: Location. Required. - :vartype loc: list[~client.models.ValidationErrorLocItem] + :vartype loc: list[~generated.models.ValidationErrorLocItem] :ivar msg: Message. Required. :vartype msg: str :ivar type: Error Type. Required. @@ -1056,7 +1097,7 @@ def __init__( ) -> None: """ :keyword loc: Location. Required. - :paramtype loc: list[~client.models.ValidationErrorLocItem] + :paramtype loc: list[~generated.models.ValidationErrorLocItem] :keyword msg: Message. Required. :paramtype msg: str :keyword type: Error Type. Required. @@ -1080,9 +1121,9 @@ class VectorSearchSpec(_serialization.Model): :ivar parameter: Parameter. Required. :vartype parameter: str :ivar operator: VectorSearchOperator. Required. Known values are: "in" and "not in". - :vartype operator: str or ~client.models.VectorSearchOperator + :vartype operator: str or ~generated.models.VectorSearchOperator :ivar values: Values. Required. - :vartype values: ~client.models.VectorSearchSpecValues + :vartype values: ~generated.models.VectorSearchSpecValues """ _validation = { @@ -1109,9 +1150,9 @@ def __init__( :keyword parameter: Parameter. Required. :paramtype parameter: str :keyword operator: VectorSearchOperator. Required. Known values are: "in" and "not in". - :paramtype operator: str or ~client.models.VectorSearchOperator + :paramtype operator: str or ~generated.models.VectorSearchOperator :keyword values: Values. Required. - :paramtype values: ~client.models.VectorSearchSpecValues + :paramtype values: ~generated.models.VectorSearchSpecValues """ super().__init__(**kwargs) self.parameter = parameter @@ -1129,9 +1170,9 @@ class VOInfo(_serialization.Model): All required parameters must be populated in order to send to server. :ivar groups: Groups. Required. - :vartype groups: dict[str, ~client.models.GroupInfo] + :vartype groups: dict[str, ~generated.models.GroupInfo] :ivar support: SupportInfo. Required. - :vartype support: ~client.models.SupportInfo + :vartype support: ~generated.models.SupportInfo :ivar default_group: Default Group. Required. :vartype default_group: str """ @@ -1158,9 +1199,9 @@ def __init__( ) -> None: """ :keyword groups: Groups. Required. - :paramtype groups: dict[str, ~client.models.GroupInfo] + :paramtype groups: dict[str, ~generated.models.GroupInfo] :keyword support: SupportInfo. Required. - :paramtype support: ~client.models.SupportInfo + :paramtype support: ~generated.models.SupportInfo :keyword default_group: Default Group. Required. :paramtype default_group: str """ diff --git a/diracx-client/src/diracx/client/models/_patch.py b/diracx-client/src/diracx/client/generated/models/_patch.py similarity index 100% rename from diracx-client/src/diracx/client/models/_patch.py rename to diracx-client/src/diracx/client/generated/models/_patch.py diff --git a/diracx-client/src/diracx/client/operations/__init__.py b/diracx-client/src/diracx/client/generated/operations/__init__.py similarity index 94% rename from diracx-client/src/diracx/client/operations/__init__.py rename to diracx-client/src/diracx/client/generated/operations/__init__.py index eb877968..66a084b1 100644 --- a/diracx-client/src/diracx/client/operations/__init__.py +++ b/diracx-client/src/diracx/client/generated/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/generated/operations/_operations.py b/diracx-client/src/diracx/client/generated/operations/_operations.py new file mode 100644 index 00000000..61747f2a --- /dev/null +++ b/diracx-client/src/diracx/client/generated/operations/_operations.py @@ -0,0 +1,3728 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.23.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import ( + Any, + Callable, + Dict, + IO, + List, + Optional, + Type, + TypeVar, + Union, + overload, +) + +from azure.core import MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import prep_if_match, prep_if_none_match, raise_if_not_implemented + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[ + Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any] +] +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_well_known_openid_configuration_request( + **kwargs: Any, +) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/.well-known/openid-configuration" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_well_known_installation_metadata_request( + **kwargs: Any, +) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/.well-known/dirac-metadata" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_auth_initiate_device_flow_request( + *, client_id: str, scope: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/device" + + # Construct parameters + _params["client_id"] = _SERIALIZER.query("client_id", client_id, "str") + _params["scope"] = _SERIALIZER.query("scope", scope, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="POST", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_auth_do_device_flow_request(*, user_code: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/device" + + # Construct parameters + _params["user_code"] = _SERIALIZER.query("user_code", user_code, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_auth_finish_device_flow_request( + *, code: str, state: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/device/complete" + + # Construct parameters + _params["code"] = _SERIALIZER.query("code", code, "str") + _params["state"] = _SERIALIZER.query("state", state, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_auth_finished_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/device/complete/finished" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_auth_get_refresh_tokens_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/refresh-tokens" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_auth_revoke_refresh_token_request(jti: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/refresh-tokens/{jti}" + path_format_arguments = { + "jti": _SERIALIZER.url("jti", jti, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, headers=_headers, **kwargs) + + +def build_auth_userinfo_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/userinfo" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_auth_authorization_flow_request( + *, + response_type: Union[str, _models.Enum0], + code_challenge: str, + code_challenge_method: Union[str, _models.Enum1], + client_id: str, + redirect_uri: str, + scope: str, + state: str, + **kwargs: Any, +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/authorize" + + # Construct parameters + _params["response_type"] = _SERIALIZER.query("response_type", response_type, "str") + _params["code_challenge"] = _SERIALIZER.query( + "code_challenge", code_challenge, "str" + ) + _params["code_challenge_method"] = _SERIALIZER.query( + "code_challenge_method", code_challenge_method, "str" + ) + _params["client_id"] = _SERIALIZER.query("client_id", client_id, "str") + _params["redirect_uri"] = _SERIALIZER.query("redirect_uri", redirect_uri, "str") + _params["scope"] = _SERIALIZER.query("scope", scope, "str") + _params["state"] = _SERIALIZER.query("state", state, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_auth_authorization_flow_complete_request( # pylint: disable=name-too-long + *, code: str, state: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/auth/authorize/complete" + + # Construct parameters + _params["code"] = _SERIALIZER.query("code", code, "str") + _params["state"] = _SERIALIZER.query("state", state, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_config_serve_config_request( + *, + if_modified_since: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any, +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/config/" + + # Construct headers + if if_modified_since is not None: + _headers["if-modified-since"] = _SERIALIZER.header( + "if_modified_since", if_modified_since, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header( + "if_none_match", if_none_match, "str" + ) + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_initiate_sandbox_upload_request( + **kwargs: Any, +) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/sandbox" + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_get_sandbox_file_request(*, pfn: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/sandbox" + + # Construct parameters + _params["pfn"] = _SERIALIZER.query( + "pfn", + pfn, + "str", + max_length=256, + pattern=r"^(:?SB:[A-Za-z]+\|)?/S3/[a-z0-9\.\-]{3,63}(?:/[^/]+){3}/[a-z0-9]{3,10}:[0-9a-f]{64}\.[a-z0-9\.]+$", + ) + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_unassign_bulk_jobs_sandboxes_request( # pylint: disable=name-too-long + *, jobs_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/sandbox" + + # Construct parameters + _params["jobs_ids"] = _SERIALIZER.query("jobs_ids", jobs_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="DELETE", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_get_job_sandboxes_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_unassign_job_sandboxes_request( # pylint: disable=name-too-long + job_id: int, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, headers=_headers, **kwargs) + + +def build_jobs_get_job_sandbox_request( + job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox/{sandbox_type}" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + "sandbox_type": _SERIALIZER.url("sandbox_type", sandbox_type, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_assign_sandbox_to_job_request( + job_id: int, *, content: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/sandbox/output" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="PATCH", url=_url, headers=_headers, content=content, **kwargs + ) + + +def build_jobs_submit_bulk_jobs_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/" + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_delete_bulk_jobs_request( + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="DELETE", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_kill_bulk_jobs_request( + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/kill" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="POST", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_remove_bulk_jobs_request( + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/remove" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="POST", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_get_job_status_bulk_request( + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/status" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_set_job_status_bulk_request( + *, force: bool = False, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/status" + + # Construct parameters + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="PATCH", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_get_job_status_history_bulk_request( # pylint: disable=name-too-long + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/status/history" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_reschedule_bulk_jobs_request( + *, job_ids: List[int], **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/reschedule" + + # Construct parameters + _params["job_ids"] = _SERIALIZER.query("job_ids", job_ids, "[int]") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="POST", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_reschedule_single_job_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/reschedule" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_search_request( + *, page: int = 1, per_page: int = 100, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/search" + + # Construct parameters + if page is not None: + _params["page"] = _SERIALIZER.query("page", page, "int") + if per_page is not None: + _params["per_page"] = _SERIALIZER.query("per_page", per_page, "int") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="POST", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_summary_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/summary" + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_get_single_job_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_delete_single_job_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, headers=_headers, **kwargs) + + +def build_jobs_set_single_job_properties_request( # pylint: disable=name-too-long + job_id: int, *, json: JSON, update_timestamp: bool = False, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if update_timestamp is not None: + _params["update_timestamp"] = _SERIALIZER.query( + "update_timestamp", update_timestamp, "bool" + ) + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="PATCH", url=_url, params=_params, headers=_headers, json=json, **kwargs + ) + + +def build_jobs_kill_single_job_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/kill" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_remove_single_job_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/remove" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_jobs_get_single_job_status_request(job_id: int, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/status" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_jobs_set_single_job_status_request( + job_id: int, *, force: bool = False, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/status" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="PATCH", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_get_single_job_status_history_request( # pylint: disable=name-too-long + job_id: int, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/jobs/{job_id}/status/history" + path_format_arguments = { + "job_id": _SERIALIZER.url("job_id", job_id, "int"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +class WellKnownOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.Dirac`'s + :attr:`well_known` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace + def openid_configuration(self, **kwargs: Any) -> Any: + """Openid Configuration. + + OpenID Connect discovery endpoint. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_well_known_openid_configuration_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def installation_metadata(self, **kwargs: Any) -> _models.Metadata: + """Installation Metadata. + + Get metadata about the dirac installation. + + :return: Metadata + :rtype: ~generated.models.Metadata + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Metadata] = kwargs.pop("cls", None) + + _request = build_well_known_installation_metadata_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("Metadata", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class AuthOperations: # pylint: disable=abstract-class-instantiated + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.Dirac`'s + :attr:`auth` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + raise_if_not_implemented( + self.__class__, + [ + "token", + ], + ) + + @distributed_trace + def initiate_device_flow( + self, *, client_id: str, scope: str, **kwargs: Any + ) -> _models.InitiateDeviceFlowResponse: + """Initiate Device Flow. + + Initiate the device flow against DIRAC authorization Server. + Scope must have exactly up to one ``group`` (otherwise default) and + one or more ``property`` scope. + If no property, then get default one. + + Offers the user to go with the browser to + ``auth//device?user_code=XYZ``. + + :keyword client_id: Required. + :paramtype client_id: str + :keyword scope: Required. + :paramtype scope: str + :return: InitiateDeviceFlowResponse + :rtype: ~generated.models.InitiateDeviceFlowResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.InitiateDeviceFlowResponse] = kwargs.pop("cls", None) + + _request = build_auth_initiate_device_flow_request( + client_id=client_id, + scope=scope, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "InitiateDeviceFlowResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: + """Do Device Flow. + + This is called as the verification URI for the device flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user_code obtained from the device flow in a cookie + to be able to map the authorization flow with the corresponding + device flow. + (note: it can't be put as parameter or in the URL). + + :keyword user_code: Required. + :paramtype user_code: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_do_device_flow_request( + user_code=user_code, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: + """Finish Device Flow. + + This the url callbacked by IAM/Checkin after the authorization + flow was granted. + It gets us the code we need for the authorization flow, and we + can map it to the corresponding device flow using the user_code + in the cookie/session. + + :keyword code: Required. + :paramtype code: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_finish_device_flow_request( + code=code, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def finished(self, **kwargs: Any) -> Any: + """Finished. + + This is the final step of the device flow. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_finished_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: + """Get Refresh Tokens. + + Get all refresh tokens for the user. If the user has the ``proxy_management`` property, then + the subject is not used to filter the refresh tokens. + + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_auth_get_refresh_tokens_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: + """Revoke Refresh Token. + + Revoke a refresh token. If the user has the ``proxy_management`` property, then + the subject is not used to filter the refresh tokens. + + :param jti: Required. + :type jti: str + :return: str + :rtype: str + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[str] = kwargs.pop("cls", None) + + _request = build_auth_revoke_refresh_token_request( + jti=jti, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("str", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: + """Userinfo. + + Get information about the user's identity. + + :return: UserInfoResponse + :rtype: ~generated.models.UserInfoResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.UserInfoResponse] = kwargs.pop("cls", None) + + _request = build_auth_userinfo_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "UserInfoResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def authorization_flow( + self, + *, + response_type: Union[str, _models.Enum0], + code_challenge: str, + code_challenge_method: Union[str, _models.Enum1], + client_id: str, + redirect_uri: str, + scope: str, + state: str, + **kwargs: Any, + ) -> Any: + """Authorization Flow. + + Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to + perform a authorization code flow. + + We set the user details obtained from the user authorize flow in a cookie + to be able to map the authorization flow with the corresponding + user authorize flow. + + :keyword response_type: "code" Required. + :paramtype response_type: str or ~generated.models.Enum0 + :keyword code_challenge: Required. + :paramtype code_challenge: str + :keyword code_challenge_method: "S256" Required. + :paramtype code_challenge_method: str or ~generated.models.Enum1 + :keyword client_id: Required. + :paramtype client_id: str + :keyword redirect_uri: Required. + :paramtype redirect_uri: str + :keyword scope: Required. + :paramtype scope: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_authorization_flow_request( + response_type=response_type, + code_challenge=code_challenge, + code_challenge_method=code_challenge_method, + client_id=client_id, + redirect_uri=redirect_uri, + scope=scope, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def authorization_flow_complete( + self, *, code: str, state: str, **kwargs: Any + ) -> Any: + """Authorization Flow Complete. + + Complete the authorization flow. + + The user is redirected back to the DIRAC auth service after completing the IAM's authorization + flow. + We retrieve the original flow details from the decrypted state and store the ID token requested + from the IAM. + The user is then redirected to the client's redirect URI. + + :keyword code: Required. + :paramtype code: str + :keyword state: Required. + :paramtype state: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_auth_authorization_flow_complete_request( + code=code, + state=state, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class ConfigOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.Dirac`'s + :attr:`config` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace + def serve_config( + self, + *, + if_modified_since: Optional[str] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any, + ) -> Any: + """Serve Config. + + Get the latest view of the config. + + If If-None-Match header is given and matches the latest ETag, return 304 + + If If-Modified-Since is given and is newer than latest, + return 304: this is to avoid flip/flopping. + + :keyword if_modified_since: Default value is None. + :paramtype if_modified_since: str + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_config_serve_config_request( + if_modified_since=if_modified_since, + etag=etag, + match_condition=match_condition, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class JobsOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.Dirac`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @overload + def initiate_sandbox_upload( + self, + body: _models.SandboxInfo, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: ~generated.models.SandboxInfo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def initiate_sandbox_upload( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def initiate_sandbox_upload( + self, body: Union[_models.SandboxInfo, IO[bytes]], **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Is either a SandboxInfo type or a IO[bytes] type. Required. + :type body: ~generated.models.SandboxInfo or IO[bytes] + :return: SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[_models.SandboxUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "SandboxInfo") + + _request = build_jobs_initiate_sandbox_upload_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "SandboxUploadResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_sandbox_file( + self, *, pfn: str, **kwargs: Any + ) -> _models.SandboxDownloadResponse: + """Get Sandbox File. + + Get a presigned URL to download a sandbox file. + + This route cannot use a redirect response most clients will also send the + authorization header when following a redirect. This is not desirable as + it would leak the authorization token to the storage backend. Additionally, + most storage backends return an error when they receive an authorization + header for a presigned URL. + + :keyword pfn: Required. + :paramtype pfn: str + :return: SandboxDownloadResponse + :rtype: ~generated.models.SandboxDownloadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SandboxDownloadResponse] = kwargs.pop("cls", None) + + _request = build_jobs_get_sandbox_file_request( + pfn=pfn, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "SandboxDownloadResponse", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def unassign_bulk_jobs_sandboxes( + self, *, jobs_ids: List[int], **kwargs: Any + ) -> Any: + """Unassign Bulk Jobs Sandboxes. + + Delete bulk jobs sandbox mapping. + + :keyword jobs_ids: Required. + :paramtype jobs_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_bulk_jobs_sandboxes_request( + jobs_ids=jobs_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_sandboxes(self, job_id: int, **kwargs: Any) -> Dict[str, List[Any]]: + """Get Job Sandboxes. + + Get input and output sandboxes of given job. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of any + :rtype: dict[str, list[any]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[Any]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("{[object]}", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: + """Unassign Job Sandboxes. + + Delete single job sandbox mapping. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_unassign_job_sandboxes_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_sandbox( + self, job_id: int, sandbox_type: Union[str, _models.SandboxType], **kwargs: Any + ) -> List[Any]: + """Get Job Sandbox. + + Get input or output sandbox of given job. + + :param job_id: Required. + :type job_id: int + :param sandbox_type: Known values are: "input" and "output". Required. + :type sandbox_type: str or ~generated.models.SandboxType + :return: list of any + :rtype: list[any] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[Any]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_sandbox_request( + job_id=job_id, + sandbox_type=sandbox_type, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: + """Assign Sandbox To Job. + + Map the pfn as output sandbox to job. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _content = self._serialize.body(body, "str") + + _request = build_jobs_assign_sandbox_to_job_request( + job_id=job_id, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def submit_bulk_jobs( + self, body: List[str], *, content_type: str = "application/json", **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Required. + :type body: list[str] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def submit_bulk_jobs( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def submit_bulk_jobs( + self, body: Union[List[str], IO[bytes]], **kwargs: Any + ) -> List[_models.InsertedJob]: + """Submit Bulk Jobs. + + Submit Bulk Jobs. + + :param body: Is either a [str] type or a IO[bytes] type. Required. + :type body: list[str] or IO[bytes] + :return: list of InsertedJob + :rtype: list[~generated.models.InsertedJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[List[_models.InsertedJob]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "[str]") + + _request = build_jobs_submit_bulk_jobs_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "[InsertedJob]", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Delete Bulk Jobs. + + Delete Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_delete_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Kill Bulk Jobs. + + Kill Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_kill_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Remove Bulk Jobs. + + Fully remove a list of jobs from the WMS databases. + + WARNING: This endpoint has been implemented for the compatibility with the legacy DIRAC WMS + and the JobCleaningAgent. However, once this agent is ported to diracx, this endpoint should + be removed, and the delete endpoint should be used instead for any other purpose. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_remove_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_status_bulk( + self, *, job_ids: List[int], **kwargs: Any + ) -> Dict[str, _models.LimitedJobStatusReturn]: + """Get Job Status Bulk. + + Get Job Status Bulk. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: dict mapping str to LimitedJobStatusReturn + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, _models.LimitedJobStatusReturn]] = kwargs.pop( + "cls", None + ) + + _request = build_jobs_get_job_status_bulk_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_job_status_bulk( + self, + body: Dict[str, Dict[str, _models.JobStatusUpdate]], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Required. + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_job_status_bulk( + self, + body: IO[bytes], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Required. + :type body: IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_job_status_bulk( + self, + body: Union[Dict[str, Dict[str, _models.JobStatusUpdate]], IO[bytes]], + *, + force: bool = False, + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Job Status Bulk. + + Set Job Status Bulk. + + :param body: Is either a {str: {str: JobStatusUpdate}} type or a IO[bytes] type. Required. + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] or IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Dict[str, _models.SetJobStatusReturn]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "{{JobStatusUpdate}}") + + _request = build_jobs_set_job_status_bulk_request( + force=force, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_job_status_history_bulk( + self, *, job_ids: List[int], **kwargs: Any + ) -> Dict[str, List[_models.JobStatusReturn]]: + """Get Job Status History Bulk. + + Get Job Status History Bulk. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: dict mapping str to list of JobStatusReturn + :rtype: dict[str, list[~generated.models.JobStatusReturn]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[_models.JobStatusReturn]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_job_status_history_bulk_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: + """Reschedule Bulk Jobs. + + Reschedule Bulk Jobs. + + :keyword job_ids: Required. + :paramtype job_ids: list[int] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_reschedule_bulk_jobs_request( + job_ids=job_ids, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Reschedule Single Job. + + Reschedule Single Job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_reschedule_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def search( + self, + body: Optional[_models.JobSearchParams] = None, + *, + page: int = 1, + per_page: int = 100, + content_type: str = "application/json", + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Default value is None. + :type body: ~generated.models.JobSearchParams + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def search( + self, + body: Optional[IO[bytes]] = None, + *, + page: int = 1, + per_page: int = 100, + content_type: str = "application/json", + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Default value is None. + :type body: IO[bytes] + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def search( + self, + body: Optional[Union[_models.JobSearchParams, IO[bytes]]] = None, + *, + page: int = 1, + per_page: int = 100, + **kwargs: Any, + ) -> List[JSON]: + """Search. + + Retrieve information about jobs. + + **TODO: Add more docs**. + + :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. + :type body: ~generated.models.JobSearchParams or IO[bytes] + :keyword page: Default value is 1. + :paramtype page: int + :keyword per_page: Default value is 100. + :paramtype per_page: int + :return: list of JSON + :rtype: list[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[List[JSON]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "JobSearchParams") + else: + _json = None + + _request = build_jobs_search_request( + page=page, + per_page=per_page, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + response_headers = {} + if response.status_code == 206: + response_headers["Content-Range"] = self._deserialize( + "str", response.headers.get("Content-Range") + ) + + deserialized = self._deserialize("[object]", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def summary( + self, + body: _models.JobSummaryParams, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Required. + :type body: ~generated.models.JobSummaryParams + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def summary( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def summary( + self, body: Union[_models.JobSummaryParams, IO[bytes]], **kwargs: Any + ) -> Any: + """Summary. + + Show information suitable for plotting. + + :param body: Is either a JobSummaryParams type or a IO[bytes] type. Required. + :type body: ~generated.models.JobSummaryParams or IO[bytes] + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobSummaryParams") + + _request = build_jobs_summary_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Get Single Job. + + Get Single Job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_get_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Delete Single Job. + + Delete a job by killing and setting the job status to DELETED. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_delete_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def set_single_job_properties( + self, job_id: int, body: JSON, *, update_timestamp: bool = False, **kwargs: Any + ) -> Any: + """Set Single Job Properties. + + Update the given job properties (MinorStatus, ApplicationStatus, etc). + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: JSON + :keyword update_timestamp: Default value is False. + :paramtype update_timestamp: bool + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("Content-Type", "application/json") + ) + cls: ClsType[Any] = kwargs.pop("cls", None) + + _json = self._serialize.body(body, "object") + + _request = build_jobs_set_single_job_properties_request( + job_id=job_id, + update_timestamp=update_timestamp, + content_type=content_type, + json=_json, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Kill Single Job. + + Kill a job. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_kill_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: + """Remove Single Job. + + Fully remove a job from the WMS databases. + + WARNING: This endpoint has been implemented for the compatibility with the legacy DIRAC WMS + and the JobCleaningAgent. However, once this agent is ported to diracx, this endpoint should + be removed, and the delete endpoint should be used instead. + + :param job_id: Required. + :type job_id: int + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_jobs_remove_single_job_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_single_job_status( + self, job_id: int, **kwargs: Any + ) -> Dict[str, _models.LimitedJobStatusReturn]: + """Get Single Job Status. + + Get Single Job Status. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to LimitedJobStatusReturn + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, _models.LimitedJobStatusReturn]] = kwargs.pop( + "cls", None + ) + + _request = build_jobs_get_single_job_status_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def set_single_job_status( + self, + job_id: int, + body: Dict[str, _models.JobStatusUpdate], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: dict[str, ~generated.models.JobStatusUpdate] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def set_single_job_status( + self, + job_id: int, + body: IO[bytes], + *, + force: bool = False, + content_type: str = "application/json", + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Required. + :type body: IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def set_single_job_status( + self, + job_id: int, + body: Union[Dict[str, _models.JobStatusUpdate], IO[bytes]], + *, + force: bool = False, + **kwargs: Any, + ) -> Dict[str, _models.SetJobStatusReturn]: + """Set Single Job Status. + + Set Single Job Status. + + :param job_id: Required. + :type job_id: int + :param body: Is either a {str: JobStatusUpdate} type or a IO[bytes] type. Required. + :type body: dict[str, ~generated.models.JobStatusUpdate] or IO[bytes] + :keyword force: Default value is False. + :paramtype force: bool + :return: dict mapping str to SetJobStatusReturn + :rtype: dict[str, ~generated.models.SetJobStatusReturn] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[Dict[str, _models.SetJobStatusReturn]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "{JobStatusUpdate}") + + _request = build_jobs_set_single_job_status_request( + job_id=job_id, + force=force, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_single_job_status_history( + self, job_id: int, **kwargs: Any + ) -> Dict[str, List[_models.JobStatusReturn]]: + """Get Single Job Status History. + + Get Single Job Status History. + + :param job_id: Required. + :type job_id: int + :return: dict mapping str to list of JobStatusReturn + :rtype: dict[str, list[~generated.models.JobStatusReturn]] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = ( + { # pylint: disable=unsubscriptable-object + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + ) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Dict[str, List[_models.JobStatusReturn]]] = kwargs.pop("cls", None) + + _request = build_jobs_get_single_job_status_history_request( + job_id=job_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/diracx-client/src/diracx/client/operations/_patch.py b/diracx-client/src/diracx/client/generated/operations/_patch.py similarity index 100% rename from diracx-client/src/diracx/client/operations/_patch.py rename to diracx-client/src/diracx/client/generated/operations/_patch.py diff --git a/diracx-client/src/diracx/client/generated/py.typed b/diracx-client/src/diracx/client/generated/py.typed new file mode 100644 index 00000000..1242d432 --- /dev/null +++ b/diracx-client/src/diracx/client/generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. diff --git a/diracx-client/src/diracx/client/models.py b/diracx-client/src/diracx/client/models.py new file mode 100644 index 00000000..15dd42a0 --- /dev/null +++ b/diracx-client/src/diracx/client/models.py @@ -0,0 +1,5 @@ +from .generated.models import * # pylint: disable=unused-wildcard-import + + +# TODO: replace with postprocess +from .generated.models import DeviceFlowErrorResponse diff --git a/diracx-client/src/diracx/client/patches/__init__.py b/diracx-client/src/diracx/client/patches/__init__.py new file mode 100644 index 00000000..0c7e71ee --- /dev/null +++ b/diracx-client/src/diracx/client/patches/__init__.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from typing import List + +from .utils import DiracClientMixin + + +from diracx.core.extensions import select_from_extension + +real_client = select_from_extension(group="diracx", name="client_class")[0].load() +DiracGenerated = real_client + + +__all__: List[str] = [ + "DiracClient", +] # Add all objects you want publicly available to users at this package level + + +class DiracClient(DiracClientMixin, DiracGenerated): ... # type: ignore diff --git a/diracx-client/src/diracx/client/patches/aio/__init__.py b/diracx-client/src/diracx/client/patches/aio/__init__.py new file mode 100644 index 00000000..952e30c9 --- /dev/null +++ b/diracx-client/src/diracx/client/patches/aio/__init__.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from typing import List + +from .utils import DiracClientMixin + + +from diracx.core.extensions import select_from_extension + +real_client = select_from_extension(group="diracx", name="aio_client_class")[0].load() +DiracGenerated = real_client + +__all__: List[str] = [ + "DiracClient", +] # Add all objects you want publicly available to users at this package level + + +class DiracClient(DiracClientMixin, DiracGenerated): ... # type: ignore diff --git a/diracx-client/src/diracx/client/aio/_patch.py b/diracx-client/src/diracx/client/patches/aio/utils.py similarity index 90% rename from diracx-client/src/diracx/client/aio/_patch.py rename to diracx-client/src/diracx/client/patches/aio/utils.py index 1a70f1c4..deb99409 100644 --- a/diracx-client/src/diracx/client/aio/_patch.py +++ b/diracx-client/src/diracx/client/patches/aio/utils.py @@ -6,10 +6,13 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ +from __future__ import annotations + +import abc import json from types import TracebackType from pathlib import Path -from typing import Any, List, Optional +from typing import Any, List, Optional, Self from azure.core.credentials import AccessToken from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline import PipelineRequest @@ -17,8 +20,7 @@ from diracx.core.preferences import get_diracx_preferences, DiracxPreferences -from ._client import Dirac as DiracGenerated -from .._patch import ( +from ..utils import ( get_openid_configuration, get_token, refresh_token, @@ -30,15 +32,6 @@ ] # Add all objects you want publicly available to users at this package level -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ - - class DiracTokenCredential(AsyncTokenCredential): """Tailor get_token() for our context""" @@ -140,7 +133,7 @@ async def on_request( request.http_request.headers["Authorization"] = f"Bearer {self._token.token}" -class DiracClient(DiracGenerated): +class DiracClientMixin(metaclass=abc.ABCMeta): """This class inherits from the generated Dirac client and adds support for tokens, so that the caller does not need to configure it by itself. """ @@ -164,7 +157,10 @@ def __init__( openid_configuration = get_openid_configuration(self._endpoint, verify=verify) # Initialize Dirac with a Dirac-specific token credential policy - super().__init__( + # We need to ignore types here because mypy complains that we give + # too many arguments to "object" constructor as this is a mixin + + super().__init__( # type: ignore endpoint=self._endpoint, authentication_policy=DiracBearerTokenCredentialPolicy( DiracTokenCredential( @@ -181,7 +177,7 @@ def __init__( def client_id(self): return self._client_id - async def __aenter__(self) -> "DiracClient": - """Redefined to provide the patched Dirac client in the managed context""" - await self._client.__aenter__() - return self + # async def __aenter__(self) -> Self: # type: ignore + # """Redefined to provide the patched Dirac client in the managed context""" + # await self._client.__aenter__() # type: ignore + # return self diff --git a/diracx-client/src/diracx/client/_patch.py b/diracx-client/src/diracx/client/patches/utils.py similarity index 88% rename from diracx-client/src/diracx/client/_patch.py rename to diracx-client/src/diracx/client/patches/utils.py index d8772d2e..692e2118 100644 --- a/diracx-client/src/diracx/client/_patch.py +++ b/diracx-client/src/diracx/client/patches/utils.py @@ -1,11 +1,3 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" from __future__ import annotations from datetime import datetime, timezone @@ -13,33 +5,114 @@ import jwt import requests + from pathlib import Path -from typing import Any, Dict, List, Optional, cast +from typing import Any, Dict, List, Optional, cast, Self from urllib import parse from azure.core.credentials import AccessToken from azure.core.credentials import TokenCredential from azure.core.pipeline import PipelineRequest from azure.core.pipeline.policies import BearerTokenCredentialPolicy -from diracx.client.models import TokenResponse +from ..generated.models import TokenResponse from diracx.core.models import TokenResponse as CoreTokenResponse from diracx.core.preferences import DiracxPreferences, get_diracx_preferences -from ._client import Dirac as DiracGenerated +import sys -__all__: List[str] = [ - "DiracClient", -] # Add all objects you want publicly available to users at this package level +def refresh_token( + location: Path, + token_endpoint: str, + client_id: str, + refresh_token: str, + *, + verify: bool | str = True, +) -> AccessToken: + """Refresh the access token using the refresh_token flow.""" + from diracx.core.utils import write_credentials + response = requests.post( + url=token_endpoint, + data={ + "client_id": client_id, + "grant_type": "refresh_token", + "refresh_token": refresh_token, + }, + verify=verify, + ) -def patch_sdk(): - """Do not remove from this file. + if response.status_code != 200: + raise RuntimeError( + f"An issue occured while refreshing your access token: {response.json()['detail']}" + ) - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ + res = response.json() + token_response = TokenResponse( + access_token=res["access_token"], + expires_in=res["expires_in"], + token_type=res.get("token_type"), + refresh_token=res.get("refresh_token"), + ) + + write_credentials(cast(CoreTokenResponse, token_response), location=location) + credentials = json.loads(location.read_text()) + return AccessToken(credentials.get("access_token"), credentials.get("expires_on")) + + +def get_openid_configuration( + endpoint: str, *, verify: bool | str = True +) -> Dict[str, str]: + """Get the openid configuration from the .well-known endpoint""" + response = requests.get( + url=parse.urljoin(endpoint, ".well-known/openid-configuration"), + verify=verify, + ) + if not response.ok: + raise RuntimeError("Cannot fetch any information from the .well-known endpoint") + return response.json() + + +def get_token(location: Path, token: AccessToken | None) -> AccessToken | None: + """Get token if available and still valid.""" + # If the credentials path does not exist, raise an error + if not location.exists(): + raise RuntimeError("credentials are not set") + + # Load the existing credentials + if not token: + credentials = json.loads(location.read_text()) + token = AccessToken( + cast(str, credentials.get("access_token")), + cast(int, credentials.get("expires_on")), + ) + + # We check the validity of the token + # If not valid, then return None to inform the caller that a new token + # is needed + if not is_token_valid(token): + return None + + return token + + +def is_refresh_token_valid(refresh_token: str) -> bool: + """Check if the refresh token is still valid.""" + # Decode the refresh token + refresh_payload = jwt.decode(refresh_token, options={"verify_signature": False}) + if not refresh_payload or "exp" not in refresh_payload: + return False + + # Check the expiration time + return refresh_payload["exp"] > datetime.now(tz=timezone.utc).timestamp() + + +def is_token_valid(token: AccessToken) -> bool: + """Condition to get a new token""" + return ( + datetime.fromtimestamp(token.expires_on, tz=timezone.utc) + - datetime.now(tz=timezone.utc) + ).total_seconds() > 300 class DiracTokenCredential(TokenCredential): @@ -125,7 +198,7 @@ def on_request( request.http_request.headers["Authorization"] = f"Bearer {self._token.token}" -class DiracClient(DiracGenerated): +class DiracClientMixin: """This class inherits from the generated Dirac client and adds support for tokens, so that the caller does not need to configure it by itself. """ @@ -149,7 +222,10 @@ def __init__( openid_configuration = get_openid_configuration(self._endpoint, verify=verify) # Initialize Dirac with a Dirac-specific token credential policy - super().__init__( + # We need to ignore types here because mypy complains that we give + # too many arguments to "object" constructor as this is a mixin + + super().__init__( # type: ignore endpoint=self._endpoint, authentication_policy=DiracBearerTokenCredentialPolicy( DiracTokenCredential( @@ -166,101 +242,7 @@ def __init__( def client_id(self): return self._client_id - def __aenter__(self) -> "DiracClient": - """Redefined to provide the patched Dirac client in the managed context""" - self._client.__enter__() - return self - - -def refresh_token( - location: Path, - token_endpoint: str, - client_id: str, - refresh_token: str, - *, - verify: bool | str = True, -) -> AccessToken: - """Refresh the access token using the refresh_token flow.""" - from diracx.core.utils import write_credentials - - response = requests.post( - url=token_endpoint, - data={ - "client_id": client_id, - "grant_type": "refresh_token", - "refresh_token": refresh_token, - }, - verify=verify, - ) - - if response.status_code != 200: - raise RuntimeError( - f"An issue occured while refreshing your access token: {response.json()['detail']}" - ) - - res = response.json() - token_response = TokenResponse( - access_token=res["access_token"], - expires_in=res["expires_in"], - token_type=res.get("token_type"), - refresh_token=res.get("refresh_token"), - ) - - write_credentials(cast(CoreTokenResponse, token_response), location=location) - credentials = json.loads(location.read_text()) - return AccessToken(credentials.get("access_token"), credentials.get("expires_on")) - - -def get_openid_configuration( - endpoint: str, *, verify: bool | str = True -) -> Dict[str, str]: - """Get the openid configuration from the .well-known endpoint""" - response = requests.get( - url=parse.urljoin(endpoint, ".well-known/openid-configuration"), - verify=verify, - ) - if not response.ok: - raise RuntimeError("Cannot fetch any information from the .well-known endpoint") - return response.json() - - -def get_token(location: Path, token: AccessToken | None) -> AccessToken | None: - """Get token if available and still valid.""" - # If the credentials path does not exist, raise an error - if not location.exists(): - raise RuntimeError("credentials are not set") - - # Load the existing credentials - if not token: - credentials = json.loads(location.read_text()) - token = AccessToken( - cast(str, credentials.get("access_token")), - cast(int, credentials.get("expires_on")), - ) - - # We check the validity of the token - # If not valid, then return None to inform the caller that a new token - # is needed - if not is_token_valid(token): - return None - - return token - - -def is_refresh_token_valid(refresh_token: str) -> bool: - """Check if the refresh token is still valid.""" - # Decode the refresh token - refresh_payload = jwt.decode(refresh_token, options={"verify_signature": False}) - if not refresh_payload or "exp" not in refresh_payload: - return False - - # Check the expiration time - return refresh_payload["exp"] > datetime.now(tz=timezone.utc).timestamp() - - -def is_token_valid(token: AccessToken) -> bool: - """Condition to get a new token""" - return ( - datetime.fromtimestamp(token.expires_on, tz=timezone.utc) - - datetime.now(tz=timezone.utc) - ).total_seconds() > 300 + # def __aenter__(self) -> Self: # type: ignore + # """Redefined to provide the patched Dirac client in the managed context""" + # self._client.__enter__() # type: ignore + # return self diff --git a/diracx-client/tests/test_regenerate.py b/diracx-client/tests/test_regenerate.py index a7d9a357..ab3686bc 100644 --- a/diracx-client/tests/test_regenerate.py +++ b/diracx-client/tests/test_regenerate.py @@ -4,7 +4,7 @@ import git import pytest -import diracx.client +import diracx.client.generated pytestmark = pytest.mark.enabled_dependencies([]) # pytestmark = pytest.mark.enabled_dependencies(["ConfigSource", "AuthSettings"]) @@ -36,14 +36,13 @@ def test_regenerate_client(test_client, tmp_path): openapi_spec = tmp_path / "openapi.json" openapi_spec.write_text(r.text) - - output_folder = Path(diracx.client.__file__).parent.parent - assert (output_folder / "client").is_dir() - repo_root = output_folder.parent.parent.parent + output_folder = Path(diracx.client.generated.__file__).parent + assert (output_folder).is_dir() + repo_root = output_folder.parents[4] assert (repo_root / "diracx-client" / "src").is_dir() assert (repo_root / ".git").is_dir() repo = git.Repo(repo_root) - if repo.is_dirty(path=repo_root / "src" / "diracx" / "client"): + if repo.is_dirty(path=repo_root / "diracx-client" / "src" / "diracx" / "client"): raise AssertionError( "Client is currently in a modified state, skipping regeneration" ) @@ -53,8 +52,8 @@ def test_regenerate_client(test_client, tmp_path): "--python", f"--input-file={openapi_spec}", "--models-mode=msrest", - "--namespace=client", - f"--output-folder={output_folder}", + "--namespace=generated", + f"--output-folder={output_folder.parent}", ] # This is required to be able to work offline diff --git a/diracx-core/pyproject.toml b/diracx-core/pyproject.toml index 26e1e11d..afcd1419 100644 --- a/diracx-core/pyproject.toml +++ b/diracx-core/pyproject.toml @@ -38,7 +38,8 @@ types = [ ] [project.entry-points."diracx"] -properties_module = "diracx.core:properties" +properties_module = "diracx.core.properties" +config = "diracx.core.config.schema:Config" [tool.setuptools.packages.find] where = ["src"] diff --git a/diracx-core/src/diracx/core/config/__init__.py b/diracx-core/src/diracx/core/config/__init__.py index 889efb2f..e1adb27c 100644 --- a/diracx-core/src/diracx/core/config/__init__.py +++ b/diracx-core/src/diracx/core/config/__init__.py @@ -21,8 +21,12 @@ from pydantic import AnyUrl, BeforeValidator, TypeAdapter, UrlConstraints from ..exceptions import BadConfigurationVersion +from ..extensions import select_from_extension from .schema import Config +# Config: type[_Config] = select_from_extension(group="diracx", name="config")[0].load() + + DEFAULT_CONFIG_FILE = "default.yml" DEFAULT_GIT_BRANCH = "master" DEFAULT_CS_CACHE_TTL = 5 @@ -146,7 +150,11 @@ def read_raw(self, hexsha: str, modified: datetime) -> Config: rev = self.repo.rev_parse(hexsha) blob = rev.tree / DEFAULT_CONFIG_FILE raw_obj = yaml.safe_load(blob.data_stream.read().decode()) - config = Config.model_validate(raw_obj) + + config_class: Config = select_from_extension(group="diracx", name="config")[ + 0 + ].load() + config = config_class.model_validate(raw_obj) config._hexsha = hexsha config._modified = modified return config diff --git a/diracx-core/src/diracx/core/config/schema.py b/diracx-core/src/diracx/core/config/schema.py index 84c24720..bef75d38 100644 --- a/diracx-core/src/diracx/core/config/schema.py +++ b/diracx-core/src/diracx/core/config/schema.py @@ -2,7 +2,7 @@ import os from datetime import datetime -from typing import Annotated, Any, TypeVar +from typing import Annotated, Any, MutableMapping, TypeVar from pydantic import BaseModel as _BaseModel from pydantic import ConfigDict, EmailStr, Field, PrivateAttr, model_validator @@ -95,8 +95,8 @@ class RegistryConfig(BaseModel): DefaultProxyLifeTime: int = 12 * 60 * 60 VOMSName: str | None = None - Users: dict[str, UserConfig] - Groups: dict[str, GroupConfig] + Users: MutableMapping[str, UserConfig] + Groups: MutableMapping[str, GroupConfig] def sub_from_preferred_username(self, preferred_username: str) -> str: """Get the user sub from the preferred username. @@ -123,7 +123,7 @@ class JobSchedulingConfig(BaseModel): class ServicesConfig(BaseModel): - Catalogs: dict[str, Any] | None = None + Catalogs: MutableMapping[str, Any] | None = None JobMonitoring: JobMonitoringConfig = JobMonitoringConfig() JobScheduling: JobSchedulingConfig = JobSchedulingConfig() @@ -132,38 +132,38 @@ class OperationsConfig(BaseModel): EnableSecurityLogging: bool = False Services: ServicesConfig = ServicesConfig() - Cloud: dict[str, Any] | None = None - DataConsistency: dict[str, Any] | None = None - DataManagement: dict[str, Any] | None = None - EMail: dict[str, Any] | None = None - ExternalsPolicy: dict[str, Any] | None = None - GaudiExecution: dict[str, Any] | None = None - Hospital: dict[str, Any] | None = None - InputDataPolicy: dict[str, Any] | None = None - JobDescription: dict[str, Any] | None = None - JobScheduling: dict[str, Any] | None = None - JobTypeMapping: dict[str, Any] | None = None - LogFiles: dict[str, Any] | None = None - LogStorage: dict[str, Any] | None = None - Logging: dict[str, Any] | None = None - Matching: dict[str, Any] | None = None - MonitoringBackends: dict[str, Any] | None = None - NagiosConnector: dict[str, Any] | None = None - Pilot: dict[str, Any] | None = None - Productions: dict[str, Any] | None = None - Shares: dict[str, Any] | None = None - Shifter: dict[str, Any] | None = None - SiteSEMappingByProtocol: dict[str, Any] | None = None - TransformationPlugins: dict[str, Any] | None = None - Transformations: dict[str, Any] | None = None - ResourceStatus: dict[str, Any] | None = None + Cloud: MutableMapping[str, Any] | None = None + DataConsistency: MutableMapping[str, Any] | None = None + DataManagement: MutableMapping[str, Any] | None = None + EMail: MutableMapping[str, Any] | None = None + ExternalsPolicy: MutableMapping[str, Any] | None = None + GaudiExecution: MutableMapping[str, Any] | None = None + Hospital: MutableMapping[str, Any] | None = None + InputDataPolicy: MutableMapping[str, Any] | None = None + JobDescription: MutableMapping[str, Any] | None = None + JobScheduling: MutableMapping[str, Any] | None = None + JobTypeMapping: MutableMapping[str, Any] | None = None + LogFiles: MutableMapping[str, Any] | None = None + LogStorage: MutableMapping[str, Any] | None = None + Logging: MutableMapping[str, Any] | None = None + Matching: MutableMapping[str, Any] | None = None + MonitoringBackends: MutableMapping[str, Any] | None = None + NagiosConnector: MutableMapping[str, Any] | None = None + Pilot: MutableMapping[str, Any] | None = None + Productions: MutableMapping[str, Any] | None = None + Shares: MutableMapping[str, Any] | None = None + Shifter: MutableMapping[str, Any] | None = None + SiteSEMappingByProtocol: MutableMapping[str, Any] | None = None + TransformationPlugins: MutableMapping[str, Any] | None = None + Transformations: MutableMapping[str, Any] | None = None + ResourceStatus: MutableMapping[str, Any] | None = None class Config(BaseModel): - Registry: dict[str, RegistryConfig] + Registry: MutableMapping[str, RegistryConfig] DIRAC: DIRACConfig # TODO: Should this be split by vo rather than setup? - Operations: dict[str, OperationsConfig] + Operations: MutableMapping[str, OperationsConfig] LocalSite: Any = None LogLevel: Any = None diff --git a/diracx-db/pyproject.toml b/diracx-db/pyproject.toml index 37d728be..7b859c09 100644 --- a/diracx-db/pyproject.toml +++ b/diracx-db/pyproject.toml @@ -47,6 +47,7 @@ build-backend = "setuptools.build_meta" [tool.setuptools_scm] root = ".." + [tool.pytest.ini_options] testpaths = ["tests"] addopts = [ diff --git a/diracx-db/src/diracx/db/__main__.py b/diracx-db/src/diracx/db/__main__.py index da36eace..179da9b7 100644 --- a/diracx-db/src/diracx/db/__main__.py +++ b/diracx-db/src/diracx/db/__main__.py @@ -31,6 +31,7 @@ async def init_sql(): from diracx.db.sql.utils import BaseSQLDB for db_name, db_url in BaseSQLDB.available_urls().items(): + logger.info("Initialising %s", db_name) db = BaseSQLDB.available_implementations(db_name)[0](db_url) async with db.engine_context(): diff --git a/diracx-routers/pyproject.toml b/diracx-routers/pyproject.toml index 97c10f50..b740836b 100644 --- a/diracx-routers/pyproject.toml +++ b/diracx-routers/pyproject.toml @@ -82,3 +82,6 @@ asyncio_mode = "auto" markers = [ "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", ] + + +asyncio_default_fixture_loop_scope = "function" diff --git a/diracx-routers/src/diracx/routers/__init__.py b/diracx-routers/src/diracx/routers/__init__.py index d0d17086..8b64a164 100644 --- a/diracx-routers/src/diracx/routers/__init__.py +++ b/diracx-routers/src/diracx/routers/__init__.py @@ -183,7 +183,9 @@ def create_app_inner( fail_startup = True # Add the SQL DBs to the application available_sql_db_classes: set[type[BaseSQLDB]] = set() + for db_name, db_url in database_urls.items(): + try: sql_db_classes = BaseSQLDB.available_implementations(db_name) @@ -196,6 +198,7 @@ def create_app_inner( for sql_db_class in sql_db_classes: assert sql_db_class.transaction not in app.dependency_overrides available_sql_db_classes.add(sql_db_class) + app.dependency_overrides[sql_db_class.transaction] = partial( db_transaction, sql_db ) @@ -251,6 +254,7 @@ def create_app_inner( missing_sql_dbs = ( set(find_dependents(router, BaseSQLDB)) - available_sql_db_classes ) + if missing_sql_dbs: raise NotImplementedError( f"Cannot enable {system_name=} as it requires {missing_sql_dbs=}" diff --git a/diracx-routers/src/diracx/routers/fastapi_classes.py b/diracx-routers/src/diracx/routers/fastapi_classes.py index d5fa3e8c..d7166667 100644 --- a/diracx-routers/src/diracx/routers/fastapi_classes.py +++ b/diracx-routers/src/diracx/routers/fastapi_classes.py @@ -2,9 +2,10 @@ import asyncio import contextlib -from typing import TypeVar +from typing import Any, Callable, TypeVar, cast from fastapi import APIRouter, FastAPI +from starlette.routing import Route T = TypeVar("T") @@ -60,6 +61,7 @@ def openapi(self, *args, **kwargs): if not self.openapi_schema: super().openapi(*args, **kwargs) _downgrade_openapi_schema(self.openapi_schema) + # Remove 422 responses as we don't want autorest to use it for _, method_item in self.openapi_schema.get("paths").items(): for _, param in method_item.items(): @@ -81,3 +83,28 @@ def __init__( super().__init__(dependencies=dependencies) self.diracx_require_auth = require_auth self.diracx_path_root = path_root + + #### + # These 2 methods are needed to overwrite routes + # https://github.com/tiangolo/fastapi/discussions/8489 + + def add_api_route(self, path: str, endpoint: Callable[..., Any], **kwargs): + + route_index = self._get_route_index_by_path_and_methods( + path, set(kwargs.get("methods", [])) + ) + if route_index >= 0: + # route_to_del = self.routes[route_index] + + self.routes.pop(route_index) + + return super().add_api_route(path, endpoint, **kwargs) + + def _get_route_index_by_path_and_methods(self, path: str, methods: set[str]) -> int: + routes = cast(list[Route], self.routes) + for index, route in enumerate(routes): + if route.path == path and methods == route.methods: + return index + return -1 + + ###### diff --git a/diracx-testing/src/diracx/testing/__init__.py b/diracx-testing/src/diracx/testing/__init__.py index 373e3310..13a59be3 100644 --- a/diracx-testing/src/diracx/testing/__init__.py +++ b/diracx-testing/src/diracx/testing/__init__.py @@ -241,6 +241,7 @@ def enrich_tokens(access_payload: dict, refresh_payload: dict): @contextlib.contextmanager def configure(self, enabled_dependencies): + assert ( self.app.dependency_overrides == {} and self.app.lifetime_functions == [] ), "configure cannot be nested" @@ -532,8 +533,9 @@ def cli_env(monkeypatch, tmp_path, demo_urls, demo_dir): async def with_cli_login(monkeypatch, capfd, cli_env, tmp_path): try: credentials = await test_login(monkeypatch, capfd, cli_env) - except Exception: - pytest.skip("Login failed, fix test_login to re-enable this test") + except Exception as e: + + pytest.skip(f"Login failed, fix test_login to re-enable this test: {e!r}") credentials_path = tmp_path / "credentials.json" credentials_path.write_text(credentials) @@ -575,7 +577,6 @@ def fake_sleep(*args, **kwargs): expected_credentials_path = Path( cli_env["HOME"], ".cache", "diracx", "credentials.json" ) - # Ensure the credentials file does not exist before logging in assert not expected_credentials_path.exists() diff --git a/docs/CLIENT.md b/docs/CLIENT.md index 2c671a8a..3c141c08 100644 --- a/docs/CLIENT.md +++ b/docs/CLIENT.md @@ -20,7 +20,12 @@ The DiracX client is a comprehensive toolset designed to interact with various s ## diracx-client -The `diracx-client` is an auto-generated client library that facilitates communication with services defined by OpenAPI specifications. +The `diracx-client` consists of three parts: +* an auto-generated client library that facilitates communication with services defined by OpenAPI specifications. (the `generated` folder) +* customization, in the `patches` folder, which mirror the structure of the generated client. +* the base modules (`aio`, `extensions`, `models`) just exporting what we want to be exporting + +`diracx-client` also defines a `DiracClient` class which exposes all these low level calls, and handles the authentication/authorisation aspects, as well as the interactions with extensions. ### Generating a Client @@ -46,7 +51,7 @@ Further details can be found in the [Python Autorest documentation](https://gith ### Customising the Generated Client -Modifications to the generated client should be made in `_patch.py` files to ensure maintainability. Detailed guidance can be found in [Python Autorest documentation](https://github.com/Azure/autorest.python/blob/main/docs/customizations.md). +Modifications to the generated client should be made in the `patches` files to ensure maintainability, and possibly imported in the `_patch.py` files if needed. Detailed guidance can be found in [Python Autorest documentation](https://github.com/Azure/autorest.python/blob/main/docs/customizations.md). Note: any modification in the synchronous client should also be performed in the asynchronous client (**aio**), and vice-versa. @@ -140,6 +145,17 @@ CLI commands are located in `diracx-cli/src/diracx/cli/`. To create a CLI comman 2. Import `utils.AsyncTyper`. 3. Use the `@app.async_command` decorator to define commands. +For adding a new command, it needs to be added to one of the following entrypoint: + +```toml +[project.entry-points."diracx.cli"] +jobs = "diracx.cli.jobs:app" +config = "diracx.cli.config:app" + +[project.entry-points."diracx.cli.hidden"] +internal = "diracx.cli.internal:app" +``` + #### Example ```python diff --git a/docs/CODING_CONVENTION.md b/docs/CODING_CONVENTION.md index 81f183bd..9ea27510 100644 --- a/docs/CODING_CONVENTION.md +++ b/docs/CODING_CONVENTION.md @@ -1,3 +1,57 @@ Opinionated decisions TODO: + +# pytest + +DO + +```python +import pytest + +@pytest.fixture +def my_ficture(): + ... +``` + +DONT + +```python +from pytest import fixture + +@fixture +def my_ficture(): + ... +``` + +# datetime + +DO + +```python +from datetime import datetime, timedelta +delay = datetime.now() + timedelta(hours=1) +``` + +DONT + +```python +import datetime +delay = datetime.datetime.now() + datetime.timedelta(hours=1) +``` + + +ALWAYS DO + +```python +from __future__ import annotations +``` + + +# Structure + +(https://github.com/DIRACGrid/diracx/issues/268) + +* `__init__.py` should not contain code, but `__all__` +* at a package level (router for example) we have one file per system (configuration.py for example) +* If we need more files (think of jobs, which have the sandbox, the joblogging, etc), we put them in a sub module (e.g routers.job). The code goes in a specific file (job.py, joblogging.py) but we use the the __init__.py to expose the specific file diff --git a/docs/VERSIONING.md b/docs/VERSIONING.md index 3ecaf351..8ec60b50 100644 --- a/docs/VERSIONING.md +++ b/docs/VERSIONING.md @@ -96,5 +96,5 @@ TODO: document the entry point ## Extensions -- Extensions will extend one or more of `diracx`, `diracx-routers`, `diracx-tasks` images (e.g. `mydiracx`, `mydiracx-routers`, `mydiracx-tasks`). +- Extensions will extend one or more of `diracx`, `diracx-routers`, `diracx-tasks` images (e.g. `lhcbdiracx`, `lhcbdiracx-routers`, `lhcbdiracx-tasks`). - Extensions provide a corresponding container image based on a specific release of the corresponding DiracX image. diff --git a/extensions/README.md b/extensions/README.md new file mode 100644 index 00000000..1784b6df --- /dev/null +++ b/extensions/README.md @@ -0,0 +1,212 @@ + +# Gubbins + +`gubbins` is a `diracx` extension. It is a show-case for everything which is possible to extend. + +It should also serve as a reference doc on how to write your own extension. Everything in the `diracx` dev documentation applies here too. + +If you write your own extension, just replace `gubbins` with whatever name you chose (e.g. `lhcbdiracx`, `belle2diracx`, `donotforgetgriddpp`). + +The structure of the repo, the content of the ``pyproject.toml`` files, the ``__init__`` of the modules... are all skeletons that you must reproduce. + +Extensions are controled via: +* the ``DIRACX_EXTENSIONS`` environment variable. It should be set to `gubbins,diracx` +* the entrypoints found in the various `pyproject.toml` + +This here is the exhaustive list of what is supported and tested for extensions. Any usecase not listed here is not supported. + + +## QUESTIONS + +What to do with the `environment.yaml` ? should we replicate wht's in diracx ? + + +## General statements + +When working with the extension, you MUST always (server, client, testing) have exported the environment variable ``DIRACX_EXTENSIONS=gubbins,diracx``. The [``pytest fixture``](extensions/gubbins/gubbins-testing/src/gubbins/testing/__init__.py) + + +The fact of having `gubbins` as a subfolder has a few downside which you will not suffer if having your extension in a separate repository: +* the `root` of ``setuptools_scm`` in the various ``pyproject.toml`` will only be ``..`` for your extension + +## CI + +The extension is tested in the CI. + +What is in the [action file](``.github/workflows/extensions.yaml``) should in fact be split in multiple jobs under ``.github/workflows/`` of your repo. + +Here we use the local versions of `diracx` packages to make sure we are always up to date, and that a change in `diracx` does not break the extension mechanisms. But in your real extension, you will want to depend on published package (i.e. ``pip install diracx-routers`` instead of ``pip install -e ./diracx-routers`` for example), and on published docker images. + +Moreover, the `gubbins` docker images are never uploaded, and are only passed from on job to the next. You should definitely upload yours. + +## Installing the extension + +To develop, follow the same instruction as `diracx` + +```bash +# Create the mamba environment +mamba env create --file extensions/gubbins/environment.yml +conda activate gubbins-dev + + +# Make an editable installation of diracx +pip install -r requirements-dev.txt + +# Make an editable installation of gubbins +cd extensions/gubbins/ +pip install -r requirements-dev.txt + +# Install the patched DIRAC version +pip install git+https://github.com/DIRACGrid/DIRAC.git@integration + +# Enable pre-commit +mamba install pre-commit +pre-commit install +``` + +## DB + +The ``gubbins-db`` package contains the extension for the DB. + +### New DB + +`lollygag` is a DB which is specific to `gubbins`, i.e. it does not modify or extend an existing `diracx` db + +### Extended DB + +`GubbinsJobDB` illustrate how to extend an existing `diracx` DB, add new methods, modify methods, add a table. + +A [router test](extensions/gubbins/gubbins-routers/tests/test_gubbins_job_manager.py) exists, even though no router is redefined. It is just to show that the correct DB is being loaded. + +> [!WARNING] +> in the dependency override, you need to specify both the original DiracX JobDB as well as the extended one GubbinsJobDB. To avoid these issues, reuse the same name (i.e. `JobDB` instead of `GubbinsJobDB`) + + +## Routers + +The ``gubbins-router`` package contains the extension for the DB. + + +### New router + +`lollygag` is a router which is specific to `gubbins`, i.e. it does not modify or extend an existing `diracx` routers. It uses the `lollygagDB`. It also makes use of gubbins' specific `properties` and `AccessPolicy` + +### Existing router + +``well-known`` overwrites the ``dirac-metadata`` endpoint. It also changes the return type and makes use of gubbins' specific configs. + + +## Client + +The requirements are the following: + +* Working with the `DiracClient` should allow you to call the API from the extension +* It should be possible to use directly the extension client (i.e. `GubbinsClient`) +* Methods/Operations/models that are patched in `diracx` cannot be re-patched in the extension + + +### New client + +To create a client extension: +* mirror the structure of the `diracx-client` +* Generate a client in `generated` using `Autorest` +* Create the `patches` directory, simply exporting the generated `clients`(both [sync](gubbins/gubbins-client/src/gubbins/client/patches/__init__.py) and [async](gubbins/gubbins-client/src/gubbins/client/patches/aio/__init__.py)) +* Define the base modules to export what is needed +* The [top init](gubbins/gubbins-client/src/gubbins/client/__init__.py) MUST have +```python +import diracx.client +``` +* Redefine the entrypoint + +```toml +[project.entry-points."diracx"] +client_class = "gubbins.client.generated._client:Dirac" +aio_client_class = "gubbins.client.generated.aio._client:Dirac" +``` + +* Generate the autorest client (see CI ``regenerate_client``) + +## CLI + +The following CLI extensions are supported: +* add a new subcommand +* extend an existing subcommand +* modify an existing subcommand +* no `gubbins` CI, everything through `dirac` cli + +The CLI is managed by the ``diracx.cli`` entry point + +```toml +[project.entry-points."diracx.cli"] +jobs = "diracx.cli.jobs:app" +config = "diracx.cli.config:app" + +[project.entry-points."diracx.cli.hidden"] +internal = "diracx.cli.internal:app" +``` + +See the ``gubbins-cli`` package for instructions + +### New subcommand + + +`lollygag` is a new CLI command, calling the `lollygag` router. + +### Changing a subcommand + +For completely replacing a subcommand, it is enough to change the corresponding entrypoint in the `pyproject.toml` + +### Extending a subcommand + +You can modify the behavior of a specific CLI call, or add extra calls to an existing subcommand. The `config` CLI shows just that. + + +## Configuration + +Only extending the configuration is allowed. For example, you can add extra fields for the users + +You need to: + +* Redefine a new configuration [schema](gubbins/gubbins-core/src/gubbins/core/config/schema.py) +* Declare this new class in the `diracx` entrypoint +```toml +[project.entry-points."diracx"] +config = "gubbins.core.config.schema:Config" +``` +* Redefine a dependency for your routers to use (see [example](gubbins/gubbins-routers/src/gubbins/routers/dependencies.py)) + + +## Properties + +Properties can only be added. This is done in the `gubbins-core` `pyproject.toml` + + +```toml +[project.entry-points."diracx"] +properties_module = "gubbins.core.properties" +``` + +[properties](gubbins/gubbins-core/src/gubbins/core/properties.py) illustrates how to do it + + +## Writing tests + +`diracx-testing` package contains a lot of useful tools for testing `diracx` and its extensions. It is recommended to have a `testing` package for your extension which contains at least a check that your `DIRACX_EXTENSIONS` environment variable is set (see [example](gubbins/gubbins-testing/src/gubbins/testing/__init__.py)) + +Note that even if you have your own `testing` package depending on `diracx-testing`, you should specify it when calling `pytest` (see various `pyprojects.toml`) + + +## Work on gubbins + +Working on this test extension is tricky as it is a subdirectory. if you want to debug it you need to: + +* buid the `gubbins` docker images (or download an art) +* copy the files somewhere else +* edit the pyproject +* run_demo + +This is what is done in the [CI](../.github/workflows/extensions.yml) + +## Deploying on kubernetes + +The CI comes with a specific [`values.yaml`](gubbins_values.yaml). diff --git a/extensions/containers/client/Dockerfile b/extensions/containers/client/Dockerfile new file mode 100644 index 00000000..c48f930a --- /dev/null +++ b/extensions/containers/client/Dockerfile @@ -0,0 +1,14 @@ +FROM ghcr.io/diracgrid/diracx/client:dev + +#Extension +ENV DIRACX_EXTENSIONS="gubbins,diracx" +ENV GUBBINS_IMAGE_PACKAGES=core,client,api,cli,. + +RUN --mount=type=bind,source=.,target=/bindmount GUBBINS_CUSTOM_SOURCE_PREFIXES=/bindmount /entrypoint.sh bash -exc "ls /bindmount && echo 'Running pip check' && pip check" + +# # In many clusters the container is ran as a random uid for security reasons. +# # If we mark the conda directory as group 0 and give it group write permissions +# # then we're still able to manage the environment from inside the container. +USER 0 +RUN chown -R $MAMBA_USER:0 /opt/conda && chmod -R g=u /opt/conda +USER $MAMBA_USER diff --git a/extensions/containers/services/Dockerfile b/extensions/containers/services/Dockerfile new file mode 100644 index 00000000..84481656 --- /dev/null +++ b/extensions/containers/services/Dockerfile @@ -0,0 +1,16 @@ +FROM ghcr.io/diracgrid/diracx/services:dev + +#Extension +ENV DIRACX_EXTENSIONS="gubbins,diracx" +ENV GUBBINS_IMAGE_PACKAGES=core,db,routers,client + +ARG EXTRA_PACKAGES_TO_INSTALL + +RUN --mount=type=bind,source=.,target=/bindmount GUBBINS_CUSTOM_SOURCE_PREFIXES=/bindmount /entrypoint.sh bash -exc "ls /bindmount && echo 'Running pip check' && pip check" + +# # In many clusters the container is ran as a random uid for security reasons. +# # If we mark the conda directory as group 0 and give it group write permissions +# # then we're still able to manage the environment from inside the container. +USER 0 +RUN chown -R $MAMBA_USER:0 /opt/conda && chmod -R g=u /opt/conda +USER $MAMBA_USER diff --git a/extensions/gubbins/environment.yml b/extensions/gubbins/environment.yml new file mode 100644 index 00000000..f33f449b --- /dev/null +++ b/extensions/gubbins/environment.yml @@ -0,0 +1,10 @@ +name: gubbins-dev +channels: + - conda-forge + - nodefaults +dependencies: + - python + - pip + - m2crypto + - python-gfal2 + - mypy diff --git a/extensions/gubbins/gubbins-cli/pyproject.toml b/extensions/gubbins/gubbins-cli/pyproject.toml new file mode 100644 index 00000000..1c214a9b --- /dev/null +++ b/extensions/gubbins/gubbins-cli/pyproject.toml @@ -0,0 +1,52 @@ +[project] +name = "gubbins-cli" +description = "CLI for the gubbins diracx extension" +readme = "README.md" +requires-python = ">=3.11" +keywords = [] +license = {text = "GPL-3.0-only"} +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = ["diracx-cli"] +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["diracx-cli[testing]", "diracx-testing"] +types = [ + "types-PyYAML", +] + + +[project.entry-points."diracx.cli"] +lollygag = "gubbins.cli.lollygag:app" +config = "gubbins.cli.config:app" + + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "../../.." + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = [ + "-v", + "--cov=diracx.cli", "--cov-report=term-missing", + "-pdiracx.testing", + "-pgubbins.testing", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/gubbins-cli/src/gubbins/cli/__init__.py b/extensions/gubbins/gubbins-cli/src/gubbins/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-cli/src/gubbins/cli/config.py b/extensions/gubbins/gubbins-cli/src/gubbins/cli/config.py new file mode 100644 index 00000000..5f6b8138 --- /dev/null +++ b/extensions/gubbins/gubbins-cli/src/gubbins/cli/config.py @@ -0,0 +1,20 @@ +""" +This just shows how to extend and modify an existing CLI +""" + +# In order to extend it, just import the app from DiracX +from diracx.cli.config import app + + +@app.async_command() +async def gubbins_extra(): + """ + Add an extra command + """ + print("Adding something extra") + + +@app.async_command() +async def dump(): + """Replace a command""" + print("I replaced the original dump") diff --git a/extensions/gubbins/gubbins-cli/src/gubbins/cli/lollygag.py b/extensions/gubbins/gubbins-cli/src/gubbins/cli/lollygag.py new file mode 100644 index 00000000..6632959c --- /dev/null +++ b/extensions/gubbins/gubbins-cli/src/gubbins/cli/lollygag.py @@ -0,0 +1,38 @@ +""" +This shows how to create a new subcommand +""" + +from diracx.cli.utils import AsyncTyper + +from gubbins.client.aio import GubbinsClient + +app = AsyncTyper() + + +@app.command() +def hello(): + """ + This is just to make sure that the CLI extension mechanism works + """ + print("Shagadelic, Baby!") + + +@app.async_command() +async def get_owners(): + """ + This makes a proper use of the GubbinsClient to call + a method specific to Gubbins + """ + async with GubbinsClient() as api: + owners = await api.lollygag.get_owner_object() + print(owners) + + +@app.async_command() +async def sensei(): + """ + This function is only here to test the GUBBINS_SENSEI property + """ + async with GubbinsClient() as api: + secrets = await api.lollygag.get_gubbins_secrets() + print(secrets) diff --git a/extensions/gubbins/gubbins-cli/src/gubbins/cli/py.typed b/extensions/gubbins/gubbins-cli/src/gubbins/cli/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-cli/tests/test_gubbins_cli.py b/extensions/gubbins/gubbins-cli/tests/test_gubbins_cli.py new file mode 100644 index 00000000..dbf10bc4 --- /dev/null +++ b/extensions/gubbins/gubbins-cli/tests/test_gubbins_cli.py @@ -0,0 +1,62 @@ +import os + +from diracx.cli import app +from typer.testing import CliRunner + +runner = CliRunner() + + +def test_lollygag_cli(): + """ + Makes sure that lollygag is available in the dirac cli + """ + result = runner.invoke(app, ["--help"], env=os.environ) + assert result.exit_code == 0, result.output + assert "lollygag" in result.output, result.output + + +def test_lollygag_hello(): + """ + We can call a trivial lollygag function + """ + result = runner.invoke(app, ["lollygag", "hello"], env=os.environ) + assert result.exit_code == 0, result.output + assert "Baby" in result.output, result.output + + +def test_lollygag_owner(with_cli_login): + """ + We can call a more complex function, involving the client and + calling the router + """ + result = runner.invoke(app, ["lollygag", "get-owners"], env=os.environ) + assert result.exit_code == 0, result.output + assert "[]" in result.output, result.output + + +def test_lollygag_sensei(with_cli_login): + """ + We expect to not be able to call that method because our token + does not have the correct property + """ + result = runner.invoke(app, ["lollygag", "sensei"], env=os.environ) + assert result.exit_code != 0, result.output + assert result.exception.status_code == 403 + + +def test_config_extra(): + """ + Test adding an extra function + """ + result = runner.invoke(app, ["config", "gubbins-extra"], env=os.environ) + assert result.exit_code == 0, result.output + assert "Adding something extra" in result.output, result.output + + +def test_config_overwrite(): + """ + Test overwriting a function + """ + result = runner.invoke(app, ["config", "dump"], env=os.environ) + assert result.exit_code == 0, result.output + assert "I replaced the original dump" in result.output, result.output diff --git a/extensions/gubbins/gubbins-client/pyproject.toml b/extensions/gubbins/gubbins-client/pyproject.toml new file mode 100644 index 00000000..fdb290d8 --- /dev/null +++ b/extensions/gubbins/gubbins-client/pyproject.toml @@ -0,0 +1,51 @@ +[project] +name = "gubbins-client" +description = "TODO" +readme = "README.md" +requires-python = ">=3.10" +keywords = [] +license = { text = "GPL-3.0-only" } +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = ["diracx-client"] +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["diracx-client[testing]", "diracx-testing"] +types = ["types-requests"] + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + + +[project.entry-points."diracx"] +client_class = "gubbins.client.generated._client:Dirac" +aio_client_class = "gubbins.client.generated.aio._client:Dirac" + + +[tool.setuptools_scm] +root = "../../.." + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = [ + "-v", + "--cov=gubbins.client", + "--cov-report=term-missing", + "-pgubbins.testing", + "-pdiracx.testing", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/__init__.py new file mode 100644 index 00000000..addc4d8b --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/__init__.py @@ -0,0 +1,9 @@ +""" +This init file exposes what we want exported, as well as initializate the client extension magic +""" + +# This must be here in order to initialize the MetaPathFinder +import diracx.client # noqa + +from .generated import * # pylint: disable=unused-wildcard-import # noqa +from .patches import GubbinsClient # noqa diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/aio.py b/extensions/gubbins/gubbins-client/src/gubbins/client/aio.py new file mode 100644 index 00000000..3ddd7881 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/aio.py @@ -0,0 +1,2 @@ +from .generated.aio import * # pylint: disable=unused-wildcard-import # noqa +from .patches.aio import GubbinsClient # noqa diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/__init__.py new file mode 100644 index 00000000..5b70b981 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/__init__.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._client import Dirac + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Dirac", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_client.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_client.py new file mode 100644 index 00000000..e138c1c7 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_client.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from . import models as _models +from ._configuration import DiracConfiguration +from ._serialization import Deserializer, Serializer +from .operations import ( + AuthOperations, + ConfigOperations, + JobsOperations, + LollygagOperations, + WellKnownOperations, +) + + +class Dirac: # pylint: disable=client-accepts-api-version-keyword + """Dirac. + + :ivar well_known: WellKnownOperations operations + :vartype well_known: generated.operations.WellKnownOperations + :ivar auth: AuthOperations operations + :vartype auth: generated.operations.AuthOperations + :ivar config: ConfigOperations operations + :vartype config: generated.operations.ConfigOperations + :ivar jobs: JobsOperations operations + :vartype jobs: generated.operations.JobsOperations + :ivar lollygag: LollygagOperations operations + :vartype lollygag: generated.operations.LollygagOperations + :keyword endpoint: Service URL. Required. Default value is "". + :paramtype endpoint: str + """ + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, *, endpoint: str = "", **kwargs: Any + ) -> None: + self._config = DiracConfiguration(**kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + ( + policies.SensitiveHeaderCleanupPolicy(**kwargs) + if self._config.redirect_policy + else None + ), + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient( + base_url=endpoint, policies=_policies, **kwargs + ) + + client_models = { + k: v for k, v in _models.__dict__.items() if isinstance(v, type) + } + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.well_known = WellKnownOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.auth = AuthOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.config = ConfigOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.lollygag = LollygagOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_configuration.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_configuration.py new file mode 100644 index 00000000..0a17fd2e --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_configuration.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.pipeline import policies + +VERSION = "unknown" + + +class DiracConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for Dirac. + + Note that all parameters used to create this instance are saved as instance + attributes. + """ + + def __init__(self, **kwargs: Any) -> None: + + kwargs.setdefault("sdk_moniker", "dirac/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get( + "user_agent_policy" + ) or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy( + **kwargs + ) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get( + "logging_policy" + ) or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get( + "http_logging_policy" + ) or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get( + "custom_hook_policy" + ) or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy( + **kwargs + ) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_patch.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_patch.py new file mode 100644 index 00000000..1ed6e8c1 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_patch.py @@ -0,0 +1,26 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + + +__all__: List[str] = [ + # "GubbinsClient" +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ + + +# from ..patches import GubbinsClient diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_serialization.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_serialization.py new file mode 100644 index 00000000..a058c396 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_serialization.py @@ -0,0 +1,2274 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text( + cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None + ) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError( + "JSON is invalid: {}".format(err), err + ) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError( + "Cannot deserialize content-type: {}".format(content_type) + ) + + @classmethod + def deserialize_from_http_generics( + cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping + ) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ + return "Z" + + def dst(self, dt): + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset) -> None: + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning( + "%s is not a known attribute of class %s and will be ignored", + k, + self.__class__, + ) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning( + "Readonly attribute %s will be ignored in class %s", + k, + self.__class__, + ) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node( + xml_map.get("name", cls.__name__), + xml_map.get("prefix", None), + xml_map.get("ns", None), + ) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[ + [str, Dict[str, Any], Any], Any + ] = attribute_transformer, + **kwargs: Any, + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = { + k: v for k, v in models.__dict__.items() if isinstance(v, type) + } + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize( + cls: Type[ModelType], data: Any, content_type: Optional[str] = None + ) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update( + objects[valuetype]._flatten_subtype(key, objects) + ) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop( + rest_api_response_key, None + ) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor( + subtype_key, cls._attribute_map[subtype_key], response + ) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning( + "Discriminator %s is absent or null, use base class %s.", + subtype_key, + cls.__name__, + ) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer(object): # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault( + "is_xml", target_obj.is_xml_model() + ) + + serialized = {} + if is_xml_model_serialization: + serialized = ( + target_obj._create_xml_node() + ) # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if ( + not keep_readonly + and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False) + ): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer( + attr, attr_desc.copy(), orig_attr + ) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data( + orig_attr, attr_desc["type"], **kwargs + ) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format( + attr_name, class_name, str(target_obj) + ) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault( + "is_xml", internal_data_type.is_xml_model() + ) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize( + data_type, data + ) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError( + "Unable to build a model: " + str(err) + ) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter( + data, internal_data_type, do_quote=do_quote, **kwargs + ) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers( + cls, data_type, **kwargs + ): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = [ + "" if s is None else quote(str(s), safe="") for s in serialized + ] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node( + xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None) + ) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node( + node_name, + xml_desc.get("prefix", None), + xml_desc.get("ns", None), + ) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data( + value, dict_type, **kwargs + ) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node( + xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None) + ) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object( + self, attr, **kwargs + ): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object( + value, **kwargs + ) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, + utc.tm_mon, + utc.tm_mday, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor( + working_key, None, working_data + ) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor( + attr, attr_desc, data +): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor( + attr, attr_desc, data +): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or ( + internal_type + and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map) + ): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if ( + internal_type + ): # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError( + "Find several XML '{}' where it was not expected".format(xml_name) + ) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile( + r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?" + ) + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize( + self, target_obj, data + ): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [ + name + for name, config in getattr(data, "_validation", {}).items() + if config.get("constant") + ] + try: + for ( + attr, + mapconfig, + ) in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance( + internal_data_type, Enum + ): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if ( + "additional_properties" in attribute_map + and attribute_map.get("additional_properties", {}).get("key") != "" + ): + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", + exc_info=True, + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError( + "This pipeline didn't have the RawDeserializer policy; can't deserialize" + ) + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text(), raw_data.headers + ) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text, raw_data.headers + ) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() + if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k + for k, v in response._validation.items() + if v.get("constant") # pylint: disable=protected-access + ] + kwargs = { + k: v + for k, v in attrs.items() + if k not in subtype and k not in readonly + const + } + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data( + self, data, data_type + ): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance( + data, self.deserialize_expected_types.get(data_type, tuple()) + ): + return data + + is_a_text_parsing_type = ( + lambda x: x + not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + ) + if ( + isinstance(data, ET.Element) + and is_a_text_parsing_type(data_type) + and not data.text + ): + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance( + attr, ET.Element + ): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError( + "Cannot deserialize as [{}] an object of type {}".format( + iter_type, type(attr) + ) + ) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return { + x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr + } + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object( + self, attr, **kwargs + ): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic( + self, attr, data_type + ): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning( + "Deserializer is not able to find %s as valid enum in %s", + data, + enum_obj, + ) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises: DeserializationError if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError( + "Date must have only digits and -. Received: %s" % attr + ) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError( + "Date must have only digits and -. Received: %s" % attr + ) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], + tzinfo=_FixedOffset( + datetime.timedelta(minutes=(parsed_date[9] or 0) / 60) + ), + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_vendor.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_vendor.py new file mode 100644 index 00000000..4cf9951b --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/_vendor.py @@ -0,0 +1,55 @@ +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def raise_if_not_implemented(cls, abstract_methods): + not_implemented = [ + f for f in abstract_methods if not callable(getattr(cls, f, None)) + ] + if not_implemented: + raise NotImplementedError( + "The following methods on operation group '{}' are not implemented: '{}'." + " Please refer to https://aka.ms/azsdk/python/dpcodegen/python/customize to learn how to customize.".format( + cls.__name__, "', '".join(not_implemented) + ) + ) + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match( + etag: Optional[str], match_condition: Optional[MatchConditions] +) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match( + etag: Optional[str], match_condition: Optional[MatchConditions] +) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/__init__.py new file mode 100644 index 00000000..5b70b981 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/__init__.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._client import Dirac + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Dirac", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_client.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_client.py new file mode 100644 index 00000000..d7e8da23 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_client.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .. import models as _models +from .._serialization import Deserializer, Serializer +from ._configuration import DiracConfiguration +from .operations import ( + AuthOperations, + ConfigOperations, + JobsOperations, + LollygagOperations, + WellKnownOperations, +) + + +class Dirac: # pylint: disable=client-accepts-api-version-keyword + """Dirac. + + :ivar well_known: WellKnownOperations operations + :vartype well_known: generated.aio.operations.WellKnownOperations + :ivar auth: AuthOperations operations + :vartype auth: generated.aio.operations.AuthOperations + :ivar config: ConfigOperations operations + :vartype config: generated.aio.operations.ConfigOperations + :ivar jobs: JobsOperations operations + :vartype jobs: generated.aio.operations.JobsOperations + :ivar lollygag: LollygagOperations operations + :vartype lollygag: generated.aio.operations.LollygagOperations + :keyword endpoint: Service URL. Required. Default value is "". + :paramtype endpoint: str + """ + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, *, endpoint: str = "", **kwargs: Any + ) -> None: + self._config = DiracConfiguration(**kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + ( + policies.SensitiveHeaderCleanupPolicy(**kwargs) + if self._config.redirect_policy + else None + ), + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient( + base_url=endpoint, policies=_policies, **kwargs + ) + + client_models = { + k: v for k, v in _models.__dict__.items() if isinstance(v, type) + } + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.well_known = WellKnownOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.auth = AuthOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.config = ConfigOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.lollygag = LollygagOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_configuration.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_configuration.py new file mode 100644 index 00000000..15fc2ca7 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_configuration.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.pipeline import policies + +VERSION = "unknown" + + +class DiracConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for Dirac. + + Note that all parameters used to create this instance are saved as instance + attributes. + """ + + def __init__(self, **kwargs: Any) -> None: + + kwargs.setdefault("sdk_moniker", "dirac/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get( + "user_agent_policy" + ) or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy( + **kwargs + ) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get( + "logging_policy" + ) or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get( + "http_logging_policy" + ) or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get( + "custom_hook_policy" + ) or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get( + "redirect_policy" + ) or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy( + **kwargs + ) + self.authentication_policy = kwargs.get("authentication_policy") diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_patch.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_patch.py new file mode 100644 index 00000000..abf56120 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_patch.py @@ -0,0 +1,22 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = ( + [] +) # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_vendor.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_vendor.py new file mode 100644 index 00000000..4cf9951b --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/_vendor.py @@ -0,0 +1,55 @@ +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def raise_if_not_implemented(cls, abstract_methods): + not_implemented = [ + f for f in abstract_methods if not callable(getattr(cls, f, None)) + ] + if not_implemented: + raise NotImplementedError( + "The following methods on operation group '{}' are not implemented: '{}'." + " Please refer to https://aka.ms/azsdk/python/dpcodegen/python/customize to learn how to customize.".format( + cls.__name__, "', '".join(not_implemented) + ) + ) + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match( + etag: Optional[str], match_condition: Optional[MatchConditions] +) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match( + etag: Optional[str], match_condition: Optional[MatchConditions] +) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/__init__.py new file mode 100644 index 00000000..c2fe1d0e --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import WellKnownOperations +from ._operations import AuthOperations +from ._operations import ConfigOperations +from ._operations import JobsOperations +from ._operations import LollygagOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "WellKnownOperations", + "AuthOperations", + "ConfigOperations", + "JobsOperations", + "LollygagOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/diracx-client/src/diracx/client/aio/operations/_operations.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_operations.py similarity index 88% rename from diracx-client/src/diracx/client/aio/operations/_operations.py rename to extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_operations.py index 8a2f0d53..a0404023 100644 --- a/diracx-client/src/diracx/client/aio/operations/_operations.py +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_operations.py @@ -1,23 +1,12 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import ( - Any, - Callable, - Dict, - IO, - List, - Optional, - Type, - TypeVar, - Union, - overload, -) +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload from azure.core import MatchConditions from azure.core.exceptions import ( @@ -72,6 +61,9 @@ build_jobs_summary_request, build_jobs_unassign_bulk_jobs_sandboxes_request, build_jobs_unassign_job_sandboxes_request, + build_lollygag_get_gubbins_secrets_request, + build_lollygag_get_owner_object_request, + build_lollygag_insert_owner_object_request, build_well_known_installation_metadata_request, build_well_known_openid_configuration_request, ) @@ -80,7 +72,7 @@ if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[ Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any] @@ -94,7 +86,7 @@ class WellKnownOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.aio.Dirac`'s + :class:`~generated.aio.Dirac`'s :attr:`well_known` attribute. """ @@ -119,7 +111,7 @@ async def openid_configuration(self, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -148,14 +140,12 @@ async def openid_configuration(self, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -163,16 +153,16 @@ async def openid_configuration(self, **kwargs: Any) -> Any: return deserialized # type: ignore @distributed_trace_async - async def installation_metadata(self, **kwargs: Any) -> _models.Metadata: + async def installation_metadata(self, **kwargs: Any) -> _models.ExtendedMetadata: """Installation Metadata. - Get metadata about the dirac installation. + Installation Metadata. - :return: Metadata - :rtype: ~client.models.Metadata + :return: ExtendedMetadata + :rtype: ~generated.models.ExtendedMetadata :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -183,7 +173,7 @@ async def installation_metadata(self, **kwargs: Any) -> _models.Metadata: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Metadata] = kwargs.pop("cls", None) + cls: ClsType[_models.ExtendedMetadata] = kwargs.pop("cls", None) _request = build_well_known_installation_metadata_request( headers=_headers, @@ -201,14 +191,14 @@ async def installation_metadata(self, **kwargs: Any) -> _models.Metadata: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("Metadata", pipeline_response) + deserialized = self._deserialize( + "ExtendedMetadata", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -222,7 +212,7 @@ class AuthOperations: # pylint: disable=abstract-class-instantiated **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.aio.Dirac`'s + :class:`~generated.aio.Dirac`'s :attr:`auth` attribute. """ @@ -253,7 +243,7 @@ async def initiate_device_flow( Initiate the device flow against DIRAC authorization Server. Scope must have exactly up to one ``group`` (otherwise default) and one or more ``property`` scope. - If no property, then get default one + If no property, then get default one. Offers the user to go with the browser to ``auth//device?user_code=XYZ``. @@ -263,10 +253,10 @@ async def initiate_device_flow( :keyword scope: Required. :paramtype scope: str :return: InitiateDeviceFlowResponse - :rtype: ~client.models.InitiateDeviceFlowResponse + :rtype: ~generated.models.InitiateDeviceFlowResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -297,15 +287,13 @@ async def initiate_device_flow( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) deserialized = self._deserialize( - "InitiateDeviceFlowResponse", pipeline_response + "InitiateDeviceFlowResponse", pipeline_response.http_response ) if cls: @@ -332,7 +320,7 @@ async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -362,14 +350,12 @@ async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -394,7 +380,7 @@ async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> A :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -425,14 +411,12 @@ async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> A response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -449,7 +433,7 @@ async def finished(self, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -478,14 +462,12 @@ async def finished(self, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -503,7 +485,7 @@ async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -532,14 +514,12 @@ async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -559,7 +539,7 @@ async def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: :rtype: str :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -589,14 +569,12 @@ async def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,10 +588,10 @@ async def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: Get information about the user's identity. :return: UserInfoResponse - :rtype: ~client.models.UserInfoResponse + :rtype: ~generated.models.UserInfoResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -642,14 +620,14 @@ async def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("UserInfoResponse", pipeline_response) + deserialized = self._deserialize( + "UserInfoResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -680,11 +658,11 @@ async def authorization_flow( user authorize flow. :keyword response_type: "code" Required. - :paramtype response_type: str or ~client.models.Enum0 + :paramtype response_type: str or ~generated.models.Enum0 :keyword code_challenge: Required. :paramtype code_challenge: str :keyword code_challenge_method: "S256" Required. - :paramtype code_challenge_method: str or ~client.models.Enum1 + :paramtype code_challenge_method: str or ~generated.models.Enum1 :keyword client_id: Required. :paramtype client_id: str :keyword redirect_uri: Required. @@ -697,7 +675,7 @@ async def authorization_flow( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -733,14 +711,12 @@ async def authorization_flow( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -769,7 +745,7 @@ async def authorization_flow_complete( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -800,14 +776,12 @@ async def authorization_flow_complete( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -821,7 +795,7 @@ class ConfigOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.aio.Dirac`'s + :class:`~generated.aio.Dirac`'s :attr:`config` attribute. """ @@ -865,7 +839,7 @@ async def serve_config( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -903,14 +877,12 @@ async def serve_config( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -924,7 +896,7 @@ class JobsOperations: # pylint: disable=too-many-public-methods **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.aio.Dirac`'s + :class:`~generated.aio.Dirac`'s :attr:`jobs` attribute. """ @@ -958,12 +930,12 @@ async def initiate_sandbox_upload( should be used to upload the sandbox to the storage backend. :param body: Required. - :type body: ~client.models.SandboxInfo + :type body: ~generated.models.SandboxInfo :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ @@ -987,7 +959,7 @@ async def initiate_sandbox_upload( Default value is "application/json". :paramtype content_type: str :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1006,12 +978,12 @@ async def initiate_sandbox_upload( should be used to upload the sandbox to the storage backend. :param body: Is either a SandboxInfo type or a IO[bytes] type. Required. - :type body: ~client.models.SandboxInfo or IO[bytes] + :type body: ~generated.models.SandboxInfo or IO[bytes] :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1054,14 +1026,14 @@ async def initiate_sandbox_upload( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("SandboxUploadResponse", pipeline_response) + deserialized = self._deserialize( + "SandboxUploadResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1074,7 +1046,7 @@ async def get_sandbox_file( ) -> _models.SandboxDownloadResponse: """Get Sandbox File. - Get a presigned URL to download a sandbox file + Get a presigned URL to download a sandbox file. This route cannot use a redirect response most clients will also send the authorization header when following a redirect. This is not desirable as @@ -1085,10 +1057,10 @@ async def get_sandbox_file( :keyword pfn: Required. :paramtype pfn: str :return: SandboxDownloadResponse - :rtype: ~client.models.SandboxDownloadResponse + :rtype: ~generated.models.SandboxDownloadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1118,14 +1090,14 @@ async def get_sandbox_file( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("SandboxDownloadResponse", pipeline_response) + deserialized = self._deserialize( + "SandboxDownloadResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1146,7 +1118,7 @@ async def unassign_bulk_jobs_sandboxes( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1176,14 +1148,12 @@ async def unassign_bulk_jobs_sandboxes( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1204,7 +1174,7 @@ async def get_job_sandboxes( :rtype: dict[str, list[any]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1234,14 +1204,12 @@ async def get_job_sandboxes( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[object]}", pipeline_response) + deserialized = self._deserialize("{[object]}", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1260,7 +1228,7 @@ async def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1290,14 +1258,12 @@ async def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1315,12 +1281,12 @@ async def get_job_sandbox( :param job_id: Required. :type job_id: int :param sandbox_type: Known values are: "input" and "output". Required. - :type sandbox_type: str or ~client.models.SandboxType + :type sandbox_type: str or ~generated.models.SandboxType :return: list of any :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1351,14 +1317,12 @@ async def get_job_sandbox( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1379,7 +1343,7 @@ async def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1416,14 +1380,12 @@ async def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1444,7 +1406,7 @@ async def submit_bulk_jobs( Default value is "application/json". :paramtype content_type: str :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1462,7 +1424,7 @@ async def submit_bulk_jobs( Default value is "application/json". :paramtype content_type: str :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1477,10 +1439,10 @@ async def submit_bulk_jobs( :param body: Is either a [str] type or a IO[bytes] type. Required. :type body: list[str] or IO[bytes] :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1523,14 +1485,14 @@ async def submit_bulk_jobs( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[InsertedJob]", pipeline_response) + deserialized = self._deserialize( + "[InsertedJob]", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1549,7 +1511,7 @@ async def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1579,14 +1541,12 @@ async def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1605,7 +1565,7 @@ async def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1635,14 +1595,12 @@ async def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1665,7 +1623,7 @@ async def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1695,14 +1653,12 @@ async def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1720,10 +1676,10 @@ async def get_job_status_bulk( :keyword job_ids: Required. :paramtype job_ids: list[int] :return: dict mapping str to LimitedJobStatusReturn - :rtype: dict[str, ~client.models.LimitedJobStatusReturn] + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1755,14 +1711,14 @@ async def get_job_status_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{LimitedJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1783,14 +1739,14 @@ async def set_job_status_bulk( Set Job Status Bulk. :param body: Required. - :type body: dict[str, dict[str, ~client.models.JobStatusUpdate]] + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] :keyword force: Default value is False. :paramtype force: bool :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1815,7 +1771,7 @@ async def set_job_status_bulk( Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1832,14 +1788,14 @@ async def set_job_status_bulk( Set Job Status Bulk. :param body: Is either a {str: {str: JobStatusUpdate}} type or a IO[bytes] type. Required. - :type body: dict[str, dict[str, ~client.models.JobStatusUpdate]] or IO[bytes] + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] or IO[bytes] :keyword force: Default value is False. :paramtype force: bool :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1883,14 +1839,14 @@ async def set_job_status_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{SetJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1908,10 +1864,10 @@ async def get_job_status_history_bulk( :keyword job_ids: Required. :paramtype job_ids: list[int] :return: dict mapping str to list of JobStatusReturn - :rtype: dict[str, list[~client.models.JobStatusReturn]] + :rtype: dict[str, list[~generated.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1941,14 +1897,14 @@ async def get_job_status_history_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[JobStatusReturn]}", pipeline_response) + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1967,7 +1923,7 @@ async def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> An :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1997,14 +1953,12 @@ async def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> An response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2023,7 +1977,7 @@ async def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2053,14 +2007,12 @@ async def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2084,7 +2036,7 @@ async def search( **TODO: Add more docs**. :param body: Default value is None. - :type body: ~client.models.JobSearchParams + :type body: ~generated.models.JobSearchParams :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. @@ -2143,7 +2095,7 @@ async def search( **TODO: Add more docs**. :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. - :type body: ~client.models.JobSearchParams or IO[bytes] + :type body: ~generated.models.JobSearchParams or IO[bytes] :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. @@ -2152,7 +2104,7 @@ async def search( :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2200,23 +2152,18 @@ async def search( response = pipeline_response.http_response if response.status_code not in [200, 206]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("[object]", pipeline_response) - if response.status_code == 206: response_headers["Content-Range"] = self._deserialize( "str", response.headers.get("Content-Range") ) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2236,7 +2183,7 @@ async def summary( Show information suitable for plotting. :param body: Required. - :type body: ~client.models.JobSummaryParams + :type body: ~generated.models.JobSummaryParams :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2272,12 +2219,12 @@ async def summary( Show information suitable for plotting. :param body: Is either a JobSummaryParams type or a IO[bytes] type. Required. - :type body: ~client.models.JobSummaryParams or IO[bytes] + :type body: ~generated.models.JobSummaryParams or IO[bytes] :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2320,14 +2267,12 @@ async def summary( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2346,7 +2291,7 @@ async def get_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2376,14 +2321,12 @@ async def get_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2402,7 +2345,7 @@ async def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2432,14 +2375,12 @@ async def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2464,7 +2405,7 @@ async def set_single_job_properties( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2502,14 +2443,12 @@ async def set_single_job_properties( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2528,7 +2467,7 @@ async def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2558,14 +2497,12 @@ async def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2588,7 +2525,7 @@ async def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2618,14 +2555,12 @@ async def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2643,10 +2578,10 @@ async def get_single_job_status( :param job_id: Required. :type job_id: int :return: dict mapping str to LimitedJobStatusReturn - :rtype: dict[str, ~client.models.LimitedJobStatusReturn] + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2678,14 +2613,14 @@ async def get_single_job_status( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{LimitedJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2709,14 +2644,14 @@ async def set_single_job_status( :param job_id: Required. :type job_id: int :param body: Required. - :type body: dict[str, ~client.models.JobStatusUpdate] + :type body: dict[str, ~generated.models.JobStatusUpdate] :keyword force: Default value is False. :paramtype force: bool :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2744,7 +2679,7 @@ async def set_single_job_status( Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2764,14 +2699,14 @@ async def set_single_job_status( :param job_id: Required. :type job_id: int :param body: Is either a {str: JobStatusUpdate} type or a IO[bytes] type. Required. - :type body: dict[str, ~client.models.JobStatusUpdate] or IO[bytes] + :type body: dict[str, ~generated.models.JobStatusUpdate] or IO[bytes] :keyword force: Default value is False. :paramtype force: bool :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2816,14 +2751,14 @@ async def set_single_job_status( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{SetJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2841,10 +2776,10 @@ async def get_single_job_status_history( :param job_id: Required. :type job_id: int :return: dict mapping str to list of JobStatusReturn - :rtype: dict[str, list[~client.models.JobStatusReturn]] + :rtype: dict[str, list[~generated.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2874,14 +2809,192 @@ async def get_single_job_status_history( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[JobStatusReturn]}", pipeline_response) + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class LollygagOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.aio.Dirac`'s + :attr:`lollygag` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace_async + async def insert_owner_object(self, owner_name: str, **kwargs: Any) -> Any: + """Insert Owner Object. + + Insert Owner Object. + + :param owner_name: Required. + :type owner_name: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_insert_owner_object_request( + owner_name=owner_name, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_owner_object(self, **kwargs: Any) -> Any: + """Get Owner Object. + + Get Owner Object. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_get_owner_object_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_gubbins_secrets(self, **kwargs: Any) -> Any: + """Get Gubbins Secrets. + + Does nothing but expects a GUBBINS_SENSEI permission. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_get_gubbins_secrets_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_patch.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_patch.py new file mode 100644 index 00000000..abf56120 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/aio/operations/_patch.py @@ -0,0 +1,22 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = ( + [] +) # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/__init__.py new file mode 100644 index 00000000..8e940f29 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/__init__.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._models import BodyAuthToken +from ._models import BodyAuthTokenGrantType +from ._models import DevelopmentSettings +from ._models import ExtendedMetadata +from ._models import GroupInfo +from ._models import HTTPValidationError +from ._models import InitiateDeviceFlowResponse +from ._models import InsertedJob +from ._models import JobSearchParams +from ._models import JobSearchParamsSearchItem +from ._models import JobStatusReturn +from ._models import JobStatusUpdate +from ._models import JobSummaryParams +from ._models import JobSummaryParamsSearchItem +from ._models import LimitedJobStatusReturn +from ._models import SandboxDownloadResponse +from ._models import SandboxInfo +from ._models import SandboxUploadResponse +from ._models import ScalarSearchSpec +from ._models import ScalarSearchSpecValue +from ._models import SetJobStatusReturn +from ._models import SortSpec +from ._models import SupportInfo +from ._models import TokenResponse +from ._models import UserInfoResponse +from ._models import VOInfo +from ._models import ValidationError +from ._models import ValidationErrorLocItem +from ._models import VectorSearchSpec +from ._models import VectorSearchSpecValues + +from ._enums import ChecksumAlgorithm +from ._enums import Enum0 +from ._enums import Enum1 +from ._enums import Enum2 +from ._enums import Enum3 +from ._enums import Enum4 +from ._enums import JobStatus +from ._enums import SandboxFormat +from ._enums import SandboxType +from ._enums import ScalarSearchOperator +from ._enums import SortDirection +from ._enums import VectorSearchOperator +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BodyAuthToken", + "BodyAuthTokenGrantType", + "DevelopmentSettings", + "ExtendedMetadata", + "GroupInfo", + "HTTPValidationError", + "InitiateDeviceFlowResponse", + "InsertedJob", + "JobSearchParams", + "JobSearchParamsSearchItem", + "JobStatusReturn", + "JobStatusUpdate", + "JobSummaryParams", + "JobSummaryParamsSearchItem", + "LimitedJobStatusReturn", + "SandboxDownloadResponse", + "SandboxInfo", + "SandboxUploadResponse", + "ScalarSearchSpec", + "ScalarSearchSpecValue", + "SetJobStatusReturn", + "SortSpec", + "SupportInfo", + "TokenResponse", + "UserInfoResponse", + "VOInfo", + "ValidationError", + "ValidationErrorLocItem", + "VectorSearchSpec", + "VectorSearchSpecValues", + "ChecksumAlgorithm", + "Enum0", + "Enum1", + "Enum2", + "Enum3", + "Enum4", + "JobStatus", + "SandboxFormat", + "SandboxType", + "ScalarSearchOperator", + "SortDirection", + "VectorSearchOperator", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_enums.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_enums.py new file mode 100644 index 00000000..0e4657b1 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_enums.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ChecksumAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ChecksumAlgorithm.""" + + SHA256 = "sha256" + + +class Enum0(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Response Type.""" + + CODE = "code" + + +class Enum1(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Code Challenge Method.""" + + S256 = "S256" + + +class Enum2(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum2.""" + + AUTHORIZATION_CODE = "authorization_code" + + +class Enum3(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum3.""" + + URN_IETF_PARAMS_OAUTH_GRANT_TYPE_DEVICE_CODE = ( + "urn:ietf:params:oauth:grant-type:device_code" + ) + + +class Enum4(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum4.""" + + REFRESH_TOKEN = "refresh_token" + + +class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """JobStatus.""" + + SUBMITTING = "Submitting" + RECEIVED = "Received" + CHECKING = "Checking" + STAGING = "Staging" + WAITING = "Waiting" + MATCHED = "Matched" + RUNNING = "Running" + STALLED = "Stalled" + COMPLETING = "Completing" + DONE = "Done" + COMPLETED = "Completed" + FAILED = "Failed" + DELETED = "Deleted" + KILLED = "Killed" + RESCHEDULED = "Rescheduled" + + +class SandboxFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SandboxFormat.""" + + TAR_BZ2 = "tar.bz2" + + +class SandboxType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Sandbox Type.""" + + INPUT = "input" + OUTPUT = "output" + + +class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScalarSearchOperator.""" + + EQ = "eq" + NEQ = "neq" + GT = "gt" + LT = "lt" + LIKE = "like" + + +class SortDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SortDirection.""" + + ASC = "asc" + DESC = "desc" + + +class VectorSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """VectorSearchOperator.""" + + IN = "in" + NOT_IN = "not in" diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_models.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_models.py new file mode 100644 index 00000000..ada3e6a5 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_models.py @@ -0,0 +1,1226 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union + +from .. import _serialization + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore + +if TYPE_CHECKING: + from .. import models as _models +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + + +class BodyAuthToken(_serialization.Model): + """Body_auth_token. + + All required parameters must be populated in order to send to server. + + :ivar grant_type: OAuth2 Grant type. Required. + :vartype grant_type: ~generated.models.BodyAuthTokenGrantType + :ivar client_id: OAuth2 client id. Required. + :vartype client_id: str + :ivar device_code: device code for OAuth2 device flow. + :vartype device_code: str + :ivar code: Code for OAuth2 authorization code flow. + :vartype code: str + :ivar redirect_uri: redirect_uri used with OAuth2 authorization code flow. + :vartype redirect_uri: str + :ivar code_verifier: Verifier for the code challenge for the OAuth2 authorization flow with + PKCE. + :vartype code_verifier: str + :ivar refresh_token: Refresh token used with OAuth2 refresh token flow. + :vartype refresh_token: str + """ + + _validation = { + "grant_type": {"required": True}, + "client_id": {"required": True}, + } + + _attribute_map = { + "grant_type": {"key": "grant_type", "type": "BodyAuthTokenGrantType"}, + "client_id": {"key": "client_id", "type": "str"}, + "device_code": {"key": "device_code", "type": "str"}, + "code": {"key": "code", "type": "str"}, + "redirect_uri": {"key": "redirect_uri", "type": "str"}, + "code_verifier": {"key": "code_verifier", "type": "str"}, + "refresh_token": {"key": "refresh_token", "type": "str"}, + } + + def __init__( + self, + *, + grant_type: "_models.BodyAuthTokenGrantType", + client_id: str, + device_code: Optional[str] = None, + code: Optional[str] = None, + redirect_uri: Optional[str] = None, + code_verifier: Optional[str] = None, + refresh_token: Optional[str] = None, + **kwargs: Any, + ) -> None: + """ + :keyword grant_type: OAuth2 Grant type. Required. + :paramtype grant_type: ~generated.models.BodyAuthTokenGrantType + :keyword client_id: OAuth2 client id. Required. + :paramtype client_id: str + :keyword device_code: device code for OAuth2 device flow. + :paramtype device_code: str + :keyword code: Code for OAuth2 authorization code flow. + :paramtype code: str + :keyword redirect_uri: redirect_uri used with OAuth2 authorization code flow. + :paramtype redirect_uri: str + :keyword code_verifier: Verifier for the code challenge for the OAuth2 authorization flow with + PKCE. + :paramtype code_verifier: str + :keyword refresh_token: Refresh token used with OAuth2 refresh token flow. + :paramtype refresh_token: str + """ + super().__init__(**kwargs) + self.grant_type = grant_type + self.client_id = client_id + self.device_code = device_code + self.code = code + self.redirect_uri = redirect_uri + self.code_verifier = code_verifier + self.refresh_token = refresh_token + + +class BodyAuthTokenGrantType(_serialization.Model): + """OAuth2 Grant type.""" + + +class DevelopmentSettings(_serialization.Model): + """Settings for the Development Configuration that can influence run time. + + :ivar crash_on_missed_access_policy: Crash On Missed Access Policy. + :vartype crash_on_missed_access_policy: bool + """ + + _attribute_map = { + "crash_on_missed_access_policy": { + "key": "crash_on_missed_access_policy", + "type": "bool", + }, + } + + def __init__( + self, *, crash_on_missed_access_policy: bool = False, **kwargs: Any + ) -> None: + """ + :keyword crash_on_missed_access_policy: Crash On Missed Access Policy. + :paramtype crash_on_missed_access_policy: bool + """ + super().__init__(**kwargs) + self.crash_on_missed_access_policy = crash_on_missed_access_policy + + +class ExtendedMetadata(_serialization.Model): + """ExtendedMetadata. + + All required parameters must be populated in order to send to server. + + :ivar virtual_organizations: Virtual Organizations. Required. + :vartype virtual_organizations: dict[str, ~generated.models.VOInfo] + :ivar development_settings: Settings for the Development Configuration that can influence run + time. Required. + :vartype development_settings: ~generated.models.DevelopmentSettings + :ivar gubbins_secrets: Gubbins Secrets. Required. + :vartype gubbins_secrets: str + :ivar gubbins_user_info: Gubbins User Info. Required. + :vartype gubbins_user_info: dict[str, list[str]] + """ + + _validation = { + "virtual_organizations": {"required": True}, + "development_settings": {"required": True}, + "gubbins_secrets": {"required": True}, + "gubbins_user_info": {"required": True}, + } + + _attribute_map = { + "virtual_organizations": {"key": "virtual_organizations", "type": "{VOInfo}"}, + "development_settings": { + "key": "development_settings", + "type": "DevelopmentSettings", + }, + "gubbins_secrets": {"key": "gubbins_secrets", "type": "str"}, + "gubbins_user_info": {"key": "gubbins_user_info", "type": "{[str]}"}, + } + + def __init__( + self, + *, + virtual_organizations: Dict[str, "_models.VOInfo"], + development_settings: "_models.DevelopmentSettings", + gubbins_secrets: str, + gubbins_user_info: Dict[str, List[str]], + **kwargs: Any, + ) -> None: + """ + :keyword virtual_organizations: Virtual Organizations. Required. + :paramtype virtual_organizations: dict[str, ~generated.models.VOInfo] + :keyword development_settings: Settings for the Development Configuration that can influence + run time. Required. + :paramtype development_settings: ~generated.models.DevelopmentSettings + :keyword gubbins_secrets: Gubbins Secrets. Required. + :paramtype gubbins_secrets: str + :keyword gubbins_user_info: Gubbins User Info. Required. + :paramtype gubbins_user_info: dict[str, list[str]] + """ + super().__init__(**kwargs) + self.virtual_organizations = virtual_organizations + self.development_settings = development_settings + self.gubbins_secrets = gubbins_secrets + self.gubbins_user_info = gubbins_user_info + + +class GroupInfo(_serialization.Model): + """GroupInfo. + + All required parameters must be populated in order to send to server. + + :ivar properties: Properties. Required. + :vartype properties: list[str] + """ + + _validation = { + "properties": {"required": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "[str]"}, + } + + def __init__(self, *, properties: List[str], **kwargs: Any) -> None: + """ + :keyword properties: Properties. Required. + :paramtype properties: list[str] + """ + super().__init__(**kwargs) + self.properties = properties + + +class HTTPValidationError(_serialization.Model): + """HTTPValidationError. + + :ivar detail: Detail. + :vartype detail: list[~generated.models.ValidationError] + """ + + _attribute_map = { + "detail": {"key": "detail", "type": "[ValidationError]"}, + } + + def __init__( + self, *, detail: Optional[List["_models.ValidationError"]] = None, **kwargs: Any + ) -> None: + """ + :keyword detail: Detail. + :paramtype detail: list[~generated.models.ValidationError] + """ + super().__init__(**kwargs) + self.detail = detail + + +class InitiateDeviceFlowResponse(_serialization.Model): + """Response for the device flow initiation. + + All required parameters must be populated in order to send to server. + + :ivar user_code: User Code. Required. + :vartype user_code: str + :ivar device_code: Device Code. Required. + :vartype device_code: str + :ivar verification_uri_complete: Verification Uri Complete. Required. + :vartype verification_uri_complete: str + :ivar verification_uri: Verification Uri. Required. + :vartype verification_uri: str + :ivar expires_in: Expires In. Required. + :vartype expires_in: int + """ + + _validation = { + "user_code": {"required": True}, + "device_code": {"required": True}, + "verification_uri_complete": {"required": True}, + "verification_uri": {"required": True}, + "expires_in": {"required": True}, + } + + _attribute_map = { + "user_code": {"key": "user_code", "type": "str"}, + "device_code": {"key": "device_code", "type": "str"}, + "verification_uri_complete": { + "key": "verification_uri_complete", + "type": "str", + }, + "verification_uri": {"key": "verification_uri", "type": "str"}, + "expires_in": {"key": "expires_in", "type": "int"}, + } + + def __init__( + self, + *, + user_code: str, + device_code: str, + verification_uri_complete: str, + verification_uri: str, + expires_in: int, + **kwargs: Any, + ) -> None: + """ + :keyword user_code: User Code. Required. + :paramtype user_code: str + :keyword device_code: Device Code. Required. + :paramtype device_code: str + :keyword verification_uri_complete: Verification Uri Complete. Required. + :paramtype verification_uri_complete: str + :keyword verification_uri: Verification Uri. Required. + :paramtype verification_uri: str + :keyword expires_in: Expires In. Required. + :paramtype expires_in: int + """ + super().__init__(**kwargs) + self.user_code = user_code + self.device_code = device_code + self.verification_uri_complete = verification_uri_complete + self.verification_uri = verification_uri + self.expires_in = expires_in + + +class InsertedJob(_serialization.Model): + """InsertedJob. + + All required parameters must be populated in order to send to server. + + :ivar job_id: Jobid. Required. + :vartype job_id: int + :ivar status: Status. Required. + :vartype status: str + :ivar minor_status: Minorstatus. Required. + :vartype minor_status: str + :ivar time_stamp: Timestamp. Required. + :vartype time_stamp: ~datetime.datetime + """ + + _validation = { + "job_id": {"required": True}, + "status": {"required": True}, + "minor_status": {"required": True}, + "time_stamp": {"required": True}, + } + + _attribute_map = { + "job_id": {"key": "JobID", "type": "int"}, + "status": {"key": "Status", "type": "str"}, + "minor_status": {"key": "MinorStatus", "type": "str"}, + "time_stamp": {"key": "TimeStamp", "type": "iso-8601"}, + } + + def __init__( + self, + *, + job_id: int, + status: str, + minor_status: str, + time_stamp: datetime.datetime, + **kwargs: Any, + ) -> None: + """ + :keyword job_id: Jobid. Required. + :paramtype job_id: int + :keyword status: Status. Required. + :paramtype status: str + :keyword minor_status: Minorstatus. Required. + :paramtype minor_status: str + :keyword time_stamp: Timestamp. Required. + :paramtype time_stamp: ~datetime.datetime + """ + super().__init__(**kwargs) + self.job_id = job_id + self.status = status + self.minor_status = minor_status + self.time_stamp = time_stamp + + +class JobSearchParams(_serialization.Model): + """JobSearchParams. + + :ivar parameters: Parameters. + :vartype parameters: list[str] + :ivar search: Search. + :vartype search: list[~generated.models.JobSearchParamsSearchItem] + :ivar sort: Sort. + :vartype sort: list[~generated.models.SortSpec] + :ivar distinct: Distinct. + :vartype distinct: bool + """ + + _attribute_map = { + "parameters": {"key": "parameters", "type": "[str]"}, + "search": {"key": "search", "type": "[JobSearchParamsSearchItem]"}, + "sort": {"key": "sort", "type": "[SortSpec]"}, + "distinct": {"key": "distinct", "type": "bool"}, + } + + def __init__( + self, + *, + parameters: Optional[List[str]] = None, + search: List["_models.JobSearchParamsSearchItem"] = [], + sort: List["_models.SortSpec"] = [], + distinct: bool = False, + **kwargs: Any, + ) -> None: + """ + :keyword parameters: Parameters. + :paramtype parameters: list[str] + :keyword search: Search. + :paramtype search: list[~generated.models.JobSearchParamsSearchItem] + :keyword sort: Sort. + :paramtype sort: list[~generated.models.SortSpec] + :keyword distinct: Distinct. + :paramtype distinct: bool + """ + super().__init__(**kwargs) + self.parameters = parameters + self.search = search + self.sort = sort + self.distinct = distinct + + +class JobSearchParamsSearchItem(_serialization.Model): + """JobSearchParamsSearchItem.""" + + +class JobStatusReturn(_serialization.Model): + """JobStatusReturn. + + All required parameters must be populated in order to send to server. + + :ivar status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", + "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", + "Failed", "Deleted", "Killed", and "Rescheduled". + :vartype status: str or ~generated.models.JobStatus + :ivar minor_status: Minorstatus. Required. + :vartype minor_status: str + :ivar application_status: Applicationstatus. Required. + :vartype application_status: str + :ivar status_time: Statustime. Required. + :vartype status_time: ~datetime.datetime + :ivar source: Source. Required. + :vartype source: str + """ + + _validation = { + "status": {"required": True}, + "minor_status": {"required": True}, + "application_status": {"required": True}, + "status_time": {"required": True}, + "source": {"required": True}, + } + + _attribute_map = { + "status": {"key": "Status", "type": "str"}, + "minor_status": {"key": "MinorStatus", "type": "str"}, + "application_status": {"key": "ApplicationStatus", "type": "str"}, + "status_time": {"key": "StatusTime", "type": "iso-8601"}, + "source": {"key": "Source", "type": "str"}, + } + + def __init__( + self, + *, + status: Union[str, "_models.JobStatus"], + minor_status: str, + application_status: str, + status_time: datetime.datetime, + source: str, + **kwargs: Any, + ) -> None: + """ + :keyword status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", + "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", + "Failed", "Deleted", "Killed", and "Rescheduled". + :paramtype status: str or ~generated.models.JobStatus + :keyword minor_status: Minorstatus. Required. + :paramtype minor_status: str + :keyword application_status: Applicationstatus. Required. + :paramtype application_status: str + :keyword status_time: Statustime. Required. + :paramtype status_time: ~datetime.datetime + :keyword source: Source. Required. + :paramtype source: str + """ + super().__init__(**kwargs) + self.status = status + self.minor_status = minor_status + self.application_status = application_status + self.status_time = status_time + self.source = source + + +class JobStatusUpdate(_serialization.Model): + """JobStatusUpdate. + + :ivar status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", + "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", + "Deleted", "Killed", and "Rescheduled". + :vartype status: str or ~generated.models.JobStatus + :ivar minor_status: Minorstatus. + :vartype minor_status: str + :ivar application_status: Applicationstatus. + :vartype application_status: str + :ivar source: Source. + :vartype source: str + """ + + _attribute_map = { + "status": {"key": "Status", "type": "str"}, + "minor_status": {"key": "MinorStatus", "type": "str"}, + "application_status": {"key": "ApplicationStatus", "type": "str"}, + "source": {"key": "Source", "type": "str"}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "_models.JobStatus"]] = None, + minor_status: Optional[str] = None, + application_status: Optional[str] = None, + source: str = "Unknown", + **kwargs: Any, + ) -> None: + """ + :keyword status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", + "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", + "Deleted", "Killed", and "Rescheduled". + :paramtype status: str or ~generated.models.JobStatus + :keyword minor_status: Minorstatus. + :paramtype minor_status: str + :keyword application_status: Applicationstatus. + :paramtype application_status: str + :keyword source: Source. + :paramtype source: str + """ + super().__init__(**kwargs) + self.status = status + self.minor_status = minor_status + self.application_status = application_status + self.source = source + + +class JobSummaryParams(_serialization.Model): + """JobSummaryParams. + + All required parameters must be populated in order to send to server. + + :ivar grouping: Grouping. Required. + :vartype grouping: list[str] + :ivar search: Search. + :vartype search: list[~generated.models.JobSummaryParamsSearchItem] + """ + + _validation = { + "grouping": {"required": True}, + } + + _attribute_map = { + "grouping": {"key": "grouping", "type": "[str]"}, + "search": {"key": "search", "type": "[JobSummaryParamsSearchItem]"}, + } + + def __init__( + self, + *, + grouping: List[str], + search: List["_models.JobSummaryParamsSearchItem"] = [], + **kwargs: Any, + ) -> None: + """ + :keyword grouping: Grouping. Required. + :paramtype grouping: list[str] + :keyword search: Search. + :paramtype search: list[~generated.models.JobSummaryParamsSearchItem] + """ + super().__init__(**kwargs) + self.grouping = grouping + self.search = search + + +class JobSummaryParamsSearchItem(_serialization.Model): + """JobSummaryParamsSearchItem.""" + + +class LimitedJobStatusReturn(_serialization.Model): + """LimitedJobStatusReturn. + + All required parameters must be populated in order to send to server. + + :ivar status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", + "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", + "Failed", "Deleted", "Killed", and "Rescheduled". + :vartype status: str or ~generated.models.JobStatus + :ivar minor_status: Minorstatus. Required. + :vartype minor_status: str + :ivar application_status: Applicationstatus. Required. + :vartype application_status: str + """ + + _validation = { + "status": {"required": True}, + "minor_status": {"required": True}, + "application_status": {"required": True}, + } + + _attribute_map = { + "status": {"key": "Status", "type": "str"}, + "minor_status": {"key": "MinorStatus", "type": "str"}, + "application_status": {"key": "ApplicationStatus", "type": "str"}, + } + + def __init__( + self, + *, + status: Union[str, "_models.JobStatus"], + minor_status: str, + application_status: str, + **kwargs: Any, + ) -> None: + """ + :keyword status: JobStatus. Required. Known values are: "Submitting", "Received", "Checking", + "Staging", "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", + "Failed", "Deleted", "Killed", and "Rescheduled". + :paramtype status: str or ~generated.models.JobStatus + :keyword minor_status: Minorstatus. Required. + :paramtype minor_status: str + :keyword application_status: Applicationstatus. Required. + :paramtype application_status: str + """ + super().__init__(**kwargs) + self.status = status + self.minor_status = minor_status + self.application_status = application_status + + +class SandboxDownloadResponse(_serialization.Model): + """SandboxDownloadResponse. + + All required parameters must be populated in order to send to server. + + :ivar url: Url. Required. + :vartype url: str + :ivar expires_in: Expires In. Required. + :vartype expires_in: int + """ + + _validation = { + "url": {"required": True}, + "expires_in": {"required": True}, + } + + _attribute_map = { + "url": {"key": "url", "type": "str"}, + "expires_in": {"key": "expires_in", "type": "int"}, + } + + def __init__(self, *, url: str, expires_in: int, **kwargs: Any) -> None: + """ + :keyword url: Url. Required. + :paramtype url: str + :keyword expires_in: Expires In. Required. + :paramtype expires_in: int + """ + super().__init__(**kwargs) + self.url = url + self.expires_in = expires_in + + +class SandboxInfo(_serialization.Model): + """SandboxInfo. + + All required parameters must be populated in order to send to server. + + :ivar checksum_algorithm: ChecksumAlgorithm. Required. "sha256" + :vartype checksum_algorithm: str or ~generated.models.ChecksumAlgorithm + :ivar checksum: Checksum. Required. + :vartype checksum: str + :ivar size: Size. Required. + :vartype size: int + :ivar format: SandboxFormat. Required. "tar.bz2" + :vartype format: str or ~generated.models.SandboxFormat + """ + + _validation = { + "checksum_algorithm": {"required": True}, + "checksum": {"required": True, "pattern": r"^[0-9a-fA-F]{64}$"}, + "size": {"required": True, "minimum": 1}, + "format": {"required": True}, + } + + _attribute_map = { + "checksum_algorithm": {"key": "checksum_algorithm", "type": "str"}, + "checksum": {"key": "checksum", "type": "str"}, + "size": {"key": "size", "type": "int"}, + "format": {"key": "format", "type": "str"}, + } + + def __init__( + self, + *, + checksum_algorithm: Union[str, "_models.ChecksumAlgorithm"], + checksum: str, + size: int, + format: Union[str, "_models.SandboxFormat"], + **kwargs: Any, + ) -> None: + """ + :keyword checksum_algorithm: ChecksumAlgorithm. Required. "sha256" + :paramtype checksum_algorithm: str or ~generated.models.ChecksumAlgorithm + :keyword checksum: Checksum. Required. + :paramtype checksum: str + :keyword size: Size. Required. + :paramtype size: int + :keyword format: SandboxFormat. Required. "tar.bz2" + :paramtype format: str or ~generated.models.SandboxFormat + """ + super().__init__(**kwargs) + self.checksum_algorithm = checksum_algorithm + self.checksum = checksum + self.size = size + self.format = format + + +class SandboxUploadResponse(_serialization.Model): + """SandboxUploadResponse. + + All required parameters must be populated in order to send to server. + + :ivar pfn: Pfn. Required. + :vartype pfn: str + :ivar url: Url. + :vartype url: str + :ivar fields: Fields. + :vartype fields: dict[str, str] + """ + + _validation = { + "pfn": {"required": True}, + } + + _attribute_map = { + "pfn": {"key": "pfn", "type": "str"}, + "url": {"key": "url", "type": "str"}, + "fields": {"key": "fields", "type": "{str}"}, + } + + def __init__( + self, + *, + pfn: str, + url: Optional[str] = None, + fields: Optional[Dict[str, str]] = None, + **kwargs: Any, + ) -> None: + """ + :keyword pfn: Pfn. Required. + :paramtype pfn: str + :keyword url: Url. + :paramtype url: str + :keyword fields: Fields. + :paramtype fields: dict[str, str] + """ + super().__init__(**kwargs) + self.pfn = pfn + self.url = url + self.fields = fields + + +class ScalarSearchSpec(_serialization.Model): + """ScalarSearchSpec. + + All required parameters must be populated in order to send to server. + + :ivar parameter: Parameter. Required. + :vartype parameter: str + :ivar operator: ScalarSearchOperator. Required. Known values are: "eq", "neq", "gt", "lt", and + "like". + :vartype operator: str or ~generated.models.ScalarSearchOperator + :ivar value: Value. Required. + :vartype value: ~generated.models.ScalarSearchSpecValue + """ + + _validation = { + "parameter": {"required": True}, + "operator": {"required": True}, + "value": {"required": True}, + } + + _attribute_map = { + "parameter": {"key": "parameter", "type": "str"}, + "operator": {"key": "operator", "type": "str"}, + "value": {"key": "value", "type": "ScalarSearchSpecValue"}, + } + + def __init__( + self, + *, + parameter: str, + operator: Union[str, "_models.ScalarSearchOperator"], + value: "_models.ScalarSearchSpecValue", + **kwargs: Any, + ) -> None: + """ + :keyword parameter: Parameter. Required. + :paramtype parameter: str + :keyword operator: ScalarSearchOperator. Required. Known values are: "eq", "neq", "gt", "lt", + and "like". + :paramtype operator: str or ~generated.models.ScalarSearchOperator + :keyword value: Value. Required. + :paramtype value: ~generated.models.ScalarSearchSpecValue + """ + super().__init__(**kwargs) + self.parameter = parameter + self.operator = operator + self.value = value + + +class ScalarSearchSpecValue(_serialization.Model): + """Value.""" + + +class SetJobStatusReturn(_serialization.Model): + """SetJobStatusReturn. + + :ivar status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", + "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", + "Deleted", "Killed", and "Rescheduled". + :vartype status: str or ~generated.models.JobStatus + :ivar minor_status: Minorstatus. + :vartype minor_status: str + :ivar application_status: Applicationstatus. + :vartype application_status: str + :ivar heart_beat_time: Heartbeattime. + :vartype heart_beat_time: ~datetime.datetime + :ivar start_exec_time: Startexectime. + :vartype start_exec_time: ~datetime.datetime + :ivar end_exec_time: Endexectime. + :vartype end_exec_time: ~datetime.datetime + :ivar last_update_time: Lastupdatetime. + :vartype last_update_time: ~datetime.datetime + """ + + _attribute_map = { + "status": {"key": "Status", "type": "str"}, + "minor_status": {"key": "MinorStatus", "type": "str"}, + "application_status": {"key": "ApplicationStatus", "type": "str"}, + "heart_beat_time": {"key": "HeartBeatTime", "type": "iso-8601"}, + "start_exec_time": {"key": "StartExecTime", "type": "iso-8601"}, + "end_exec_time": {"key": "EndExecTime", "type": "iso-8601"}, + "last_update_time": {"key": "LastUpdateTime", "type": "iso-8601"}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "_models.JobStatus"]] = None, + minor_status: Optional[str] = None, + application_status: Optional[str] = None, + heart_beat_time: Optional[datetime.datetime] = None, + start_exec_time: Optional[datetime.datetime] = None, + end_exec_time: Optional[datetime.datetime] = None, + last_update_time: Optional[datetime.datetime] = None, + **kwargs: Any, + ) -> None: + """ + :keyword status: JobStatus. Known values are: "Submitting", "Received", "Checking", "Staging", + "Waiting", "Matched", "Running", "Stalled", "Completing", "Done", "Completed", "Failed", + "Deleted", "Killed", and "Rescheduled". + :paramtype status: str or ~generated.models.JobStatus + :keyword minor_status: Minorstatus. + :paramtype minor_status: str + :keyword application_status: Applicationstatus. + :paramtype application_status: str + :keyword heart_beat_time: Heartbeattime. + :paramtype heart_beat_time: ~datetime.datetime + :keyword start_exec_time: Startexectime. + :paramtype start_exec_time: ~datetime.datetime + :keyword end_exec_time: Endexectime. + :paramtype end_exec_time: ~datetime.datetime + :keyword last_update_time: Lastupdatetime. + :paramtype last_update_time: ~datetime.datetime + """ + super().__init__(**kwargs) + self.status = status + self.minor_status = minor_status + self.application_status = application_status + self.heart_beat_time = heart_beat_time + self.start_exec_time = start_exec_time + self.end_exec_time = end_exec_time + self.last_update_time = last_update_time + + +class SortSpec(_serialization.Model): + """SortSpec. + + All required parameters must be populated in order to send to server. + + :ivar parameter: Parameter. Required. + :vartype parameter: str + :ivar direction: SortDirection. Required. Known values are: "asc" and "desc". + :vartype direction: str or ~generated.models.SortDirection + """ + + _validation = { + "parameter": {"required": True}, + "direction": {"required": True}, + } + + _attribute_map = { + "parameter": {"key": "parameter", "type": "str"}, + "direction": {"key": "direction", "type": "str"}, + } + + def __init__( + self, + *, + parameter: str, + direction: Union[str, "_models.SortDirection"], + **kwargs: Any, + ) -> None: + """ + :keyword parameter: Parameter. Required. + :paramtype parameter: str + :keyword direction: SortDirection. Required. Known values are: "asc" and "desc". + :paramtype direction: str or ~generated.models.SortDirection + """ + super().__init__(**kwargs) + self.parameter = parameter + self.direction = direction + + +class SupportInfo(_serialization.Model): + """SupportInfo. + + All required parameters must be populated in order to send to server. + + :ivar message: Message. Required. + :vartype message: str + :ivar webpage: Webpage. Required. + :vartype webpage: str + :ivar email: Email. Required. + :vartype email: str + """ + + _validation = { + "message": {"required": True}, + "webpage": {"required": True}, + "email": {"required": True}, + } + + _attribute_map = { + "message": {"key": "message", "type": "str"}, + "webpage": {"key": "webpage", "type": "str"}, + "email": {"key": "email", "type": "str"}, + } + + def __init__( + self, *, message: str, webpage: str, email: str, **kwargs: Any + ) -> None: + """ + :keyword message: Message. Required. + :paramtype message: str + :keyword webpage: Webpage. Required. + :paramtype webpage: str + :keyword email: Email. Required. + :paramtype email: str + """ + super().__init__(**kwargs) + self.message = message + self.webpage = webpage + self.email = email + + +class TokenResponse(_serialization.Model): + """TokenResponse. + + All required parameters must be populated in order to send to server. + + :ivar access_token: Access Token. Required. + :vartype access_token: str + :ivar expires_in: Expires In. Required. + :vartype expires_in: int + :ivar token_type: Token Type. + :vartype token_type: str + :ivar refresh_token: Refresh Token. + :vartype refresh_token: str + """ + + _validation = { + "access_token": {"required": True}, + "expires_in": {"required": True}, + } + + _attribute_map = { + "access_token": {"key": "access_token", "type": "str"}, + "expires_in": {"key": "expires_in", "type": "int"}, + "token_type": {"key": "token_type", "type": "str"}, + "refresh_token": {"key": "refresh_token", "type": "str"}, + } + + def __init__( + self, + *, + access_token: str, + expires_in: int, + token_type: str = "Bearer", + refresh_token: Optional[str] = None, + **kwargs: Any, + ) -> None: + """ + :keyword access_token: Access Token. Required. + :paramtype access_token: str + :keyword expires_in: Expires In. Required. + :paramtype expires_in: int + :keyword token_type: Token Type. + :paramtype token_type: str + :keyword refresh_token: Refresh Token. + :paramtype refresh_token: str + """ + super().__init__(**kwargs) + self.access_token = access_token + self.expires_in = expires_in + self.token_type = token_type + self.refresh_token = refresh_token + + +class UserInfoResponse(_serialization.Model): + """Response for the userinfo endpoint. + + All required parameters must be populated in order to send to server. + + :ivar sub: Sub. Required. + :vartype sub: str + :ivar vo: Vo. Required. + :vartype vo: str + :ivar dirac_group: Dirac Group. Required. + :vartype dirac_group: str + :ivar policies: Policies. Required. + :vartype policies: JSON + :ivar properties: Properties. Required. + :vartype properties: list[str] + :ivar preferred_username: Preferred Username. Required. + :vartype preferred_username: str + """ + + _validation = { + "sub": {"required": True}, + "vo": {"required": True}, + "dirac_group": {"required": True}, + "policies": {"required": True}, + "properties": {"required": True}, + "preferred_username": {"required": True}, + } + + _attribute_map = { + "sub": {"key": "sub", "type": "str"}, + "vo": {"key": "vo", "type": "str"}, + "dirac_group": {"key": "dirac_group", "type": "str"}, + "policies": {"key": "policies", "type": "object"}, + "properties": {"key": "properties", "type": "[str]"}, + "preferred_username": {"key": "preferred_username", "type": "str"}, + } + + def __init__( + self, + *, + sub: str, + vo: str, + dirac_group: str, + policies: JSON, + properties: List[str], + preferred_username: str, + **kwargs: Any, + ) -> None: + """ + :keyword sub: Sub. Required. + :paramtype sub: str + :keyword vo: Vo. Required. + :paramtype vo: str + :keyword dirac_group: Dirac Group. Required. + :paramtype dirac_group: str + :keyword policies: Policies. Required. + :paramtype policies: JSON + :keyword properties: Properties. Required. + :paramtype properties: list[str] + :keyword preferred_username: Preferred Username. Required. + :paramtype preferred_username: str + """ + super().__init__(**kwargs) + self.sub = sub + self.vo = vo + self.dirac_group = dirac_group + self.policies = policies + self.properties = properties + self.preferred_username = preferred_username + + +class ValidationError(_serialization.Model): + """ValidationError. + + All required parameters must be populated in order to send to server. + + :ivar loc: Location. Required. + :vartype loc: list[~generated.models.ValidationErrorLocItem] + :ivar msg: Message. Required. + :vartype msg: str + :ivar type: Error Type. Required. + :vartype type: str + """ + + _validation = { + "loc": {"required": True}, + "msg": {"required": True}, + "type": {"required": True}, + } + + _attribute_map = { + "loc": {"key": "loc", "type": "[ValidationErrorLocItem]"}, + "msg": {"key": "msg", "type": "str"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__( + self, + *, + loc: List["_models.ValidationErrorLocItem"], + msg: str, + type: str, + **kwargs: Any, + ) -> None: + """ + :keyword loc: Location. Required. + :paramtype loc: list[~generated.models.ValidationErrorLocItem] + :keyword msg: Message. Required. + :paramtype msg: str + :keyword type: Error Type. Required. + :paramtype type: str + """ + super().__init__(**kwargs) + self.loc = loc + self.msg = msg + self.type = type + + +class ValidationErrorLocItem(_serialization.Model): + """ValidationErrorLocItem.""" + + +class VectorSearchSpec(_serialization.Model): + """VectorSearchSpec. + + All required parameters must be populated in order to send to server. + + :ivar parameter: Parameter. Required. + :vartype parameter: str + :ivar operator: VectorSearchOperator. Required. Known values are: "in" and "not in". + :vartype operator: str or ~generated.models.VectorSearchOperator + :ivar values: Values. Required. + :vartype values: ~generated.models.VectorSearchSpecValues + """ + + _validation = { + "parameter": {"required": True}, + "operator": {"required": True}, + "values": {"required": True}, + } + + _attribute_map = { + "parameter": {"key": "parameter", "type": "str"}, + "operator": {"key": "operator", "type": "str"}, + "values": {"key": "values", "type": "VectorSearchSpecValues"}, + } + + def __init__( + self, + *, + parameter: str, + operator: Union[str, "_models.VectorSearchOperator"], + values: "_models.VectorSearchSpecValues", + **kwargs: Any, + ) -> None: + """ + :keyword parameter: Parameter. Required. + :paramtype parameter: str + :keyword operator: VectorSearchOperator. Required. Known values are: "in" and "not in". + :paramtype operator: str or ~generated.models.VectorSearchOperator + :keyword values: Values. Required. + :paramtype values: ~generated.models.VectorSearchSpecValues + """ + super().__init__(**kwargs) + self.parameter = parameter + self.operator = operator + self.values = values + + +class VectorSearchSpecValues(_serialization.Model): + """Values.""" + + +class VOInfo(_serialization.Model): + """VOInfo. + + All required parameters must be populated in order to send to server. + + :ivar groups: Groups. Required. + :vartype groups: dict[str, ~generated.models.GroupInfo] + :ivar support: SupportInfo. Required. + :vartype support: ~generated.models.SupportInfo + :ivar default_group: Default Group. Required. + :vartype default_group: str + """ + + _validation = { + "groups": {"required": True}, + "support": {"required": True}, + "default_group": {"required": True}, + } + + _attribute_map = { + "groups": {"key": "groups", "type": "{GroupInfo}"}, + "support": {"key": "support", "type": "SupportInfo"}, + "default_group": {"key": "default_group", "type": "str"}, + } + + def __init__( + self, + *, + groups: Dict[str, "_models.GroupInfo"], + support: "_models.SupportInfo", + default_group: str, + **kwargs: Any, + ) -> None: + """ + :keyword groups: Groups. Required. + :paramtype groups: dict[str, ~generated.models.GroupInfo] + :keyword support: SupportInfo. Required. + :paramtype support: ~generated.models.SupportInfo + :keyword default_group: Default Group. Required. + :paramtype default_group: str + """ + super().__init__(**kwargs) + self.groups = groups + self.support = support + self.default_group = default_group diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_patch.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_patch.py new file mode 100644 index 00000000..abf56120 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/models/_patch.py @@ -0,0 +1,22 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = ( + [] +) # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/__init__.py new file mode 100644 index 00000000..c2fe1d0e --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import WellKnownOperations +from ._operations import AuthOperations +from ._operations import ConfigOperations +from ._operations import JobsOperations +from ._operations import LollygagOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "WellKnownOperations", + "AuthOperations", + "ConfigOperations", + "JobsOperations", + "LollygagOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/diracx-client/src/diracx/client/operations/_operations.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_operations.py similarity index 89% rename from diracx-client/src/diracx/client/operations/_operations.py rename to extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_operations.py index 800996ab..264732aa 100644 --- a/diracx-client/src/diracx/client/operations/_operations.py +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_operations.py @@ -1,23 +1,12 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.19) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.0, generator: @autorest/python@6.26.0) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import ( - Any, - Callable, - Dict, - IO, - List, - Optional, - Type, - TypeVar, - Union, - overload, -) +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload from azure.core import MatchConditions from azure.core.exceptions import ( @@ -41,7 +30,7 @@ if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[ Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any] @@ -912,13 +901,64 @@ def build_jobs_get_single_job_status_history_request( # pylint: disable=name-to return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) +def build_lollygag_insert_owner_object_request( # pylint: disable=name-too-long + owner_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/lollygag/insert_owner/{owner_name}" + path_format_arguments = { + "owner_name": _SERIALIZER.url("owner_name", owner_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_lollygag_get_owner_object_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/lollygag/get_owners" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + +def build_lollygag_get_gubbins_secrets_request( + **kwargs: Any, +) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/lollygag/gubbins_sensei" + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + + class WellKnownOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.Dirac`'s + :class:`~generated.Dirac`'s :attr:`well_known` attribute. """ @@ -943,7 +983,7 @@ def openid_configuration(self, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -972,14 +1012,12 @@ def openid_configuration(self, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -987,16 +1025,16 @@ def openid_configuration(self, **kwargs: Any) -> Any: return deserialized # type: ignore @distributed_trace - def installation_metadata(self, **kwargs: Any) -> _models.Metadata: + def installation_metadata(self, **kwargs: Any) -> _models.ExtendedMetadata: """Installation Metadata. - Get metadata about the dirac installation. + Installation Metadata. - :return: Metadata - :rtype: ~client.models.Metadata + :return: ExtendedMetadata + :rtype: ~generated.models.ExtendedMetadata :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1007,7 +1045,7 @@ def installation_metadata(self, **kwargs: Any) -> _models.Metadata: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.Metadata] = kwargs.pop("cls", None) + cls: ClsType[_models.ExtendedMetadata] = kwargs.pop("cls", None) _request = build_well_known_installation_metadata_request( headers=_headers, @@ -1025,14 +1063,14 @@ def installation_metadata(self, **kwargs: Any) -> _models.Metadata: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("Metadata", pipeline_response) + deserialized = self._deserialize( + "ExtendedMetadata", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1046,7 +1084,7 @@ class AuthOperations: # pylint: disable=abstract-class-instantiated **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.Dirac`'s + :class:`~generated.Dirac`'s :attr:`auth` attribute. """ @@ -1077,7 +1115,7 @@ def initiate_device_flow( Initiate the device flow against DIRAC authorization Server. Scope must have exactly up to one ``group`` (otherwise default) and one or more ``property`` scope. - If no property, then get default one + If no property, then get default one. Offers the user to go with the browser to ``auth//device?user_code=XYZ``. @@ -1087,10 +1125,10 @@ def initiate_device_flow( :keyword scope: Required. :paramtype scope: str :return: InitiateDeviceFlowResponse - :rtype: ~client.models.InitiateDeviceFlowResponse + :rtype: ~generated.models.InitiateDeviceFlowResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1121,15 +1159,13 @@ def initiate_device_flow( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) deserialized = self._deserialize( - "InitiateDeviceFlowResponse", pipeline_response + "InitiateDeviceFlowResponse", pipeline_response.http_response ) if cls: @@ -1156,7 +1192,7 @@ def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1186,14 +1222,12 @@ def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1218,7 +1252,7 @@ def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1249,14 +1283,12 @@ def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1273,7 +1305,7 @@ def finished(self, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1302,14 +1334,12 @@ def finished(self, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1327,7 +1357,7 @@ def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1356,14 +1386,12 @@ def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1383,7 +1411,7 @@ def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: :rtype: str :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1413,14 +1441,12 @@ def revoke_refresh_token(self, jti: str, **kwargs: Any) -> str: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1434,10 +1460,10 @@ def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: Get information about the user's identity. :return: UserInfoResponse - :rtype: ~client.models.UserInfoResponse + :rtype: ~generated.models.UserInfoResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1466,14 +1492,14 @@ def userinfo(self, **kwargs: Any) -> _models.UserInfoResponse: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("UserInfoResponse", pipeline_response) + deserialized = self._deserialize( + "UserInfoResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1504,11 +1530,11 @@ def authorization_flow( user authorize flow. :keyword response_type: "code" Required. - :paramtype response_type: str or ~client.models.Enum0 + :paramtype response_type: str or ~generated.models.Enum0 :keyword code_challenge: Required. :paramtype code_challenge: str :keyword code_challenge_method: "S256" Required. - :paramtype code_challenge_method: str or ~client.models.Enum1 + :paramtype code_challenge_method: str or ~generated.models.Enum1 :keyword client_id: Required. :paramtype client_id: str :keyword redirect_uri: Required. @@ -1521,7 +1547,7 @@ def authorization_flow( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1557,14 +1583,12 @@ def authorization_flow( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1593,7 +1617,7 @@ def authorization_flow_complete( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1624,14 +1648,12 @@ def authorization_flow_complete( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1645,7 +1667,7 @@ class ConfigOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.Dirac`'s + :class:`~generated.Dirac`'s :attr:`config` attribute. """ @@ -1689,7 +1711,7 @@ def serve_config( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1727,14 +1749,12 @@ def serve_config( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1748,7 +1768,7 @@ class JobsOperations: # pylint: disable=too-many-public-methods **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~client.Dirac`'s + :class:`~generated.Dirac`'s :attr:`jobs` attribute. """ @@ -1782,12 +1802,12 @@ def initiate_sandbox_upload( should be used to upload the sandbox to the storage backend. :param body: Required. - :type body: ~client.models.SandboxInfo + :type body: ~generated.models.SandboxInfo :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1811,7 +1831,7 @@ def initiate_sandbox_upload( Default value is "application/json". :paramtype content_type: str :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1830,12 +1850,12 @@ def initiate_sandbox_upload( should be used to upload the sandbox to the storage backend. :param body: Is either a SandboxInfo type or a IO[bytes] type. Required. - :type body: ~client.models.SandboxInfo or IO[bytes] + :type body: ~generated.models.SandboxInfo or IO[bytes] :return: SandboxUploadResponse - :rtype: ~client.models.SandboxUploadResponse + :rtype: ~generated.models.SandboxUploadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1878,14 +1898,14 @@ def initiate_sandbox_upload( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("SandboxUploadResponse", pipeline_response) + deserialized = self._deserialize( + "SandboxUploadResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1898,7 +1918,7 @@ def get_sandbox_file( ) -> _models.SandboxDownloadResponse: """Get Sandbox File. - Get a presigned URL to download a sandbox file + Get a presigned URL to download a sandbox file. This route cannot use a redirect response most clients will also send the authorization header when following a redirect. This is not desirable as @@ -1909,10 +1929,10 @@ def get_sandbox_file( :keyword pfn: Required. :paramtype pfn: str :return: SandboxDownloadResponse - :rtype: ~client.models.SandboxDownloadResponse + :rtype: ~generated.models.SandboxDownloadResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1942,14 +1962,14 @@ def get_sandbox_file( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("SandboxDownloadResponse", pipeline_response) + deserialized = self._deserialize( + "SandboxDownloadResponse", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1970,7 +1990,7 @@ def unassign_bulk_jobs_sandboxes( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2000,14 +2020,12 @@ def unassign_bulk_jobs_sandboxes( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2026,7 +2044,7 @@ def get_job_sandboxes(self, job_id: int, **kwargs: Any) -> Dict[str, List[Any]]: :rtype: dict[str, list[any]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2056,14 +2074,12 @@ def get_job_sandboxes(self, job_id: int, **kwargs: Any) -> Dict[str, List[Any]]: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[object]}", pipeline_response) + deserialized = self._deserialize("{[object]}", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2082,7 +2098,7 @@ def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2112,14 +2128,12 @@ def unassign_job_sandboxes(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2137,12 +2151,12 @@ def get_job_sandbox( :param job_id: Required. :type job_id: int :param sandbox_type: Known values are: "input" and "output". Required. - :type sandbox_type: str or ~client.models.SandboxType + :type sandbox_type: str or ~generated.models.SandboxType :return: list of any :rtype: list[any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2173,14 +2187,12 @@ def get_job_sandbox( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2201,7 +2213,7 @@ def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2238,14 +2250,12 @@ def assign_sandbox_to_job(self, job_id: int, body: str, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2266,7 +2276,7 @@ def submit_bulk_jobs( Default value is "application/json". :paramtype content_type: str :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2284,7 +2294,7 @@ def submit_bulk_jobs( Default value is "application/json". :paramtype content_type: str :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2299,10 +2309,10 @@ def submit_bulk_jobs( :param body: Is either a [str] type or a IO[bytes] type. Required. :type body: list[str] or IO[bytes] :return: list of InsertedJob - :rtype: list[~client.models.InsertedJob] + :rtype: list[~generated.models.InsertedJob] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2345,14 +2355,14 @@ def submit_bulk_jobs( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[InsertedJob]", pipeline_response) + deserialized = self._deserialize( + "[InsertedJob]", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2371,7 +2381,7 @@ def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2401,14 +2411,12 @@ def delete_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2427,7 +2435,7 @@ def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2457,14 +2465,12 @@ def kill_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2487,7 +2493,7 @@ def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2517,14 +2523,12 @@ def remove_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2542,10 +2546,10 @@ def get_job_status_bulk( :keyword job_ids: Required. :paramtype job_ids: list[int] :return: dict mapping str to LimitedJobStatusReturn - :rtype: dict[str, ~client.models.LimitedJobStatusReturn] + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2577,14 +2581,14 @@ def get_job_status_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{LimitedJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2605,14 +2609,14 @@ def set_job_status_bulk( Set Job Status Bulk. :param body: Required. - :type body: dict[str, dict[str, ~client.models.JobStatusUpdate]] + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] :keyword force: Default value is False. :paramtype force: bool :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2637,7 +2641,7 @@ def set_job_status_bulk( Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2654,14 +2658,14 @@ def set_job_status_bulk( Set Job Status Bulk. :param body: Is either a {str: {str: JobStatusUpdate}} type or a IO[bytes] type. Required. - :type body: dict[str, dict[str, ~client.models.JobStatusUpdate]] or IO[bytes] + :type body: dict[str, dict[str, ~generated.models.JobStatusUpdate]] or IO[bytes] :keyword force: Default value is False. :paramtype force: bool :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2705,14 +2709,14 @@ def set_job_status_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{SetJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2730,10 +2734,10 @@ def get_job_status_history_bulk( :keyword job_ids: Required. :paramtype job_ids: list[int] :return: dict mapping str to list of JobStatusReturn - :rtype: dict[str, list[~client.models.JobStatusReturn]] + :rtype: dict[str, list[~generated.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2763,14 +2767,14 @@ def get_job_status_history_bulk( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[JobStatusReturn]}", pipeline_response) + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2789,7 +2793,7 @@ def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2819,14 +2823,12 @@ def reschedule_bulk_jobs(self, *, job_ids: List[int], **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2845,7 +2847,7 @@ def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2875,14 +2877,12 @@ def reschedule_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2906,7 +2906,7 @@ def search( **TODO: Add more docs**. :param body: Default value is None. - :type body: ~client.models.JobSearchParams + :type body: ~generated.models.JobSearchParams :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. @@ -2965,7 +2965,7 @@ def search( **TODO: Add more docs**. :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. - :type body: ~client.models.JobSearchParams or IO[bytes] + :type body: ~generated.models.JobSearchParams or IO[bytes] :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. @@ -2974,7 +2974,7 @@ def search( :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3022,23 +3022,18 @@ def search( response = pipeline_response.http_response if response.status_code not in [200, 206]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("[object]", pipeline_response) - if response.status_code == 206: response_headers["Content-Range"] = self._deserialize( "str", response.headers.get("Content-Range") ) - deserialized = self._deserialize("[object]", pipeline_response) + deserialized = self._deserialize("[object]", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3058,7 +3053,7 @@ def summary( Show information suitable for plotting. :param body: Required. - :type body: ~client.models.JobSummaryParams + :type body: ~generated.models.JobSummaryParams :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -3094,12 +3089,12 @@ def summary( Show information suitable for plotting. :param body: Is either a JobSummaryParams type or a IO[bytes] type. Required. - :type body: ~client.models.JobSummaryParams or IO[bytes] + :type body: ~generated.models.JobSummaryParams or IO[bytes] :return: any :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3142,14 +3137,12 @@ def summary( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3168,7 +3161,7 @@ def get_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3198,14 +3191,12 @@ def get_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3224,7 +3215,7 @@ def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3254,14 +3245,12 @@ def delete_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3286,7 +3275,7 @@ def set_single_job_properties( :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3324,14 +3313,12 @@ def set_single_job_properties( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3350,7 +3337,7 @@ def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3380,14 +3367,12 @@ def kill_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3410,7 +3395,7 @@ def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: :rtype: any :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3440,14 +3425,12 @@ def remove_single_job(self, job_id: int, **kwargs: Any) -> Any: response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("object", pipeline_response) + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3465,10 +3448,10 @@ def get_single_job_status( :param job_id: Required. :type job_id: int :return: dict mapping str to LimitedJobStatusReturn - :rtype: dict[str, ~client.models.LimitedJobStatusReturn] + :rtype: dict[str, ~generated.models.LimitedJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3500,14 +3483,14 @@ def get_single_job_status( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{LimitedJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{LimitedJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3531,14 +3514,14 @@ def set_single_job_status( :param job_id: Required. :type job_id: int :param body: Required. - :type body: dict[str, ~client.models.JobStatusUpdate] + :type body: dict[str, ~generated.models.JobStatusUpdate] :keyword force: Default value is False. :paramtype force: bool :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3566,7 +3549,7 @@ def set_single_job_status( Default value is "application/json". :paramtype content_type: str :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3586,14 +3569,14 @@ def set_single_job_status( :param job_id: Required. :type job_id: int :param body: Is either a {str: JobStatusUpdate} type or a IO[bytes] type. Required. - :type body: dict[str, ~client.models.JobStatusUpdate] or IO[bytes] + :type body: dict[str, ~generated.models.JobStatusUpdate] or IO[bytes] :keyword force: Default value is False. :paramtype force: bool :return: dict mapping str to SetJobStatusReturn - :rtype: dict[str, ~client.models.SetJobStatusReturn] + :rtype: dict[str, ~generated.models.SetJobStatusReturn] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3638,14 +3621,14 @@ def set_single_job_status( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{SetJobStatusReturn}", pipeline_response) + deserialized = self._deserialize( + "{SetJobStatusReturn}", pipeline_response.http_response + ) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3663,10 +3646,10 @@ def get_single_job_status_history( :param job_id: Required. :type job_id: int :return: dict mapping str to list of JobStatusReturn - :rtype: dict[str, list[~client.models.JobStatusReturn]] + :rtype: dict[str, list[~generated.models.JobStatusReturn]] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -3696,14 +3679,192 @@ def get_single_job_status_history( response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error( status_code=response.status_code, response=response, error_map=error_map ) raise HttpResponseError(response=response) - deserialized = self._deserialize("{[JobStatusReturn]}", pipeline_response) + deserialized = self._deserialize( + "{[JobStatusReturn]}", pipeline_response.http_response + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class LollygagOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~generated.Dirac`'s + :attr:`lollygag` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = ( + input_args.pop(0) if input_args else kwargs.pop("deserializer") + ) + + @distributed_trace + def insert_owner_object(self, owner_name: str, **kwargs: Any) -> Any: + """Insert Owner Object. + + Insert Owner Object. + + :param owner_name: Required. + :type owner_name: str + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_insert_owner_object_request( + owner_name=owner_name, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_owner_object(self, **kwargs: Any) -> Any: + """Get Owner Object. + + Get Owner Object. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_get_owner_object_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_gubbins_secrets(self, **kwargs: Any) -> Any: + """Get Gubbins Secrets. + + Does nothing but expects a GUBBINS_SENSEI permission. + + :return: any + :rtype: any + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Any] = kwargs.pop("cls", None) + + _request = build_lollygag_get_gubbins_secrets_request( + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("object", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_patch.py b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_patch.py new file mode 100644 index 00000000..abf56120 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/operations/_patch.py @@ -0,0 +1,22 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = ( + [] +) # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/generated/py.typed b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/py.typed new file mode 100644 index 00000000..1242d432 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/models.py b/extensions/gubbins/gubbins-client/src/gubbins/client/models.py new file mode 100644 index 00000000..84d0aba1 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/models.py @@ -0,0 +1 @@ +from .generated.models import * # pylint: disable=unused-wildcard-import # noqa diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/patches/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/patches/__init__.py new file mode 100644 index 00000000..8dbe8d5d --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/patches/__init__.py @@ -0,0 +1,10 @@ +""" +This shows how to define your extension client +""" + +from diracx.client.patches.utils import DiracClientMixin + +from gubbins.client.generated._client import Dirac as GubbinsGenerated + + +class GubbinsClient(DiracClientMixin, GubbinsGenerated): ... diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/patches/aio/__init__.py b/extensions/gubbins/gubbins-client/src/gubbins/client/patches/aio/__init__.py new file mode 100644 index 00000000..bf918799 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/patches/aio/__init__.py @@ -0,0 +1,12 @@ +""" +This shows how to define your extension aio client +""" + +from __future__ import annotations + +from diracx.client.patches.aio.utils import DiracClientMixin + +from gubbins.client.generated.aio._client import Dirac as GubbinsGenerated + + +class GubbinsClient(DiracClientMixin, GubbinsGenerated): ... diff --git a/extensions/gubbins/gubbins-client/src/gubbins/client/py.typed b/extensions/gubbins/gubbins-client/src/gubbins/client/py.typed new file mode 100644 index 00000000..1242d432 --- /dev/null +++ b/extensions/gubbins/gubbins-client/src/gubbins/client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. diff --git a/extensions/gubbins/gubbins-client/tests/test_gubbins_client.py b/extensions/gubbins/gubbins-client/tests/test_gubbins_client.py new file mode 100644 index 00000000..cbb9d3b3 --- /dev/null +++ b/extensions/gubbins/gubbins-client/tests/test_gubbins_client.py @@ -0,0 +1,178 @@ +""" +These tests make sure that we can access all the original client as well as the extension +We do it in subprocesses to avoid conflict between the MetaPathFinder and pytest test discovery +""" + +import os +import shlex +import shutil +import subprocess +from pathlib import Path + +import pytest + + +@pytest.fixture +def fake_cli_env(monkeypatch, tmp_path): + from diracx.core.preferences import get_diracx_preferences + + env = { + "DIRACX_URL": "http://localhost:9999", + "DIRACX_CA_PATH": str(tmp_path), + "HOME": str(tmp_path), + } + + for key, value in env.items(): + monkeypatch.setenv(key, value) + + data_dir = ( + Path(__file__).parents[2] / "gubbins-routers/tests/data/lhcb-auth.web.cern.ch" + ) + + run_server_cmd = f"{shutil.which('python')} -m http.server -d {data_dir} 9999" + proc = subprocess.Popen(shlex.split(run_server_cmd)) # noqa + print(proc) + yield + proc.kill() + + get_diracx_preferences.cache_clear() + + +def test_client_extension(fake_cli_env, tmp_path): + """ + Make sure that the DiracClient can call gubbins routes + + We run the test as a separate python script to make sure that MetaPathFinder + behaves as expected in a normal python code, and not inside pytest + """ + test_code = """ +from diracx.client import DiracClient +with DiracClient() as api: + print(f"{api.jobs=}") + assert "diracx.client.generated.operations._patch.JobsOperations" in str(api.jobs) + print(f"{api.lollygag=}") + assert "gubbins.client.generated.operations._operations.LollygagOperations" in str(api.lollygag) + +""" + with open(tmp_path / "test_client_ext.py", "wt") as f: + f.write(test_code) + try: + with open(tmp_path / "std.out", "wt") as f: + + subprocess.run( # noqa + [shutil.which("python"), tmp_path / "test_client_ext.py"], + env=os.environ, + text=True, + stdout=f, + stderr=f, + check=True, + ) + except subprocess.CalledProcessError as e: + raise AssertionError(Path(tmp_path / "std.out").read_text()) from e + + +def test_gubbins_client(fake_cli_env, tmp_path): + """Make sure that we can use the GubbinsClient directly + + We run the test as a separate python script to make sure that MetaPathFinder + behaves as expected in a normal python code, and not inside pytest + """ + + test_code = """ +from gubbins.client import GubbinsClient +with GubbinsClient() as api: + print(f"{api.jobs=}") + assert "diracx.client.generated.operations._patch.JobsOperations" in str(api.jobs) + print(f"{api.lollygag=}") + assert "gubbins.client.generated.operations._operations.LollygagOperations" in str(api.lollygag) + +""" + with open(tmp_path / "test_client_ext.py", "wt") as f: + f.write(test_code) + try: + with open(tmp_path / "std.out", "wt") as f: + subprocess.run( # noqa + [shutil.which("python"), tmp_path / "test_client_ext.py"], + env=os.environ, + text=True, + stdout=f, + stderr=f, + check=True, + ) + except subprocess.CalledProcessError as e: + raise AssertionError(Path(tmp_path / "std.out").read_text()) from e + + +def test_async_client_extension(fake_cli_env, tmp_path): + """ + Make sure that the DiracClient can call gubbins routes + + We run the test as a separate python script to make sure that MetaPathFinder + behaves as expected in a normal python code, and not inside pytest + """ + test_code = """ + +import asyncio + +async def main(): + from diracx.client.aio import DiracClient + async with DiracClient() as api: + print(f"{api.jobs=}") + assert "diracx.client.generated.aio.operations._patch.JobsOperations" in str(api.jobs) + print(f"{api.lollygag=}") + assert "gubbins.client.generated.aio.operations._operations.LollygagOperations" in str(api.lollygag) +asyncio.run(main()) + +""" + with open(tmp_path / "test_client_ext.py", "wt") as f: + f.write(test_code) + try: + with open(tmp_path / "std.out", "wt") as f: + + subprocess.run( # noqa + [shutil.which("python"), tmp_path / "test_client_ext.py"], + env=os.environ, + text=True, + stdout=f, + stderr=f, + check=True, + ) + except subprocess.CalledProcessError as e: + raise AssertionError(Path(tmp_path / "std.out").read_text()) from e + + +def test_async_gubbins_client(fake_cli_env, tmp_path): + """Make sure that we can use the GubbinsClient directly + + We run the test as a separate python script to make sure that MetaPathFinder + behaves as expected in a normal python code, and not inside pytest + """ + + test_code = """ + +import asyncio + +async def main(): + from gubbins.client.aio import GubbinsClient + async with GubbinsClient() as api: + print(f"{api.jobs=}") + assert "diracx.client.generated.aio.operations._patch.JobsOperations" in str(api.jobs) + print(f"{api.lollygag=}") + assert "gubbins.client.generated.aio.operations._operations.LollygagOperations" in str(api.lollygag) +asyncio.run(main()) + +""" + with open(tmp_path / "test_client_ext.py", "wt") as f: + f.write(test_code) + try: + with open(tmp_path / "std.out", "wt") as f: + subprocess.run( # noqa + [shutil.which("python"), tmp_path / "test_client_ext.py"], + env=os.environ, + text=True, + stdout=f, + stderr=f, + check=True, + ) + except subprocess.CalledProcessError as e: + raise AssertionError(Path(tmp_path / "std.out").read_text()) from e diff --git a/extensions/gubbins/gubbins-client/tests/test_regenerate.py b/extensions/gubbins/gubbins-client/tests/test_regenerate.py new file mode 100644 index 00000000..45f7b0bb --- /dev/null +++ b/extensions/gubbins/gubbins-client/tests/test_regenerate.py @@ -0,0 +1,90 @@ +""" +Regenerate gubbins-client. +You should have something like that too, however the fact of having +gubbins a subdirectory of diracx means the path are slightly different. +It is better to look at the origin `test_regenerate.py`. +""" + +import subprocess +from pathlib import Path + +import git +import pytest + +import gubbins.client + +pytestmark = pytest.mark.enabled_dependencies([]) + + +AUTOREST_VERSION = "6.13.7" + + +@pytest.fixture +def test_client(client_factory): + with client_factory.unauthenticated() as client: + yield client + + +def test_regenerate_client(test_client, tmp_path): + """Regenerate the AutoREST client and run pre-commit checks on it. + + This test is skipped by default, and can be enabled by passing + --regenerate-client to pytest. It is intended to be run manually + when the API changes. + + The reason this is a test is that it is the only way to get access to the + test_client fixture, which is required to get the OpenAPI spec. + + WARNING: This test will modify the source code of the client! + """ + r = test_client.get("/api/openapi.json") + r.raise_for_status() + openapi_spec = tmp_path / "openapi.json" + openapi_spec.write_text(r.text) + + output_folder = Path(gubbins.client.generated.__file__).parent.parent + assert (output_folder).is_dir() + repo_root = output_folder.parents[5] + assert (repo_root / ".git").is_dir() + + repo = git.Repo(repo_root) + + if repo.is_dirty( + path=repo_root + / "extensions" + / "gubbins" + / "gubbins-client" + / "src" + / "gubbins" + / "client" + ): + raise AssertionError( + "Client is currently in a modified state, skipping regeneration" + ) + cmd = [ + "autorest", + "--python", + f"--input-file={openapi_spec}", + "--models-mode=msrest", + "--namespace=generated", + f"--output-folder={output_folder}", + ] + + # This is required to be able to work offline + # TODO: if offline, find the version already installed + # and use it + # cmd += [f"--use=@autorest/python@{AUTOREST_VERSION}"] + + subprocess.run(cmd, check=True) # noqa + + cmd = ["pre-commit", "run", "--all-files"] + print("Running pre-commit...") + subprocess.run(cmd, check=False, cwd=repo_root) # noqa + print("Re-running pre-commit...") + subprocess.run(cmd, check=True, cwd=repo_root) # noqa + if repo.is_dirty(path=repo_root / "src" / "gubbins" / "client"): + raise AssertionError("Client was regenerated with changes") + + +if __name__ == "__main__": + print(AUTOREST_VERSION) diff --git a/extensions/gubbins/gubbins-core/pyproject.toml b/extensions/gubbins/gubbins-core/pyproject.toml new file mode 100644 index 00000000..648c94d7 --- /dev/null +++ b/extensions/gubbins/gubbins-core/pyproject.toml @@ -0,0 +1,53 @@ +[project] +name = "gubbins-core" +description = "Common code used by all Gubbins packages" +readme = "README.md" +requires-python = ">=3.11" +keywords = [] +license = { text = "GPL-3.0-only" } +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = ["diracx-core"] +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["gubbins-testing", "diracx-testing"] + +types = [ + "types-cachetools", + "types-PyYAML", +] + +[project.entry-points."diracx"] +properties_module = "gubbins.core.properties" +config = "gubbins.core.config.schema:Config" + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "../../.." + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = [ + "-v", + "--cov=gubbins.core", + "--cov-report=term-missing", + "-pgubbins.testing", + "-pdiracx.testing", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/gubbins-core/src/gubbins/core/__init__.py b/extensions/gubbins/gubbins-core/src/gubbins/core/__init__.py new file mode 100644 index 00000000..b05c9dee --- /dev/null +++ b/extensions/gubbins/gubbins-core/src/gubbins/core/__init__.py @@ -0,0 +1 @@ +__all__ = ("config", "properties") diff --git a/extensions/gubbins/gubbins-core/src/gubbins/core/config/__init__.py b/extensions/gubbins/gubbins-core/src/gubbins/core/config/__init__.py new file mode 100644 index 00000000..1d6a9037 --- /dev/null +++ b/extensions/gubbins/gubbins-core/src/gubbins/core/config/__init__.py @@ -0,0 +1 @@ +__all__ = ("schema",) diff --git a/extensions/gubbins/gubbins-core/src/gubbins/core/config/schema.py b/extensions/gubbins/gubbins-core/src/gubbins/core/config/schema.py new file mode 100644 index 00000000..ccc38396 --- /dev/null +++ b/extensions/gubbins/gubbins-core/src/gubbins/core/config/schema.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import MutableMapping + +from diracx.core.config.schema import ( + Config as _Config, +) +from diracx.core.config.schema import ( + RegistryConfig as _RegistryConfig, +) +from diracx.core.config.schema import ( + UserConfig as _UserConfig, +) + +""" +In order to add extra config, you need to redefine +the whole tree down to the point you are interested in changing +""" + + +class UserConfig(_UserConfig): + GubbinsSpecificInfo: str | None = None + + +class RegistryConfig(_RegistryConfig): + Users: MutableMapping[str, UserConfig] # type: ignore[assignment] + + +class Config(_Config): + Registry: MutableMapping[str, RegistryConfig] # type: ignore[assignment] diff --git a/extensions/gubbins/gubbins-core/src/gubbins/core/properties.py b/extensions/gubbins/gubbins-core/src/gubbins/core/properties.py new file mode 100644 index 00000000..a14a2337 --- /dev/null +++ b/extensions/gubbins/gubbins-core/src/gubbins/core/properties.py @@ -0,0 +1,5 @@ +from __future__ import annotations + +from diracx.core.properties import SecurityProperty + +GUBBINS_SENSEI = SecurityProperty("GubbinsSensei") diff --git a/extensions/gubbins/gubbins-core/src/gubbins/core/py.typed b/extensions/gubbins/gubbins-core/src/gubbins/core/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-core/tests/test_config.py b/extensions/gubbins/gubbins-core/tests/test_config.py new file mode 100644 index 00000000..d4b2e453 --- /dev/null +++ b/extensions/gubbins/gubbins-core/tests/test_config.py @@ -0,0 +1,39 @@ +""" +This test makes sure that we are getting the configuration as a GubbinsConfig +""" + +import datetime +from urllib import request + +import pytest +from diracx.core.config import ConfigSource, RemoteGitConfigSource + +from gubbins.core.config.schema import Config + +# The diracx-chart contains a CS example +TEST_REPO = "git+https://github.com/DIRACGrid/diracx-charts/" + + +def github_is_down(): + try: + request.urlopen("https://github.com", timeout=1) + return False + except Exception: + return True + + +@pytest.mark.skipif(github_is_down(), reason="Github unavailble") +def test_remote_git_config_source(monkeypatch): + + monkeypatch.setattr( + "diracx.core.config.DEFAULT_CONFIG_FILE", + "k3s/examples/cs.yaml", + ) + remote_conf = ConfigSource.create_from_url(backend_url=TEST_REPO) + assert isinstance(remote_conf, RemoteGitConfigSource) + + hexsha, modified = remote_conf.latest_revision() + assert isinstance(hexsha, str) + assert isinstance(modified, datetime.datetime) + result = remote_conf.read_raw(hexsha, modified) + assert isinstance(result, Config) diff --git a/extensions/gubbins/gubbins-core/tests/test_properties.py b/extensions/gubbins/gubbins-core/tests/test_properties.py new file mode 100644 index 00000000..9a1df1b9 --- /dev/null +++ b/extensions/gubbins/gubbins-core/tests/test_properties.py @@ -0,0 +1,9 @@ +from diracx.core.properties import SecurityProperty + + +def test_properties(): + """Checks that both gubbins and diracx properties are available""" + all_properties = SecurityProperty.available_properties() + + assert "GubbinsSensei" in all_properties + assert "NormalUser" in all_properties diff --git a/extensions/gubbins/gubbins-db/pyproject.toml b/extensions/gubbins/gubbins-db/pyproject.toml new file mode 100644 index 00000000..ced03e27 --- /dev/null +++ b/extensions/gubbins/gubbins-db/pyproject.toml @@ -0,0 +1,58 @@ +[project] +name = "gubbins-db" +description = "DB classes for the Gubbins diracx extension" +readme = "README.md" +requires-python = ">=3.10" +keywords = [] +license = { text = "GPL-3.0-only" } +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = [ + # This is obvious + "diracx-db", + # We should add something else +] +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["gubbins-testing", "diracx-testing"] + +[project.entry-points."diracx.db.sql"] +LollygagDB = "gubbins.db.sql:LollygagDB" +JobDB = "gubbins.db.sql:GubbinsJobDB" + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +# This should not be in your extension ! +# It is just because we have this demo extension +# in a subfolder of our git repo +[tool.setuptools_scm] +root = "../../.." + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = [ + "-v", + "--cov=gubbins.db", + "--cov-report=term-missing", + "-pgubbins.testing", + # Both gubbins and diracx are needed here + "-pgubbins.testing", + "-pdiracx.testing", + "-pdiracx.testing.osdb", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/__init__.py b/extensions/gubbins/gubbins-db/src/gubbins/db/__init__.py new file mode 100644 index 00000000..7a32a33b --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/__init__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +# Do not forget that, as otherwise diracx won't find your DBs +__all__ = ("sql",) + +from . import sql diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/py.typed b/extensions/gubbins/gubbins-db/src/gubbins/db/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/__init__.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/__init__.py new file mode 100644 index 00000000..a2ae6f84 --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/__init__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +__all__ = ("LollygagDB", "GubbinsJobDB") + +from .jobs.db import GubbinsJobDB +from .lollygag.db import LollygagDB diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/__init__.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/db.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/db.py new file mode 100644 index 00000000..e89d1b85 --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/db.py @@ -0,0 +1,53 @@ +from diracx.db.sql.job.db import JobDB +from sqlalchemy import insert, select + +from .schema import GubbinsInfo, JobDBBase + + +class GubbinsJobDB(JobDB): + """ + This DB extends the diracx JobDB. + All methods from the parent DB are accessible + + """ + + metadata = JobDBBase.metadata + + async def insert_gubbins_info(self, job_id: int, info: str): + """ + This is a new method that makes use of a new table. + """ + stmt = insert(GubbinsInfo).values(JobID=job_id, Info=info) + await self.conn.execute(stmt) + + async def getJobJDL( # type: ignore[override] + self, job_id: int, original: bool = False, with_info=False + ) -> str | dict[str, str]: + """ + This method modifes the one in the parent class: + * adds an extra argument + * changes the return type + + Note that this requires to disable mypy error with + # type: ignore[override] + """ + jdl = await super().getJobJDL(job_id, original=original) + if not with_info: + return jdl + + stmt = select(GubbinsInfo.Info).where(GubbinsInfo.JobID == job_id) + + info = (await self.conn.execute(stmt)).scalar_one() + return {"JDL": jdl, "Info": info} + + async def setJobAttributes(self, job_id, jobData): + """ + This method modified the one in the parent class, + without changing the argument nor the return type + + Also, this method is called by the router via the status_utility + so we can test in test_gubbins_job_router that the behavior + is altered without even redefining a gubbins specific router + """ + # We do nothing + ... diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/schema.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/schema.py new file mode 100644 index 00000000..ac5cd039 --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/jobs/schema.py @@ -0,0 +1,19 @@ +from diracx.db.sql.job.db import JobDBBase +from diracx.db.sql.utils import Column +from sqlalchemy import ( + ForeignKey, + Integer, + String, +) + + +# You need to inherit from the declarative_base of the parent DB +class GubbinsInfo(JobDBBase): + """An extra table with respect to Vanilla diracx JobDB""" + + __tablename__ = "GubbinsJobs" + + JobID = Column( + Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True + ) + Info = Column(String(255), default="", primary_key=True) diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/__init__.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/db.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/db.py new file mode 100644 index 00000000..354356de --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/db.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from uuid import UUID + +from diracx.db.sql.utils import BaseSQLDB, apply_search_filters +from sqlalchemy import func, insert, select + +from .schema import Base as LollygagDBBase +from .schema import Cars, Owners + + +class LollygagDB(BaseSQLDB): + """ + This LollygagDB is just to illustrate some important aspect of writing + DB classes in DiracX. + + It is mostly pure SQLAlchemy, with a few convention + + Document the secrets + """ + + # This needs to be here for the BaseSQLDB to create the engine + metadata = LollygagDBBase.metadata + + async def summary(self, group_by, search) -> list[dict[str, str | int]]: + columns = [Cars.__table__.columns[x] for x in group_by] + + stmt = select(*columns, func.count(Cars.licensePlate).label("count")) + stmt = apply_search_filters(Cars.__table__.columns.__getitem__, stmt, search) + stmt = stmt.group_by(*columns) + + # Execute the query + return [ + dict(row._mapping) + async for row in (await self.conn.stream(stmt)) + if row.count > 0 # type: ignore + ] + + async def insert_owner(self, name: str) -> int: + stmt = insert(Owners).values(name=name) + result = await self.conn.execute(stmt) + # await self.engine.commit() + return result.lastrowid + + async def get_owner(self) -> list[str]: + stmt = select(Owners.name) + result = await self.conn.execute(stmt) + # await self.engine.commit() + return [row[0] for row in result] + + async def insert_car(self, license_plate: UUID, model: str, owner_id: int) -> int: + stmt = insert(Cars).values( + licensePlate=license_plate, model=model, ownerID=owner_id + ) + + result = await self.conn.execute(stmt) + # await self.engine.commit() + return result.lastrowid + + async def get_car(self) -> list[str]: + stmt = select(Cars.model) + result = await self.conn.execute(stmt) + # await self.engine.commit() + return [row[0] for row in result] diff --git a/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/schema.py b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/schema.py new file mode 100644 index 00000000..9e7b4eba --- /dev/null +++ b/extensions/gubbins/gubbins-db/src/gubbins/db/sql/lollygag/schema.py @@ -0,0 +1,21 @@ +# The utils class define some boilerplate types that should be used +# in place of the SQLAlchemy one. Have a look at them +from diracx.db.sql.utils import Column, DateNowColumn +from sqlalchemy import ForeignKey, Integer, String, Uuid +from sqlalchemy.orm import declarative_base + +Base = declarative_base() + + +class Owners(Base): + __tablename__ = "Owners" + ownerID = Column(Integer, primary_key=True, autoincrement=True) + creation_time = DateNowColumn() + name = Column(String(255)) + + +class Cars(Base): + __tablename__ = "Cars" + licensePlate = Column(Uuid(), primary_key=True) + model = Column(String(255)) + ownerID = Column(Integer, ForeignKey(Owners.ownerID)) diff --git a/extensions/gubbins/gubbins-db/tests/test_gubbinsJobDB.py b/extensions/gubbins/gubbins-db/tests/test_gubbinsJobDB.py new file mode 100644 index 00000000..1dd095b0 --- /dev/null +++ b/extensions/gubbins/gubbins-db/tests/test_gubbinsJobDB.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import AsyncGenerator + +import pytest + +from gubbins.db.sql import GubbinsJobDB + + +@pytest.fixture +async def gubbins_db() -> AsyncGenerator[GubbinsJobDB, None]: + gubbins_db = GubbinsJobDB("sqlite+aiosqlite:///:memory:") + async with gubbins_db.engine_context(): + async with gubbins_db.engine.begin() as conn: + # set PRAGMA foreign_keys=ON if sqlite + if gubbins_db._db_url.startswith("sqlite"): + await conn.exec_driver_sql("PRAGMA foreign_keys=ON") + await conn.run_sync(gubbins_db.metadata.create_all) + yield gubbins_db + + +async def test_gubbins_info(gubbins_db): + """ + This test makes sure that we can: + * use a method from the parent db (insert) + * use a method from a child db (insert_gubbins_info) + * use a method modified in the child db (getJobJDL) + """ + async with gubbins_db as gubbins_db: + result = await gubbins_db.insert( + "JDL", + "owner_toto", + "owner_group1", + "New", + "dfdfds", + "lhcb", + ) + + job_id = result["JobID"] + + await gubbins_db.insert_gubbins_info(job_id, "info") + + result = await gubbins_db.getJobJDL(job_id, original=True) + assert result == "[JDL]" + + result = await gubbins_db.getJobJDL(job_id, with_info=True) + assert "JDL" in result + assert result["Info"] == "info" diff --git a/extensions/gubbins/gubbins-db/tests/test_lollygagDB.py b/extensions/gubbins/gubbins-db/tests/test_lollygagDB.py new file mode 100644 index 00000000..f963ded1 --- /dev/null +++ b/extensions/gubbins/gubbins-db/tests/test_lollygagDB.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING +from uuid import uuid4 + +import pytest +from diracx.core.exceptions import InvalidQueryError +from diracx.db.sql.utils import SQLDBUnavailable + +from gubbins.db.sql.lollygag.db import LollygagDB + +if TYPE_CHECKING: + from typing import AsyncGenerator + +# Each DB test class must defined a fixture looking like this one +# It allows to get an instance of an in memory DB, + + +@pytest.fixture +async def lollygag_db(tmp_path) -> AsyncGenerator[LollygagDB, None]: + lollygag_db = LollygagDB("sqlite+aiosqlite:///:memory:") + async with lollygag_db.engine_context(): + async with lollygag_db.engine.begin() as conn: + await conn.run_sync(lollygag_db.metadata.create_all) + yield lollygag_db + + +async def test_insert_and_summary(lollygag_db: LollygagDB): + # Each context manager creates a transaction + # So it is important to write test this way + async with lollygag_db as lollygag_db: + # First we check that the DB is empty + result = await lollygag_db.summary(["model"], []) + assert not result + + # Now we add some data in the DB + async with lollygag_db as lollygag_db: + # Add a car owner + owner_id = await lollygag_db.insert_owner(name="Magnum") + assert owner_id + + # Add cars, belonging to the same guy + result = await asyncio.gather( + *( + lollygag_db.insert_car(uuid4(), f"model_{i}", owner_id) + for i in range(10) + ) + ) + assert result + + # Check that there are now 10 cars assigned to a single driver + async with lollygag_db as lollygag_db: + result = await lollygag_db.summary(["ownerID"], []) + + assert result[0]["count"] == 10 + + # Test the selection + async with lollygag_db as lollygag_db: + result = await lollygag_db.summary( + ["ownerID"], [{"parameter": "model", "operator": "eq", "value": "model_1"}] + ) + + assert result[0]["count"] == 1 + + async with lollygag_db as lollygag_db: + with pytest.raises(InvalidQueryError): + result = await lollygag_db.summary( + ["ownerID"], + [ + { + "parameter": "model", + "operator": "BADSELECTION", + "value": "model_1", + } + ], + ) + + +async def test_bad_connection(): + lollygag_db = LollygagDB("mysql+aiomysql://tata:yoyo@db.invalid:3306/name") + async with lollygag_db.engine_context(): + with pytest.raises(SQLDBUnavailable): + async with lollygag_db: + lollygag_db.ping() diff --git a/extensions/gubbins/gubbins-routers/pyproject.toml b/extensions/gubbins/gubbins-routers/pyproject.toml new file mode 100644 index 00000000..2ab09e95 --- /dev/null +++ b/extensions/gubbins/gubbins-routers/pyproject.toml @@ -0,0 +1,68 @@ +[project] +name = "gubbins-routers" +description = "TODO" +readme = "README.md" +requires-python = ">=3.10" +keywords = [] +license = { text = "GPL-3.0-only" } +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] + +dependencies = [ + # This is obvious + "diracx-routers", + # We should add something else +] + +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["diracx-testing", "moto[server]", "pytest-httpx"] +types = [ + "boto3-stubs", + "types-aiobotocore[essential]", + "types-aiobotocore-s3", + "types-cachetools", + "types-python-dateutil", + "types-PyYAML", + "types-requests", +] + +[project.entry-points."diracx.services"] +lollygag = "gubbins.routers.lollygag:router" +".well-known" = "gubbins.routers.well_known:router" + +[project.entry-points."diracx.access_policies"] +lollygagAccessPolicy = "gubbins.routers.lollygag.access_policy:LollygagAccessPolicy" + + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "../../.." + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = [ + "-v", + "--cov=gubbins.routers", + "--cov-report=term-missing", + # Both gubbins and diracx are needed here + "-pgubbins.testing", + "-pdiracx.testing", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/__init__.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/__init__.py new file mode 100644 index 00000000..c6c689af --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/__init__.py @@ -0,0 +1 @@ +__all__ = ("dependencies", "lollygag", "well_known") diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/dependencies.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/dependencies.py new file mode 100644 index 00000000..9a27fd9e --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/dependencies.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +__all__ = ("Config",) + +from typing import Annotated + +from diracx.core.config import ConfigSource +from fastapi import Depends + +from gubbins.core.config.schema import Config as _Config + +# Overwrite the Config dependency such that gubbins routers +# can use it +Config = Annotated[_Config, Depends(ConfigSource.create)] diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/__init__.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/__init__.py new file mode 100644 index 00000000..fdbeb7ea --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/__init__.py @@ -0,0 +1,2 @@ +__all__ = ("router",) +from .lollygag import router diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/access_policy.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/access_policy.py new file mode 100644 index 00000000..870727b1 --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/access_policy.py @@ -0,0 +1,45 @@ +""" +Lollygag dummy AccessPolicy +Makes sure we can use Gubbins specific property + +""" + +from __future__ import annotations + +from collections.abc import Callable +from enum import StrEnum, auto +from typing import Annotated + +from diracx.routers.access_policies import BaseAccessPolicy +from diracx.routers.utils.users import AuthorizedUserInfo +from fastapi import Depends, HTTPException, status + +from gubbins.core.properties import GUBBINS_SENSEI + + +class ActionType(StrEnum): + + CREATE = auto() + + READ = auto() + + MANAGE = auto() + + +class LollygagAccessPolicy(BaseAccessPolicy): + + @staticmethod + async def policy( + policy_name: str, + user_info: AuthorizedUserInfo, + /, + *, + action: ActionType | None = None, + ): + assert action, "action is a mandatory parameter" + + if action == ActionType.MANAGE and GUBBINS_SENSEI not in user_info.properties: + raise HTTPException(status.HTTP_403_FORBIDDEN, detail="Streng verboten !!") + + +CheckLollygagPolicyCallable = Annotated[Callable, Depends(LollygagAccessPolicy.check)] diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/lollygag.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/lollygag.py new file mode 100644 index 00000000..1e69c06c --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/lollygag/lollygag.py @@ -0,0 +1,50 @@ +""" +This router makes use of the new LollygagDB. +It uses the the Lollygag AccessPolicy (which itself requires the Gubbins property) +""" + +from __future__ import annotations + +from typing import Annotated + +from diracx.routers.fastapi_classes import DiracxRouter +from fastapi import Depends + +from gubbins.db.sql import LollygagDB as _LollygagDB + +from .access_policy import ActionType, CheckLollygagPolicyCallable + +# Define the dependency at the top, so you don't have to +# be so verbose in your routes +LollygagDB = Annotated[_LollygagDB, Depends(_LollygagDB.transaction)] + +router = DiracxRouter() + + +@router.post("/insert_owner/{owner_name}") +async def insert_owner_object( + lollygag_db: LollygagDB, + owner_name: str, + check_permission: CheckLollygagPolicyCallable, +): + await check_permission(action=ActionType.CREATE) + return await lollygag_db.insert_owner(owner_name) + + +@router.get("/get_owners") +async def get_owner_object( + lollygag_db: LollygagDB, + check_permission: CheckLollygagPolicyCallable, +): + await check_permission(action=ActionType.READ) + return await lollygag_db.get_owner() + + +@router.get("/gubbins_sensei") +async def get_gubbins_secrets( + lollygag_db: LollygagDB, + check_permission: CheckLollygagPolicyCallable, +): + """Does nothing but expects a GUBBINS_SENSEI permission""" + await check_permission(action=ActionType.MANAGE) + return await lollygag_db.get_owner() diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/py.typed b/extensions/gubbins/gubbins-routers/src/gubbins/routers/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/extensions/gubbins/gubbins-routers/src/gubbins/routers/well_known.py b/extensions/gubbins/gubbins-routers/src/gubbins/routers/well_known.py new file mode 100644 index 00000000..c7035362 --- /dev/null +++ b/extensions/gubbins/gubbins-routers/src/gubbins/routers/well_known.py @@ -0,0 +1,52 @@ +""" +Illustrate how to extend/overwrite a diracx router. +It : +* changes slightly the return type +* uses the Gubbins specific configuration +* uses the Gubbins dependencies +""" + +from diracx.routers.auth.well_known import Metadata +from diracx.routers.auth.well_known import ( + installation_metadata as _installation_metadata, +) +from diracx.routers.auth.well_known import router as diracx_wellknown_router +from diracx.routers.dependencies import DevelopmentSettings +from diracx.routers.fastapi_classes import DiracxRouter + +from gubbins.routers.dependencies import Config + +router = DiracxRouter(require_auth=False, path_root="") +router.include_router(diracx_wellknown_router) + + +# Change slightly the return type +class ExtendedMetadata(Metadata): + gubbins_secrets: str + gubbins_user_info: dict[str, list[str | None]] + + +# Overwrite the dirac-metadata endpoint and add an extra metadata +# This also makes sure that we can get Config as a GubbinsConfig +@router.get("/dirac-metadata") +async def installation_metadata( + config: Config, + dev_settings: DevelopmentSettings, +) -> ExtendedMetadata: + original_metadata = await _installation_metadata(config, dev_settings) + + gubbins_user_info: dict[str, list[str | None]] = {} + for vo in config.Registry: + vo_gubbins = [ + user.GubbinsSpecificInfo for user in config.Registry[vo].Users.values() + ] + gubbins_user_info[vo] = vo_gubbins + + gubbins_metadata = ExtendedMetadata( + gubbins_secrets="hush!", + virtual_organizations=original_metadata["virtual_organizations"], + development_settings=original_metadata["development_settings"], + gubbins_user_info=gubbins_user_info, + ) + + return gubbins_metadata diff --git a/extensions/gubbins/gubbins-routers/tests/data/lhcb-auth.web.cern.ch/.well-known/openid-configuration b/extensions/gubbins/gubbins-routers/tests/data/lhcb-auth.web.cern.ch/.well-known/openid-configuration new file mode 100644 index 00000000..92a0465b --- /dev/null +++ b/extensions/gubbins/gubbins-routers/tests/data/lhcb-auth.web.cern.ch/.well-known/openid-configuration @@ -0,0 +1 @@ +{"request_parameter_supported":true,"introspection_endpoint":"https://lhcb-auth.web.cern.ch/introspect","claims_parameter_supported":false,"scopes_supported":["openid","profile","email","address","phone","offline_access","eduperson_scoped_affiliation","eduperson_entitlement","eduperson_assurance","wlcg","wlcg.groups","entitlements"],"issuer":"https://lhcb-auth.web.cern.ch/","userinfo_encryption_enc_values_supported":["XC20P","A256CBC+HS512","A256GCM","A192GCM","A128GCM","A128CBC-HS256","A192CBC-HS384","A256CBC-HS512","A128CBC+HS256"],"id_token_encryption_enc_values_supported":["XC20P","A256CBC+HS512","A256GCM","A192GCM","A128GCM","A128CBC-HS256","A192CBC-HS384","A256CBC-HS512","A128CBC+HS256"],"authorization_endpoint":"https://lhcb-auth.web.cern.ch/authorize","request_object_encryption_enc_values_supported":["XC20P","A256CBC+HS512","A256GCM","A192GCM","A128GCM","A128CBC-HS256","A192CBC-HS384","A256CBC-HS512","A128CBC+HS256"],"device_authorization_endpoint":"https://lhcb-auth.web.cern.ch/devicecode","userinfo_signing_alg_values_supported":["HS256","HS384","HS512","RS256","RS384","RS512","ES256","ES384","ES512","PS256","PS384","PS512"],"claims_supported":["sub","name","preferred_username","given_name","family_name","middle_name","nickname","profile","picture","zoneinfo","locale","updated_at","email","email_verified","organisation_name","groups","wlcg.groups","external_authn"],"op_policy_uri":"https://lhcb-auth.web.cern.ch/about","claim_types_supported":["normal"],"token_endpoint_auth_methods_supported":["client_secret_basic","client_secret_post","client_secret_jwt","private_key_jwt","none"],"token_endpoint":"https://lhcb-auth.web.cern.ch/token","response_types_supported":["code","token"],"request_uri_parameter_supported":false,"userinfo_encryption_alg_values_supported":["RSA-OAEP-512","RSA-OAEP","RSA-OAEP-256","RSA1_5","RSA-OAEP-384"],"grant_types_supported":["authorization_code","implicit","refresh_token","client_credentials","password","urn:ietf:params:oauth:grant-type:token-exchange","urn:ietf:params:oauth:grant-type:device_code"],"revocation_endpoint":"https://lhcb-auth.web.cern.ch/revoke","userinfo_endpoint":"https://lhcb-auth.web.cern.ch/userinfo","op_tos_uri":"https://lhcb-auth.web.cern.ch/about","token_endpoint_auth_signing_alg_values_supported":["HS256","HS384","HS512","RS256","RS384","RS512","ES256","ES384","ES512","PS256","PS384","PS512"],"require_request_uri_registration":false,"code_challenge_methods_supported":["plain","S256"],"id_token_encryption_alg_values_supported":["RSA-OAEP-512","RSA-OAEP","RSA-OAEP-256","RSA1_5","RSA-OAEP-384"],"jwks_uri":"https://lhcb-auth.web.cern.ch/jwk","subject_types_supported":["public","pairwise"],"id_token_signing_alg_values_supported":["HS256","HS384","HS512","RS256","RS384","RS512","ES256","ES384","ES512","PS256","PS384","PS512","none"],"registration_endpoint":"https://lhcb-auth.web.cern.ch/iam/api/client-registration","request_object_signing_alg_values_supported":["HS256","HS384","HS512","RS256","RS384","RS512","ES256","ES384","ES512","PS256","PS384","PS512"],"request_object_encryption_alg_values_supported":["RSA-OAEP-512","RSA-OAEP","RSA-OAEP-256","RSA1_5","RSA-OAEP-384"]} diff --git a/extensions/gubbins/gubbins-routers/tests/test_gubbins_job_manager.py b/extensions/gubbins/gubbins-routers/tests/test_gubbins_job_manager.py new file mode 100644 index 00000000..73e320ff --- /dev/null +++ b/extensions/gubbins/gubbins-routers/tests/test_gubbins_job_manager.py @@ -0,0 +1,86 @@ +""" +Just repete the diracx tests to make sure they still pass +""" + +import pytest +from diracx.core.models import JobStatus +from fastapi.testclient import TestClient + +pytestmark = pytest.mark.enabled_dependencies( + [ + "AuthSettings", + # CAUTION !!! + # You need to put both the original AND your extended one + "JobDB", + "GubbinsJobDB", + # + "JobLoggingDB", + "WMSAccessPolicy", + "ConfigSource", + "TaskQueueDB", + "DevelopmentSettings", + ] +) + + +TEST_JDL = """ + Arguments = "jobDescription.xml -o LogLevel=INFO"; + Executable = "dirac-jobexec"; + JobGroup = jobGroup; + JobName = jobName; + JobType = User; + LogLevel = INFO; + OutputSandbox = + { + Script1_CodeOutput.log, + std.err, + std.out + }; + Priority = 1; + Site = ANY; + StdError = std.err; + StdOutput = std.out; +""" + + +@pytest.fixture +def normal_user_client(client_factory): + with client_factory.normal_user() as client: + yield client + + +@pytest.fixture +def valid_job_id(normal_user_client: TestClient): + """ + Copied from the vanila tests + This ensures that the submission route works + + """ + job_definitions = [TEST_JDL] + r = normal_user_client.post("/api/jobs/", json=job_definitions) + assert r.status_code == 200, r.json() + assert len(r.json()) == 1 + return r.json()[0]["JobID"] + + +def test_gubbins_job_router(normal_user_client, valid_job_id): + """ + Basically like diracx test_delete_job_valid_job_id + except that the job does not go into DELETED status, + as the method is intercepted by the DB + """ + + # We search for the job + r = normal_user_client.get(f"/api/jobs/{valid_job_id}/status") + assert r.status_code == 200, r.json() + assert r.json()[str(valid_job_id)]["Status"] == JobStatus.RECEIVED + + # We delete the job, and here we expect that nothing + # actually happened + r = normal_user_client.delete(f"/api/jobs/{valid_job_id}") + assert r.status_code == 200, r.json() + + r = normal_user_client.get(f"/api/jobs/{valid_job_id}/status") + assert r.status_code == 200, r.json() + # The job would normally be deleted + assert r.json()[str(valid_job_id)]["Status"] == JobStatus.RECEIVED diff --git a/extensions/gubbins/gubbins-routers/tests/test_lollybag.py b/extensions/gubbins/gubbins-routers/tests/test_lollybag.py new file mode 100644 index 00000000..83349f6c --- /dev/null +++ b/extensions/gubbins/gubbins-routers/tests/test_lollybag.py @@ -0,0 +1,34 @@ +""" +Test lollygag router as a normal router +""" + +import pytest + +pytestmark = pytest.mark.enabled_dependencies( + [ + "AuthSettings", + "LollygagDB", + "LollygagAccessPolicy", + "DevelopmentSettings", + ] +) + + +@pytest.fixture +def normal_user_client(client_factory): + with client_factory.normal_user() as client: + yield client + + +def test_lollygag(normal_user_client): + r = normal_user_client.get("/api/lollygag/get_owners") + assert r.status_code == 200 + assert len(r.json()) == 0 + + r = normal_user_client.post("/api/lollygag/insert_owner/username") + assert r.status_code == 200 + assert r.json() + + r = normal_user_client.get("/api/lollygag/get_owners") + assert r.status_code == 200 + assert r.json() == ["username"] diff --git a/extensions/gubbins/gubbins-routers/tests/test_wellknown.py b/extensions/gubbins/gubbins-routers/tests/test_wellknown.py new file mode 100644 index 00000000..850a8614 --- /dev/null +++ b/extensions/gubbins/gubbins-routers/tests/test_wellknown.py @@ -0,0 +1,37 @@ +""" +Test the extended well_known endpoint +""" + +import pytest +from fastapi import status + +pytestmark = pytest.mark.enabled_dependencies( + ["AuthSettings", "ConfigSource", "BaseAccessPolicy", "DevelopmentSettings"] +) + + +@pytest.fixture +def test_client(client_factory): + with client_factory.unauthenticated() as client: + yield client + + +async def test_dirac_metadata_is_overwriten(test_client): + """ + Makes sure that the dirac-metadata endpoint is properly overwriten + """ + r = test_client.get( + "/.well-known/dirac-metadata", + ) + assert r.status_code == 200, r.json() + assert "gubbins_secrets" in r.json(), r.json() + + +async def test_openid_configuration_is_not_changed(test_client): + """test that the endpoint still exists and is unchanged""" + + r = test_client.get( + "/.well-known/openid-configuration", + ) + assert r.status_code == status.HTTP_200_OK, r.json() + assert "authorization_endpoint" in r.json(), r.json() diff --git a/extensions/gubbins/gubbins-testing/pyproject.toml b/extensions/gubbins/gubbins-testing/pyproject.toml new file mode 100644 index 00000000..112227eb --- /dev/null +++ b/extensions/gubbins/gubbins-testing/pyproject.toml @@ -0,0 +1,29 @@ +[project] +name = "gubbins-testing" +description = "TODO" +readme = "README.md" +requires-python = ">=3.10" +keywords = [] +license = {text = "GPL-3.0-only"} +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = [ + "diracx-testing", +] +dynamic = ["version"] + + +[tool.setuptools.packages.find] +where = ["src"] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "../../.." diff --git a/extensions/gubbins/gubbins-testing/src/gubbins/testing/__init__.py b/extensions/gubbins/gubbins-testing/src/gubbins/testing/__init__.py new file mode 100644 index 00000000..aa9109b1 --- /dev/null +++ b/extensions/gubbins/gubbins-testing/src/gubbins/testing/__init__.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import os + +import pytest + +# This fixture makes sure the extension variable is set correctly +# during the tests. +# You really should define one like that, it will save you some headache + + +@pytest.fixture(scope="session", autouse=True) +def check_extension_env(): + if os.environ.get("DIRACX_EXTENSIONS") != "gubbins,diracx": + pytest.fail( + "You must set the DIRACX_EXTENSIONS environment variable to 'gubbins,diracx'" + ) diff --git a/extensions/gubbins/pyproject.toml b/extensions/gubbins/pyproject.toml new file mode 100644 index 00000000..0e618a10 --- /dev/null +++ b/extensions/gubbins/pyproject.toml @@ -0,0 +1,130 @@ +[project] +name = "gubbins" +description = "Client installation for users of DiracX installations" +readme = "README.md" +requires-python = ">=3.10" +keywords = [] +license = { text = "GPL-3.0-only" } +classifiers = [ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: System :: Distributed Computing", +] +dependencies = [ + # "gubbins-api", + # "gubbins-cli", + # "gubbins-client", + # "gubbins-core", +] +dynamic = ["version"] + +[project.optional-dependencies] +testing = ["gubbins-testing"] + +[tool.setuptools] +packages = [] + +[build-system] +requires = ["setuptools>=61", "wheel", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "../.." + +[tool.ruff] +line-length = 120 +src = ["gubbins-*/src", "gubbins-*/tests"] +exclude = ["gubbins-client/src/gubbins/client/generated"] + +[tool.isort] +profile = "black" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errrors + "F", # pyflakes + "B", # flake8-bugbear + "I", # isort + "PLE", # pylint errors + # "UP", # pyUpgrade + "FLY", # flynt + "DTZ", # flake8-datetimez + "S", # flake8-bandit +] + +ignore = [ + "B905", + "B008", + "B006", + "S101", # bandit: use of assert https://docs.astral.sh/ruff/rules/assert/ +] + + + +[tool.ruff.lint.flake8-bugbear] +# Allow default arguments like, e.g., `data: List[str] = fastapi.Query(None)`. +extend-immutable-calls = [ + "fastapi.Depends", + "fastapi.Query", + "fastapi.Path", + "fastapi.Body", + "fastapi.Header", +] + +[tool.mypy] +files = [ +# "gubbins-api/src/**/*.py", + "gubbins-cli/src/**/*.py", + "gubbins-client/src/**/_patch.py", + "gubbins-client/src/gubbins/client/patches/**/*.py", +# "gubbins-core/src/**/*.py", + "gubbins-db/src/**/*.py", + "gubbins-routers/src/**/*.py", +] +mypy_path = [ +# "$MYPY_CONFIG_FILE_DIR/gubbins-api/src", + "$MYPY_CONFIG_FILE_DIR/gubbins-cli/src", + "$MYPY_CONFIG_FILE_DIR/gubbins-client/src", +# "$MYPY_CONFIG_FILE_DIR/gubbins-core/src", + "$MYPY_CONFIG_FILE_DIR/gubbins-db/src", + "$MYPY_CONFIG_FILE_DIR/gubbins-routers/src", +] +plugins = ["sqlalchemy.ext.mypy.plugin", "pydantic.mypy"] +allow_redefinition = true +explicit_package_bases = true +# disallow_untyped_defs = true +# strict = true +enable_error_code = ["import", "attr-defined"] + +[[tool.mypy.overrides]] +module = 'DIRAC.*' +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = 'authlib.*' +ignore_missing_imports = true + +[tool.pytest.ini_options] +testpaths = [ + "gubbins-api/tests", + "gubbins-cli/tests", + "gubbins-client/tests", + "gubbins-core/tests", + "gubbins-db/tests", + "gubbins-routers/tests", +] +addopts = [ + "-v", + "--cov=gubbins", + "--cov-report=term-missing", + "-pgubbins.testing", + "-pdiracx.testing", + "-pdiracx.testing.osdb", + "--import-mode=importlib", +] +asyncio_mode = "auto" +markers = [ + "enabled_dependencies: List of dependencies which should be available to the FastAPI test client", +] diff --git a/extensions/gubbins/release.notes b/extensions/gubbins/release.notes new file mode 100644 index 00000000..341e0a5c --- /dev/null +++ b/extensions/gubbins/release.notes @@ -0,0 +1,2 @@ +[0.0.1] +There was myDIRAC... and a prototype diff --git a/extensions/gubbins/requirements-dev.txt b/extensions/gubbins/requirements-dev.txt new file mode 100644 index 00000000..37a24f36 --- /dev/null +++ b/extensions/gubbins/requirements-dev.txt @@ -0,0 +1,6 @@ +-e ./gubbins-routers/[testing] +-e ./gubbins-db/[testing] +-e ./gubbins-testing/[testing] +-e ./gubbins-client/[testing] +-e ./gubbins-cli/[testing] +-e ./gubbins-core/[testing] diff --git a/extensions/gubbins_values.yaml b/extensions/gubbins_values.yaml new file mode 100644 index 00000000..0d974913 --- /dev/null +++ b/extensions/gubbins_values.yaml @@ -0,0 +1,12 @@ +# values specific to testing gubbins +global: + # Needed to be able to force pre-load the image in kind + # see https://kind.sigs.k8s.io/docs/user/quick-start/#loading-an-image-into-your-cluster + # and https://iximiuz.com/en/posts/kubernetes-kind-load-docker-image/ + imagePullPolicy: IfNotPresent + images: + services: gubbins/services +diracx: + sqlDbs: + dbs: + LollygagDB: diff --git a/pyproject.toml b/pyproject.toml index ab8f291c..4109f0cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,9 @@ build-backend = "setuptools.build_meta" [tool.ruff] line-length = 120 src = ["diracx-*/src", "diracx-*/tests"] -exclude = ["diracx-client/src/diracx/client/"] +exclude = [ + "diracx-client/src/diracx/client/", +] [tool.ruff.lint] select = [