From 5707eec10701285f386ffc96a74e2a23cdffdd98 Mon Sep 17 00:00:00 2001 From: Arnaud Lheureux Date: Sun, 2 Jun 2024 18:49:48 -0700 Subject: [PATCH] Simplify workflows --- .../{pr_dispatch.yaml => pr_lint.yaml} | 25 +-- .github/workflows/pr_tests-azuread.yaml | 95 +++++++++++ .github/workflows/pr_tests-compute.yaml | 95 +++++++++++ .github/workflows/pr_tests-dataplat.yaml | 101 +++++++++++ .github/workflows/pr_tests-networking.yaml | 95 +++++++++++ .github/workflows/pr_tests-scenarios.yaml | 91 ++++++++++ ...s-azuread.json => standalone-azuread.json} | 0 .github/workflows/standalone-compute.json | 5 +- .github/workflows/standalone-compute.yaml | 152 ----------------- .github/workflows/standalone-dataplat.json | 76 +++++++++ .github/workflows/standalone-networking.json | 15 +- .github/workflows/standalone-networking.yaml | 152 ----------------- .../workflows/standalone-regressor-tf100.yaml | 3 +- .../standalone-scenarios-additional.json | 5 - .../standalone-scenarios-longrunners.json | 41 ----- .github/workflows/standalone-scenarios.json | 109 ++++-------- .github/workflows/standalone-tf100.yaml | 161 ------------------ .github/workflows/weekly_dispatch.yaml | 34 ++++ ...{pr_workflow.yaml => weekly_workflow.yaml} | 23 ++- 19 files changed, 648 insertions(+), 630 deletions(-) rename .github/workflows/{pr_dispatch.yaml => pr_lint.yaml} (58%) create mode 100644 .github/workflows/pr_tests-azuread.yaml create mode 100644 .github/workflows/pr_tests-compute.yaml create mode 100644 .github/workflows/pr_tests-dataplat.yaml create mode 100644 .github/workflows/pr_tests-networking.yaml create mode 100644 .github/workflows/pr_tests-scenarios.yaml rename .github/workflows/{standalone-scenarios-azuread.json => standalone-azuread.json} (100%) delete mode 100644 .github/workflows/standalone-compute.yaml create mode 100644 .github/workflows/standalone-dataplat.json delete mode 100644 .github/workflows/standalone-networking.yaml delete mode 100644 .github/workflows/standalone-scenarios-longrunners.json delete mode 100644 .github/workflows/standalone-tf100.yaml create mode 100644 .github/workflows/weekly_dispatch.yaml rename .github/workflows/{pr_workflow.yaml => weekly_workflow.yaml} (97%) diff --git a/.github/workflows/pr_dispatch.yaml b/.github/workflows/pr_lint.yaml similarity index 58% rename from .github/workflows/pr_dispatch.yaml rename to .github/workflows/pr_lint.yaml index f478bd51ff..8e0d16a3d4 100644 --- a/.github/workflows/pr_dispatch.yaml +++ b/.github/workflows/pr_lint.yaml @@ -3,7 +3,7 @@ # Licensed under the MIT License. # -name: PR Dispatch Workflow +name: PR Tflint on: pull_request: @@ -12,6 +12,7 @@ on: env: TF_VERSION: "1.8.4" TF_LINT_VERSION: "v0.50.3" + jobs: linting: name: Format and Lint Checks @@ -35,30 +36,10 @@ jobs: with: tflint_version: ${{ env.TF_LINT_VERSION }} - - name: Run TFLint with reviewdog uses: reviewdog/action-tflint@v1 with: github_token: ${{ secrets.GITHUB_TOKEN }} reporter: github-pr-check level: info - tflint_init: true - - dispatch: - runs-on: ubuntu-latest - strategy: - matrix: - scenario: - - standalone-scenarios-azuread.json - - standalone-scenarios.json - - standalone-compute.json - - standalone-networking.json - - standalone-scenarios-longrunners.json - - steps: - - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ secrets.GITHUB_TOKEN }} - event-type: pr-${{ matrix.scenario }} - client-payload: '{"scenario": "${{ (matrix.scenario) }}", "sha": "${{ github.event.pull_request.head.sha }}"}' + tflint_init: true \ No newline at end of file diff --git a/.github/workflows/pr_tests-azuread.yaml b/.github/workflows/pr_tests-azuread.yaml new file mode 100644 index 0000000000..60640e52ae --- /dev/null +++ b/.github/workflows/pr_tests-azuread.yaml @@ -0,0 +1,95 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR azuread-tests + +on: + pull_request: + types: [opened, synchronize, reopened] + paths: + - 'azuread*' + - 'modules/azuread/**' + - 'examples/azuread/**' + - '.github/workflows/*azuread.*' + +env: + scenario: standalone-azuread.json + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + load_scenarios: + name: Load Test Scenarios Matrix + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.load_scenarios.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - id: load_scenarios + run: | + cases=$(cat ./.github/workflows/${{ env.SCENARIO }} | jq -c .) + echo "matrix=${cases}" >> $GITHUB_OUTPUT + + mock_plan_scenarios: + name: ${{ matrix.config_files }} + runs-on: ubuntu-latest + needs: load_scenarios + + strategy: + fail-fast: false + matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create environment variables + run: | + cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} + FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) + echo STATE_FILE=${HOME}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV + echo PLAN_FILE=${HOME}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV + echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV + echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Install Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Configure Terraform plugin cache + run: | + echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >>"$GITHUB_ENV" + mkdir --parents "$HOME/.terraform.d/plugin-cache" + + - name: Cache Terraform + uses: actions/cache@v4 + with: + path: | + ~/.terraform.d/plugin-cache + key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} + restore-keys: | + ${{ runner.os }}-terraform- + + - name: Terraform Init example + id: tf_init + run: | + terraform -chdir=examples \ + init + + - name: Terraform Test example + id: tf_test + run: | + terraform -chdir=examples \ + test \ + -test-directory=./tests/mock \ + ${{ env.PARAMETER_FILES }} \ + -verbose \ No newline at end of file diff --git a/.github/workflows/pr_tests-compute.yaml b/.github/workflows/pr_tests-compute.yaml new file mode 100644 index 0000000000..993d2269a0 --- /dev/null +++ b/.github/workflows/pr_tests-compute.yaml @@ -0,0 +1,95 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR compute-tests + +on: + pull_request: + types: [opened, synchronize, reopened] + paths: + - 'compute_*' + - 'modules/compute/**' + - 'examples/compute/**' + - '.github/workflows/*compute.*' + +env: + scenario: standalone-compute.json + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + load_scenarios: + name: Load Test Scenarios Matrix + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.load_scenarios.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - id: load_scenarios + run: | + cases=$(cat ./.github/workflows/${{ env.SCENARIO }} | jq -c .) + echo "matrix=${cases}" >> $GITHUB_OUTPUT + + mock_plan_scenarios: + name: ${{ matrix.config_files }} + runs-on: ubuntu-latest + needs: load_scenarios + + strategy: + fail-fast: false + matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create environment variables + run: | + cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} + FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) + echo STATE_FILE=${HOME}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV + echo PLAN_FILE=${HOME}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV + echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV + echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Install Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Configure Terraform plugin cache + run: | + echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >>"$GITHUB_ENV" + mkdir --parents "$HOME/.terraform.d/plugin-cache" + + - name: Cache Terraform + uses: actions/cache@v4 + with: + path: | + ~/.terraform.d/plugin-cache + key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} + restore-keys: | + ${{ runner.os }}-terraform- + + - name: Terraform Init example + id: tf_init + run: | + terraform -chdir=examples \ + init + + - name: Terraform Test example + id: tf_test + run: | + terraform -chdir=examples \ + test \ + -test-directory=./tests/mock \ + ${{ env.PARAMETER_FILES }} \ + -verbose \ No newline at end of file diff --git a/.github/workflows/pr_tests-dataplat.yaml b/.github/workflows/pr_tests-dataplat.yaml new file mode 100644 index 0000000000..5aaabd1a60 --- /dev/null +++ b/.github/workflows/pr_tests-dataplat.yaml @@ -0,0 +1,101 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR dataplat-tests + +on: + pull_request: + types: [opened, synchronize, reopened] + paths: + - 'mssql*' + - 'modules/analytics/**' + - 'modules/databases/**' + - 'modules/data_factory/**' + - 'modules/purview/**' + - 'examples/analytics/**' + - 'examples/databases/**' + - 'examples/data_factory/**' + - 'examples/purview/**' + - '.github/workflows/*dataplat.*' + +env: + scenario: standalone-dataplat.json + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + load_scenarios: + name: Load Test Scenarios Matrix + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.load_scenarios.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - id: load_scenarios + run: | + cases=$(cat ./.github/workflows/${{ env.SCENARIO }} | jq -c .) + echo "matrix=${cases}" >> $GITHUB_OUTPUT + + mock_plan_scenarios: + name: ${{ matrix.config_files }} + runs-on: ubuntu-latest + needs: load_scenarios + + strategy: + fail-fast: false + matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create environment variables + run: | + cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} + FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) + echo STATE_FILE=${HOME}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV + echo PLAN_FILE=${HOME}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV + echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV + echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Install Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Configure Terraform plugin cache + run: | + echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >>"$GITHUB_ENV" + mkdir --parents "$HOME/.terraform.d/plugin-cache" + + - name: Cache Terraform + uses: actions/cache@v4 + with: + path: | + ~/.terraform.d/plugin-cache + key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} + restore-keys: | + ${{ runner.os }}-terraform- + + - name: Terraform Init example + id: tf_init + run: | + terraform -chdir=examples \ + init + + - name: Terraform Test example + id: tf_test + run: | + terraform -chdir=examples \ + test \ + -test-directory=./tests/mock \ + ${{ env.PARAMETER_FILES }} \ + -verbose \ No newline at end of file diff --git a/.github/workflows/pr_tests-networking.yaml b/.github/workflows/pr_tests-networking.yaml new file mode 100644 index 0000000000..24a07e0855 --- /dev/null +++ b/.github/workflows/pr_tests-networking.yaml @@ -0,0 +1,95 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR networking-tests + +on: + pull_request: + types: [opened, synchronize, reopened] + paths: + - 'network*' + - 'modules/networking/**' + - 'examples/networking/**' + - '.github/workflows/*networking.*' + +env: + scenario: standalone-networking.json + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + load_scenarios: + name: Load Test Scenarios Matrix + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.load_scenarios.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - id: load_scenarios + run: | + cases=$(cat ./.github/workflows/${{ env.SCENARIO }} | jq -c .) + echo "matrix=${cases}" >> $GITHUB_OUTPUT + + mock_plan_scenarios: + name: ${{ matrix.config_files }} + runs-on: ubuntu-latest + needs: load_scenarios + + strategy: + fail-fast: false + matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create environment variables + run: | + cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} + FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) + echo STATE_FILE=${HOME}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV + echo PLAN_FILE=${HOME}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV + echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV + echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Install Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Configure Terraform plugin cache + run: | + echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >>"$GITHUB_ENV" + mkdir --parents "$HOME/.terraform.d/plugin-cache" + + - name: Cache Terraform + uses: actions/cache@v4 + with: + path: | + ~/.terraform.d/plugin-cache + key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} + restore-keys: | + ${{ runner.os }}-terraform- + + - name: Terraform Init example + id: tf_init + run: | + terraform -chdir=examples \ + init + + - name: Terraform Test example + id: tf_test + run: | + terraform -chdir=examples \ + test \ + -test-directory=./tests/mock \ + ${{ env.PARAMETER_FILES }} \ + -verbose \ No newline at end of file diff --git a/.github/workflows/pr_tests-scenarios.yaml b/.github/workflows/pr_tests-scenarios.yaml new file mode 100644 index 0000000000..8bd8c90965 --- /dev/null +++ b/.github/workflows/pr_tests-scenarios.yaml @@ -0,0 +1,91 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR all-tests + +on: + pull_request: + types: [opened, synchronize, reopened] + +env: + scenario: standalone-scenarios.json + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + load_scenarios: + name: Load Test Scenarios Matrix + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.load_scenarios.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + + - id: load_scenarios + run: | + cases=$(cat ./.github/workflows/${{ env.SCENARIO }} | jq -c .) + echo "matrix=${cases}" >> $GITHUB_OUTPUT + + mock_plan_scenarios: + name: ${{ matrix.config_files }} + runs-on: ubuntu-latest + needs: load_scenarios + + strategy: + fail-fast: false + matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Create environment variables + run: | + cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} + FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) + echo STATE_FILE=${HOME}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV + echo PLAN_FILE=${HOME}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV + echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV + echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Install Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Configure Terraform plugin cache + run: | + echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >>"$GITHUB_ENV" + mkdir --parents "$HOME/.terraform.d/plugin-cache" + + - name: Cache Terraform + uses: actions/cache@v4 + with: + path: | + ~/.terraform.d/plugin-cache + key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} + restore-keys: | + ${{ runner.os }}-terraform- + + - name: Terraform Init example + id: tf_init + run: | + terraform -chdir=examples \ + init + + - name: Terraform Test example + id: tf_test + run: | + terraform -chdir=examples \ + test \ + -test-directory=./tests/mock \ + ${{ env.PARAMETER_FILES }} \ + -verbose \ No newline at end of file diff --git a/.github/workflows/standalone-scenarios-azuread.json b/.github/workflows/standalone-azuread.json similarity index 100% rename from .github/workflows/standalone-scenarios-azuread.json rename to .github/workflows/standalone-azuread.json diff --git a/.github/workflows/standalone-compute.json b/.github/workflows/standalone-compute.json index 0ef269d56b..a1effb7e50 100644 --- a/.github/workflows/standalone-compute.json +++ b/.github/workflows/standalone-compute.json @@ -2,9 +2,9 @@ "config_files": [ "compute/availability_set/100-simple-availabilityset", "compute/availability_set/101-availabilityset-with-proximity-placement-group", - "compute/azure_virtual_desktop/wvd_resources", "compute/azure_redhat_openshift/101_basic_private_cluster", "compute/azure_redhat_openshift/102_basic_public_cluster", + "compute/azure_virtual_desktop/wvd_resources", "compute/batch/batch_account/100-batch-account-storage", "compute/batch/batch_account/100-batch-account", "compute/batch/batch_account/200-batch-account-private-endpoint", @@ -49,6 +49,7 @@ "compute/virtual_machine/214-vm-generic_extensions_complex", "compute/virtual_machine/215-vm-keyvault-for-windows-extension", "compute/virtual_machine/216-vm-linux_diagnostic_extensions", - "compute/virtual_machine/217-vm-disk-encryption-set-msi" + "compute/virtual_machine/217-vm-disk-encryption-set-msi", + "compute/vmware_cluster/101-vmware_cluster" ] } diff --git a/.github/workflows/standalone-compute.yaml b/.github/workflows/standalone-compute.yaml deleted file mode 100644 index 17696aa0e1..0000000000 --- a/.github/workflows/standalone-compute.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# -# Copyright (c) Microsoft Corporation -# Licensed under the MIT License. -# - -name: standalone-compute - -on: - push: - paths: - - 'compute_*' - - 'modules/compute/**' - - 'examples/compute/**' - - '.github/workflows/*compute.*' - -env: - TF_CLI_ARGS: "-no-color" - TF_CLI_ARGS_destroy: "-auto-approve -refresh=false" - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - TF_REGISTRY_DISCOVERY_RETRY: 5 - TF_REGISTRY_CLIENT_TIMEOUT: 15 - ROVER_RUNNER: true - -jobs: - load_scenarios: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.load_scenarios.outputs.matrix }} - steps: - - uses: actions/checkout@v4 - - id: load_scenarios - run: | - cases=$(cat ./.github/workflows/standalone-compute.json | jq -c .) - echo "matrix=${cases}" >> $GITHUB_OUTPUT - - testcases: - name: test - runs-on: ubuntu-latest - needs: load_scenarios - - strategy: - fail-fast: false - matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} - - container: - image: aztfmod/rover:1.8.4-2405.2306 - options: --user 0 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Create environment variables - run: | - cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} - FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) - echo STATE_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV - echo PLAN_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV - echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV - echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV - - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Terraform Init example - id: tf_init - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - init -upgrade=true | grep -P '^- (?=Downloading|Using|Finding|Installing)|^[^-]' - - - name: Terraform Plan example - id: tf_plan - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }} - - - name: Terraform Apply example - id: tf_apply - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -parallelism=30 \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }} - - - name: Terraform Destroy planning example - id: tf_destroy_plan - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -destroy \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }}-destroy - - - name: Terraform Destroy apply example - id: tf_destroy_apply - if: steps.tf_destroy_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -refresh=false \ - -parallelism=30 \ - -auto-approve \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }}-destroy - - purge: - name: purge - runs-on: ubuntu-latest - if: ${{ failure() || cancelled() }} - - needs: [testcases] - - container: - image: aztfmod/rover:1.8.4-2405.2306 - options: --user 0 - - steps: - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Complete purge - run: | - for i in `az monitor diagnostic-settings subscription list -o tsv --query "value[?contains(name, '${{ github.run_id }}' )].name"`; do echo "purging subscription diagnostic-settings: $i" && $(az monitor diagnostic-settings subscription delete --name $i --yes); done - for i in `az monitor log-profiles list -o tsv --query '[].name'`; do az monitor log-profiles delete --name $i; done - for i in `az ad group list --query "[?contains(displayName, '${{ github.run_id }}')].id" -o tsv`; do echo "purging Azure AD group: $i" && $(az ad group delete --verbose --group $i || true); done - for i in `az ad app list --query "[?contains(displayName, '${{ github.run_id }}')].appId" -o tsv`; do echo "purging Azure AD app: $i" && $(az ad app delete --verbose --id $i || true); done - for i in `az keyvault list-deleted --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do az keyvault purge --name $i; done - for i in `az group list --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do echo "purging resource group: $i" && $(az group delete -n $i -y --no-wait || true); done - for i in `az role assignment list --query "[?contains(roleDefinitionName, '${{ github.run_id }}')].roleDefinitionName" -o tsv`; do echo "purging role assignment: $i" && $(az role assignment delete --role $i || true); done - for i in `az role definition list --query "[?contains(roleName, '${{ github.run_id }}')].roleName" -o tsv`; do echo "purging custom role definition: $i" && $(az role definition delete --name $i || true); done diff --git a/.github/workflows/standalone-dataplat.json b/.github/workflows/standalone-dataplat.json new file mode 100644 index 0000000000..1a1a565a55 --- /dev/null +++ b/.github/workflows/standalone-dataplat.json @@ -0,0 +1,76 @@ +{ + "config_files": [ + "cosmos_db/100-cosmos-db-sql-role-mapping", + "cosmos_db/100-simple-cosmos-db-cassandra", + "cosmos_db/100-simple-cosmos-db-gremlin", + "cosmos_db/100-simple-cosmos-db-mongo", + "cosmos_db/100-simple-cosmos-db-sql", + "cosmos_db/100-simple-cosmos-db-table", + "cosmos_db/101-decomposed-cosmosdb-sql", + "cosmos_db/101-private-endpoint-cosmos-db", + "data_explorer/101-kusto_clusters_basic", + "data_explorer/102-kusto_clusters_vnet", + "data_explorer/103-kusto_clusters_identity", + "data_explorer/104-kusto_cluster_database", + "data_explorer/105-kusto_attached_database_configuration", + "data_explorer/106-database_principal_assignment", + "data_explorer/107-private-endpoint", + "data_factory/101-data_factory", + "data_factory/102-data_factory_pipeline", + "data_factory/103-data_factory_trigger_schedule", + "data_factory/104-data_factory_dataset_azure_blob", + "data_factory/105-data_factory_dataset_cosmosdb_sqlapi", + "data_factory/106-data_factory_dataset_delimited_text", + "data_factory/107-data_factory_dataset_http", + "data_factory/108-data_factory_dataset_json", + "data_factory/109-data_factory_dataset_mysql", + "data_factory/110-data_factory_dataset_postgresql", + "data_factory/111-data_factory_dataset_sql_server_table", + "data_factory/112-data_factory_integration_runtime_azure_ssis", + "data_factory/113-data_factory_integration_runtime_azure_ssis_mssql_server", + "data_factory/114-data_factory_integration_runtime_self_hosted", + "data_factory/115-data_factory_runtime_self_hoste_databricks", + "data_factory/116-data_factory_linked_service_azure_databricks", + "data_protection/100-backup-vault-blob-storage", + "data_protection/101-backup-vault-disk", + "database_migration_services/100-dms", + "databricks/100-standard-databricks-no-vnet", + "databricks/101-standard-databricks-vnet", + "databricks/102-premium-aml", + "databricks/102-premium-databricks-vnet-private-endpoint", + "datalake/101-datalake-storage", + "machine_learning/100-aml", + "machine_learning/101-aml-vnet", + "machine_learning/102-aml-compute_instance", + "mariadb_server/100-simple-mariadb", + "mariadb_server/101-vnet-rule-mariadb", + "mariadb_server/102-private-endpoint-mariadb", + "mariadb_server/103-private-endpoint-with-fw-rule-mariadb", + "mssql_mi/200-mi", + "mssql_server/101-sqlserver-simple", + "mssql_server/102-sqlserver-extend", + "mssql_server/104-sqlserver-elastic_pools", + "mssql_server/105-sqlserver-failover_groups", + "mssql_server/107-sqlserver-db-retention-policy", + "mssql_server/108-sqlserver-db-diagnostics", + "mssql_server/109-sqlserver-network-firewall-rule", + "mysql_flexible_server/100-simple-mysql-flexible", + "mysql_flexible_server/101-delegated-subnet-with-fw-rule", + "mysql_flexible_server/102-advanced-mysql-flexible", + "mysql_server/100-simple-mysql", + "mysql_server/101-vnet-rule-mysql", + "mysql_server/102-private-endpoint-mysql", + "mysql_server/103-private-endpoint-with-fw-rule-mysql", + "postgresql_flexible_server/100-simple-postgresql-flexible", + "postgresql_flexible_server/101-delegated-subnet-with-fw-rule", + "postgresql_flexible_server/102-advanced-postgresql-flexible", + "postgresql_flexible_server/104-private-endpoint", + "postgresql_server/100-simple-postgresql", + "postgresql_server/101-vnet-rule-postgresql", + "postgresql_server/102-private-endpoint-postgresql", + "postgresql_server/103-private-endpoint-with-fw-rule", + "powerbi_embedded/100-simple-powerbi", + "purview/100-purview_account", + "purview/101-purview_account_private_link" + ] +} diff --git a/.github/workflows/standalone-networking.json b/.github/workflows/standalone-networking.json index c1a3a787c9..0697afeee1 100644 --- a/.github/workflows/standalone-networking.json +++ b/.github/workflows/standalone-networking.json @@ -36,6 +36,10 @@ "networking/private_dns/100-private-dns-vnet-links", "networking/private_links/endpoints/centralized", "networking/private_links/endpoints/static_ip", + "networking/virtual_network_gateway/100-expressroute-gateway", + "networking/virtual_network_gateway/101-vpn-site-to-site", + "networking/virtual_network_gateway/102-vpn-site-to-site-active-active", + "networking/virtual_network_gateway/103-vpn-site-to-site-connection", "networking/virtual_network/100-import-rg", "networking/virtual_network/100-simple-vnet-subnets-nsgs", "networking/virtual_network/100-subnet-delegation", @@ -44,7 +48,16 @@ "networking/virtual_network/201-nsg-flow-logs-v1", "networking/virtual_subnets/100-simple-subnet-rbac", "networking/virtual_wan/100-vwan-multi-hubs", + "networking/virtual_wan/101-vwan-hub-firewall-legacy", + "networking/virtual_wan/102-vwan-hub-firewall-secured-vhub", + "networking/virtual_wan/102a-vwan-hub-firewall-secured-vhub-fw-mgr", + "networking/virtual_wan/103-vwan-hub-gw-legacy", + "networking/virtual_wan/104-vwan-hub-gw-spp", + "networking/virtual_wan/105-vwan-hub-route-table", "networking/virtual_wan/106-vwan-hub-routes", - "networking/virtual_wan/108-vwan-vpn-site" + "networking/virtual_wan/108-vwan-vpn-site", + "networking/virtual_wan/109-vwan-vpn-gateway-connection", + "networking/virtual_wan/110-vwan-hub-gw-p2s-keyvault-cert", + "networking/virtual_wan/111-vwan-vpn-gateway-connection-with-nat" ] } diff --git a/.github/workflows/standalone-networking.yaml b/.github/workflows/standalone-networking.yaml deleted file mode 100644 index 3ae0243874..0000000000 --- a/.github/workflows/standalone-networking.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# -# Copyright (c) Microsoft Corporation -# Licensed under the MIT License. -# - -name: standalone-networking - -on: - push: - paths: - - 'network*' - - 'modules/networking/**' - - 'examples/networking/**' - - '.github/workflows/*networking.*' - -env: - TF_CLI_ARGS: "-no-color" - TF_CLI_ARGS_destroy: "-auto-approve -refresh=false" - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - TF_REGISTRY_DISCOVERY_RETRY: 5 - TF_REGISTRY_CLIENT_TIMEOUT: 15 - ROVER_RUNNER: true - -jobs: - load_scenarios: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.load_scenarios.outputs.matrix }} - steps: - - uses: actions/checkout@v4 - - id: load_scenarios - run: | - cases=$(cat ./.github/workflows/standalone-networking.json | jq -c .) - echo "matrix=${cases}" >> $GITHUB_OUTPUT - - testcases: - name: test - runs-on: ubuntu-latest - needs: load_scenarios - - strategy: - fail-fast: false - matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} - - container: - image: aztfmod/rover:1.8.4-2405.2306 - options: --user 0 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Create environment variables - run: | - cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} - FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) - echo STATE_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV - echo PLAN_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV - echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV - echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV - - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Terraform Init example - id: tf_init - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - init -upgrade=true | grep -P '^- (?=Downloading|Using|Finding|Installing)|^[^-]' - - - name: Terraform Plan example - id: tf_plan - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }} - - - name: Terraform Apply example - id: tf_apply - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -parallelism=30 \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }} - - - name: Terraform Destroy planning example - id: tf_destroy_plan - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -destroy \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }}-destroy - - - name: Terraform Destroy apply example - id: tf_destroy_apply - if: steps.tf_destroy_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -refresh=false \ - -parallelism=30 \ - -auto-approve \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }}-destroy - - purge: - name: purge - runs-on: ubuntu-latest - if: ${{ failure() || cancelled() }} - - needs: [testcases] - - container: - image: aztfmod/rover:1.8.0-2405.0203 - options: --user 0 - - steps: - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Complete purge - run: | - for i in `az monitor diagnostic-settings subscription list -o tsv --query "value[?contains(name, '${{ github.run_id }}' )].name"`; do echo "purging subscription diagnostic-settings: $i" && $(az monitor diagnostic-settings subscription delete --name $i --yes); done - for i in `az monitor log-profiles list -o tsv --query '[].name'`; do az monitor log-profiles delete --name $i; done - for i in `az ad group list --query "[?contains(displayName, '${{ github.run_id }}')].id" -o tsv`; do echo "purging Azure AD group: $i" && $(az ad group delete --verbose --group $i || true); done - for i in `az ad app list --query "[?contains(displayName, '${{ github.run_id }}')].appId" -o tsv`; do echo "purging Azure AD app: $i" && $(az ad app delete --verbose --id $i || true); done - for i in `az keyvault list-deleted --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do az keyvault purge --name $i; done - for i in `az group list --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do echo "purging resource group: $i" && $(az group delete -n $i -y --no-wait || true); done - for i in `az role assignment list --query "[?contains(roleDefinitionName, '${{ github.run_id }}')].roleDefinitionName" -o tsv`; do echo "purging role assignment: $i" && $(az role assignment delete --role $i || true); done - for i in `az role definition list --query "[?contains(roleName, '${{ github.run_id }}')].roleName" -o tsv`; do echo "purging custom role definition: $i" && $(az role definition delete --name $i || true); done diff --git a/.github/workflows/standalone-regressor-tf100.yaml b/.github/workflows/standalone-regressor-tf100.yaml index 22a66c6458..60bf4d639b 100644 --- a/.github/workflows/standalone-regressor-tf100.yaml +++ b/.github/workflows/standalone-regressor-tf100.yaml @@ -18,11 +18,10 @@ on: type: choice default: 'standalone-scenarios.json' options: - - standalone-scenarios-azuread.json + - standalone-azuread.json - standalone-scenarios.json - standalone-compute.json - standalone-networking.json - - standalone-scenarios-longrunners.json env: TF_CLI_ARGS: '-no-color' diff --git a/.github/workflows/standalone-scenarios-additional.json b/.github/workflows/standalone-scenarios-additional.json index 2e0450829d..5c141c24bd 100644 --- a/.github/workflows/standalone-scenarios-additional.json +++ b/.github/workflows/standalone-scenarios-additional.json @@ -14,11 +14,6 @@ "consumption_budget/105-consumption-budget-subscription-aks", "messaging/signalr/100-signalr-simple", "mssql_mi/200-mi-two-regions", - "networking/virtual_network_gateway/100-expressroute-gateway", - "networking/virtual_network_gateway/101-vpn-site-to-site", - "networking/virtual_network_gateway/102-vpn-site-to-site-active-active", - "networking/virtual_network_gateway/103-vpn-site-to-site-connection", - "networking/virtual_wan/100-vwan-multi-hubs", "networking/virtual_wan/101-vwan-hub-firewall-legacy", "networking/virtual_wan/102-vwan-hub-firewall-new", "networking/virtual_wan/103-vwan-hub-gw", diff --git a/.github/workflows/standalone-scenarios-longrunners.json b/.github/workflows/standalone-scenarios-longrunners.json deleted file mode 100644 index 5712cd8f61..0000000000 --- a/.github/workflows/standalone-scenarios-longrunners.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "config_files": [ - "apim/100-basic", - "apim/101-api", - "apim/102-diagnostics", - "apim/103-api_operations", - "apim/104-backend", - "apim/105-api_policy", - "apim/106-api_operation_tag", - "apim/107-api_operation_policy", - "apim/108-api_management_user", - "apim/110-api_management_diagnostic", - "apim/111-api_management_certificate", - "apim/112-api_management_gateway", - "apim/113-api_management_gateway_api", - "apim/114-api-management-group", - "apim/115-api_management_private_virtual_network", - "apim/116-api_management_subscription", - "apim/117-api_management_product", - "apim/118-api_management_platform_stv2", - "compute/vmware_cluster/101-vmware_cluster", - "mssql_mi/200-mi", - "networking/virtual_network_gateway/100-expressroute-gateway", - "networking/virtual_network_gateway/101-vpn-site-to-site", - "networking/virtual_network_gateway/102-vpn-site-to-site-active-active", - "networking/virtual_network_gateway/103-vpn-site-to-site-connection", - "networking/virtual_wan/101-vwan-hub-firewall-legacy", - "networking/virtual_wan/102-vwan-hub-firewall-secured-vhub", - "networking/virtual_wan/102a-vwan-hub-firewall-secured-vhub-fw-mgr", - "networking/virtual_wan/103-vwan-hub-gw-legacy", - "networking/virtual_wan/104-vwan-hub-gw-spp", - "networking/virtual_wan/105-vwan-hub-route-table", - "networking/virtual_wan/109-vwan-vpn-gateway-connection", - "networking/virtual_wan/110-vwan-hub-gw-p2s-keyvault-cert", - "networking/virtual_wan/111-vwan-vpn-gateway-connection-with-nat", - "redis_cache/100-redis-standard", - "redis_cache/101-redis-diagnostics", - "redis_cache/102-redis-private", - "webapps/appservice-environment/102-simple_asev3" - ] -} diff --git a/.github/workflows/standalone-scenarios.json b/.github/workflows/standalone-scenarios.json index a7f95055b5..c0e560b63d 100644 --- a/.github/workflows/standalone-scenarios.json +++ b/.github/workflows/standalone-scenarios.json @@ -1,5 +1,23 @@ { "config_files": [ + "apim/100-basic", + "apim/101-api", + "apim/102-diagnostics", + "apim/103-api_operations", + "apim/104-backend", + "apim/105-api_policy", + "apim/106-api_operation_tag", + "apim/107-api_operation_policy", + "apim/108-api_management_user", + "apim/110-api_management_diagnostic", + "apim/111-api_management_certificate", + "apim/112-api_management_gateway", + "apim/113-api_management_gateway_api", + "apim/114-api-management-group", + "apim/115-api_management_private_virtual_network", + "apim/116-api_management_subscription", + "apim/117-api_management_product", + "apim/118-api_management_platform_stv2", "app_config/100-simple", "app_config/101-private-link", "app_insights/100-all-attributes", @@ -12,69 +30,30 @@ "automation/103-automation-private-endpoints", "automation/104-automation-schedule-runbook", "communication/communication_services/101-communication_service", - "cosmos_db/100-simple-cosmos-db-cassandra", - "cosmos_db/100-simple-cosmos-db-gremlin", - "cosmos_db/100-simple-cosmos-db-mongo", - "cosmos_db/100-simple-cosmos-db-sql", - "cosmos_db/100-simple-cosmos-db-table", - "cosmos_db/100-cosmos-db-sql-role-mapping", - "cosmos_db/101-decomposed-cosmosdb-sql", - "cosmos_db/101-private-endpoint-cosmos-db", - "data_explorer/101-kusto_clusters_basic", - "data_explorer/102-kusto_clusters_vnet", - "data_explorer/103-kusto_clusters_identity", - "data_explorer/104-kusto_cluster_database", - "data_explorer/105-kusto_attached_database_configuration", - "data_explorer/106-database_principal_assignment", - "data_explorer/107-private-endpoint", - "data_factory/101-data_factory", - "data_factory/102-data_factory_pipeline", - "data_factory/103-data_factory_trigger_schedule", - "data_factory/104-data_factory_dataset_azure_blob", - "data_factory/105-data_factory_dataset_cosmosdb_sqlapi", - "data_factory/106-data_factory_dataset_delimited_text", - "data_factory/107-data_factory_dataset_http", - "data_factory/108-data_factory_dataset_json", - "data_factory/109-data_factory_dataset_mysql", - "data_factory/110-data_factory_dataset_postgresql", - "data_factory/111-data_factory_dataset_sql_server_table", - "data_factory/112-data_factory_integration_runtime_azure_ssis", - "data_factory/113-data_factory_integration_runtime_azure_ssis_mssql_server", - "data_factory/114-data_factory_integration_runtime_self_hosted", - "data_factory/115-data_factory_runtime_self_hoste_databricks", - "data_factory/116-data_factory_linked_service_azure_databricks", - "data_protection/100-backup-vault-blob-storage", - "data_protection/101-backup-vault-disk", - "database_migration_services/100-dms", - "databricks/100-standard-databricks-no-vnet", - "databricks/101-standard-databricks-vnet", - "databricks/102-premium-aml", - "databricks/102-premium-databricks-vnet-private-endpoint", - "datalake/101-datalake-storage", + "diagnostics_profiles/100-multiple-destinations", "diagnostics_profiles/100-multiple-destinations", "diagnostics_profiles/101-log-analytics-destination-type-profile", "diagnostics_profiles/200-diagnostics-eventhub-namespaces", + "diagnostics_profiles/200-diagnostics-eventhub-namespaces", + "diagnostics_profiles/201-multi-eventhub-diagnostics", "diagnostics_profiles/201-multi-eventhub-diagnostics", "digital_twins/100-basic", "digital_twins/101-adt-servicebus", "digital_twins/102-digital_twins_instance_eventhub", - "diagnostics_profiles/100-multiple-destinations", - "diagnostics_profiles/200-diagnostics-eventhub-namespaces", - "diagnostics_profiles/201-multi-eventhub-diagnostics", "eventhub/100-simple-eventhub-namespace", "eventhub/101-evh-namespace-with-private-endpoint", "eventhub/102-namespace-and-evh-with-auth-rules", "eventhub/103-eventhub-consumer-groups", "eventhub/104-namespace-and-evh-with-storage", - "keyvault/101-keyvault-policies", - "keyvault/102-keyvault-cert-issuer", - "keyvault/104-keyvault-dynamic-secret", - "keyvault/105-keyvault-dynamic-certificate", "iot/100-iot-hub", "iot/101-iot-hub-endpoints-and-file-upload", "iot/103-iot-hub-with-dps", "iot/110-iot-central-application", "iot/111-iot-security-solution", + "keyvault/101-keyvault-policies", + "keyvault/102-keyvault-cert-issuer", + "keyvault/104-keyvault-dynamic-secret", + "keyvault/105-keyvault-dynamic-certificate", "load_test/100-load-test", "logic_app/100-logic_app_workflow", "logic_app/102-logic_app_integration_account", @@ -84,19 +63,12 @@ "logic_app/106-logic_app_trigger_recurrence", "logic_app/107-logic_app_trigger_custom", "logic_app/109-logic_app_standard_vnet_integration", - "machine_learning/100-aml", - "machine_learning/101-aml-vnet", - "machine_learning/102-aml-compute_instance", "maintenance_configuration/100-maintenance-configuration", "maintenance_configuration/101-maintenance-configuration-schedule", "maintenance_configuration/200-maintenance-configuration-assignment-vm-windows", "maintenance_configuration/201-maintenance-configuration-assignment-vm-linux", - "maps/101-azure-maps-account", "managed_service_identity/100-msi-levels", - "mariadb_server/100-simple-mariadb", - "mariadb_server/101-vnet-rule-mariadb", - "mariadb_server/102-private-endpoint-mariadb", - "mariadb_server/103-private-endpoint-with-fw-rule-mariadb", + "maps/101-azure-maps-account", "messaging/eventgrid/100-simple-eventgrid-topic", "messaging/eventgrid/101-simple-eventgrid-topic-private-endpoint", "messaging/eventgrid/102-eventgrid_subscription", @@ -111,33 +83,8 @@ "monitoring/102-monitor_activity_log_alert", "monitoring/103-monitor_metric_alert", "monitoring/104-log_analytics_storage_insights", - "mssql_server/101-sqlserver-simple", - "mssql_server/102-sqlserver-extend", - "mssql_server/104-sqlserver-elastic_pools", - "mssql_server/105-sqlserver-failover_groups", - "mssql_server/107-sqlserver-db-retention-policy", - "mssql_server/108-sqlserver-db-diagnostics", - "mssql_server/109-sqlserver-network-firewall-rule", - "mysql_flexible_server/100-simple-mysql-flexible", - "mysql_flexible_server/101-delegated-subnet-with-fw-rule", - "mysql_flexible_server/102-advanced-mysql-flexible", - "mysql_server/100-simple-mysql", - "mysql_server/101-vnet-rule-mysql", - "mysql_server/102-private-endpoint-mysql", - "mysql_server/103-private-endpoint-with-fw-rule-mysql", "netapp/101-nfs", "netapp/102-nfs-export-policy", - "postgresql_flexible_server/100-simple-postgresql-flexible", - "postgresql_flexible_server/101-delegated-subnet-with-fw-rule", - "postgresql_flexible_server/102-advanced-postgresql-flexible", - "postgresql_flexible_server/104-private-endpoint", - "postgresql_server/100-simple-postgresql", - "postgresql_server/101-vnet-rule-postgresql", - "postgresql_server/102-private-endpoint-postgresql", - "postgresql_server/103-private-endpoint-with-fw-rule", - "powerbi_embedded/100-simple-powerbi", - "purview/100-purview_account", - "purview/101-purview_account_private_link", "recovery_vault/101-simple-asrv", "recovery_vault/102-asr-protection", "recovery_vault/103-asr-with-private-endpoint", @@ -145,6 +92,9 @@ "recovery_vault/105-asr-with-network-mapping", "recovery_vault/106-backupvault-with-sqldatabase-saphana", "recovery_vault/107-asr-diagnostics", + "redis_cache/100-redis-standard", + "redis_cache/101-redis-diagnostics", + "redis_cache/102-redis-private", "redis_cache/103-redis-private-endpoints", "role_mapping/100-simple-role-mapping", "role_mapping/101-function-app-managed-identity", @@ -171,6 +121,7 @@ "storage_container/101-storage_container", "synapse_analytics/100-synapse", "synapse_analytics/101-synapse-sparkpool", + "webapps/appservice-environment/102-simple_asev3", "webapps/appservice/101-appservice-simple", "webapps/appservice/102-appservice-slots", "webapps/appservice/103-appservice-extend", diff --git a/.github/workflows/standalone-tf100.yaml b/.github/workflows/standalone-tf100.yaml deleted file mode 100644 index 3ba94fd09e..0000000000 --- a/.github/workflows/standalone-tf100.yaml +++ /dev/null @@ -1,161 +0,0 @@ -# -# Copyright (c) Microsoft Corporation -# Licensed under the MIT License. -# - -name: standalone-tf100 - -on: - workflow_dispatch: - inputs: - scenario: - description: "Select the scenario you want to run:" - required: false - type: choice - default: "standalone-scenarios.json" - options: - - standalone-scenarios-azuread.json - - standalone-scenarios.json - - standalone-compute.json - - standalone-networking.json - - standalone-scenarios-longrunners.json - -env: - TF_CLI_ARGS: "-no-color" - TF_CLI_ARGS_destroy: "-auto-approve -refresh=false" - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - TF_REGISTRY_DISCOVERY_RETRY: 5 - TF_REGISTRY_CLIENT_TIMEOUT: 15 - ROVER_RUNNER: true - -jobs: - load_scenarios: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.load_scenarios.outputs.matrix }} - steps: - - uses: actions/checkout@v4 - - id: load_scenarios - run: | - cases=$(( - cat ./.github/workflows/${{ github.event.inputs.scenario }}) | jq -c .) - echo "matrix=${cases}" >> $GITHUB_OUTPUT - - testcases: - name: test - runs-on: ubuntu-latest - needs: load_scenarios - - strategy: - max-parallel: 20 - fail-fast: false - matrix: ${{fromJSON(needs.load_scenarios.outputs.matrix)}} - - container: - image: aztfmod/rover:1.8.4-2405.2306 - options: --user 0 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Create environment variables - run: | - cd ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} - FILE_NAME=$(echo ${{ matrix.config_files }} | sed 's./..g' | xargs) - echo STATE_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.tfstate >> $GITHUB_ENV - echo PLAN_FILE=${TF_DATA_DIR}/tfstates/${FILE_NAME}.plan >> $GITHUB_ENV - echo CURRENT_FOLDER=${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} >> $GITHUB_ENV - echo PARAMETER_FILES=$(find ${GITHUB_WORKSPACE}/examples/${{ matrix.config_files }} | grep .tfvars | sed 's/.*/-var-file=&/' | xargs) >> $GITHUB_ENV - - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Terraform Init example - id: tf_init - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - init -upgrade=true | grep -P '^- (?=Downloading|Using|Finding|Installing)|^[^-]' - - - name: Terraform Plan example - id: tf_plan - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }} - - - name: Terraform Apply example - id: tf_apply - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -parallelism=30 \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }} - - - name: Terraform Destroy planning example - id: tf_destroy_plan - if: steps.tf_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - plan \ - ${{ env.PARAMETER_FILES }} \ - -var tags='{testing_job_id='"${{ github.run_id }}"'}' \ - -var var_folder_path=${{ env.CURRENT_FOLDER }} \ - -refresh=true \ - -input=false \ - -destroy \ - -state=${{ env.STATE_FILE }} \ - -out=${{ env.PLAN_FILE }}-destroy - - - name: Terraform Destroy apply example - id: tf_destroy_apply - if: steps.tf_destroy_plan.outcome == 'success' - run: | - terraform -chdir=${GITHUB_WORKSPACE}/examples \ - apply \ - -refresh=false \ - -parallelism=30 \ - -auto-approve \ - -state=${{ env.STATE_FILE }} \ - ${{ env.PLAN_FILE }}-destroy - - purge: - name: purge - runs-on: ubuntu-latest - if: ${{ failure() || cancelled() }} - - needs: [testcases] - - container: - image: aztfmod/rover:1.8.4-2405.2306 - options: --user 0 - - steps: - - name: Login azure - run: | - az login --service-principal -u '${{ env.ARM_CLIENT_ID }}' -p '${{ env.ARM_CLIENT_SECRET }}' --tenant '${{ env.ARM_TENANT_ID }}' - az account set -s ${{ env.ARM_SUBSCRIPTION_ID }} - - - name: Complete purge - run: | - for i in `az monitor diagnostic-settings subscription list -o tsv --query "value[?contains(name, '${{ github.run_id }}' )].name"`; do echo "purging subscription diagnostic-settings: $i" && $(az monitor diagnostic-settings subscription delete --name $i --yes); done - for i in `az monitor log-profiles list -o tsv --query '[].name'`; do az monitor log-profiles delete --name $i; done - for i in `az ad group list --query "[?contains(displayName, '${{ github.run_id }}')].id" -o tsv`; do echo "purging Azure AD group: $i" && $(az ad group delete --verbose --group $i || true); done - for i in `az ad app list --query "[?contains(displayName, '${{ github.run_id }}')].appId" -o tsv`; do echo "purging Azure AD app: $i" && $(az ad app delete --verbose --id $i || true); done - for i in `az keyvault list-deleted --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do az keyvault purge --name $i; done - for i in `az group list --query "[?tags.testing_job_id=='${{ github.run_id }}'].name" -o tsv`; do echo "purging resource group: $i" && $(az group delete -n $i -y --no-wait || true); done - for i in `az role assignment list --query "[?contains(roleDefinitionName, '${{ github.run_id }}')].roleDefinitionName" -o tsv`; do echo "purging role assignment: $i" && $(az role assignment delete --role $i || true); done - for i in `az role definition list --query "[?contains(roleName, '${{ github.run_id }}')].roleName" -o tsv`; do echo "purging custom role definition: $i" && $(az role definition delete --name $i || true); done diff --git a/.github/workflows/weekly_dispatch.yaml b/.github/workflows/weekly_dispatch.yaml new file mode 100644 index 0000000000..fc601ff4d7 --- /dev/null +++ b/.github/workflows/weekly_dispatch.yaml @@ -0,0 +1,34 @@ +# +# Copyright (c) Microsoft Corporation +# Licensed under the MIT License. +# + +name: PR Dispatch Workflow + +on: + schedule: + - cron: '0 5 * * 5' + +env: + TF_VERSION: "1.8.4" + TF_LINT_VERSION: "v0.50.3" + +jobs: + dispatch: + runs-on: ubuntu-latest + strategy: + matrix: + scenario: + - standalone-scenarios-azuread.json + - standalone-scenarios.json + - standalone-compute.json + - standalone-networking.json + - standalone-dataplat.json + + steps: + - name: Repository Dispatch + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: int-${{ matrix.scenario }} + client-payload: '{"scenario": "${{ (matrix.scenario) }}", "sha": "${{ github.event.pull_request.head.sha }}"}' diff --git a/.github/workflows/pr_workflow.yaml b/.github/workflows/weekly_workflow.yaml similarity index 97% rename from .github/workflows/pr_workflow.yaml rename to .github/workflows/weekly_workflow.yaml index b5b6710518..ff2d792d7e 100644 --- a/.github/workflows/pr_workflow.yaml +++ b/.github/workflows/weekly_workflow.yaml @@ -7,7 +7,7 @@ name: PR tests on: repository_dispatch: - types: [pr-*] + types: [int-*] workflow_dispatch: inputs: scenario: @@ -22,10 +22,6 @@ on: - standalone-networking.json - standalone-scenarios-longrunners.json -permissions: - id-token: write - contents: read - env: DEFAULT_SCENARIO: "standalone-networking.json" TF_VERSION: "1.8.4" @@ -39,9 +35,6 @@ jobs: matrix: ${{ steps.load_scenarios.outputs.matrix }} steps: - uses: actions/checkout@v4 - with: - ref: ${{ github.event.client_payload.sha }} - - id: load_scenarios run: | echo "Scenario: ${{ github.event.client_payload.scenario }}" @@ -64,8 +57,6 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - with: - ref: ${{ github.event.client_payload.sha }} - name: Create environment variables run: | @@ -116,6 +107,10 @@ jobs: -verbose terraform_integration_tests: + permissions: + id-token: write + contents: read + name: Integration-${{ matrix.config_files }} runs-on: ubuntu-latest if: always() @@ -131,8 +126,6 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - with: - ref: ${{ github.event.client_payload.sha }} - name: Create environment variables run: | @@ -161,7 +154,7 @@ jobs: key: ${{ runner.os }}-terraform-${{ hashFiles('**/.terraform.lock.hcl') }} restore-keys: | ${{ runner.os }}-terraform- - + - name: Azure Login uses: azure/login@v2 with: @@ -221,6 +214,10 @@ jobs: ${{ env.PLAN_FILE }}-destroy purge: + permissions: + id-token: write + contents: read + name: Purge Integration Environment runs-on: ubuntu-latest if: ${{ failure() || cancelled() }}