Weekly Testing #81
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Weekly Testing | |
on: | |
schedule: | |
# Weekly on Monday at 10:18 am GMT, or 3:18 am Pacific Time. | |
- cron: '18 10 * * 1' | |
workflow_dispatch: | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
jdk: ['11', '8'] | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: 'temurin' | |
java-version: ${{ matrix.jdk }} | |
- name: Build the project | |
run: cd connector && sbt package | |
- name: Upload the build artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: build-jar-file-${{ matrix.jdk }} | |
path: /home/runner/work/spark-connector/spark-connector/connector/target/scala-2.12/spark-vertica-connector_2.12-*.jar | |
run-spark-integration-tests-jdk8: | |
name: Run regression tests against different versions of Vertica | |
runs-on: ubuntu-latest | |
needs: build | |
strategy: | |
fail-fast: false | |
matrix: | |
runtime: [ { spark: "[3.0.0, 3.1.0)", hadoop: "2.7.4" }, | |
{ spark: "[3.1.0, 3.2.0)", hadoop: "3.2.0" }, | |
{ spark: "[3.2.0, 3.3.0)", hadoop: "3.3.1" }, | |
{ spark: "[3.3.0, 3.4.0)", hadoop: "3.3.2" } ] | |
vertica: ["11.1.1-2", "12.0.4-0" ] | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- name: Remove existing docker containers | |
run: cd docker && docker-compose down | |
- name: Run docker compose | |
run: cd docker && docker-compose up -d | |
env: | |
SPARK_VERSION: ${{matrix.runtime.spark}} | |
HADOOP_VERSION: ${{matrix.runtime.hadoop}} | |
VERTICA_VERSION: ${{matrix.vertica}} | |
- name: Download the build artifact | |
uses: actions/download-artifact@v2 | |
with: | |
name: build-jar-file-8 | |
path: ./functional-tests/lib/ | |
- name: Wait for Vertica to be available | |
uses: nick-invision/retry@v2 | |
with: | |
timeout_seconds: 20 | |
max_attempts: 10 | |
retry_on: error | |
command: docker logs docker_vertica_1 | grep "Vertica container is now running" >/dev/null | |
- name: Run the integration tests | |
run: docker exec -w /spark-connector/functional-tests docker_client_1 sbt run | |
- name: Remove docker containers | |
run: cd docker && docker-compose down | |
run-spark-integration-tests-json: | |
name: Run regression tests using JSON export | |
runs-on: ubuntu-latest | |
needs: build | |
strategy: | |
fail-fast: false | |
matrix: | |
runtime: [ { spark: "[3.0.0, 3.1.0)", hadoop: "2.7.4" }, | |
{ spark: "[3.1.0, 3.2.0)", hadoop: "3.2.0" }, | |
{ spark: "[3.2.0, 3.3.0)", hadoop: "3.3.1" }, | |
{ spark: "[3.3.0, 3.4.0)", hadoop: "3.3.2" } ] | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- name: Remove existing docker containers | |
run: cd docker && docker-compose down | |
- name: Run docker compose | |
run: cd docker && docker-compose up -d | |
env: | |
SPARK_VERSION: ${{matrix.runtime.spark}} | |
HADOOP_VERSION: ${{matrix.runtime.hadoop}} | |
VERTICA_VERSION: ${{matrix.vertica}} | |
- name: Download the build artifact | |
uses: actions/download-artifact@v2 | |
with: | |
name: build-jar-file-11 | |
path: ./functional-tests/lib/ | |
- name: Wait for Vertica to be available | |
uses: nick-invision/retry@v2 | |
with: | |
timeout_seconds: 20 | |
max_attempts: 10 | |
retry_on: error | |
command: docker logs docker_vertica_1 | grep "Vertica container is now running" >/dev/null | |
- name: Run the integration tests with JSON export | |
run: docker exec -w /spark-connector/functional-tests docker_client_1 sbt "run -j" | |
- name: Remove docker containers | |
run: cd docker && docker-compose down | |
run-spark-integration-tests-vertica-10: | |
name: Run integration tests against Vertica 10 | |
runs-on: ubuntu-latest | |
needs: build | |
strategy: | |
fail-fast: false | |
matrix: | |
runtime: [ { spark: "[3.0.0, 3.1.0)", hadoop: "2.7.4" }, | |
{ spark: "[3.1.0, 3.2.0)", hadoop: "3.2.0" }, | |
{ spark: "[3.2.0, 3.3.0)", hadoop: "3.3.1" }, | |
{ spark: "[3.3.0, 3.4.0)", hadoop: "3.3.2" } ] | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- name: Remove existing docker containers | |
run: cd docker && docker-compose down | |
- name: Run docker compose | |
run: cd docker && docker-compose up -d | |
env: | |
SPARK_VERSION: ${{matrix.runtime.spark}} | |
HADOOP_VERSION: ${{matrix.runtime.hadoop}} | |
VERTICA_VERSION: 10.1.1-0 | |
- name: Download the build artifact | |
uses: actions/download-artifact@v2 | |
with: | |
name: build-jar-file-11 | |
path: ./functional-tests/lib/ | |
- name: Wait for Vertica to be available | |
uses: nick-invision/retry@v2 | |
with: | |
timeout_seconds: 20 | |
max_attempts: 10 | |
retry_on: error | |
command: docker logs docker_vertica_1 | grep "Vertica container is now running" >/dev/null | |
- name: Run the integration tests against Vertica 10 | |
run: docker exec -w /spark-connector/functional-tests docker_client_1 sbt "run --v10" | |
- name: Remove docker containers | |
run: cd docker && docker-compose down | |
run-integration-tests-s3: | |
runs-on: ubuntu-latest | |
needs: build | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- name: Download the build artifact | |
uses: actions/download-artifact@v2 | |
with: | |
name: build-jar-file-11 | |
path: ./functional-tests/lib/ | |
- name: Add config options to application.conf | |
run: cd functional-tests && ./pipeline-s3-config.sh | |
env: | |
S3_FILEPATH: ${{secrets.S3_FILEPATH}} | |
- name: Build client image | |
run: docker build -t client ./docker/client | |
- name: Run docker compose | |
run: cd docker && docker-compose up -d | |
env: | |
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}} | |
- name: Wait for Vertica to be available | |
uses: nick-invision/retry@v2 | |
with: | |
timeout_seconds: 20 | |
max_attempts: 10 | |
retry_on: error | |
command: docker logs docker_vertica_1 | grep "Vertica container is now running" >/dev/null | |
- name: Run the integration tests against S3 | |
run: docker exec -w /spark-connector/functional-tests docker_client_1 sbt run | |
- name: Remove docker containers | |
run: cd docker && docker-compose down | |
run-integration-tests-gcs: | |
runs-on: ubuntu-latest | |
needs: build | |
steps: | |
- name: Checkout the project | |
uses: actions/checkout@v2 | |
- name: Download the build artifact | |
uses: actions/download-artifact@v2 | |
with: | |
name: build-jar-file-11 | |
path: ./functional-tests/lib/ | |
- name: Add config options to application.conf | |
run: cd functional-tests && ./pipeline-gcs-config.sh | |
env: | |
GCS_FILEPATH: ${{secrets.GCS_FILEPATH}} | |
- name: Build client image | |
run: docker build -t client ./docker/client | |
- name: Run docker compose | |
run: cd docker && docker-compose up -d | |
env: | |
GCS_HMAC_KEY_ID: ${{secrets.GCS_HMAC_KEY_ID}} | |
GCS_HMAC_KEY_SECRET: ${{secrets.GCS_HMAC_KEY_SECRET}} | |
GCS_SERVICE_KEY_ID: ${{secrets.GCS_SERVICE_KEY_ID}} | |
GCS_SERVICE_KEY: ${{secrets.GCS_SERVICE_KEY}} | |
GCS_SERVICE_EMAIL: ${{secrets.GCS_SERVICE_EMAIL}} | |
- name: Wait for Vertica to be available | |
uses: nick-invision/retry@v2 | |
with: | |
timeout_seconds: 20 | |
max_attempts: 10 | |
retry_on: error | |
command: docker logs docker_vertica_1 | grep "Vertica container is now running" >/dev/null | |
- name: Run the integration tests against GCS | |
run: docker exec -w /spark-connector/functional-tests docker_client_1 sbt run | |
- name: Remove docker containers | |
run: cd docker && docker-compose down | |
slack-workflow-status: | |
name: Post Workflow Status To Slack | |
runs-on: ubuntu-latest | |
needs: [build, | |
run-spark-integration-tests-jdk8, | |
run-spark-integration-tests-json, | |
run-spark-integration-tests-vertica-10, | |
run-integration-tests-gcs, | |
run-integration-tests-s3,] | |
if: failure() | |
steps: | |
- name: Slack Workflow Notification | |
uses: Gamesight/slack-workflow-status@master | |
if: always() | |
with: | |
repo_token: ${{secrets.GITHUB_TOKEN}} | |
slack_webhook_url: ${{secrets.SLACK_CHANNEL_WEBHOOK}} | |
name: 'Weekly tests failed' | |
icon_emoji: ':sadge:' | |
include_jobs: false |