Skip to content

Commit

Permalink
add 'test template integrity' ci workflow (#2)
Browse files Browse the repository at this point in the history
New CI workflow that runs:
- Copier rendering
- dbt debug
- dbt compile
- dbt-coves generate airflow-dags
- pre-commit
  • Loading branch information
BAntonellini authored Jul 1, 2024
1 parent 1bc92f4 commit 7077fa0
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 31 deletions.
44 changes: 31 additions & 13 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,41 @@ name: Test template
on:
workflow_dispatch:
pull_request:
branches: [main]

jobs:
build:
Test_Template_Integrity:
env:
DBT_PROFILES_DIR: /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/generated/automate/dbt
DATACOVES__DBT_HOME: /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/generated

DATACOVES__MAIN__ACCOUNT: ${{ vars.DATACOVES__MAIN__ACCOUNT }}

DATACOVES__MAIN__DATABASE: ${{ vars.DATACOVES__MAIN__DATABASE }}
DATACOVES__MAIN__SCHEMA: ${{ vars.DATACOVES__MAIN__SCHEMA }}

DATACOVES__MAIN__ROLE: ${{ vars.DATACOVES__MAIN__ROLE }}
DATACOVES__MAIN__WAREHOUSE: ${{ vars.DATACOVES__MAIN__WAREHOUSE }}

DATACOVES__MAIN__USER: ${{ vars.DATACOVES__MAIN__USER }}
DATACOVES__MAIN__PASSWORD: ${{ secrets.DATACOVES__MAIN__PASSWORD }}
runs-on: ubuntu-latest
container: datacoves/ci-airflow-dbt-snowflake:2

steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Checkout branch
uses: actions/checkout@v4
with:
ref: ${{ github.event.push.head.sha }}
fetch-depth: 0

- name: Set Secure Directory
run: git config --global --add safe.directory /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}

- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: 3.10
python-version: "3.10"

- name: Install dependencies
run: |
Expand All @@ -24,10 +46,7 @@ jobs:
- name: Run Copier
run: |
copier -d setup_dbt_project=True -d setup_dbt_profile=True -d setup_ci_cd=True -d setup_precommit=True
-d setup_airflow_dag=True -d dbt_project_dir="." -d dbt_project_name="balboa" -d is_new_project=True
-d airflow_profile_path="automate/dbt" -d yml_dags_path="orchestrate/dag_yml_definitions"
-d dags_path="orchestrate/dags" -a ci_answers.yml copy [email protected]:datacoves/setup_template.git ./generated/
copier --defaults -d setup_dbt_project=True -d setup_dbt_profile=True -d setup_ci_cd=True -d setup_precommit=True -d setup_airflow_dag=True -d dbt_project_dir="." -d dbt_project_name="balboa" -d is_new_project=True -d airflow_profile_path="automate/dbt" -d yml_dags_path="orchestrate/dag_yml_definitions" -d dags_path="orchestrate/dags" -d setup_precommit=True -d use_sqlfluff=True -d use_yamllint=True -d use_dbt_checkpoint=True -a ../copier-answers.yml copy ./ generated/
# run dbt debug and dbt compile
- name: Run dbt commands
Expand All @@ -39,12 +58,11 @@ jobs:
# run `dbt-coves generate airflow dags`
- name: Generate dags
run: |
dbt-coves generate airflow-dags --yml-path generated/orchestrate/dag_yml_definitions/sample_dag.yml
--dags-path generated/orchestrate/dags
dbt-coves generate airflow-dags --yml-path generated/orchestrate/dag_yml_definitions/sample_dag.yml --dags-path generated/orchestrate/dags
# fill Airflow DagBag with the result of generated dags
- name: Load dags
run: python test_load_dagbag.py
# - name: Load dags
# run: python test_load_dagbag.py #TODO: uncomment once absence of Datacoves Operators is fixed

# run pre-commit
- name: Run pre-commit
Expand Down
3 changes: 1 addition & 2 deletions ci_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# dbt-coves==1.8.0
git+https://github.com/datacoves/dbt-coves.git@DCV-2515-dbt-coves-setup-datacoves
dbt-coves==1.7.10
dbt-core==1.7.17
dbt-snowflake==1.7.5
pre-commit==3.7.1
Expand Down
File renamed without changes.
4 changes: 1 addition & 3 deletions copier.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
_exclude:
- ci_answers.yml
- copier-answers.yml
- .github/
- ci_requirements.txt
- test_load_dagbag.py
Expand All @@ -16,14 +16,12 @@ ci_provider:
dags_path:
type: str
when: "{{airflow_dags_confirm_path}}"
# when: "{% if airflow_dags_confirm_path %}true{%endif%}"
default: "{{tentative_dags_path}}"
help: Airflow DAGs path

yml_dags_path:
type: str
when: "{{yml_dags_confirm_path}}"
# when: "{% if yml_dags_confirm_path %}true{%endif%}"
default: "{{tentative_yml_dags_path}}"
help: Airflow YML Dags path

Expand Down
Original file line number Diff line number Diff line change
@@ -1,31 +1,29 @@
generate:
sources:
database: RAW # Database where to look for source tables
sources_destination: "models/L1_staging/{{schema}}/_{{schema}}.yml" # Where sources yml files will be generated
models_destination: "models/L1_staging/{{schema}}/{{relation}}.sql" # Where models sql files will be generated
model_props_destination: "models/L1_staging/{{schema}}/{{relation}}.yml" # Where models yml files will be generated
database: RAW # Database where to look for source tables
sources_destination: "models/L1_staging/{{schema}}/_{{schema}}.yml" # Where sources yml files will be generated
models_destination: "models/L1_staging/{{schema}}/{{relation}}.sql" # Where models sql files will be generated
model_props_destination: "models/L1_staging/{{schema}}/{{relation}}.yml" # Where models yml files will be generated
update_strategy: update # Action to perform when a property file exists. Options: update, recreate, fail, ask
templates_folder: ".dbt_coves/templates" # Folder where source generation jinja templates are located.
templates_folder: ".dbt_coves/templates" # Folder where source generation jinja templates are located.
flatten_json_fields: "no" # Action to perform when VARIANT / JSON field is encountered

properties:
destination: "{{model_folder_path}}/{{model_file_name}}.yml" # Where models yml files will be generated
destination: "{{model_folder_path}}/{{model_file_name}}.yml" # Where models yml files will be generated
# You can specify a different path by declaring it explicitly, i.e.: "models/staging/{{model_file_name}}.yml"
update_strategy: ask # Action to perform when a property file already exists. Options: update, recreate, fail, ask
models: "models/" # Model(s) path where 'generate properties' will look for models for generation
update_strategy: ask # Action to perform when a property file already exists. Options: update, recreate, fail, ask
models: "models/" # Model(s) path where 'generate properties' will look for models for generation

metadata:
database: RAW
# destination: "metadata.csv"

airflow_dags:

yml_path: "/config/workspace/{{ env_var('DATACOVES__AIRFLOW_DAGS_YML_PATH') }}"
dags_path: "/config/workspace/{{ env_var('DATACOVES__AIRFLOW_DAGS_PATH') }}"
# yml_path: "/config/workspace/{{ env_var('DATACOVES__AIRFLOW_DAGS_YML_PATH') }}"
# dags_path: "/config/workspace/{{ env_var('DATACOVES__AIRFLOW_DAGS_PATH') }}"

generators_params:
AirbyteDbtGenerator:

host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
airbyte_conn_id: airbyte_connection
Expand All @@ -41,6 +39,6 @@ generate:

extract:
airbyte:
path: /config/workspace/load/airbyte
# path: /config/workspace/load/airbyte
host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"

0 comments on commit 7077fa0

Please sign in to comment.