Skip to content

Commit

Permalink
update dags
Browse files Browse the repository at this point in the history
  • Loading branch information
Noel Gomez committed Feb 7, 2024
1 parent bddc799 commit 23bce86
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 24 deletions.
8 changes: 0 additions & 8 deletions .github/workflows/merge_to_main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ on: # yamllint disable-line rule:truthy
paths:
- .github/workflows/*
- automate/*
- visualize/*
- orchestrate/*
- transform/*
- transform/**/*

Expand Down Expand Up @@ -57,12 +55,6 @@ jobs:
# deployments, most likely you don't want to set this, we use it for demos
DATACOVES__DROP_DB_ON_FAIL: ${{ vars.DATACOVES__DROP_DB_ON_FAIL }}

# this is only here temporarilly until we updtae dbt-coves
DATACOVES__AIRFLOW_DAGS_YML_PATH: ''
DATACOVES__AIRFLOW_DAGS_PATH: ''
DATACOVES__AIRBYTE_HOST_NAME: ''
DATACOVES__AIRBYTE_PORT: ''

steps:
- name: Checkout branch
uses: actions/checkout@v2
Expand Down
10 changes: 1 addition & 9 deletions .github/workflows/pull_request_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,6 @@ jobs:
# cannot be applied when using the Datacoves permifrost security model.
DATACOVES__DROP_DB_ON_FAIL: ${{ vars.DATACOVES__DROP_DB_ON_FAIL }}


# this is only here temporarilly until we updtae dbt-coves
DATACOVES__AIRFLOW_DAGS_YML_PATH: ''
DATACOVES__AIRFLOW_DAGS_PATH: ''
DATACOVES__AIRBYTE_HOST_NAME: ''
DATACOVES__AIRBYTE_PORT: ''

steps:
- name: Checkout branch
uses: actions/[email protected]
Expand Down Expand Up @@ -96,8 +89,7 @@ jobs:
run: "dbt --no-write-json run-operation grant_access_to_pr_database"

- name: Generate Docs Combining Prod and branch catalog.json
#TODO: remove profiles-dir when dbt-coves is fixed
run: "dbt-coves generate docs --merge-deferred --state logs --profiles-dir /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/automate/dbt"
run: "dbt-coves generate docs --merge-deferred --state logs"

- name: Run governance checks
run: "pre-commit run --from-ref origin/${{ github.event.pull_request.base.ref }} --to-ref HEAD"
Expand Down
6 changes: 5 additions & 1 deletion orchestrate/dags/daily_loan_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ def extract_and_load_airbyte():
connection_id="902432a8-cbed-4602-870f-33617fda6859",
airbyte_conn_id="airbyte_connection",
)
zip_coordinates_datacoves_snowflake = AirbyteTriggerSyncOperator(
task_id="zip_coordinates_datacoves_snowflake",
connection_id="b09075d9-6b33-4265-8660-4e8cab10bd70",
airbyte_conn_id="airbyte_connection",
)
country_populations_datacoves_snowflake = AirbyteTriggerSyncOperator(
task_id="country_populations_datacoves_snowflake",
connection_id="ac02ea96-58a1-4061-be67-78900bb5aaf6",
Expand Down Expand Up @@ -56,7 +61,6 @@ def extract_and_load_fivetran():
tg_extract_and_load_fivetran = extract_and_load_fivetran()
extract_and_load_dlt = DatacovesBashOperator(
task_id="extract_and_load_dlt",
activate_venv=True,
tooltip="dlt Extract and Load",
bash_command="python load/dlt/csv_to_snowflake/load_csv_data.py",
)
Expand Down
7 changes: 2 additions & 5 deletions orchestrate/dags/datacoves_sample_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,14 @@ def datacoves_sample_dag():
dbt_task = DatacovesDbtOperator(
task_id = "run_dbt_task",
bash_command = "dbt debug",
doc_md = """\
#### Task Documentation
This task leveraged the DatacovesDbtOperator
"""
)

# This is calling an external Python file after activating the venv
# use this instead of the Python Operator
python_task = DatacovesBashOperator(
task_id = "run_python_script",
activate_venv=True,
# Virtual Environment is automatically activated
# activate_venv=True,
bash_command = "python orchestrate/python_scripts/sample_script.py"
)

Expand Down
3 changes: 2 additions & 1 deletion orchestrate/dags_yml_definitions/daily_loan_run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ nodes:
extract_and_load_dlt:
type: task
operator: operators.datacoves.bash.DatacovesBashOperator
activate_venv: true
# activate_venv: true
# Virtual Environment is automatically activated

tooltip: "dlt Extract and Load"
bash_command: "python load/dlt/csv_to_snowflake/load_csv_data.py"
Expand Down

0 comments on commit 23bce86

Please sign in to comment.