Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/10_feature_airflow_checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
airflow:
name: Pull Request Airflow Tests
runs-on: ubuntu-latest
container: datacoves/ci-airflow-dbt-snowflake:3.4
container: datacoves/ci-airflow-dbt-snowflake:4.0
needs: [validate-branch]

env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/10_feature_dbt_checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
# environment: PR_ENV

# most people should use this one
container: datacoves/ci-basic-dbt-snowflake:3.4
container: datacoves/ci-basic-dbt-snowflake:4.0

defaults:
run:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/15_drop_feature_db.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
# Alternatively, You can define multiple ENV for different workflows.
# https://github.com/<org>/<repo>/settings/environments
# environment: PR_ENV
container: datacoves/ci-basic-dbt-snowflake:3.4
container: datacoves/ci-basic-dbt-snowflake:4.0

defaults:
run:
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/20_release_dbt_checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ jobs:
# environment: PR_ENV

# Most people should use this docker image
container: datacoves/ci-basic-dbt-snowflake:3.4
container: datacoves/ci-basic-dbt-snowflake:4.0

defaults:
run:
Expand Down Expand Up @@ -213,17 +213,17 @@ jobs:
if: ${{ steps.prod_manifest.outputs.manifest_found == 'false' }}
run: "dbt build --fail-fast ${{ env.FULL_REFRESH_FLAG }}"

- name: Grant access to QA_TEMP database
if: steps.check_qa_created_today.outputs.qa_created_today != 'true'
id: grant-access-to-database
run: "dbt run-operation grant_prd_usage --args '{db_name: ${{ env.DATACOVES__MAIN__DATABASE }}}'"

- name: Swap QA with QA_TEMP database and drop old QA db
if: steps.check_qa_created_today.outputs.qa_created_today != 'true'
run: |
dbt --no-write-json run-operation swap_database --args '{db1: ${{ env.DATACOVES__MAIN__DATABASE }}, db2: ${{ env.DATACOVES__MAIN__DATABASE_QA }}, create_missing_db: true}'
dbt --no-write-json run-operation drop_recreate_db --args '{db_name: ${{ env.DATACOVES__MAIN__DATABASE }}, recreate: False}'

- name: Grant access to QA_TEMP database
if: steps.check_qa_created_today.outputs.qa_created_today != 'true'
id: grant-access-to-database
run: "dbt run-operation grant_db_usage --args '{db_name: ${{ env.DATACOVES__MAIN__DATABASE_QA }}}'"


# # We drop the database when there is a failure to grant access to the db because
# # most likely the schema was not set properly in dbt_project.yml so models built to default schema
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/30_deploy_changes_to_production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
# Alternatively, You can define multiple ENV for different workflows.
# https://github.com/<org>/<repo>/settings/environments
# environment: PR_ENV
container: datacoves/ci-basic-dbt-snowflake:3.4
container: datacoves/ci-basic-dbt-snowflake:4.0

defaults:
run:
Expand Down
1 change: 0 additions & 1 deletion automate/dbt/get_artifacts.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ then
if [ -n "$GITHUB_OUTPUT" ]; then
echo "manifest_found=false" >> $GITHUB_OUTPUT
fi
echo $manifest_found
# This is used by Jenkins
# echo "false" > temp_MANIFEST_FOUND.txt
else
Expand Down
46 changes: 46 additions & 0 deletions orchestrate/dags/other_examples/empty_task.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
from airflow.decorators import dag, task
from airflow.operators.empty import EmptyOperator

from datetime import datetime

default_args = {
"owner": "airflow",
"depends_on_past": False,
"email_on_failure": True,
"email": "DL-JRDUS-RDI-WORKFLOW-NOTIFICATION@ITS.JNJ.com",
"retries": 2,
}

@dag(
doc_md = __doc__,
catchup = False,
start_date=datetime(2024, 7, 7),

default_args = default_args,
schedule = '45 6 * * 1-5',

description = 'To load the L3 models for product cmc datya package daily',
tags = ['transform'],
dag_id = 'empty_task'
)
def general_job():

# No pod created - runs in scheduler
start = EmptyOperator(task_id='start')

@task.datacoves_dbt(
connection_id="main_key_pair"
)
def run_dbt(dbt_command):
return dbt_command

# No pod created - runs in scheduler
end = EmptyOperator(task_id='end')

# Specify task_id when calling the function
task_a = run_dbt.override(task_id='task_a')("dbt debug")
task_b = run_dbt.override(task_id='task_b')("dbt debug")

start >> [task_a, task_b] >> end

general_job()
9 changes: 3 additions & 6 deletions secure/permifrost/create_snowflake_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import glob
import json
import subprocess
import os
import yaml
import argparse
import logging
Expand Down Expand Up @@ -108,7 +107,7 @@ def run_macro(macro_name, args, target):
def main(is_dry_run, snowflake_objects, target):

if (snowflake_objects == 'all') or (snowflake_objects == 'roles'):
roles_file = find_file("roles.yml", sub_path='secure')
roles_file = find_file("roles.yml", sub_path='secure/permifrost')

permifrost_roles = get_role_names(roles_file)

Expand All @@ -124,8 +123,7 @@ def main(is_dry_run, snowflake_objects, target):
)

if (snowflake_objects == 'all') or (snowflake_objects == 'schemas'):
schemas_file = find_file("databases.yml", sub_path='secure')
permifrost_schemas = get_schemas_names(schemas_file)
permifrost_schemas = get_schemas_names(find_file("databases.yml", sub_path='secure/permifrost'))

permifrost_schemas_args = {
"permifrost_schema_list": ",".join(permifrost_schemas),
Expand All @@ -139,8 +137,7 @@ def main(is_dry_run, snowflake_objects, target):
)

if (snowflake_objects == 'all') or (snowflake_objects == 'warehouses'):
warehouses_file = find_file("warehouses.yml", sub_path='secure')
permifrost_warehouses = get_warehouses(find_file("warehouses.yml"))
permifrost_warehouses = get_warehouses(find_file("warehouses.yml", sub_path='secure/permifrost'))

permifrost_warehouses_args = {
"permifrost_warehouse_list": permifrost_warehouses,
Expand Down
Loading