Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add tox support for testing redshift, bigquery and snowflake #320

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# **what?**
# Run tests for dbt-external-tables against supported adapters

# **why?**
# To ensure that dbt-external-tables works as expected with all supported adapters

# **when?**
# On every PR, and every push to main and when manually triggered

name: Package Integration Tests

on:
push:
branches:
- main
pull_request:
workflow_dispatch:

jobs:
run-tests:
uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@v1
with:
# redshift
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
REDSHIFT_PORT: ${{ vars.REDSHIFT_PORT }}
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
REDSHIFT_SCHEMA: "integration_tests_redshift_${{ github.run_number }}"
# snowflake
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
SNOWFLAKE_SCHEMA: "integration_tests_snowflake_${{ github.run_number }}"
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}"

secrets:
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }}
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
38 changes: 21 additions & 17 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,21 +41,25 @@ jobs:
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
dbt test --target ${{ matrix.data-platform }}
env:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
env:
# redshift
REDSHIFT_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_USER: ${{ secrets.REDSHIFT_TEST_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_DATABASE: ${{ secrets.REDSHIFT_TEST_DBNAME }}
# TODO: below value - does it need to be env-var or could we hard code it in tox and here?
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
REDSHIFT_SCHEMA: "dbt_external_tables_integration_tests_redshift"
#snowflake
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
SNOWFLAKE_SCHEMA: "dbt_external_tables_integration_tests_snowflake"
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
BIGQUERY_SCHEMA: "dbt_external_tables_integration_tests_bigquery"
4 changes: 4 additions & 0 deletions integration_tests/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ clean-targets:
- "target"
- "dbt_packages"

flags:
send_anonymous_usage_stats: False
use_colors: True

dispatch:
- macro_namespace: dbt_external_tables
search_order: ['dbt_external_tables_integration_tests', 'dbt_external_tables']
Expand Down
3 changes: 1 addition & 2 deletions integration_tests/packages.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@

packages:
- local: ../
- package: dbt-labs/dbt_utils
version: 0.8.0
- package: dbt-labs/dbt_utils
60 changes: 24 additions & 36 deletions integration_tests/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,52 +2,40 @@
# HEY! This file is used in the dbt-external-tables integrations tests with CircleCI.
# You should __NEVER__ check credentials into version control. Thanks for reading :)

config:
send_anonymous_usage_stats: False
use_colors: True

integration_tests:
target: postgres
outputs:

redshift:
type: redshift
host: "{{ env_var('REDSHIFT_TEST_HOST') }}"
user: "{{ env_var('REDSHIFT_TEST_USER') }}"
pass: "{{ env_var('REDSHIFT_TEST_PASS') }}"
dbname: "{{ env_var('REDSHIFT_TEST_DBNAME') }}"
port: "{{ env_var('REDSHIFT_TEST_PORT') | as_number }}"
schema: dbt_external_tables_integration_tests_redshift
threads: 1
type: "redshift"
host: "{{ env_var('REDSHIFT_HOST') }}"
user: "{{ env_var('REDSHIFT_USER') }}"
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
schema: "{{ env_var('REDSHIFT_SCHEMA') }}"
threads: 5

snowflake:
type: snowflake
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
schema: dbt_external_tables_integration_tests_snowflake
threads: 1
type: "snowflake"
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_USER') }}"
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}"
threads: 10

bigquery:
type: bigquery
method: service-account-json
type: "bigquery"
method: "service-account-json"
project: "{{ env_var('BIGQUERY_PROJECT') }}"
dataset: "{{ env_var('BIGQUERY_SCHEMA') }}"
threads: 10
keyfile_json:
type: "service_account"
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
auth_uri: "https://accounts.google.com/o/oauth2/auth"
token_uri: "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
schema: dbt_external_tables_integration_tests_bigquery
threads: 1
"{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native}}"
job_retries: 3

databricks:
type: spark
Expand Down
22 changes: 11 additions & 11 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_HOST=
REDSHIFT_USER=
DBT_ENV_SECRET_REDSHIFT_PASS=
REDSHIFT_DATABASE=
REDSHIFT_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake

SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASS=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DBNAME=
SNOWFLAKE_TEST_WHNAME=
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
DBT_ENV_SECRET_SNOWFLAKE_PASS=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_WAREHOUSE=

# bigquery
BIGQUERY_PRIVATE_KEY=
Expand Down
1 change: 1 addition & 0 deletions supported_adapters.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SUPPORTED_ADAPTERS=snowflake,redshift,bigquery
69 changes: 69 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
[tox]
skipsdist = True
envlist = lint_all, testenv

[testenv]
passenv =
# redshift
REDSHIFT_HOST
REDSHIFT_USER
DBT_ENV_SECRET_REDSHIFT_PASS
REDSHIFT_DATABASE
REDSHIFT_SCHEMA
REDSHIFT_PORT
# snowflake
SNOWFLAKE_ACCOUNT
SNOWFLAKE_USER
DBT_ENV_SECRET_SNOWFLAKE_PASS
SNOWFLAKE_ROLE
SNOWFLAKE_DATABASE
SNOWFLAKE_WAREHOUSE
SNOWFLAKE_SCHEMA

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_redshift]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target redshift
dbt deps --target redshift
dbt seed --full-refresh --target redshift
dbt run --target redshift
dbt run-operation prep_external --target redshift
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target redshift
dbt run-operation dbt_external_tables.stage_external_sources --target redshift
dbt test --target redshift

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_snowflake]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target snowflake
dbt deps --target snowflake
dbt seed --full-refresh --target snowflake
dbt run --target snowflake
dbt run-operation prep_external --target snowflake
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target snowflake
dbt run-operation dbt_external_tables.stage_external_sources --target snowflake
dbt test --target snowflake

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_bigquery]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target bigquery
dbt deps --target bigquery
dbt seed --full-refresh --target bigquery
dbt run --target bigquery
dbt run-operation prep_external --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --target bigquery
dbt test --target bigquery
Loading