From 2112761fac2ec66df4db9673df4e014aad18c500 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 10 Jan 2023 12:50:22 -0800 Subject: [PATCH 001/113] fix release.yml pypi steps (#259) --- .github/workflows/release.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 91c3114e4..5fd0291e9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -193,7 +193,7 @@ jobs: pypi-release: name: Pypi release # only release to PyPi if we're not testing - will release to PyPi test when workflow gets rewritten - if: inputs.test_run == 'false' + if: ${{ inputs.test_run == false }} runs-on: ubuntu-latest @@ -201,10 +201,11 @@ jobs: environment: PypiProd steps: - - uses: actions/download-artifact@v2 + - name: "Download Build Artifact - ${{ inputs.version_number }}" + uses: actions/download-artifact@v3 with: - name: dist - path: 'dist' + name: ${{ inputs.version_number }} + path: dist/ - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@v1.4.2 From d042d7bf973fb44cf074c6dba526b5520f4db278 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 10 Jan 2023 18:25:20 -0600 Subject: [PATCH 002/113] Rename exceptions (#258) * rename exceptions * add changelog * fix unit test * point back to main * Update Under the Hood-20230110-100647.yaml --- .changes/unreleased/Under the Hood-20230110-100647.yaml | 7 +++++++ dbt/adapters/redshift/connections.py | 8 ++++---- dbt/adapters/redshift/impl.py | 6 +++--- tests/unit/test_context.py | 2 +- tests/unit/test_redshift_adapter.py | 6 +++--- 5 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230110-100647.yaml diff --git a/.changes/unreleased/Under the Hood-20230110-100647.yaml b/.changes/unreleased/Under the Hood-20230110-100647.yaml new file mode 100644 index 000000000..a0dad2abf --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230110-100647.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Rename exceptions to match dbt-core +time: 2023-01-10T10:06:47.570088-06:00 +custom: + Author: emmyoop + Issue: "250" + PR: "258" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index be4d626d3..a13353950 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -114,7 +114,7 @@ def fetch_cluster_credentials( ) except boto_client.exceptions.ClientError as e: - raise dbt.exceptions.FailedToConnectException( + raise dbt.exceptions.FailedToConnectError( "Unable to get temporary Redshift cluster credentials: {}".format(e) ) @@ -127,7 +127,7 @@ def get_tmp_iam_cluster_credentials(cls, credentials): iam_duration_s = credentials.iam_duration_seconds if not cluster_id: - raise dbt.exceptions.FailedToConnectException( + raise dbt.exceptions.FailedToConnectError( "'cluster_id' must be provided in profile if IAM " "authentication method selected" ) @@ -156,7 +156,7 @@ def get_credentials(cls, credentials): # this requirement is really annoying to encode into json schema, # so validate it here if credentials.password is None: - raise dbt.exceptions.FailedToConnectException( + raise dbt.exceptions.FailedToConnectError( "'password' field is required for 'database' credentials" ) return credentials @@ -166,6 +166,6 @@ def get_credentials(cls, credentials): return cls.get_tmp_iam_cluster_credentials(credentials) else: - raise dbt.exceptions.FailedToConnectException( + raise dbt.exceptions.FailedToConnectError( "Invalid 'method' in profile: '{}'".format(method) ) diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 206185f57..4ece2ff51 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -72,7 +72,7 @@ def verify_database(self, database): ra3_node = self.config.credentials.ra3_node if database.lower() != expected.lower() and not ra3_node: - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "Cross-db references allowed only in RA3.* node. ({} vs {})".format( database, expected ) @@ -85,8 +85,8 @@ def _get_catalog_schemas(self, manifest): schemas = super(SQLAdapter, self)._get_catalog_schemas(manifest) try: return schemas.flatten(allow_multiple_databases=self.config.credentials.ra3_node) - except dbt.exceptions.RuntimeException as exc: - dbt.exceptions.raise_compiler_error( + except dbt.exceptions.DbtRuntimeError as exc: + raise dbt.exceptions.CompilationError( "Cross-db references allowed only in {} RA3.* node. Got {}".format( self.type(), exc.msg ) diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py index 5b975d029..5170fcfbf 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/test_context.py @@ -212,7 +212,7 @@ def test_resolve_specific(config, manifest_extended, redshift_adapter, get_inclu ctx['adapter'].config.dispatch # macro_a exists, but default__macro_a and redshift__macro_a do not - with pytest.raises(dbt.exceptions.CompilationException): + with pytest.raises(dbt.exceptions.CompilationError): ctx['adapter'].dispatch('macro_a').macro # root namespace is always preferred, unless search order is explicitly defined in 'dispatch' config diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 33c3dc1aa..92fd9cbd8 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -10,7 +10,7 @@ Plugin as RedshiftPlugin, ) from dbt.clients import agate_helper -from dbt.exceptions import FailedToConnectException +from dbt.exceptions import FailedToConnectError from .utils import config_from_parts_or_dicts, mock_connection, TestAdapterConversions, inject_adapter @@ -115,7 +115,7 @@ def test_invalid_auth_method(self): # we have to set method this way, otherwise it won't validate self.config.credentials.method = 'badmethod' - with self.assertRaises(FailedToConnectException) as context: + with self.assertRaises(FailedToConnectError) as context: with mock.patch.object( RedshiftAdapter.ConnectionManager, 'fetch_cluster_credentials', @@ -127,7 +127,7 @@ def test_invalid_auth_method(self): def test_invalid_iam_no_cluster_id(self): self.config.credentials = self.config.credentials.replace(method='iam') - with self.assertRaises(FailedToConnectException) as context: + with self.assertRaises(FailedToConnectError) as context: with mock.patch.object( RedshiftAdapter.ConnectionManager, 'fetch_cluster_credentials', From ef443bb9ccfeb0e95a47ef0c48776b64a685397d Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Wed, 11 Jan 2023 12:34:09 -0800 Subject: [PATCH 003/113] remove setup.py from version cfg (#261) --- .bumpversion.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1d6478936..f31129a06 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -22,6 +22,4 @@ values = [bumpversion:part:num] first_value = 1 -[bumpversion:file:setup.py] - [bumpversion:file:dbt/adapters/redshift/__version__.py] From 4bb289d37cf23dfb117a410d13c091770931a6c9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 12 Jan 2023 11:04:33 -0800 Subject: [PATCH 004/113] Bumping version to 1.5.0a1 and generate changelog (#265) * Bumping version to 1.5.0a1 and generate CHANGELOG * manual changelog updates Co-authored-by: Github Build Bot Co-authored-by: Colin --- .bumpversion.cfg | 2 +- .changes/0.0.0.md | 1 + .changes/1.4.0-b1.md | 3 --- .changes/1.4.0/Under the Hood-20220926-101606.yaml | 7 ------- .changes/unreleased/Dependencies-20221209-233905.yaml | 7 ------- .changes/unreleased/Under the Hood-20221219-163610.yaml | 7 ------- .changes/unreleased/Under the Hood-20230110-100647.yaml | 7 ------- CHANGELOG.md | 5 +---- dbt/adapters/redshift/__version__.py | 2 +- 9 files changed, 4 insertions(+), 37 deletions(-) delete mode 100644 .changes/1.4.0-b1.md delete mode 100644 .changes/1.4.0/Under the Hood-20220926-101606.yaml delete mode 100644 .changes/unreleased/Dependencies-20221209-233905.yaml delete mode 100644 .changes/unreleased/Under the Hood-20221219-163610.yaml delete mode 100644 .changes/unreleased/Under the Hood-20230110-100647.yaml diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f31129a06..a3e0f8c88 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.4.0b1 +current_version = 1.5.0a1 parse = (?P\d+) \.(?P\d+) \.(?P\d+) diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md index 25dc29408..ed4a3e9be 100644 --- a/.changes/0.0.0.md +++ b/.changes/0.0.0.md @@ -1,5 +1,6 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-redshift/blob/1.1.latest/CHANGELOG.md) diff --git a/.changes/1.4.0-b1.md b/.changes/1.4.0-b1.md deleted file mode 100644 index 2376b7759..000000000 --- a/.changes/1.4.0-b1.md +++ /dev/null @@ -1,3 +0,0 @@ -## dbt-redshift 1.4.0-b1 - December 15, 2022 -### Features -- Migrate dbt-utils current_timestamp macros into core + adapters ([#194](https://github.com/dbt-labs/dbt-redshift/issues/194), [#191](https://github.com/dbt-labs/dbt-redshift/pull/191)) diff --git a/.changes/1.4.0/Under the Hood-20220926-101606.yaml b/.changes/1.4.0/Under the Hood-20220926-101606.yaml deleted file mode 100644 index 3b48d2315..000000000 --- a/.changes/1.4.0/Under the Hood-20220926-101606.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Migrate dbt-utils current_timestamp macros into core + adapters -time: 2022-09-26T10:16:06.676737-07:00 -custom: - Author: colin-rogers-dbt - Issue: "194" - PR: "191" diff --git a/.changes/unreleased/Dependencies-20221209-233905.yaml b/.changes/unreleased/Dependencies-20221209-233905.yaml deleted file mode 100644 index 31b06b6ac..000000000 --- a/.changes/unreleased/Dependencies-20221209-233905.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Dependencies -body: Add support for python 3.11 -time: 2022-12-09T23:39:05.296196-05:00 -custom: - Author: mikealfare - Issue: "225" - PR: "236" diff --git a/.changes/unreleased/Under the Hood-20221219-163610.yaml b/.changes/unreleased/Under the Hood-20221219-163610.yaml deleted file mode 100644 index 131d55986..000000000 --- a/.changes/unreleased/Under the Hood-20221219-163610.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Consistent capitalization for `CONTRIBUTING.md` -time: 2022-12-19T16:36:10.416838-07:00 -custom: - Author: dbeatty10 - Issue: "252" - PR: "252" diff --git a/.changes/unreleased/Under the Hood-20230110-100647.yaml b/.changes/unreleased/Under the Hood-20230110-100647.yaml deleted file mode 100644 index a0dad2abf..000000000 --- a/.changes/unreleased/Under the Hood-20230110-100647.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Rename exceptions to match dbt-core -time: 2023-01-10T10:06:47.570088-06:00 -custom: - Author: emmyoop - Issue: "250" - PR: "258" diff --git a/CHANGELOG.md b/CHANGELOG.md index 83c2e942f..bb9d7a4bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,9 @@ - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-redshift 1.4.0-b1 - December 15, 2022 -### Features -- Migrate dbt-utils current_timestamp macros into core + adapters ([#194](https://github.com/dbt-labs/dbt-redshift/issues/194), [#191](https://github.com/dbt-labs/dbt-redshift/pull/191)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-redshift/blob/1.1.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 27cfeecd9..219c289b1 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.4.0b1" +version = "1.5.0a1" From 57ab31d06e55de649320ce1d873303f753e4d7ec Mon Sep 17 00:00:00 2001 From: Jeremy Cohen Date: Thu, 19 Jan 2023 13:20:47 +0100 Subject: [PATCH 005/113] Convert incremental on_schema_change tests (#269) * Convert incremental on_schema_change tests * Switch to dbt-core main --- .../test_incremental_on_schema_change.py | 4 + .../test_incremental_unique_id.py | 0 .../models/incremental_append_new_columns.sql | 29 ---- ...remental_append_new_columns_remove_one.sql | 28 ---- ...l_append_new_columns_remove_one_target.sql | 19 --- .../incremental_append_new_columns_target.sql | 19 --- .../models/incremental_fail.sql | 19 --- .../models/incremental_ignore.sql | 19 --- .../models/incremental_ignore_target.sql | 15 -- .../models/incremental_sync_all_columns.sql | 31 ---- .../incremental_sync_all_columns_target.sql | 20 --- .../models/model_a.sql | 22 --- .../models/schema.yml | 68 -------- .../test_incremental_schema.py | 148 ------------------ .../tests/select_from_a.sql | 1 - ...ct_from_incremental_append_new_columns.sql | 1 - ...remental_append_new_columns_remove_one.sql | 1 - ...l_append_new_columns_remove_one_target.sql | 1 - ..._incremental_append_new_columns_target.sql | 1 - .../tests/select_from_incremental_ignore.sql | 1 - .../select_from_incremental_ignore_target.sql | 1 - ...lect_from_incremental_sync_all_columns.sql | 1 - ...om_incremental_sync_all_columns_target.sql | 1 - 23 files changed, 4 insertions(+), 446 deletions(-) create mode 100644 tests/functional/adapter/incremental/test_incremental_on_schema_change.py rename tests/functional/adapter/{ => incremental}/test_incremental_unique_id.py (100%) delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_fail.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_ignore.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/model_a.sql delete mode 100644 tests/integration/incremental_schema_tests/models/schema.yml delete mode 100644 tests/integration/incremental_schema_tests/test_incremental_schema.py delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_a.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql diff --git a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py new file mode 100644 index 000000000..192097bc5 --- /dev/null +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -0,0 +1,4 @@ +from dbt.tests.adapter.incremental.test_incremental_on_schema_change import BaseIncrementalOnSchemaChange + +class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): + pass diff --git a/tests/functional/adapter/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py similarity index 100% rename from tests/functional/adapter/test_incremental_unique_id.py rename to tests/functional/adapter/incremental/test_incremental_unique_id.py diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql deleted file mode 100644 index 18d0d5d88..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql +++ /dev/null @@ -1,29 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 19c8ea616..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1,28 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index 419fdf96b..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id, - cast(field1 as {{string_type}}) as field1, - cast(CASE WHEN id > 3 THEN NULL ELSE field2 END as {{string_type}}) AS field2, - cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3, - cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql deleted file mode 100644 index 55ed7b2c5..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,cast(field1 as {{string_type}}) as field1 - ,cast(field2 as {{string_type}}) as field2 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_fail.sql b/tests/integration/incremental_schema_tests/models/incremental_fail.sql deleted file mode 100644 index 590f5b56d..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_fail.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='fail' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2 FROM source_data - -{% else %} - -SELECT id, field1, field3 FROm source_data - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore.sql deleted file mode 100644 index 51dee6022..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='ignore' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, field1, field2 FROM source_data LIMIT 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql deleted file mode 100644 index 92d4564e0..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql +++ /dev/null @@ -1,15 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,field1 - ,field2 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql deleted file mode 100644 index 56a3e3c0f..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql +++ /dev/null @@ -1,31 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='sync_all_columns' - - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, -- to validate new fields - cast(field4 as {{string_type}}) AS field4 -- to validate new fields - -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -select id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 - -from source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql deleted file mode 100644 index abffbf746..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql +++ /dev/null @@ -1,20 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -select id - ,cast(field1 as {{string_type}}) as field1 - --,field2 - ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 - ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 - -from source_data -order by id \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/model_a.sql b/tests/integration/incremental_schema_tests/models/model_a.sql deleted file mode 100644 index 2a0b2ddaf..000000000 --- a/tests/integration/incremental_schema_tests/models/model_a.sql +++ /dev/null @@ -1,22 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4 - union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4 - union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4 - union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4 - union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4 - union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4 - -) - -select id - ,field1 - ,field2 - ,field3 - ,field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/schema.yml b/tests/integration/incremental_schema_tests/models/schema.yml deleted file mode 100644 index 6d2a85bea..000000000 --- a/tests/integration/incremental_schema_tests/models/schema.yml +++ /dev/null @@ -1,68 +0,0 @@ -version: 2 - -models: - - name: model_a - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns_target - columns: - - name: id - tags: [column_leveL_tag] - tests: - - unique - - - \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/test_incremental_schema.py b/tests/integration/incremental_schema_tests/test_incremental_schema.py deleted file mode 100644 index 7dff20c1e..000000000 --- a/tests/integration/incremental_schema_tests/test_incremental_schema.py +++ /dev/null @@ -1,148 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, FakeArgs, use_profile - - -class TestSelectionExpansion(DBTIntegrationTest): - @property - def schema(self): - return "test_incremental_schema" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["tests"] - } - - def list_tests_and_assert(self, include, exclude, expected_tests): - list_args = ['ls', '--resource-type', 'test'] - if include: - list_args.extend(('--select', include)) - if exclude: - list_args.extend(('--exclude', exclude)) - - listed = self.run_dbt(list_args) - print(listed) - assert len(listed) == len(expected_tests) - - test_names = [name.split('.')[-1] for name in listed] - assert sorted(test_names) == sorted(expected_tests) - - def run_tests_and_assert( - self, include, exclude, expected_tests, compare_source, compare_target - ): - - run_args = ['run'] - if include: - run_args.extend(('--models', include)) - - results_one = self.run_dbt(run_args) - results_two = self.run_dbt(run_args) - - self.assertEqual(len(results_one), 3) - self.assertEqual(len(results_two), 3) - - test_args = ['test'] - if include: - test_args.extend(('--models', include)) - if exclude: - test_args.extend(('--exclude', exclude)) - - results = self.run_dbt(test_args) - tests_run = [r.node.name for r in results] - assert len(tests_run) == len(expected_tests) - assert sorted(tests_run) == sorted(expected_tests) - self.assertTablesEqual(compare_source, compare_target) - - def run_incremental_ignore(self): - select = 'model_a incremental_ignore incremental_ignore_target' - compare_source = 'incremental_ignore' - compare_target = 'incremental_ignore_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_ignore', - 'select_from_incremental_ignore_target', - 'unique_model_a_id', - 'unique_incremental_ignore_id', - 'unique_incremental_ignore_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns(self): - select = 'model_a incremental_append_new_columns incremental_append_new_columns_target' - compare_source = 'incremental_append_new_columns' - compare_target = 'incremental_append_new_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns', - 'select_from_incremental_append_new_columns_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_id', - 'unique_incremental_append_new_columns_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns_remove_one(self): - select = 'model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target' - compare_source = 'incremental_append_new_columns_remove_one' - compare_target = 'incremental_append_new_columns_remove_one_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns_remove_one', - 'select_from_incremental_append_new_columns_remove_one_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_remove_one_id', - 'unique_incremental_append_new_columns_remove_one_target_id' - ] - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_sync_all_columns(self): - select = 'model_a incremental_sync_all_columns incremental_sync_all_columns_target' - compare_source = 'incremental_sync_all_columns' - compare_target = 'incremental_sync_all_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_sync_all_columns', - 'select_from_incremental_sync_all_columns_target', - 'unique_model_a_id', - 'unique_incremental_sync_all_columns_id', - 'unique_incremental_sync_all_columns_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_fail_on_schema_change(self): - select = 'model_a incremental_fail' - self.run_dbt(['run', '--models', select, '--full-refresh']) - results = self.run_dbt(['run', '--models', select], expect_pass=False) - self.assertIn('Compilation Error', results[1].message) - - @use_profile('redshift') - def test__redshift__run_incremental_ignore(self): - self.run_incremental_ignore() - - @use_profile('redshift') - def test__redshift__run_incremental_append_new_columns(self): - self.run_incremental_append_new_columns() - self.run_incremental_append_new_columns_remove_one() - - @use_profile('redshift') - def test__redshift__run_incremental_sync_all_columns(self): - self.run_incremental_sync_all_columns() - - @use_profile('redshift') - def test__redshift__run_incremental_fail_on_schema_change(self): - self.run_incremental_fail_on_schema_change() - diff --git a/tests/integration/incremental_schema_tests/tests/select_from_a.sql b/tests/integration/incremental_schema_tests/tests/select_from_a.sql deleted file mode 100644 index 3dc8f2857..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_a.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('model_a') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql deleted file mode 100644 index 947e84588..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 06d52c6d6..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index 07d2412b0..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql deleted file mode 100644 index 8b86eddd7..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql deleted file mode 100644 index d565c8464..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql deleted file mode 100644 index 35d535c5c..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql deleted file mode 100644 index aedc9f803..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql deleted file mode 100644 index 4b703c988..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns_target') }} where false \ No newline at end of file From 53d59eb72366be5f5bee32a2f82b8732db869242 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Thu, 26 Jan 2023 01:20:56 -0800 Subject: [PATCH 006/113] Remove test for CT-1629 in dbt-core. (#272) Co-authored-by: Mila Page --- .../column_quoting/models-unquoted/model.sql | 19 ----- .../column_quoting/models/model.sql | 16 ---- .../integration/column_quoting/seeds/seed.csv | 4 - .../column_quoting/test_column_quotes.py | 78 ------------------- 4 files changed, 117 deletions(-) delete mode 100644 tests/integration/column_quoting/models-unquoted/model.sql delete mode 100644 tests/integration/column_quoting/models/model.sql delete mode 100644 tests/integration/column_quoting/seeds/seed.csv delete mode 100644 tests/integration/column_quoting/test_column_quotes.py diff --git a/tests/integration/column_quoting/models-unquoted/model.sql b/tests/integration/column_quoting/models-unquoted/model.sql deleted file mode 100644 index e6862aa2e..000000000 --- a/tests/integration/column_quoting/models-unquoted/model.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% set col_a = '"col_a"' %} -{% set col_b = '"col_b"' %} -{% if adapter.type() == 'bigquery' %} - {% set col_a = '`col_a`' %} - {% set col_b = '`col_b`' %} -{% elif adapter.type() == 'snowflake' %} - {% set col_a = '"COL_A"' %} - {% set col_b = '"COL_B"' %} -{% endif %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/models/model.sql b/tests/integration/column_quoting/models/model.sql deleted file mode 100644 index 8c19c6546..000000000 --- a/tests/integration/column_quoting/models/model.sql +++ /dev/null @@ -1,16 +0,0 @@ -{% set col_a = '"col_A"' %} -{% set col_b = '"col_B"' %} -{% if adapter.type() == 'bigquery' %} - {% set col_a = '`col_A`' %} - {% set col_b = '`col_B`' %} -{% endif %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/seeds/seed.csv b/tests/integration/column_quoting/seeds/seed.csv deleted file mode 100644 index d4a1e26ee..000000000 --- a/tests/integration/column_quoting/seeds/seed.csv +++ /dev/null @@ -1,4 +0,0 @@ -col_A,col_B -1,2 -3,4 -5,6 diff --git a/tests/integration/column_quoting/test_column_quotes.py b/tests/integration/column_quoting/test_column_quotes.py deleted file mode 100644 index 2d15ee7de..000000000 --- a/tests/integration/column_quoting/test_column_quotes.py +++ /dev/null @@ -1,78 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import os - - -class BaseColumnQuotingTest(DBTIntegrationTest): - def column_quoting(self): - raise NotImplementedError('column_quoting not implemented') - - @property - def schema(self): - return 'dbt_column_quoting' - - @staticmethod - def dir(value): - return os.path.normpath(value) - - def _run_columnn_quotes(self, strategy='delete+insert'): - strategy_vars = '{{"strategy": "{}"}}'.format(strategy) - self.run_dbt(['seed', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - - -class TestColumnQuotingDefault(BaseColumnQuotingTest): - @property - def project_config(self): - return { - 'config-version': 2 - } - - @property - def models(self): - return self.dir('models-unquoted') - - def run_dbt(self, *args, **kwargs): - return super().run_dbt(*args, **kwargs) - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() - - -class TestColumnQuotingDisabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models-unquoted') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': False, - }, - } - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() - - -class TestColumnQuotingEnabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': True, - }, - } - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() From 22959d086eea49489bbc9c538adaf860a7156614 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 26 Jan 2023 11:19:14 -0800 Subject: [PATCH 007/113] remove sources integration tests (#279) --- .../sources_test/error_models/model.sql | 1 - .../sources_test/error_models/schema.yml | 12 -- .../sources_test/filtered_models/schema.yml | 18 -- .../integration/sources_test/macros/macro.sql | 14 -- .../malformed_models/descendant_model.sql | 1 - .../sources_test/malformed_models/schema.yml | 14 -- .../malformed_schema_tests/model.sql | 1 - .../malformed_schema_tests/schema.yml | 14 -- .../sources_test/models/descendant_model.sql | 1 - .../sources_test/models/ephemeral_model.sql | 3 - .../models/multi_source_model.sql | 2 - .../models/nonsource_descendant.sql | 1 - .../sources_test/models/schema.yml | 77 ------- .../sources_test/models/view_model.sql | 3 - tests/integration/sources_test/seed.sql | 113 ---------- .../seeds/expected_multi_source.csv | 4 - .../sources_test/seeds/other_source_table.csv | 4 - .../sources_test/seeds/other_table.csv | 4 - .../integration/sources_test/seeds/source.csv | 101 --------- .../integration/sources_test/test_sources.py | 203 ------------------ 20 files changed, 591 deletions(-) delete mode 100644 tests/integration/sources_test/error_models/model.sql delete mode 100644 tests/integration/sources_test/error_models/schema.yml delete mode 100644 tests/integration/sources_test/filtered_models/schema.yml delete mode 100644 tests/integration/sources_test/macros/macro.sql delete mode 100644 tests/integration/sources_test/malformed_models/descendant_model.sql delete mode 100644 tests/integration/sources_test/malformed_models/schema.yml delete mode 100644 tests/integration/sources_test/malformed_schema_tests/model.sql delete mode 100644 tests/integration/sources_test/malformed_schema_tests/schema.yml delete mode 100644 tests/integration/sources_test/models/descendant_model.sql delete mode 100644 tests/integration/sources_test/models/ephemeral_model.sql delete mode 100644 tests/integration/sources_test/models/multi_source_model.sql delete mode 100644 tests/integration/sources_test/models/nonsource_descendant.sql delete mode 100644 tests/integration/sources_test/models/schema.yml delete mode 100644 tests/integration/sources_test/models/view_model.sql delete mode 100644 tests/integration/sources_test/seed.sql delete mode 100644 tests/integration/sources_test/seeds/expected_multi_source.csv delete mode 100644 tests/integration/sources_test/seeds/other_source_table.csv delete mode 100644 tests/integration/sources_test/seeds/other_table.csv delete mode 100644 tests/integration/sources_test/seeds/source.csv delete mode 100644 tests/integration/sources_test/test_sources.py diff --git a/tests/integration/sources_test/error_models/model.sql b/tests/integration/sources_test/error_models/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/error_models/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/error_models/schema.yml b/tests/integration/sources_test/error_models/schema.yml deleted file mode 100644 index 69cf1f304..000000000 --- a/tests/integration/sources_test/error_models/schema.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: invalid - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at diff --git a/tests/integration/sources_test/filtered_models/schema.yml b/tests/integration/sources_test/filtered_models/schema.yml deleted file mode 100644 index edad7f6ec..000000000 --- a/tests/integration/sources_test/filtered_models/schema.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - filter: id > 1 - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at - freshness: - error_after: {count: 18, period: hour} - filter: id > 101 diff --git a/tests/integration/sources_test/macros/macro.sql b/tests/integration/sources_test/macros/macro.sql deleted file mode 100644 index a607a6e4c..000000000 --- a/tests/integration/sources_test/macros/macro.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro override_me() -%} - {{ exceptions.raise_compiler_error('this is a bad macro') }} -{%- endmacro %} - -{% macro happy_little_macro() -%} - {{ override_me() }} -{%- endmacro %} - - -{% macro vacuum_source(source_name, table_name) -%} - {% call statement('stmt', auto_begin=false, fetch_result=false) %} - vacuum {{ source(source_name, table_name) }} - {% endcall %} -{%- endmacro %} diff --git a/tests/integration/sources_test/malformed_models/descendant_model.sql b/tests/integration/sources_test/malformed_models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_models/schema.yml b/tests/integration/sources_test/malformed_models/schema.yml deleted file mode 100644 index 544d18d65..000000000 --- a/tests/integration/sources_test/malformed_models/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - tests: - - relationships: - # this is invalid (list of 3 1-key dicts instead of a single 3-key dict) - - column_name: favorite_color - - to: ref('descendant_model') - - field: favorite_color diff --git a/tests/integration/sources_test/malformed_schema_tests/model.sql b/tests/integration/sources_test/malformed_schema_tests/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_schema_tests/schema.yml b/tests/integration/sources_test/malformed_schema_tests/schema.yml deleted file mode 100644 index d72ab2eee..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - columns: - - name: favorite_color - tests: - - relationships: - to: ref('model') - # this will get rendered as its literal - field: "{{ 'favorite' ~ 'color' }}" diff --git a/tests/integration/sources_test/models/descendant_model.sql b/tests/integration/sources_test/models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/models/ephemeral_model.sql b/tests/integration/sources_test/models/ephemeral_model.sql deleted file mode 100644 index 8de35cd3e..000000000 --- a/tests/integration/sources_test/models/ephemeral_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{{ config(materialized='ephemeral') }} - -select 1 as id diff --git a/tests/integration/sources_test/models/multi_source_model.sql b/tests/integration/sources_test/models/multi_source_model.sql deleted file mode 100644 index e310206b0..000000000 --- a/tests/integration/sources_test/models/multi_source_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -select * from {{ source('test_source', 'other_test_table')}} - join {{ source('other_source', 'test_table')}} using (id) diff --git a/tests/integration/sources_test/models/nonsource_descendant.sql b/tests/integration/sources_test/models/nonsource_descendant.sql deleted file mode 100644 index 97f2151c7..000000000 --- a/tests/integration/sources_test/models/nonsource_descendant.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ schema }}.source diff --git a/tests/integration/sources_test/models/schema.yml b/tests/integration/sources_test/models/schema.yml deleted file mode 100644 index f02eb1345..000000000 --- a/tests/integration/sources_test/models/schema.yml +++ /dev/null @@ -1,77 +0,0 @@ -version: 2 -models: - - name: descendant_model - columns: - - name: favorite_color - tests: - - relationships: - to: source('test_source', 'test_table') - field: favorite_color - -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tags: - - my_test_source_tag - tables: - - name: test_table - identifier: source - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - freshness: - error_after: {count: 18, period: hour} - tags: - - my_test_source_table_tag - columns: - - name: favorite_color - description: The favorite color - - name: id - description: The user ID - tests: - - unique - - not_null - tags: - - id_column - - name: first_name - description: The first name of the user - tests: [] - - name: email - description: The email address of the user - - name: ip_address - description: The last IP address the user logged in from - - name: updated_at - description: The last update time for this user - tests: - - relationships: - # do this as a table-level test, just to test out that aspect - column_name: favorite_color - to: ref('descendant_model') - field: favorite_color - - name: other_test_table - identifier: other_table - columns: - - name: id - tests: - - not_null - - unique - tags: - - id_column - - name: disabled_test_table - freshness: null - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - - name: other_source - schema: "{{ var('test_run_schema') }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: other_source_table - - name: external_source - schema: "{{ var('test_run_alt_schema', var('test_run_schema')) }}" - tables: - - name: table diff --git a/tests/integration/sources_test/models/view_model.sql b/tests/integration/sources_test/models/view_model.sql deleted file mode 100644 index ecb330804..000000000 --- a/tests/integration/sources_test/models/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{# See here: https://github.com/dbt-labs/dbt/pull/1729 #} - -select * from {{ ref('ephemeral_model') }} diff --git a/tests/integration/sources_test/seed.sql b/tests/integration/sources_test/seed.sql deleted file mode 100644 index 40110b990..000000000 --- a/tests/integration/sources_test/seed.sql +++ /dev/null @@ -1,113 +0,0 @@ -create table {schema}.seed_expected ( - favorite_color TEXT, - id INTEGER, - first_name TEXT, - email TEXT, - ip_address TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_expected - ("favorite_color","id","first_name","email","ip_address","updated_at") -VALUES - ('blue',1,'Larry','lking0@miitbeian.gov.cn','''69.135.206.194''','2008-09-12 19:08:31'), - ('blue',2,'Larry','lperkins1@toplist.cz','''64.210.133.162''','1978-05-09 04:15:14'), - ('blue',3,'Anna','amontgomery2@miitbeian.gov.cn','''168.104.64.114''','2011-10-16 04:07:57'), - ('blue',4,'Sandra','sgeorge3@livejournal.com','''229.235.252.98''','1973-07-19 10:52:43'), - ('blue',5,'Fred','fwoods4@google.cn','''78.229.170.124''','2012-09-30 16:38:29'), - ('blue',6,'Stephen','shanson5@livejournal.com','''182.227.157.105''','1995-11-07 21:40:50'), - ('blue',7,'William','wmartinez6@upenn.edu','''135.139.249.50''','1982-09-05 03:11:59'), - ('blue',8,'Jessica','jlong7@hao123.com','''203.62.178.210''','1991-10-16 11:03:15'), - ('blue',9,'Douglas','dwhite8@tamu.edu','''178.187.247.1''','1979-10-01 09:49:48'), - ('blue',10,'Lisa','lcoleman9@nydailynews.com','''168.234.128.249''','2011-05-26 07:45:49'), - ('blue',11,'Ralph','rfieldsa@home.pl','''55.152.163.149''','1972-11-18 19:06:11'), - ('blue',12,'Louise','lnicholsb@samsung.com','''141.116.153.154''','2014-11-25 20:56:14'), - ('blue',13,'Clarence','cduncanc@sfgate.com','''81.171.31.133''','2011-11-17 07:02:36'), - ('blue',14,'Daniel','dfranklind@omniture.com','''8.204.211.37''','1980-09-13 00:09:04'), - ('blue',15,'Katherine','klanee@auda.org.au','''176.96.134.59''','1997-08-22 19:36:56'), - ('blue',16,'Billy','bwardf@wikia.com','''214.108.78.85''','2003-10-19 02:14:47'), - ('blue',17,'Annie','agarzag@ocn.ne.jp','''190.108.42.70''','1988-10-28 15:12:35'), - ('blue',18,'Shirley','scolemanh@fastcompany.com','''109.251.164.84''','1988-08-24 10:50:57'), - ('blue',19,'Roger','rfrazieri@scribd.com','''38.145.218.108''','1985-12-31 15:17:15'), - ('blue',20,'Lillian','lstanleyj@goodreads.com','''47.57.236.17''','1970-06-08 02:09:05'), - ('blue',21,'Aaron','arodriguezk@nps.gov','''205.245.118.221''','1985-10-11 23:07:49'), - ('blue',22,'Patrick','pparkerl@techcrunch.com','''19.8.100.182''','2006-03-29 12:53:56'), - ('blue',23,'Phillip','pmorenom@intel.com','''41.38.254.103''','2011-11-07 15:35:43'), - ('blue',24,'Henry','hgarcian@newsvine.com','''1.191.216.252''','2008-08-28 08:30:44'), - ('blue',25,'Irene','iturnero@opera.com','''50.17.60.190''','1994-04-01 07:15:02'), - ('blue',26,'Andrew','adunnp@pen.io','''123.52.253.176''','2000-11-01 06:03:25'), - ('blue',27,'David','dgutierrezq@wp.com','''238.23.203.42''','1988-01-25 07:29:18'), - ('blue',28,'Henry','hsanchezr@cyberchimps.com','''248.102.2.185''','1983-01-01 13:36:37'), - ('blue',29,'Evelyn','epetersons@gizmodo.com','''32.80.46.119''','1979-07-16 17:24:12'), - ('blue',30,'Tammy','tmitchellt@purevolume.com','''249.246.167.88''','2001-04-03 10:00:23'), - ('blue',31,'Jacqueline','jlittleu@domainmarket.com','''127.181.97.47''','1986-02-11 21:35:50'), - ('blue',32,'Earl','eortizv@opera.com','''166.47.248.240''','1996-07-06 08:16:27'), - ('blue',33,'Juan','jgordonw@sciencedirect.com','''71.77.2.200''','1987-01-31 03:46:44'), - ('blue',34,'Diane','dhowellx@nyu.edu','''140.94.133.12''','1994-06-11 02:30:05'), - ('blue',35,'Randy','rkennedyy@microsoft.com','''73.255.34.196''','2005-05-26 20:28:39'), - ('blue',36,'Janice','jriveraz@time.com','''22.214.227.32''','1990-02-09 04:16:52'), - ('blue',37,'Laura','lperry10@diigo.com','''159.148.145.73''','2015-03-17 05:59:25'), - ('blue',38,'Gary','gray11@statcounter.com','''40.193.124.56''','1970-01-27 10:04:51'), - ('blue',39,'Jesse','jmcdonald12@typepad.com','''31.7.86.103''','2009-03-14 08:14:29'), - ('blue',40,'Sandra','sgonzalez13@goodreads.com','''223.80.168.239''','1993-05-21 14:08:54'), - ('blue',41,'Scott','smoore14@archive.org','''38.238.46.83''','1980-08-30 11:16:56'), - ('blue',42,'Phillip','pevans15@cisco.com','''158.234.59.34''','2011-12-15 23:26:31'), - ('blue',43,'Steven','sriley16@google.ca','''90.247.57.68''','2011-10-29 19:03:28'), - ('blue',44,'Deborah','dbrown17@hexun.com','''179.125.143.240''','1995-04-10 14:36:07'), - ('blue',45,'Lori','lross18@ow.ly','''64.80.162.180''','1980-12-27 16:49:15'), - ('blue',46,'Sean','sjackson19@tumblr.com','''240.116.183.69''','1988-06-12 21:24:45'), - ('blue',47,'Terry','tbarnes1a@163.com','''118.38.213.137''','1997-09-22 16:43:19'), - ('blue',48,'Dorothy','dross1b@ebay.com','''116.81.76.49''','2005-02-28 13:33:24'), - ('blue',49,'Samuel','swashington1c@house.gov','''38.191.253.40''','1989-01-19 21:15:48'), - ('blue',50,'Ralph','rcarter1d@tinyurl.com','''104.84.60.174''','2007-08-11 10:21:49'), - ('green',51,'Wayne','whudson1e@princeton.edu','''90.61.24.102''','1983-07-03 16:58:12'), - ('green',52,'Rose','rjames1f@plala.or.jp','''240.83.81.10''','1995-06-08 11:46:23'), - ('green',53,'Louise','lcox1g@theglobeandmail.com','''105.11.82.145''','2016-09-19 14:45:51'), - ('green',54,'Kenneth','kjohnson1h@independent.co.uk','''139.5.45.94''','1976-08-17 11:26:19'), - ('green',55,'Donna','dbrown1i@amazon.co.uk','''19.45.169.45''','2006-05-27 16:51:40'), - ('green',56,'Johnny','jvasquez1j@trellian.com','''118.202.238.23''','1975-11-17 08:42:32'), - ('green',57,'Patrick','pramirez1k@tamu.edu','''231.25.153.198''','1997-08-06 11:51:09'), - ('green',58,'Helen','hlarson1l@prweb.com','''8.40.21.39''','1993-08-04 19:53:40'), - ('green',59,'Patricia','pspencer1m@gmpg.org','''212.198.40.15''','1977-08-03 16:37:27'), - ('green',60,'Joseph','jspencer1n@marriott.com','''13.15.63.238''','2005-07-23 20:22:06'), - ('green',61,'Phillip','pschmidt1o@blogtalkradio.com','''177.98.201.190''','1976-05-19 21:47:44'), - ('green',62,'Joan','jwebb1p@google.ru','''105.229.170.71''','1972-09-07 17:53:47'), - ('green',63,'Phyllis','pkennedy1q@imgur.com','''35.145.8.244''','2000-01-01 22:33:37'), - ('green',64,'Katherine','khunter1r@smh.com.au','''248.168.205.32''','1991-01-09 06:40:24'), - ('green',65,'Laura','lvasquez1s@wiley.com','''128.129.115.152''','1997-10-23 12:04:56'), - ('green',66,'Juan','jdunn1t@state.gov','''44.228.124.51''','2004-11-10 05:07:35'), - ('green',67,'Judith','jholmes1u@wiley.com','''40.227.179.115''','1977-08-02 17:01:45'), - ('green',68,'Beverly','bbaker1v@wufoo.com','''208.34.84.59''','2016-03-06 20:07:23'), - ('green',69,'Lawrence','lcarr1w@flickr.com','''59.158.212.223''','1988-09-13 06:07:21'), - ('green',70,'Gloria','gwilliams1x@mtv.com','''245.231.88.33''','1995-03-18 22:32:46'), - ('green',71,'Steven','ssims1y@cbslocal.com','''104.50.58.255''','2001-08-05 21:26:20'), - ('green',72,'Betty','bmills1z@arstechnica.com','''103.177.214.220''','1981-12-14 21:26:54'), - ('green',73,'Mildred','mfuller20@prnewswire.com','''151.158.8.130''','2000-04-19 10:13:55'), - ('green',74,'Donald','dday21@icq.com','''9.178.102.255''','1972-12-03 00:58:24'), - ('green',75,'Eric','ethomas22@addtoany.com','''85.2.241.227''','1992-11-01 05:59:30'), - ('green',76,'Joyce','jarmstrong23@sitemeter.com','''169.224.20.36''','1985-10-24 06:50:01'), - ('green',77,'Maria','mmartinez24@amazonaws.com','''143.189.167.135''','2005-10-05 05:17:42'), - ('green',78,'Harry','hburton25@youtube.com','''156.47.176.237''','1978-03-26 05:53:33'), - ('green',79,'Kevin','klawrence26@hao123.com','''79.136.183.83''','1994-10-12 04:38:52'), - ('green',80,'David','dhall27@prweb.com','''133.149.172.153''','1976-12-15 16:24:24'), - ('green',81,'Kathy','kperry28@twitter.com','''229.242.72.228''','1979-03-04 02:58:56'), - ('green',82,'Adam','aprice29@elegantthemes.com','''13.145.21.10''','1982-11-07 11:46:59'), - ('green',83,'Brandon','bgriffin2a@va.gov','''73.249.128.212''','2013-10-30 05:30:36'), - ('green',84,'Henry','hnguyen2b@discovery.com','''211.36.214.242''','1985-01-09 06:37:27'), - ('green',85,'Eric','esanchez2c@edublogs.org','''191.166.188.251''','2004-05-01 23:21:42'), - ('green',86,'Jason','jlee2d@jimdo.com','''193.92.16.182''','1973-01-08 09:05:39'), - ('green',87,'Diana','drichards2e@istockphoto.com','''19.130.175.245''','1994-10-05 22:50:49'), - ('green',88,'Andrea','awelch2f@abc.net.au','''94.155.233.96''','2002-04-26 08:41:44'), - ('green',89,'Louis','lwagner2g@miitbeian.gov.cn','''26.217.34.111''','2003-08-25 07:56:39'), - ('green',90,'Jane','jsims2h@seesaa.net','''43.4.220.135''','1987-03-20 20:39:04'), - ('green',91,'Larry','lgrant2i@si.edu','''97.126.79.34''','2000-09-07 20:26:19'), - ('green',92,'Louis','ldean2j@prnewswire.com','''37.148.40.127''','2011-09-16 20:12:14'), - ('green',93,'Jennifer','jcampbell2k@xing.com','''38.106.254.142''','1988-07-15 05:06:49'), - ('green',94,'Wayne','wcunningham2l@google.com.hk','''223.28.26.187''','2009-12-15 06:16:54'), - ('green',95,'Lori','lstevens2m@icq.com','''181.250.181.58''','1984-10-28 03:29:19'), - ('green',96,'Judy','jsimpson2n@marriott.com','''180.121.239.219''','1986-02-07 15:18:10'), - ('green',97,'Phillip','phoward2o@usa.gov','''255.247.0.175''','2002-12-26 08:44:45'), - ('green',98,'Gloria','gwalker2p@usa.gov','''156.140.7.128''','1997-10-04 07:58:58'), - ('green',99,'Paul','pjohnson2q@umn.edu','''183.59.198.197''','1991-11-14 12:33:55'), - ('green',100,'Frank','fgreene2r@blogspot.com','''150.143.68.121''','2010-06-12 23:55:39'); diff --git a/tests/integration/sources_test/seeds/expected_multi_source.csv b/tests/integration/sources_test/seeds/expected_multi_source.csv deleted file mode 100644 index de9c1c01d..000000000 --- a/tests/integration/sources_test/seeds/expected_multi_source.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name,color -1,Larry,blue -2,Curly,red -3,Moe,green diff --git a/tests/integration/sources_test/seeds/other_source_table.csv b/tests/integration/sources_test/seeds/other_source_table.csv deleted file mode 100644 index a92b2cb8e..000000000 --- a/tests/integration/sources_test/seeds/other_source_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,color -1,blue -2,red -3,green diff --git a/tests/integration/sources_test/seeds/other_table.csv b/tests/integration/sources_test/seeds/other_table.csv deleted file mode 100644 index 56bdda92b..000000000 --- a/tests/integration/sources_test/seeds/other_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name -1,Larry -2,Curly -3,Moe diff --git a/tests/integration/sources_test/seeds/source.csv b/tests/integration/sources_test/seeds/source.csv deleted file mode 100644 index a8f87412e..000000000 --- a/tests/integration/sources_test/seeds/source.csv +++ /dev/null @@ -1,101 +0,0 @@ -favorite_color,id,first_name,email,ip_address,updated_at -blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 -blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 -blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 -blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 -blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 -blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 -blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 -blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 -blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 -blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 -blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 -blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 -blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 -blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 -blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 -blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 -blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 -blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 -blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 -blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 -blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 -blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 -blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 -blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 -blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 -blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 -blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 -blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 -blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 -blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 -blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 -blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 -blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 -blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 -blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 -blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 -blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 -blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 -blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 -blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 -blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 -blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 -blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 -blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 -blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 -blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 -blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 -blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 -blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 -blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 -green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 -green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 -green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 -green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 -green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 -green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 -green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 -green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 -green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 -green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 -green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 -green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 -green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 -green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 -green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 -green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 -green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 -green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 -green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 -green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 -green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 -green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 -green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 -green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 -green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 -green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 -green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 -green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 -green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 -green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 -green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 -green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 -green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 -green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 -green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 -green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 -green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 -green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 -green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 -green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 -green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 -green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 -green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 -green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 -green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 -green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 -green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 -green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 -green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 -green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 diff --git a/tests/integration/sources_test/test_sources.py b/tests/integration/sources_test/test_sources.py deleted file mode 100644 index a8b7017b9..000000000 --- a/tests/integration/sources_test/test_sources.py +++ /dev/null @@ -1,203 +0,0 @@ -import json -import os -from datetime import datetime, timedelta - -import yaml - -import dbt.tracking -import dbt.version -from dbt.events.functions import reset_metadata_vars -from tests.integration.base import DBTIntegrationTest, use_profile, AnyFloat, \ - AnyStringWith - - -class BaseSourcesTest(DBTIntegrationTest): - @property - def schema(self): - return "sources" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'quoting': {'database': True, 'schema': True, 'identifier': True}, - 'seeds': { - 'quote_columns': True, - }, - } - - def setUp(self): - super().setUp() - os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] = 'test_run_schema' - - def tearDown(self): - del os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] - super().tearDown() - - def run_dbt_with_vars(self, cmd, *args, **kwargs): - vars_dict = { - 'test_run_schema': self.unique_schema(), - 'test_loaded_at': self.adapter.quote('updated_at'), - } - cmd.extend(['--vars', yaml.safe_dump(vars_dict)]) - return self.run_dbt(cmd, *args, **kwargs) - - -class SuccessfulSourcesTest(BaseSourcesTest): - def setUp(self): - super().setUp() - self.run_dbt_with_vars(['seed']) - self.maxDiff = None - self._id = 101 - # this is the db initial value - self.last_inserted_time = "2016-09-19T14:45:51+00:00" - os.environ['DBT_ENV_CUSTOM_ENV_key'] = 'value' - - def tearDown(self): - super().tearDown() - del os.environ['DBT_ENV_CUSTOM_ENV_key'] - - def _set_updated_at_to(self, delta): - insert_time = datetime.utcnow() + delta - timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") - # favorite_color,id,first_name,email,ip_address,updated_at - insert_id = self._id - self._id += 1 - raw_sql = """INSERT INTO {schema}.{source} - ({quoted_columns}) - VALUES ( - 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' - )""" - quoted_columns = ','.join( - self.adapter.quote(c) if self.adapter_type != 'bigquery' else c - for c in - ('favorite_color', 'id', 'first_name', - 'email', 'ip_address', 'updated_at') - ) - self.run_sql( - raw_sql, - kwargs={ - 'schema': self.unique_schema(), - 'time': timestr, - 'id': insert_id, - 'source': self.adapter.quote('source'), - 'quoted_columns': quoted_columns, - } - ) - self.last_inserted_time = insert_time.strftime( - "%Y-%m-%dT%H:%M:%S+00:00") - - -class TestSourceFreshness(SuccessfulSourcesTest): - - def _assert_freshness_results(self, path, state): - self.assertTrue(os.path.exists(path)) - with open(path) as fp: - data = json.load(fp) - - assert set(data) == {'metadata', 'results', 'elapsed_time'} - assert 'generated_at' in data['metadata'] - assert isinstance(data['elapsed_time'], float) - self.assertBetween(data['metadata']['generated_at'], - self.freshness_start_time) - assert data['metadata']['dbt_schema_version'] == 'https://schemas.getdbt.com/dbt/sources/v3.json' - assert data['metadata']['dbt_version'] == dbt.version.__version__ - assert data['metadata']['invocation_id'] == dbt.tracking.active_user.invocation_id - key = 'key' - if os.name == 'nt': - key = key.upper() - assert data['metadata']['env'] == {key: 'value'} - - last_inserted_time = self.last_inserted_time - - self.assertEqual(len(data['results']), 1) - - self.assertEqual(data['results'], [ - { - 'unique_id': 'source.test.test_source.test_table', - 'max_loaded_at': last_inserted_time, - 'snapshotted_at': AnyStringWith(), - 'max_loaded_at_time_ago_in_s': AnyFloat(), - 'status': state, - 'criteria': { - 'filter': None, - 'warn_after': {'count': 10, 'period': 'hour'}, - 'error_after': {'count': 18, 'period': 'hour'}, - }, - 'adapter_response': {}, - 'thread_id': AnyStringWith('Thread-'), - 'execution_time': AnyFloat(), - 'timing': [ - { - 'name': 'compile', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - }, - { - 'name': 'execute', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - } - ] - } - ]) - - def _run_source_freshness(self): - # test_source.test_table should have a loaded_at field of `updated_at` - # and a freshness of warn_after: 10 hours, error_after: 18 hours - # by default, our data set is way out of date! - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/error_source.json'], - expect_pass=False - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'error') - self._assert_freshness_results('target/error_source.json', 'error') - - self._set_updated_at_to(timedelta(hours=-12)) - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/warn_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'warn') - self._assert_freshness_results('target/warn_source.json', 'warn') - - self._set_updated_at_to(timedelta(hours=-2)) - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/pass_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'pass') - self._assert_freshness_results('target/pass_source.json', 'pass') - - @use_profile('redshift') - def test_redshift_source_freshness(self): - reset_metadata_vars() - self._run_source_freshness() - - -class TestUnquotedSources(SuccessfulSourcesTest): - @property - def project_config(self): - cfg = super().project_config - cfg['quoting'] = { - 'identifier': False, - 'schema': False, - 'database': False, - } - return cfg - - @use_profile('redshift') - def test_redshift_catalog(self): - reset_metadata_vars() - self.run_dbt_with_vars(['run']) - reset_metadata_vars() - self.run_dbt_with_vars(['docs', 'generate']) From 047b5c2356809e481b9a1fdce08069a3abc19297 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Thu, 26 Jan 2023 15:58:35 -0600 Subject: [PATCH 008/113] [CT-1630] Convert column_types test for dbt-redshift (#276) * init conversion of columns_types test for redshift, removal of old test, dev_requirements pin changed * reset dev-requirement pointer --- .../adapter/column_types/test_column_types.py | 59 +++++++++++++++ .../macros/test_alter_column_type.sql | 5 -- .../column_type_tests/macros/test_is_type.sql | 72 ------------------- .../column_type_tests/rs_models/model.sql | 17 ----- .../column_type_tests/rs_models/schema.yml | 22 ------ .../column_type_tests/test_column_types.py | 21 ------ 6 files changed, 59 insertions(+), 137 deletions(-) create mode 100644 tests/functional/adapter/column_types/test_column_types.py delete mode 100644 tests/integration/column_type_tests/macros/test_alter_column_type.sql delete mode 100644 tests/integration/column_type_tests/macros/test_is_type.sql delete mode 100644 tests/integration/column_type_tests/rs_models/model.sql delete mode 100644 tests/integration/column_type_tests/rs_models/schema.yml delete mode 100644 tests/integration/column_type_tests/test_column_types.py diff --git a/tests/functional/adapter/column_types/test_column_types.py b/tests/functional/adapter/column_types/test_column_types.py new file mode 100644 index 000000000..81d5ca0fa --- /dev/null +++ b/tests/functional/adapter/column_types/test_column_types.py @@ -0,0 +1,59 @@ +import pytest +from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes + +_MODEL_SQL = """ +select + 1::smallint as smallint_col, + 2::int as int_col, + 3::bigint as bigint_col, + 4::int2 as int2_col, + 5::int4 as int4_col, + 6::int8 as int8_col, + 7::integer as integer_col, + 8.0::real as real_col, + 9.0::float4 as float4_col, + 10.0::float8 as float8_col, + 11.0::float as float_col, + 12.0::double precision as double_col, + 13.0::numeric as numeric_col, + 14.0::decimal as decimal_col, + '15'::varchar(20) as varchar_col, + '16'::text as text_col +""" + +_SCHEMA_YML = """ +version: 2 +models: + - name: model + tests: + - is_type: + column_map: + smallint_col: ['integer', 'number'] + int_col: ['integer', 'number'] + bigint_col: ['integer', 'number'] + int2_col: ['integer', 'number'] + int4_col: ['integer', 'number'] + int8_col: ['integer', 'number'] + integer_col: ['integer', 'number'] + real_col: ['float', 'number'] + double_col: ['float', 'number'] + float4_col: ['float', 'number'] + float8_col: ['float', 'number'] + float_col: ['float', 'number'] + numeric_col: ['numeric', 'number'] + decimal_col: ['numeric', 'number'] + varchar_col: ['string', 'not number'] + text_col: ['string', 'not number'] +""" + +class TestRedshiftColumnTypes(BaseColumnTypes): + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_SQL, + "schema.yml": _SCHEMA_YML + } + + def test_run_and_test(self, project): + self.run_and_test() \ No newline at end of file diff --git a/tests/integration/column_type_tests/macros/test_alter_column_type.sql b/tests/integration/column_type_tests/macros/test_alter_column_type.sql deleted file mode 100644 index 133d59fad..000000000 --- a/tests/integration/column_type_tests/macros/test_alter_column_type.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Macro to alter a column type -{% macro test_alter_column_type(model_name, column_name, new_column_type) %} - {% set relation = ref(model_name) %} - {{ alter_column_type(relation, column_name, new_column_type) }} -{% endmacro %} diff --git a/tests/integration/column_type_tests/macros/test_is_type.sql b/tests/integration/column_type_tests/macros/test_is_type.sql deleted file mode 100644 index 2f1ffde2b..000000000 --- a/tests/integration/column_type_tests/macros/test_is_type.sql +++ /dev/null @@ -1,72 +0,0 @@ - -{% macro simple_type_check_column(column, check) %} - {% if check == 'string' %} - {{ return(column.is_string()) }} - {% elif check == 'float' %} - {{ return(column.is_float()) }} - {% elif check == 'number' %} - {{ return(column.is_number()) }} - {% elif check == 'numeric' %} - {{ return(column.is_numeric()) }} - {% elif check == 'integer' %} - {{ return(column.is_integer()) }} - {% else %} - {% do exceptions.raise_compiler_error('invalid type check value: ' ~ check) %} - {% endif %} -{% endmacro %} - -{% macro type_check_column(column, type_checks) %} - {% set failures = [] %} - {% for type_check in type_checks %} - {% if type_check.startswith('not ') %} - {% if simple_type_check_column(column, type_check[4:]) %} - {% do log('simple_type_check_column got ', True) %} - {% do failures.append(type_check) %} - {% endif %} - {% else %} - {% if not simple_type_check_column(column, type_check) %} - {% do failures.append(type_check) %} - {% endif %} - {% endif %} - {% endfor %} - {% if (failures | length) > 0 %} - {% do log('column ' ~ column.name ~ ' had failures: ' ~ failures, info=True) %} - {% endif %} - {% do return((failures | length) == 0) %} -{% endmacro %} - -{% test is_type(model, column_map) %} - {% if not execute %} - {{ return(None) }} - {% endif %} - {% if not column_map %} - {% do exceptions.raise_compiler_error('test_is_type must have a column name') %} - {% endif %} - {% set columns = adapter.get_columns_in_relation(model) %} - {% if (column_map | length) != (columns | length) %} - {% set column_map_keys = (column_map | list | string) %} - {% set column_names = (columns | map(attribute='name') | list | string) %} - {% do exceptions.raise_compiler_error('did not get all the columns/all columns not specified:\n' ~ column_map_keys ~ '\nvs\n' ~ column_names) %} - {% endif %} - {% set bad_columns = [] %} - {% for column in columns %} - {% set column_key = (column.name | lower) %} - {% if column_key in column_map %} - {% set type_checks = column_map[column_key] %} - {% if not type_checks %} - {% do exceptions.raise_compiler_error('no type checks?') %} - {% endif %} - {% if not type_check_column(column, type_checks) %} - {% do bad_columns.append(column.name) %} - {% endif %} - {% else %} - {% do exceptions.raise_compiler_error('column key ' ~ column_key ~ ' not found in ' ~ (column_map | list | string)) %} - {% endif %} - {% endfor %} - {% do log('bad columns: ' ~ bad_columns, info=True) %} - {% for bad_column in bad_columns %} - select '{{ bad_column }}' as bad_column - {{ 'union all' if not loop.last }} - {% endfor %} - select * from (select 1 limit 0) as nothing -{% endtest %} diff --git a/tests/integration/column_type_tests/rs_models/model.sql b/tests/integration/column_type_tests/rs_models/model.sql deleted file mode 100644 index f8e972103..000000000 --- a/tests/integration/column_type_tests/rs_models/model.sql +++ /dev/null @@ -1,17 +0,0 @@ -select - 1::smallint as smallint_col, - 2::int as int_col, - 3::bigint as bigint_col, - 4::int2 as int2_col, - 5::int4 as int4_col, - 6::int8 as int8_col, - 7::integer as integer_col, - 8.0::real as real_col, - 9.0::float4 as float4_col, - 10.0::float8 as float8_col, - 11.0::float as float_col, - 12.0::double precision as double_col, - 13.0::numeric as numeric_col, - 14.0::decimal as decimal_col, - '15'::varchar(20) as varchar_col, - '16'::text as text_col diff --git a/tests/integration/column_type_tests/rs_models/schema.yml b/tests/integration/column_type_tests/rs_models/schema.yml deleted file mode 100644 index 5b35ce025..000000000 --- a/tests/integration/column_type_tests/rs_models/schema.yml +++ /dev/null @@ -1,22 +0,0 @@ -version: 2 -models: - - name: model - tests: - - is_type: - column_map: - smallint_col: ['integer', 'number'] - int_col: ['integer', 'number'] - bigint_col: ['integer', 'number'] - int2_col: ['integer', 'number'] - int4_col: ['integer', 'number'] - int8_col: ['integer', 'number'] - integer_col: ['integer', 'number'] - real_col: ['float', 'number'] - double_col: ['float', 'number'] - float4_col: ['float', 'number'] - float8_col: ['float', 'number'] - float_col: ['float', 'number'] - numeric_col: ['numeric', 'number'] - decimal_col: ['numeric', 'number'] - varchar_col: ['string', 'not number'] - text_col: ['string', 'not number'] diff --git a/tests/integration/column_type_tests/test_column_types.py b/tests/integration/column_type_tests/test_column_types.py deleted file mode 100644 index 52e4c2f5b..000000000 --- a/tests/integration/column_type_tests/test_column_types.py +++ /dev/null @@ -1,21 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestColumnTypes(DBTIntegrationTest): - @property - def schema(self): - return 'column_types' - - def run_and_test(self): - self.assertEqual(len(self.run_dbt(['run'])), 1) - self.assertEqual(len(self.run_dbt(['test'])), 1) - - -class TestRedshiftColumnTypes(TestColumnTypes): - @property - def models(self): - return 'rs_models' - - @use_profile('redshift') - def test_redshift_column_types(self): - self.run_and_test() From 007edf94ec7862f124f74ee2d23d502eca72ef11 Mon Sep 17 00:00:00 2001 From: Alexander Smolyakov Date: Fri, 27 Jan 2023 19:05:05 +0400 Subject: [PATCH 009/113] [CI/CD] Update release workflow and introduce workflow for nightly releases (#266) * Add workflows * Set default `test_run` value to `true` * Update .bumpversion.cfg * Resolve review comments - Update workflow docs - Change workflow name - Set `test_run` default value to `true` * Update Slack secret * Resolve review comments --- .bumpversion.cfg | 8 +- .github/workflows/nightly-release.yml | 109 +++++++++ .github/workflows/release.yml | 309 ++++++++++++++------------ scripts/env-setup.sh | 10 + 4 files changed, 288 insertions(+), 148 deletions(-) create mode 100644 .github/workflows/nightly-release.yml create mode 100644 scripts/env-setup.sh diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a3e0f8c88..933bfdfc7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -3,8 +3,12 @@ current_version = 1.5.0a1 parse = (?P\d+) \.(?P\d+) \.(?P\d+) - ((?Pa|b|rc)(?P\d+))? + ((?Pa|b|rc) + (?P\d+) # pre-release version num + )(\.(?P[a-z..0-9]+) + )? serialize = + {major}.{minor}.{patch}{prerelease}{num}.{nightly} {major}.{minor}.{patch}{prerelease}{num} {major}.{minor}.{patch} commit = False @@ -22,4 +26,6 @@ values = [bumpversion:part:num] first_value = 1 +[bumpversion:part:nightly] + [bumpversion:file:dbt/adapters/redshift/__version__.py] diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml new file mode 100644 index 000000000..b668d62ec --- /dev/null +++ b/.github/workflows/nightly-release.yml @@ -0,0 +1,109 @@ +# **what?** +# Nightly releases to GitHub and PyPI. This workflow produces the following outcome: +# - generate and validate data for night release (commit SHA, version number, release branch); +# - pass data to release workflow; +# - night release will be pushed to GitHub as a draft release; +# - night build will be pushed to test PyPI; +# +# **why?** +# Ensure an automated and tested release process for nightly builds +# +# **when?** +# This workflow runs on schedule or can be run manually on demand. + +name: Nightly Test Release to GitHub and PyPI + +on: + workflow_dispatch: # for manual triggering + schedule: + - cron: 0 9 * * * + +permissions: + contents: write # this is the permission that allows creating a new release + +defaults: + run: + shell: bash + +env: + RELEASE_BRANCH: "main" + +jobs: + aggregate-release-data: + runs-on: ubuntu-latest + + outputs: + commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }} + version_number: ${{ steps.nightly-release-version.outputs.number }} + release_branch: ${{ steps.release-branch.outputs.name }} + + steps: + - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" + uses: actions/checkout@v3 + with: + ref: ${{ env.RELEASE_BRANCH }} + + - name: "Resolve Commit To Release" + id: resolve-commit-sha + run: | + commit_sha=$(git rev-parse HEAD) + echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT + + - name: "Get Current Version Number" + id: version-number-sources + run: | + current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '` + echo "current_version=$current_version" >> $GITHUB_OUTPUT + + - name: "Audit Version And Parse Into Parts" + id: semver + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.version-number-sources.outputs.current_version }} + + - name: "Get Current Date" + id: current-date + run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT + + - name: "Generate Nightly Release Version Number" + id: nightly-release-version + run: | + number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly" + echo "number=$number" >> $GITHUB_OUTPUT + + - name: "Audit Nightly Release Version And Parse Into Parts" + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.nightly-release-version.outputs.number }} + + - name: "Set Release Branch" + id: release-branch + run: | + echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT + + log-outputs-aggregate-release-data: + runs-on: ubuntu-latest + needs: [aggregate-release-data] + + steps: + - name: "[DEBUG] Log Outputs" + run: | + echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }} + echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} + + release-github-pypi: + needs: [aggregate-release-data] + + uses: ./.github/workflows/release.yml + with: + sha: ${{ needs.aggregate-release-data.outputs.commit_sha }} + target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }} + version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + build_script_path: "scripts/build-dist.sh" + env_setup_script_path: "scripts/env-setup.sh" + s3_bucket_name: "core-team-artifacts" + package_test_command: "dbt --version" + test_run: true + nightly_release: true + secrets: inherit diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5fd0291e9..a9f60734b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,14 +1,19 @@ # **what?** -# Take the given commit, run unit tests specifically on that sha, build and -# package it, and then release to GitHub with that specific build (PyPi to follow later) - +# Release workflow provides the following steps: +# - checkout the given commit; +# - validate version in sources and changelog file for given version; +# - bump the version and generate a changelog if needed; +# - merge all changes to the target branch if needed; +# - run unit and integration tests against given commit; +# - build and package that SHA; +# - release it to GitHub and PyPI with that specific build; +# # **why?** # Ensure an automated and tested release process - +# # **when?** -# This will only run manually with a given sha and version - -name: Build, Test, and Package +# This workflow can be run manually on demand or can be called by other workflows +name: Release to GitHub and PyPI on: workflow_dispatch: @@ -17,17 +22,85 @@ on: description: "The last commit sha in the release" type: string required: true - changelog_path: - description: "Path to changes log" + target_branch: + description: "The branch to release from" + type: string + required: true + version_number: + description: "The release version number (i.e. 1.0.0b1)" + type: string + required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" type: string - default: "./CHANGELOG.md" + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true + test_run: + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" + type: boolean + default: false required: false + workflow_call: + inputs: + sha: + description: "The last commit sha in the release" + type: string + required: true + target_branch: + description: "The branch to release from" + type: string + required: true version_number: description: "The release version number (i.e. 1.0.0b1)" type: string required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" + type: string + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true test_run: - description: "Test run (Publish release as draft to GitHub)" + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" type: boolean default: false required: false @@ -35,10 +108,6 @@ on: permissions: contents: write # this is the permission that allows creating a new release -env: - PYTHON_TARGET_VERSION: 3.8 - ARTIFACT_RETENTION_DAYS: 2 - defaults: run: shell: bash @@ -50,164 +119,110 @@ jobs: steps: - name: "[DEBUG] Print Variables" run: | - echo The last commit sha in the release: ${{ inputs.sha }} - echo The release version number: ${{ inputs.version_number }} - echo The path to the changelog markdpown: ${{ inputs.changelog_path }} - echo This is a test run: ${{ inputs.test_run }} - echo Python target version: ${{ env.PYTHON_TARGET_VERSION }} - echo Artifact retention days: ${{ env.ARTIFACT_RETENTION_DAYS }} - - unit: - name: Unit Test - runs-on: ubuntu-latest + echo The last commit sha in the release: ${{ inputs.sha }} + echo The branch to release from: ${{ inputs.target_branch }} + echo The release version number: ${{ inputs.version_number }} + echo Build script path: ${{ inputs.build_script_path }} + echo Environment setup script path: ${{ inputs.env_setup_script_path }} + echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }} + echo Package test command: ${{ inputs.package_test_command }} + echo Test run: ${{ inputs.test_run }} + echo Nightly release: ${{ inputs.nightly_release }} - env: - TOXENV: "unit" + bump-version-generate-changelog: + name: Bump package version, Generate changelog - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ github.event.inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install tox - python -m pip --version - python -m tox --version + uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main - - name: "Run Tox" - run: tox - - build: - name: Build Packages - - runs-on: ubuntu-latest - - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade setuptools wheel twine check-wheel-contents - python -m pip --version - - - name: "Build Distributions" - run: ./scripts/build-dist.sh - - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Check Distribution Descriptions" - run: | - twine check dist/* - - - name: "[DEBUG] Check Wheel Contents" - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 + with: + sha: ${{ inputs.sha }} + version_number: ${{ inputs.version_number }} + target_branch: ${{ inputs.target_branch }} + env_setup_script_path: ${{ inputs.env_setup_script_path }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "Upload Build Artifact - ${{ inputs.version_number }}" - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: | - dist/ - !dist/dbt-${{ inputs.version_number }}.tar.gz - retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }} + secrets: inherit - test-build: - name: Verify Packages + log-outputs-bump-version-generate-changelog: + name: "[Log output] Bump package version, Generate changelog" + if: ${{ !failure() && !cancelled() }} - needs: [unit, build] + needs: [bump-version-generate-changelog] runs-on: ubuntu-latest steps: - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" + - name: Print variables run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade wheel - python -m pip --version - - - name: "Download Build Artifact - ${{ inputs.version_number }}" - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: dist/ + echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Install Wheel Distributions" - run: | - find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + build-test-package: + name: Build, Test, Package + if: ${{ !failure() && !cancelled() }} + needs: [bump-version-generate-changelog] - - name: "[DEBUG] Check Wheel Distributions" - run: | - dbt --version + uses: dbt-labs/dbt-release/.github/workflows/build.yml@main - - name: "Install Source Distributions" - run: | - find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + with: + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + version_number: ${{ inputs.version_number }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} + build_script_path: ${{ inputs.build_script_path }} + s3_bucket_name: ${{ inputs.s3_bucket_name }} + package_test_command: ${{ inputs.package_test_command }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "[DEBUG] Check Source Distributions" - run: | - dbt --version + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.PRODUCTION_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.PRODUCTION_AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release if: ${{ !failure() && !cancelled() }} - needs: test-build - # pin to commit since this is workflow is WIP but this commit has been tested as working - uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@7b6e01d73d2c8454e06302cc66ef4c2dbd4dbe4e + needs: [bump-version-generate-changelog, build-test-package] + + uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main with: - sha: ${{ inputs.sha }} + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} - changelog_path: ${{ inputs.changelog_path }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} test_run: ${{ inputs.test_run }} pypi-release: - name: Pypi release - # only release to PyPi if we're not testing - will release to PyPi test when workflow gets rewritten - if: ${{ inputs.test_run == false }} + name: PyPI Release - runs-on: ubuntu-latest + needs: [github-release] - needs: github-release + uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main - environment: PypiProd - steps: - - name: "Download Build Artifact - ${{ inputs.version_number }}" - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: dist/ - - - name: Publish distribution to PyPI - uses: pypa/gh-action-pypi-publish@v1.4.2 - with: - password: ${{ secrets.PYPI_API_TOKEN }} + with: + version_number: ${{ inputs.version_number }} + test_run: ${{ inputs.test_run }} + + secrets: + PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + + slack-notification: + name: Slack Notification + if: ${{ failure() }} + + needs: + [ + bump-version-generate-changelog, + build-test-package, + github-release, + pypi-release, + ] + + uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main + with: + status: "failure" + + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }} diff --git a/scripts/env-setup.sh b/scripts/env-setup.sh new file mode 100644 index 000000000..866d8f749 --- /dev/null +++ b/scripts/env-setup.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Set TOXENV environment variable for subsequent steps +echo "TOXENV=integration-redshift" >> $GITHUB_ENV +# Set INTEGRATION_TESTS_SECRETS_PREFIX environment variable for subsequent steps +# All GH secrets that have this prefix will be set as environment variables +echo "INTEGRATION_TESTS_SECRETS_PREFIX=REDSHIFT_TEST" >> $GITHUB_ENV +# Set environment variables required for integration tests +echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV +echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV +echo "DBT_TEST_USER_3=dbt_test_user_3" >> $GITHUB_ENV From 16b122b4f3f0ef433987ab44e6b09789eeddc20a Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 27 Jan 2023 15:43:30 -0600 Subject: [PATCH 010/113] Remove PR requirement from Changelog (#280) * remove PR requirement for changelogs, remove issue for dependency/security * update comment * add comment about multiple issues --- .changie.yaml | 96 ++++++++++++++++++++++------- .github/workflows/bot-changelog.yml | 5 +- 2 files changed, 76 insertions(+), 25 deletions(-) diff --git a/.changie.yaml b/.changie.yaml index 77d5422e4..dc8bfef5b 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -6,57 +6,109 @@ changelogPath: CHANGELOG.md versionExt: md versionFormat: '## dbt-redshift {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' -changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' +changeFormat: |- + {{- $IssueList := list }} + {{- $changes := splitList " " $.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}}) kinds: - label: Breaking Changes - label: Features - label: Fixes - label: Under the Hood - label: Dependencies - changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 - label: Security - changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 + +newlines: + afterChangelogHeader: 1 + afterKind: 1 + afterChangelogVersion: 1 + beforeKind: 1 + endOfVersion: 1 + custom: - key: Author label: GitHub Username(s) (separated by a single space if multiple) type: string minLength: 3 - key: Issue - label: GitHub Issue Number - type: int - minLength: 4 -- key: PR - label: GitHub Pull Request Number + label: GitHub Issue Number (separated by a single space if multiple) type: int minLength: 4 + footerFormat: | {{- $contributorDict := dict }} {{- /* any names added to this list should be all lowercase for later matching purposes */}} - {{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "mikealfare" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }} + {{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }} {{- range $change := .Changes }} {{- $authorList := splitList " " $change.Custom.Author }} - {{- /* loop through all authors for a PR */}} + {{- /* loop through all authors for a single changelog */}} {{- range $author := $authorList }} {{- $authorLower := lower $author }} {{- /* we only want to include non-core team contributors */}} {{- if not (has $authorLower $core_team)}} - {{- $pr := $change.Custom.PR }} - {{- /* check if this contributor has other PRs associated with them already */}} - {{- if hasKey $contributorDict $author }} - {{- $prList := get $contributorDict $author }} - {{- $prList = append $prList $pr }} - {{- $contributorDict := set $contributorDict $author $prList }} - {{- else }} - {{- $prList := list $change.Custom.PR }} - {{- $contributorDict := set $contributorDict $author $prList }} - {{- end }} - {{- end}} + {{- $changeList := splitList " " $change.Custom.Author }} + {{- $changeLink := $change.Kind }} + {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} + {{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $change.Custom.PR }} + {{- else }} + {{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $change.Custom.Issue }} + {{- end }} + {{- /* check if this contributor has other changes associated with them already */}} + {{- if hasKey $contributorDict $author }} + {{- $contributionList := get $contributorDict $author }} + {{- $contributionList = append $contributionList $changeLink }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- else }} + {{- $contributionList := list $changeLink }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- end }} + {{- end}} {{- end}} {{- end }} {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} {{- if $contributorDict}} ### Contributors {{- range $k,$v := $contributorDict }} - - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-redshift/pull/{{$element}}){{end}}) + - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) {{- end }} {{- end }} diff --git a/.github/workflows/bot-changelog.yml b/.github/workflows/bot-changelog.yml index 1df5573bb..8122ab8b4 100644 --- a/.github/workflows/bot-changelog.yml +++ b/.github/workflows/bot-changelog.yml @@ -9,7 +9,6 @@ # time: # custom: # Author: -# Issue: 4904 # PR: # # **why?** @@ -40,7 +39,7 @@ jobs: matrix: include: - label: "dependencies" - changie_kind: "Dependency" + changie_kind: "Dependencies" - label: "snyk" changie_kind: "Security" runs-on: ubuntu-latest @@ -58,4 +57,4 @@ jobs: commit_message: "Add automated changelog yaml from template for bot PR" changie_kind: ${{ matrix.changie_kind }} label: ${{ matrix.label }} - custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 150\n PR: ${{ github.event.pull_request.number }}\n" + custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}\n" From 2541018ab477e56907fbf64a9fd55cf272c69c24 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Fri, 27 Jan 2023 17:01:18 -0800 Subject: [PATCH 011/113] [CTE-1941] Stand-alone Python module for RedshiftColumn (#291) --- .changes/unreleased/Features-20230127-155317.yaml | 6 ++++++ dbt/adapters/redshift/__init__.py | 2 +- dbt/adapters/redshift/column.py | 5 +++++ dbt/adapters/redshift/impl.py | 2 +- dbt/adapters/redshift/relation.py | 5 ----- 5 files changed, 13 insertions(+), 7 deletions(-) create mode 100644 .changes/unreleased/Features-20230127-155317.yaml create mode 100644 dbt/adapters/redshift/column.py diff --git a/.changes/unreleased/Features-20230127-155317.yaml b/.changes/unreleased/Features-20230127-155317.yaml new file mode 100644 index 000000000..c18c00a32 --- /dev/null +++ b/.changes/unreleased/Features-20230127-155317.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Stand-alone Python module for RedshiftColumn +time: 2023-01-27T15:53:17.999882-08:00 +custom: + Author: nssalian + Issue: "290" diff --git a/dbt/adapters/redshift/__init__.py b/dbt/adapters/redshift/__init__.py index be348f6d5..64ac384fe 100644 --- a/dbt/adapters/redshift/__init__.py +++ b/dbt/adapters/redshift/__init__.py @@ -1,6 +1,6 @@ from dbt.adapters.redshift.connections import RedshiftConnectionManager # noqa from dbt.adapters.redshift.connections import RedshiftCredentials -from dbt.adapters.redshift.relation import RedshiftColumn # noqa +from dbt.adapters.redshift.column import RedshiftColumn # noqa from dbt.adapters.redshift.relation import RedshiftRelation # noqa: F401 from dbt.adapters.redshift.impl import RedshiftAdapter diff --git a/dbt/adapters/redshift/column.py b/dbt/adapters/redshift/column.py new file mode 100644 index 000000000..4d48746a6 --- /dev/null +++ b/dbt/adapters/redshift/column.py @@ -0,0 +1,5 @@ +from dbt.adapters.base import Column + + +class RedshiftColumn(Column): + pass # redshift does not inherit from postgres here diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 4ece2ff51..45c983b3e 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -5,7 +5,7 @@ from dbt.adapters.base.meta import available from dbt.adapters.postgres import PostgresAdapter from dbt.adapters.redshift import RedshiftConnectionManager -from dbt.adapters.redshift import RedshiftColumn +from dbt.adapters.redshift.column import RedshiftColumn from dbt.adapters.redshift import RedshiftRelation from dbt.events import AdapterLogger import dbt.exceptions diff --git a/dbt/adapters/redshift/relation.py b/dbt/adapters/redshift/relation.py index 33e7c6897..fa9b8e92f 100644 --- a/dbt/adapters/redshift/relation.py +++ b/dbt/adapters/redshift/relation.py @@ -1,4 +1,3 @@ -from dbt.adapters.base import Column from dataclasses import dataclass from dbt.adapters.postgres.relation import PostgresRelation @@ -12,7 +11,3 @@ class RedshiftRelation(PostgresRelation): # see: https://docs.aws.amazon.com/redshift/latest/dg/r_names.html def relation_max_name_length(self): return 127 - - -class RedshiftColumn(Column): - pass # redshift does not inherit from postgres here From 52972789aab99bec9e82e758cc7c4e62da13de83 Mon Sep 17 00:00:00 2001 From: Alexander Smolyakov Date: Mon, 30 Jan 2023 19:18:56 +0400 Subject: [PATCH 012/113] Update release workflow (#292) - Update AWS secrets - Rework condition for Slack notification --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a9f60734b..1c0885001 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -176,8 +176,8 @@ jobs: nightly_release: ${{ inputs.nightly_release }} secrets: - AWS_ACCESS_KEY_ID: ${{ secrets.PRODUCTION_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.PRODUCTION_AWS_SECRET_ACCESS_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release @@ -210,7 +210,7 @@ jobs: slack-notification: name: Slack Notification - if: ${{ failure() }} + if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }} needs: [ From 14ac78af0afe0da6b433a5cf2e2846367c9cd619 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 31 Jan 2023 12:01:24 -0600 Subject: [PATCH 013/113] update changie type to be string to allow multiple issues (#289) * update type to be string to allow multiple issues * update reqired length to 1 * fix logic to list contributor issues --- .changie.yaml | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/.changie.yaml b/.changie.yaml index dc8bfef5b..7b961180e 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -72,8 +72,8 @@ custom: minLength: 3 - key: Issue label: GitHub Issue Number (separated by a single space if multiple) - type: int - minLength: 4 + type: string + minLength: 1 footerFormat: | {{- $contributorDict := dict }} @@ -87,19 +87,28 @@ footerFormat: | {{- /* we only want to include non-core team contributors */}} {{- if not (has $authorLower $core_team)}} {{- $changeList := splitList " " $change.Custom.Author }} + {{- $IssueList := list }} {{- $changeLink := $change.Kind }} {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} - {{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $change.Custom.PR }} + {{- $changes := splitList " " $change.Custom.PR }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} {{- else }} - {{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $change.Custom.Issue }} + {{- $changes := splitList " " $change.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} {{- end }} {{- /* check if this contributor has other changes associated with them already */}} {{- if hasKey $contributorDict $author }} {{- $contributionList := get $contributorDict $author }} - {{- $contributionList = append $contributionList $changeLink }} + {{- $contributionList = concat $contributionList $IssueList }} {{- $contributorDict := set $contributorDict $author $contributionList }} {{- else }} - {{- $contributionList := list $changeLink }} + {{- $contributionList := $IssueList }} {{- $contributorDict := set $contributorDict $author $contributionList }} {{- end }} {{- end}} From 471bb1404536f8f55b18afba30db84443c17dbcb Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 31 Jan 2023 13:16:44 -0800 Subject: [PATCH 014/113] Migrate persist docs to an adapter zone variant. (#283) * Migrate persist docs to an adapter zone variant. * Move sha and names to match new sha in core * Remove Shas now that base conversion has been merged in --------- Co-authored-by: Mila Page --- tests/functional/adapter/test_persist_docs.py | 57 ++++++++ .../models-column-missing/missing_column.sql | 2 - .../models-column-missing/schema.yml | 8 -- .../persist_docs_tests/models/my_fun_docs.md | 10 -- .../models/no_docs_model.sql | 1 - .../persist_docs_tests/models/schema.yml | 70 ---------- .../persist_docs_tests/models/table_model.sql | 2 - .../persist_docs_tests/models/view_model.sql | 2 - .../persist_docs_tests/seeds/seed.csv | 3 - .../persist_docs_tests/test_persist_docs.py | 132 ------------------ 10 files changed, 57 insertions(+), 230 deletions(-) create mode 100644 tests/functional/adapter/test_persist_docs.py delete mode 100644 tests/integration/persist_docs_tests/models-column-missing/missing_column.sql delete mode 100644 tests/integration/persist_docs_tests/models-column-missing/schema.yml delete mode 100644 tests/integration/persist_docs_tests/models/my_fun_docs.md delete mode 100644 tests/integration/persist_docs_tests/models/no_docs_model.sql delete mode 100644 tests/integration/persist_docs_tests/models/schema.yml delete mode 100644 tests/integration/persist_docs_tests/models/table_model.sql delete mode 100644 tests/integration/persist_docs_tests/models/view_model.sql delete mode 100644 tests/integration/persist_docs_tests/seeds/seed.csv delete mode 100644 tests/integration/persist_docs_tests/test_persist_docs.py diff --git a/tests/functional/adapter/test_persist_docs.py b/tests/functional/adapter/test_persist_docs.py new file mode 100644 index 000000000..4d18f8ec6 --- /dev/null +++ b/tests/functional/adapter/test_persist_docs.py @@ -0,0 +1,57 @@ +import json +import pytest + +from dbt.tests.util import run_dbt + +from dbt.tests.adapter.persist_docs.test_persist_docs import ( + BasePersistDocsBase, + BasePersistDocs, + BasePersistDocsColumnMissing, + BasePersistDocsCommentOnQuotedColumn, +) + + +class TestPersistDocs(BasePersistDocs): + pass + + +class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing): + pass + + +class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn): + pass + + +class TestPersistDocsLateBinding(BasePersistDocsBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + 'models': { + 'test': { + '+persist_docs': { + "relation": True, + "columns": True, + }, + 'view_model': { + 'bind': False, + } + } + } + } + + def test_comment_on_late_binding_view(self, project): + run_dbt() + run_dbt(['docs', 'generate']) + with open('target/catalog.json') as fp: + catalog_data = json.load(fp) + assert 'nodes' in catalog_data + assert len(catalog_data['nodes']) == 4 + table_node = catalog_data['nodes']['model.test.table_model'] + view_node = self._assert_has_table_comments(table_node) + + view_node = catalog_data['nodes']['model.test.view_model'] + self._assert_has_view_comments(view_node, False, False) + + no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] + self._assert_has_view_comments(no_docs_node, False, False) diff --git a/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql b/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql deleted file mode 100644 index 642b0f14a..000000000 --- a/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as id, 'Ed' as name diff --git a/tests/integration/persist_docs_tests/models-column-missing/schema.yml b/tests/integration/persist_docs_tests/models-column-missing/schema.yml deleted file mode 100644 index aa7b4f888..000000000 --- a/tests/integration/persist_docs_tests/models-column-missing/schema.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 -models: - - name: missing_column - columns: - - name: id - description: "test id column description" - - name: column_that_does_not_exist - description: "comment that cannot be created" diff --git a/tests/integration/persist_docs_tests/models/my_fun_docs.md b/tests/integration/persist_docs_tests/models/my_fun_docs.md deleted file mode 100644 index f3c0fbf55..000000000 --- a/tests/integration/persist_docs_tests/models/my_fun_docs.md +++ /dev/null @@ -1,10 +0,0 @@ -{% docs my_fun_doc %} -name Column description "with double quotes" -and with 'single quotes' as welll as other; -'''abc123''' -reserved -- characters --- -/* comment */ -Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - -{% enddocs %} diff --git a/tests/integration/persist_docs_tests/models/no_docs_model.sql b/tests/integration/persist_docs_tests/models/no_docs_model.sql deleted file mode 100644 index e39a7a156..000000000 --- a/tests/integration/persist_docs_tests/models/no_docs_model.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id, 'Alice' as name diff --git a/tests/integration/persist_docs_tests/models/schema.yml b/tests/integration/persist_docs_tests/models/schema.yml deleted file mode 100644 index 5a9091624..000000000 --- a/tests/integration/persist_docs_tests/models/schema.yml +++ /dev/null @@ -1,70 +0,0 @@ -version: 2 - -models: - - name: table_model - description: | - Table model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - - name: name - description: | - Some stuff here and then a call to - {{ doc('my_fun_doc')}} - - name: view_model - description: | - View model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - -seeds: - - name: seed - description: | - Seed model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - - name: name - description: | - Some stuff here and then a call to - {{ doc('my_fun_doc')}} diff --git a/tests/integration/persist_docs_tests/models/table_model.sql b/tests/integration/persist_docs_tests/models/table_model.sql deleted file mode 100644 index c0e93c3f3..000000000 --- a/tests/integration/persist_docs_tests/models/table_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as id, 'Joe' as name diff --git a/tests/integration/persist_docs_tests/models/view_model.sql b/tests/integration/persist_docs_tests/models/view_model.sql deleted file mode 100644 index a6f96a16d..000000000 --- a/tests/integration/persist_docs_tests/models/view_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='view') }} -select 2 as id, 'Bob' as name diff --git a/tests/integration/persist_docs_tests/seeds/seed.csv b/tests/integration/persist_docs_tests/seeds/seed.csv deleted file mode 100644 index 1a728c8ab..000000000 --- a/tests/integration/persist_docs_tests/seeds/seed.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,name -1,Alice -2,Bob diff --git a/tests/integration/persist_docs_tests/test_persist_docs.py b/tests/integration/persist_docs_tests/test_persist_docs.py deleted file mode 100644 index f0374fb42..000000000 --- a/tests/integration/persist_docs_tests/test_persist_docs.py +++ /dev/null @@ -1,132 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import os - -import json - - -class BasePersistDocsTest(DBTIntegrationTest): - @property - def schema(self): - return "persist_docs" - - @property - def models(self): - return "models" - - def _assert_common_comments(self, *comments): - for comment in comments: - assert '"with double quotes"' in comment - assert """'''abc123'''""" in comment - assert '\n' in comment - assert 'Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting' in comment - assert '/* comment */' in comment - if os.name == 'nt': - assert '--\r\n' in comment or '--\n' in comment - else: - assert '--\n' in comment - - def _assert_has_table_comments(self, table_node): - table_comment = table_node['metadata']['comment'] - assert table_comment.startswith('Table model description') - - table_id_comment = table_node['columns']['id']['comment'] - assert table_id_comment.startswith('id Column description') - - table_name_comment = table_node['columns']['name']['comment'] - assert table_name_comment.startswith( - 'Some stuff here and then a call to') - - self._assert_common_comments( - table_comment, table_id_comment, table_name_comment - ) - - def _assert_has_view_comments(self, view_node, has_node_comments=True, - has_column_comments=True): - view_comment = view_node['metadata']['comment'] - if has_node_comments: - assert view_comment.startswith('View model description') - self._assert_common_comments(view_comment) - else: - assert view_comment is None - - view_id_comment = view_node['columns']['id']['comment'] - if has_column_comments: - assert view_id_comment.startswith('id Column description') - self._assert_common_comments(view_id_comment) - else: - assert view_id_comment is None - - view_name_comment = view_node['columns']['name']['comment'] - assert view_name_comment is None - - -class TestPersistDocs(BasePersistDocsTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - '+persist_docs': { - "relation": True, - "columns": True, - }, - } - } - } - - def run_has_comments_pglike(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 3 - table_node = catalog_data['nodes']['model.test.table_model'] - view_node = self._assert_has_table_comments(table_node) - - view_node = catalog_data['nodes']['model.test.view_model'] - self._assert_has_view_comments(view_node) - - no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] - self._assert_has_view_comments(no_docs_node, False, False) - - @use_profile('redshift') - def test_redshift_comments(self): - self.run_has_comments_pglike() - - -class TestPersistDocsLateBinding(BasePersistDocsTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - '+persist_docs': { - "relation": True, - "columns": True, - }, - 'view_model': { - 'bind': False, - } - } - } - } - - @use_profile('redshift') - def test_redshift_late_binding_view(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 3 - table_node = catalog_data['nodes']['model.test.table_model'] - view_node = self._assert_has_table_comments(table_node) - - view_node = catalog_data['nodes']['model.test.view_model'] - self._assert_has_view_comments(view_node, False, False) - - no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] - self._assert_has_view_comments(no_docs_node, False, False) From cc4859146eed9d2f844e35fc40e3e10e4d465979 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Thu, 2 Feb 2023 12:21:48 -0600 Subject: [PATCH 015/113] update regex for version bump (#304) --- .bumpversion.cfg | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 933bfdfc7..aa0eb468b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,11 +1,17 @@ [bumpversion] current_version = 1.5.0a1 -parse = (?P\d+) - \.(?P\d+) - \.(?P\d+) - ((?Pa|b|rc) - (?P\d+) # pre-release version num - )(\.(?P[a-z..0-9]+) + +# `parse` allows parsing the version into the parts we need to check. There are some +# unnamed groups and that's okay because they do not need to be audited. If any part +# of the version passed and does not match the regex, it will fail. +# expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly` +# excepted failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` +parse = (?P[\d]+) # major version number + \.(?P[\d]+) # minor version number + \.(?P[\d]+) # patch version number + (((?Pa|b|rc) # optional pre-release type + ?(?P[\d]+?)) # optional pre-release version number + \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator )? serialize = {major}.{minor}.{patch}{prerelease}{num}.{nightly} From 44ae0880fd3e25cc225a8995651a5f33f91d238c Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Thu, 2 Feb 2023 16:46:51 -0800 Subject: [PATCH 016/113] [CT-1953]: convert redshift_test to functional test (#306) * convert redshift_test to functional * PR comments --- .../redshift_test/test_late_binding_view.py | 52 +++++++++++++++++++ .../redshift_test/models/model.sql | 7 --- tests/integration/redshift_test/seed/seed.csv | 2 - .../redshift_test/test_late_binding_view.py | 39 -------------- 4 files changed, 52 insertions(+), 48 deletions(-) create mode 100644 tests/functional/adapter/redshift_test/test_late_binding_view.py delete mode 100644 tests/integration/redshift_test/models/model.sql delete mode 100644 tests/integration/redshift_test/seed/seed.csv delete mode 100644 tests/integration/redshift_test/test_late_binding_view.py diff --git a/tests/functional/adapter/redshift_test/test_late_binding_view.py b/tests/functional/adapter/redshift_test/test_late_binding_view.py new file mode 100644 index 000000000..7c7bfa69d --- /dev/null +++ b/tests/functional/adapter/redshift_test/test_late_binding_view.py @@ -0,0 +1,52 @@ +import pytest + +from dbt.tests.util import run_dbt, run_sql_with_adapter + +_MODEL_SQL = """{{ + config( + materialized='view', + bind=False + ) +}} +select * from {{ ref('seed') }} +""" + +_SEED_CSV = """ +id,first_name,email,ip_address,updated_at +1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 +""".lstrip() + + +class TestLateBindingView: + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_SQL, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": _SEED_CSV + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + 'seeds': { + 'quote_columns': False, + } + } + + def test_late_binding_view_query(self, project): + seed_run_result = run_dbt(['seed']) + assert len(seed_run_result) == 1 + run_result = run_dbt() + assert len(run_result) == 1 + # drop the table. Use 'cascade' here so that if late-binding views + # didn't work as advertised, the following dbt run will fail. + drop_query = """drop table if exists {}.seed cascade""".format(project.test_schema) + run_sql_with_adapter(project.adapter, drop_query) + run_result = run_dbt() + assert len(run_result) == 1 diff --git a/tests/integration/redshift_test/models/model.sql b/tests/integration/redshift_test/models/model.sql deleted file mode 100644 index 8ca73c7a0..000000000 --- a/tests/integration/redshift_test/models/model.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='view', bind=False - ) -}} - -select * from {{ ref('seed') }} diff --git a/tests/integration/redshift_test/seed/seed.csv b/tests/integration/redshift_test/seed/seed.csv deleted file mode 100644 index ef154f552..000000000 --- a/tests/integration/redshift_test/seed/seed.csv +++ /dev/null @@ -1,2 +0,0 @@ -id,first_name,email,ip_address,updated_at -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 diff --git a/tests/integration/redshift_test/test_late_binding_view.py b/tests/integration/redshift_test/test_late_binding_view.py deleted file mode 100644 index 8098c572b..000000000 --- a/tests/integration/redshift_test/test_late_binding_view.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestLateBindingView(DBTIntegrationTest): - @property - def schema(self): - return 'late_binding_view' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': [self.dir('seed')], - 'seeds': { - 'quote_columns': False, - } - } - - @use_profile('redshift') - def test__redshift_late_binding_view_query(self): - self.assertEqual(len(self.run_dbt(["seed"])), 1) - self.assertEqual(len(self.run_dbt()), 1) - # remove the table. Use 'cascade' here so that if late-binding views - # didn't work as advertised, the following dbt run will fail. - drop = 'drop table if exists {}.seed cascade'.format( - self.unique_schema() - ) - self.run_sql(drop) - self.assertEqual(len(self.run_dbt()), 1) From ffa44f8f6e14467f61937f2d61734153402e8e49 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 3 Feb 2023 10:03:09 -0600 Subject: [PATCH 017/113] update prerelease -> prekind (#305) * update regex for version bump * update to prekind * more renaming --- .bumpversion.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index aa0eb468b..99c37bce7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -14,13 +14,13 @@ parse = (?P[\d]+) # major version number \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator )? serialize = - {major}.{minor}.{patch}{prerelease}{num}.{nightly} - {major}.{minor}.{patch}{prerelease}{num} + {major}.{minor}.{patch}{prekind}{num}.{nightly} + {major}.{minor}.{patch}{prekind}{num} {major}.{minor}.{patch} commit = False tag = False -[bumpversion:part:prerelease] +[bumpversion:part:prekind] first_value = a optional_value = final values = From e463f006b6c160c8193c9ad0c0bfd1e3e7aeda9e Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Fri, 3 Feb 2023 13:07:50 -0600 Subject: [PATCH 018/113] init push for macro_test conversion (#308) --- .../adapter/test_macros.py} | 33 +++++++++++++++---- .../dispatch-inheritance-models/model.sql | 2 -- .../macro_tests/macros/my_macros.sql | 30 ----------------- tests/integration/macro_tests/models/.gitkeep | 0 .../macro_tests/models/dep_macro.sql | 4 --- .../macro_tests/models/local_macro.sql | 12 ------- .../macro_tests/models/ref_macro.sql | 1 - .../override-get-columns-macros/macros.sql | 3 -- .../override-get-columns-models/model.sql | 5 --- .../package_macro_overrides/dbt_project.yml | 7 ---- .../package_macro_overrides/macros/macros.sql | 3 -- tests/integration/macro_tests/seed.sql | 24 -------------- tests/integration/macro_tests/test_macros.py | 15 --------- 13 files changed, 26 insertions(+), 113 deletions(-) rename tests/{integration/macro_tests/no-default-macros/my_macros.sql => functional/adapter/test_macros.py} (55%) delete mode 100644 tests/integration/macro_tests/dispatch-inheritance-models/model.sql delete mode 100644 tests/integration/macro_tests/macros/my_macros.sql delete mode 100644 tests/integration/macro_tests/models/.gitkeep delete mode 100644 tests/integration/macro_tests/models/dep_macro.sql delete mode 100644 tests/integration/macro_tests/models/local_macro.sql delete mode 100644 tests/integration/macro_tests/models/ref_macro.sql delete mode 100644 tests/integration/macro_tests/override-get-columns-macros/macros.sql delete mode 100644 tests/integration/macro_tests/override-get-columns-models/model.sql delete mode 100644 tests/integration/macro_tests/package_macro_overrides/dbt_project.yml delete mode 100644 tests/integration/macro_tests/package_macro_overrides/macros/macros.sql delete mode 100644 tests/integration/macro_tests/seed.sql delete mode 100644 tests/integration/macro_tests/test_macros.py diff --git a/tests/integration/macro_tests/no-default-macros/my_macros.sql b/tests/functional/adapter/test_macros.py similarity index 55% rename from tests/integration/macro_tests/no-default-macros/my_macros.sql rename to tests/functional/adapter/test_macros.py index 8980c6c56..0994cae28 100644 --- a/tests/integration/macro_tests/no-default-macros/my_macros.sql +++ b/tests/functional/adapter/test_macros.py @@ -1,4 +1,12 @@ +import pytest +from dbt.tests.util import run_dbt +_MODEL_SQL = """ +{{ dispatch_to_parent() }} +select 1 as id +""" + +_MACRO_SQL = """ {% macro do_something2(foo2, bar2) %} select @@ -7,19 +15,12 @@ {% endmacro %} - {% macro with_ref() %} {{ ref('table_model') }} {% endmacro %} -{# there is no default__dispatch_to_nowhere! #} -{% macro dispatch_to_nowhere() %} - {% set macro = adapter.dispatch('dispatch_to_nowhere') %} - {{ macro() }} -{% endmacro %} - {% macro dispatch_to_parent() %} {% set macro = adapter.dispatch('dispatch_to_parent') %} {{ macro() }} @@ -33,3 +34,21 @@ {% macro postgres__dispatch_to_parent() %} {{ return('') }} {% endmacro %} +""" + +class TestRedshift: + + @pytest.fixture(scope="class") + def macros(self): + return { + "macro.sql": _MACRO_SQL + } + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_SQL + } + + def test_inherited_macro(self, project): + run_dbt() \ No newline at end of file diff --git a/tests/integration/macro_tests/dispatch-inheritance-models/model.sql b/tests/integration/macro_tests/dispatch-inheritance-models/model.sql deleted file mode 100644 index 7b8c49be3..000000000 --- a/tests/integration/macro_tests/dispatch-inheritance-models/model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ dispatch_to_parent() }} -select 1 as id diff --git a/tests/integration/macro_tests/macros/my_macros.sql b/tests/integration/macro_tests/macros/my_macros.sql deleted file mode 100644 index 827f3f09f..000000000 --- a/tests/integration/macro_tests/macros/my_macros.sql +++ /dev/null @@ -1,30 +0,0 @@ - -{% macro do_something2(foo2, bar2) %} - - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 - -{% endmacro %} - - -{% macro with_ref() %} - - {{ ref('table_model') }} - -{% endmacro %} - - -{% macro dispatch_to_parent() %} - {% set macro = adapter.dispatch('dispatch_to_parent') %} - {{ macro() }} -{% endmacro %} - -{% macro default__dispatch_to_parent() %} - {% set msg = 'No default implementation of dispatch_to_parent' %} - {{ exceptions.raise_compiler_error(msg) }} -{% endmacro %} - -{% macro postgres__dispatch_to_parent() %} - {{ return('') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/models/.gitkeep b/tests/integration/macro_tests/models/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/macro_tests/models/dep_macro.sql b/tests/integration/macro_tests/models/dep_macro.sql deleted file mode 100644 index 45cfa0040..000000000 --- a/tests/integration/macro_tests/models/dep_macro.sql +++ /dev/null @@ -1,4 +0,0 @@ - -{{ - dbt_integration_project.do_something("arg1", "arg2") -}} diff --git a/tests/integration/macro_tests/models/local_macro.sql b/tests/integration/macro_tests/models/local_macro.sql deleted file mode 100644 index edb66b7a3..000000000 --- a/tests/integration/macro_tests/models/local_macro.sql +++ /dev/null @@ -1,12 +0,0 @@ - -{{ - do_something2("arg1", "arg2") -}} - -union all - -{{ - test.do_something2("arg3", "arg4") -}} - - diff --git a/tests/integration/macro_tests/models/ref_macro.sql b/tests/integration/macro_tests/models/ref_macro.sql deleted file mode 100644 index 0085f6954..000000000 --- a/tests/integration/macro_tests/models/ref_macro.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ with_ref() }} diff --git a/tests/integration/macro_tests/override-get-columns-macros/macros.sql b/tests/integration/macro_tests/override-get-columns-macros/macros.sql deleted file mode 100644 index 73fe0ccfb..000000000 --- a/tests/integration/macro_tests/override-get-columns-macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro get_columns_in_relation(relation) %} - {{ return('a string') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/override-get-columns-models/model.sql b/tests/integration/macro_tests/override-get-columns-models/model.sql deleted file mode 100644 index 7be007e24..000000000 --- a/tests/integration/macro_tests/override-get-columns-models/model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% set result = adapter.get_columns_in_relation(this) %} -{% if execute and result != 'a string' %} - {% do exceptions.raise_compiler_error('overriding get_columns_in_relation failed') %} -{% endif %} -select 1 as id diff --git a/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml b/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml deleted file mode 100644 index bcf5c9285..000000000 --- a/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml +++ /dev/null @@ -1,7 +0,0 @@ -name: 'package_macro_overrides' -version: '1.0' -config-version: 2 - -profile: 'default' - -macro-paths: ["macros"] diff --git a/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql b/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql deleted file mode 100644 index 73fe0ccfb..000000000 --- a/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro get_columns_in_relation(relation) %} - {{ return('a string') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/seed.sql b/tests/integration/macro_tests/seed.sql deleted file mode 100644 index 8556efdec..000000000 --- a/tests/integration/macro_tests/seed.sql +++ /dev/null @@ -1,24 +0,0 @@ -create table {schema}.expected_dep_macro ( - foo TEXT, - bar TEXT -); - -create table {schema}.expected_local_macro ( - foo2 TEXT, - bar2 TEXT -); - -create table {schema}.seed ( - id integer, - updated_at timestamp -); - -insert into {schema}.expected_dep_macro (foo, bar) -values ('arg1', 'arg2'); - -insert into {schema}.expected_local_macro (foo2, bar2) -values ('arg1', 'arg2'), ('arg3', 'arg4'); - -insert into {schema}.seed (id, updated_at) -values (1, '2017-01-01'), (2, '2017-01-02'); - diff --git a/tests/integration/macro_tests/test_macros.py b/tests/integration/macro_tests/test_macros.py deleted file mode 100644 index 2817332ac..000000000 --- a/tests/integration/macro_tests/test_macros.py +++ /dev/null @@ -1,15 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestDispatchMacroUseParent(DBTIntegrationTest): - @property - def schema(self): - return "test_macros" - - @property - def models(self): - return "dispatch-inheritance-models" - - @use_profile('redshift') - def test_redshift_inherited_macro(self): - self.run_dbt(['run']) From 28d22bd8abf53882c7f4ddde6d4b611a5c924a9d Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Fri, 3 Feb 2023 16:39:19 -0800 Subject: [PATCH 019/113] convert test_store_test_failures to functional test (#301) * convert test_store_test_failures to functional test * convert test_store_test_failures to functional test * remove dev requirements override --- .../test_store_test_failures.py | 5 + .../models/fine_model.sql | 1 - ...odel_but_with_a_no_good_very_long_name.sql | 1 - .../models/problematic_model.sql | 11 --- .../models/schema.yml | 40 -------- .../expected/expected_accepted_values.csv | 3 - .../seeds/expected/expected_failing_test.csv | 11 --- ...expected_not_null_problematic_model_id.csv | 3 - .../expected_unique_problematic_model_id.csv | 3 - .../seeds/people.csv | 11 --- .../test_store_test_failures.py | 93 ------------------- .../tests/failing_test.sql | 1 - .../tests/passing_test.sql | 2 - 13 files changed, 5 insertions(+), 180 deletions(-) create mode 100644 tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py delete mode 100644 tests/integration/store_test_failures_tests/models/fine_model.sql delete mode 100644 tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql delete mode 100644 tests/integration/store_test_failures_tests/models/problematic_model.sql delete mode 100644 tests/integration/store_test_failures_tests/models/schema.yml delete mode 100644 tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv delete mode 100644 tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv delete mode 100644 tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv delete mode 100644 tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv delete mode 100644 tests/integration/store_test_failures_tests/seeds/people.csv delete mode 100644 tests/integration/store_test_failures_tests/test_store_test_failures.py delete mode 100644 tests/integration/store_test_failures_tests/tests/failing_test.sql delete mode 100644 tests/integration/store_test_failures_tests/tests/passing_test.sql diff --git a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py new file mode 100644 index 000000000..e6c0f38b4 --- /dev/null +++ b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import TestStoreTestFailures + + +class RedshiftTestStoreTestFailures(TestStoreTestFailures): + pass diff --git a/tests/integration/store_test_failures_tests/models/fine_model.sql b/tests/integration/store_test_failures_tests/models/fine_model.sql deleted file mode 100644 index 94b923a17..000000000 --- a/tests/integration/store_test_failures_tests/models/fine_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('people') }} diff --git a/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql b/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql deleted file mode 100644 index 97536ffaf..000000000 --- a/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as quite_long_column_name diff --git a/tests/integration/store_test_failures_tests/models/problematic_model.sql b/tests/integration/store_test_failures_tests/models/problematic_model.sql deleted file mode 100644 index e780d6b00..000000000 --- a/tests/integration/store_test_failures_tests/models/problematic_model.sql +++ /dev/null @@ -1,11 +0,0 @@ -select * from {{ ref('people') }} - -union all - -select * from {{ ref('people') }} -where id in (1,2) - -union all - -select null as id, first_name, last_name, email, gender, ip_address from {{ ref('people') }} -where id in (3,4) diff --git a/tests/integration/store_test_failures_tests/models/schema.yml b/tests/integration/store_test_failures_tests/models/schema.yml deleted file mode 100644 index f01a9e350..000000000 --- a/tests/integration/store_test_failures_tests/models/schema.yml +++ /dev/null @@ -1,40 +0,0 @@ -version: 2 - -models: - - - name: fine_model - columns: - - name: id - tests: - - unique - - not_null - - - name: problematic_model - columns: - - name: id - tests: - - unique: - store_failures: true - - not_null - - name: first_name - tests: - # test truncation of really long test name - - accepted_values: - values: - - Jack - - Kathryn - - Gerald - - Bonnie - - Harold - - Jacqueline - - Wanda - - Craig - # - Gary - # - Rose - - - name: fine_model_but_with_a_no_good_very_long_name - columns: - - name: quite_long_column_name - tests: - # test truncation of really long test name with builtin - - unique diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv deleted file mode 100644 index 02f28435b..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv +++ /dev/null @@ -1,3 +0,0 @@ -value_field,n_records -Gary,1 -Rose,1 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv deleted file mode 100644 index d9e7257f1..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv +++ /dev/null @@ -1,11 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 -2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 -3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 -5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 -6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 -7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 -8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 -9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 -10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv deleted file mode 100644 index 95fef8a25..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv deleted file mode 100644 index 431d54ef8..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv +++ /dev/null @@ -1,3 +0,0 @@ -unique_field,n_records -2,2 -1,2 \ No newline at end of file diff --git a/tests/integration/store_test_failures_tests/seeds/people.csv b/tests/integration/store_test_failures_tests/seeds/people.csv deleted file mode 100644 index d9e7257f1..000000000 --- a/tests/integration/store_test_failures_tests/seeds/people.csv +++ /dev/null @@ -1,11 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 -2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 -3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 -5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 -6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 -7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 -8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 -9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 -10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 diff --git a/tests/integration/store_test_failures_tests/test_store_test_failures.py b/tests/integration/store_test_failures_tests/test_store_test_failures.py deleted file mode 100644 index 9cc05c211..000000000 --- a/tests/integration/store_test_failures_tests/test_store_test_failures.py +++ /dev/null @@ -1,93 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, FakeArgs, use_profile - - -class TestStoreTestFailures(DBTIntegrationTest): - @property - def schema(self): - return "test_store_test_failures" - - def tearDown(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - with self.adapter.connection_named('__test'): - self._drop_schema_named(self.default_database, test_audit_schema) - - super().tearDown() - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["tests"], - "seeds": { - "quote_columns": False, - "test": { - "expected": self.column_type_overrides() - }, - }, - } - - def column_type_overrides(self): - return {} - - def run_tests_store_one_failure(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - - self.run_dbt(["seed"]) - self.run_dbt(["run"]) - self.run_dbt(["test"], expect_pass=False) - - # one test is configured with store_failures: true, make sure it worked - self.assertTablesEqual("unique_problematic_model_id", "expected_unique_problematic_model_id", test_audit_schema) - - def run_tests_store_failures_and_assert(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - - self.run_dbt(["seed"]) - self.run_dbt(["run"]) - # make sure this works idempotently for all tests - self.run_dbt(["test", "--store-failures"], expect_pass=False) - results = self.run_dbt(["test", "--store-failures"], expect_pass=False) - - # compare test results - actual = [(r.status, r.failures) for r in results] - expected = [('pass', 0), ('pass', 0), ('pass', 0), ('pass', 0), - ('fail', 2), ('fail', 2), ('fail', 2), ('fail', 10),] - self.assertEqual(sorted(actual), sorted(expected)) - - # compare test results stored in database - self.assertTablesEqual("failing_test", "expected_failing_test", test_audit_schema) - self.assertTablesEqual("not_null_problematic_model_id", "expected_not_null_problematic_model_id", test_audit_schema) - self.assertTablesEqual("unique_problematic_model_id", "expected_unique_problematic_model_id", test_audit_schema) - self.assertTablesEqual("accepted_values_problematic_mo_c533ab4ca65c1a9dbf14f79ded49b628", "expected_accepted_values", test_audit_schema) - - -class RedshiftTestStoreTestFailures(TestStoreTestFailures): - - def column_type_overrides(self): - return { - "expected_not_null_problematic_model_id": { - "+column_types": { - "email": "varchar(26)", - "first_name": "varchar(10)", - }, - }, - "expected_unique_problematic_model_id": { - "+column_types": { - "n_records": "bigint", - }, - }, - "expected_accepted_values": { - "+column_types": { - "value_field": "varchar(10)", - "n_records": "bigint", - }, - }, - } - - @use_profile('redshift') - def test__redshift__store_and_assert(self): - self.run_tests_store_failures_and_assert() diff --git a/tests/integration/store_test_failures_tests/tests/failing_test.sql b/tests/integration/store_test_failures_tests/tests/failing_test.sql deleted file mode 100644 index 1bb5ae5ba..000000000 --- a/tests/integration/store_test_failures_tests/tests/failing_test.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('fine_model') }} diff --git a/tests/integration/store_test_failures_tests/tests/passing_test.sql b/tests/integration/store_test_failures_tests/tests/passing_test.sql deleted file mode 100644 index 15c9a7a64..000000000 --- a/tests/integration/store_test_failures_tests/tests/passing_test.sql +++ /dev/null @@ -1,2 +0,0 @@ -select * from {{ ref('fine_model') }} -where false From 678335ac79e06088ab1e5ce534abbc3ba6a3dc84 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Mon, 6 Feb 2023 19:00:01 -0800 Subject: [PATCH 020/113] Ct 1955/remove column comments tests (#307) * Migrate persist docs to an adapter zone variant. * Move sha and names to match new sha in core * Remove Shas now that base conversion has been merged in * Remove unneeded integration test. --------- Co-authored-by: Mila Page --- .../models/quote_model.sql | 1 - .../column_comments_tests/models/schema.yml | 9 ---- .../test_column_comments.py | 43 ------------------- 3 files changed, 53 deletions(-) delete mode 100644 tests/integration/column_comments_tests/models/quote_model.sql delete mode 100644 tests/integration/column_comments_tests/models/schema.yml delete mode 100644 tests/integration/column_comments_tests/test_column_comments.py diff --git a/tests/integration/column_comments_tests/models/quote_model.sql b/tests/integration/column_comments_tests/models/quote_model.sql deleted file mode 100644 index 2255b4bd7..000000000 --- a/tests/integration/column_comments_tests/models/quote_model.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as {{ adapter.quote("2id") }} diff --git a/tests/integration/column_comments_tests/models/schema.yml b/tests/integration/column_comments_tests/models/schema.yml deleted file mode 100644 index 1e82165fa..000000000 --- a/tests/integration/column_comments_tests/models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: quote_model - description: "model to test column quotes and comments" - columns: - - name: 2id - description: "XXX My description" - quote: true - diff --git a/tests/integration/column_comments_tests/test_column_comments.py b/tests/integration/column_comments_tests/test_column_comments.py deleted file mode 100644 index 0cd8c2940..000000000 --- a/tests/integration/column_comments_tests/test_column_comments.py +++ /dev/null @@ -1,43 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - -import json - - -class TestColumnComment(DBTIntegrationTest): - @property - def schema(self): - return "column_comment" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - 'materialized': 'table', - '+persist_docs': { - "relation": True, - "columns": True, - }, - } - } - } - - def run_has_comments(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 1 - column_node = catalog_data['nodes']['model.test.quote_model'] - column_comment = column_node['columns']['2id']['comment'] - assert column_comment.startswith('XXX') - - @use_profile('redshift') - def test_redshift_comments(self): - self.run_has_comments() From 3600e179567168dc2d66a5baf18ab2b5e9450d6e Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Tue, 7 Feb 2023 13:04:54 -0800 Subject: [PATCH 021/113] [CT-2029]:Convert simple seed to a functional test (#310) * Convert simple seed to functional.WIP * Change to using BaseSimpleSeedColumnOverride class * Remove integ test completely * Fixes * Schema fix * Rename to schema_yml * Remove branch from dev-req * Simplify --- tests/functional/adapter/test_simple_seed.py | 86 +++ .../simple_seed_test/macros/schema_test.sql | 22 - .../models-downstream-seed/model.sql | 1 - .../simple_seed_test/models-exist/model.sql | 1 - .../simple_seed_test/models-rs/schema.yml | 47 -- tests/integration/simple_seed_test/seed.sql | 512 ------------------ .../seeds-config/seed_disabled.csv | 21 - .../seeds-config/seed_enabled.csv | 21 - .../seeds-config/seed_tricky.csv | 7 - .../simple_seed_test/seeds/seed_actual.csv | 501 ----------------- .../test_seed_type_override.py | 58 -- 11 files changed, 86 insertions(+), 1191 deletions(-) create mode 100644 tests/functional/adapter/test_simple_seed.py delete mode 100644 tests/integration/simple_seed_test/macros/schema_test.sql delete mode 100644 tests/integration/simple_seed_test/models-downstream-seed/model.sql delete mode 100644 tests/integration/simple_seed_test/models-exist/model.sql delete mode 100644 tests/integration/simple_seed_test/models-rs/schema.yml delete mode 100644 tests/integration/simple_seed_test/seed.sql delete mode 100644 tests/integration/simple_seed_test/seeds-config/seed_disabled.csv delete mode 100644 tests/integration/simple_seed_test/seeds-config/seed_enabled.csv delete mode 100644 tests/integration/simple_seed_test/seeds-config/seed_tricky.csv delete mode 100644 tests/integration/simple_seed_test/seeds/seed_actual.csv delete mode 100644 tests/integration/simple_seed_test/test_seed_type_override.py diff --git a/tests/functional/adapter/test_simple_seed.py b/tests/functional/adapter/test_simple_seed.py new file mode 100644 index 000000000..5c57f1895 --- /dev/null +++ b/tests/functional/adapter/test_simple_seed.py @@ -0,0 +1,86 @@ +import pytest +from dbt.tests.adapter.simple_seed.test_seed_type_override import BaseSimpleSeedColumnOverride +from dbt.tests.adapter.utils.base_utils import run_dbt + +_SCHEMA_YML = """ +version: 2 +seeds: +- name: seed_enabled + columns: + - name: birthday + tests: + - column_type: + type: date + - name: seed_id + tests: + - column_type: + type: character varying(256) + +- name: seed_tricky + columns: + - name: seed_id + tests: + - column_type: + type: integer + - name: seed_id_str + tests: + - column_type: + type: character varying(256) + - name: a_bool + tests: + - column_type: + type: boolean + - name: looks_like_a_bool + tests: + - column_type: + type: character varying(256) + - name: a_date + tests: + - column_type: + type: timestamp without time zone + - name: looks_like_a_date + tests: + - column_type: + type: character varying(256) + - name: relative + tests: + - column_type: + type: character varying(9) + - name: weekday + tests: + - column_type: + type: character varying(8) +""".lstrip() + + +class TestSimpleSeedColumnOverride(BaseSimpleSeedColumnOverride): + @pytest.fixture(scope="class") + def schema(self): + return "simple_seed" + + @pytest.fixture(scope="class") + def models(self): + return { + "models-rs.yml": _SCHEMA_YML + } + + @staticmethod + def seed_enabled_types(): + return { + "seed_id": "text", + "birthday": "date", + } + + @staticmethod + def seed_tricky_types(): + return { + 'seed_id_str': 'text', + 'looks_like_a_bool': 'text', + 'looks_like_a_date': 'text', + } + + def test_redshift_simple_seed_with_column_override_redshift(self, project): + seed_results = run_dbt(["seed"]) + assert len(seed_results) == 2 + test_results = run_dbt(["test"]) + assert len(test_results) == 10 diff --git a/tests/integration/simple_seed_test/macros/schema_test.sql b/tests/integration/simple_seed_test/macros/schema_test.sql deleted file mode 100644 index 5c7f25964..000000000 --- a/tests/integration/simple_seed_test/macros/schema_test.sql +++ /dev/null @@ -1,22 +0,0 @@ - -{% test column_type(model, column_name, type) %} - - {% set cols = adapter.get_columns_in_relation(model) %} - - {% set col_types = {} %} - {% for col in cols %} - {% do col_types.update({col.name: col.data_type}) %} - {% endfor %} - - {% set validation_message = 'Got a column type of ' ~ col_types.get(column_name) ~ ', expected ' ~ type %} - - {% set val = 0 if col_types.get(column_name) == type else 1 %} - {% if val == 1 and execute %} - {{ log(validation_message, info=True) }} - {% endif %} - - select '{{ validation_message }}' as validation_error - from (select true) as nothing - where {{ val }} = 1 - -{% endtest %} diff --git a/tests/integration/simple_seed_test/models-downstream-seed/model.sql b/tests/integration/simple_seed_test/models-downstream-seed/model.sql deleted file mode 100644 index 6d56d7de5..000000000 --- a/tests/integration/simple_seed_test/models-downstream-seed/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed_actual') }} diff --git a/tests/integration/simple_seed_test/models-exist/model.sql b/tests/integration/simple_seed_test/models-exist/model.sql deleted file mode 100644 index 809a05ba8..000000000 --- a/tests/integration/simple_seed_test/models-exist/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ this.schema }}.seed_expected diff --git a/tests/integration/simple_seed_test/models-rs/schema.yml b/tests/integration/simple_seed_test/models-rs/schema.yml deleted file mode 100644 index 00a79bf93..000000000 --- a/tests/integration/simple_seed_test/models-rs/schema.yml +++ /dev/null @@ -1,47 +0,0 @@ -version: 2 -seeds: -- name: seed_enabled - columns: - - name: birthday - tests: - - column_type: - type: date - - name: id - tests: - - column_type: - type: character varying(256) - -- name: seed_tricky - columns: - - name: id - tests: - - column_type: - type: integer - - name: id_str - tests: - - column_type: - type: character varying(256) - - name: a_bool - tests: - - column_type: - type: boolean - - name: looks_like_a_bool - tests: - - column_type: - type: character varying(256) - - name: a_date - tests: - - column_type: - type: timestamp without time zone - - name: looks_like_a_date - tests: - - column_type: - type: character varying(256) - - name: relative - tests: - - column_type: - type: character varying(9) - - name: weekday - tests: - - column_type: - type: character varying(8) diff --git a/tests/integration/simple_seed_test/seed.sql b/tests/integration/simple_seed_test/seed.sql deleted file mode 100644 index 0f35d90ef..000000000 --- a/tests/integration/simple_seed_test/seed.sql +++ /dev/null @@ -1,512 +0,0 @@ -create table {schema}.seed_expected ( - id INTEGER, - first_name TEXT, - email TEXT, - ip_address TEXT, - birthday TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_expected - ("id","first_name","email","ip_address","birthday") -VALUES - (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), - (2,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), - (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), - (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), - (5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), - (6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), - (7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), - (8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), - (9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), - (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), - (11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), - (12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), - (13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), - (14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), - (15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), - (16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), - (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), - (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), - (19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), - (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), - (21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), - (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), - (23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), - (24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), - (25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), - (26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), - (27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), - (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), - (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), - (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), - (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), - (32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), - (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), - (34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), - (35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), - (36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), - (37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), - (38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), - (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), - (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), - (41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), - (42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), - (43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), - (44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), - (45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), - (46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), - (47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), - (48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), - (49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), - (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), - (51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), - (52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), - (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), - (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), - (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), - (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), - (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), - (58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), - (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), - (60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), - (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), - (62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), - (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), - (64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), - (65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), - (66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), - (67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), - (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), - (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), - (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), - (71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), - (72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), - (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), - (74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), - (75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), - (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), - (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), - (78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), - (79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), - (80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), - (81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), - (82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), - (83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), - (84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), - (85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), - (86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), - (87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), - (88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), - (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), - (90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), - (91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), - (92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), - (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), - (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), - (95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), - (96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), - (97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), - (98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), - (99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), - (100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'), - (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191','1970-07-08 08:54:23'), - (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254','2000-04-12 02:39:36'), - (103,'Edward','eburns2u@dailymotion.com','253.89.118.18','1993-10-10 10:54:01'), - (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176','1978-08-26 17:07:29'), - (105,'Donald','dlawrence2w@wp.com','139.200.159.227','2007-07-21 20:56:20'), - (106,'Matthew','mfreeman2x@google.fr','205.26.239.92','2014-12-05 17:05:39'), - (107,'Sean','ssanders2y@trellian.com','143.89.82.108','1993-07-14 21:45:02'), - (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54','1977-04-06 19:07:03'), - (109,'Jennifer','jwatson30@t-online.de','196.102.127.7','1998-03-07 05:12:23'), - (110,'Clarence','cbrooks31@si.edu','218.93.234.73','2002-11-06 17:22:25'), - (111,'Jose','jflores32@goo.gl','185.105.244.231','1995-01-05 06:32:21'), - (112,'George','glee33@adobe.com','173.82.249.196','2015-01-04 02:47:46'), - (113,'Larry','lhill34@linkedin.com','66.5.206.195','2010-11-02 10:21:17'), - (114,'Marie','mmeyer35@mysql.com','151.152.88.107','1990-05-22 20:52:51'), - (115,'Clarence','cwebb36@skype.com','130.198.55.217','1972-10-27 07:38:54'), - (116,'Sarah','scarter37@answers.com','80.89.18.153','1971-08-24 19:29:30'), - (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174','1973-01-27 09:00:42'), - (118,'Teresa','thenry39@hao123.com','32.187.239.106','2015-11-06 01:48:44'), - (119,'Billy','bgutierrez3a@sun.com','52.37.70.134','2002-03-19 03:20:19'), - (120,'Anthony','agibson3b@github.io','154.251.232.213','1991-04-19 01:08:15'), - (121,'Sandra','sromero3c@wikia.com','44.124.171.2','1998-09-06 20:30:34'), - (122,'Paula','pandrews3d@blogs.com','153.142.118.226','2003-06-24 16:31:24'), - (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219','1970-12-09 23:57:21'), - (124,'Lois','lwilson3f@reuters.com','147.44.171.83','1971-01-09 22:28:51'), - (125,'Sara','smorgan3g@nature.com','197.67.192.230','1992-01-28 20:33:24'), - (126,'Charles','ctorres3h@china.com.cn','156.115.216.2','1993-10-02 19:36:34'), - (127,'Richard','ralexander3i@marriott.com','248.235.180.59','1999-02-03 18:40:55'), - (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129','1978-09-13 00:37:32'), - (129,'Steve','sadams3k@economist.com','112.248.91.98','2004-03-21 09:07:43'), - (130,'Katherine','krobertson3l@ow.ly','37.220.107.28','1977-03-18 19:28:50'), - (131,'Donna','dgibson3m@state.gov','222.218.76.221','1999-02-01 06:46:16'), - (132,'Christina','cwest3n@mlb.com','152.114.6.160','1979-12-24 15:30:35'), - (133,'Sandra','swillis3o@meetup.com','180.71.49.34','1984-09-27 08:05:54'), - (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186','1979-04-16 16:52:10'), - (135,'Ruby','rjames3q@wp.com','98.61.54.20','2007-01-13 14:25:52'), - (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172','2009-07-25 04:34:30'), - (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146','2012-05-09 22:12:33'), - (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173','1999-11-09 08:08:44'), - (139,'Deborah','dray3u@i2i.jp','11.108.186.217','2014-02-04 03:15:19'), - (140,'Paul','parmstrong3v@alexa.com','6.250.59.43','2009-12-21 10:08:53'), - (141,'Aaron','abishop3w@opera.com','207.145.249.62','1996-04-25 23:20:23'), - (142,'Henry','hsanders3x@google.ru','140.215.203.171','2012-01-29 11:52:32'), - (143,'Anne','aanderson3y@1688.com','74.150.102.118','1982-04-03 13:46:17'), - (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152','1987-11-03 19:58:41'), - (145,'Evelyn','ereid40@pbs.org','249.122.33.117','1977-12-14 17:09:57'), - (146,'Brian','bgonzalez41@wikia.com','246.254.235.141','1991-02-24 00:45:58'), - (147,'Sandra','sgray42@squarespace.com','150.73.28.159','1972-07-28 17:26:32'), - (148,'Alice','ajones43@a8.net','78.253.12.177','2002-12-05 16:57:46'), - (149,'Jessica','jhanson44@mapquest.com','87.229.30.160','1994-01-30 11:40:04'), - (150,'Louise','lbailey45@reuters.com','191.219.31.101','2011-09-07 21:11:45'), - (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239','1984-10-24 14:58:04'), - (152,'Gregory','gcollins47@yandex.ru','28.176.10.115','1998-07-25 17:17:10'), - (153,'Jane','jperkins48@usnews.com','46.53.164.159','1979-08-19 15:25:00'), - (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2','1985-07-06 02:16:36'), - (155,'Adam','acarter4a@scribd.com','78.48.148.204','2005-07-20 03:31:09'), - (156,'Frank','fweaver4b@angelfire.com','199.180.255.224','2011-03-04 23:07:54'), - (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231','1991-01-11 10:39:41'), - (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223','2009-01-17 21:05:15'), - (159,'Rose','rfoster4e@woothemes.com','203.169.53.16','1991-04-21 02:09:38'), - (160,'George','ggarrett4f@uiuc.edu','186.61.5.167','1989-11-11 11:29:42'), - (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38','2012-06-22 18:01:23'), - (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203','1980-04-16 15:26:34'), - (163,'Martin','mwells4i@ifeng.com','97.223.55.105','2010-05-26 14:08:18'), - (164,'Diana','dstone4j@google.ru','90.155.52.47','2013-02-11 00:14:54'), - (165,'Walter','wferguson4k@blogger.com','30.63.212.44','1986-02-20 17:46:46'), - (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77','1992-05-13 20:14:14'), - (167,'Philip','pknight4m@xing.com','15.28.135.167','2000-09-11 18:41:13'), - (168,'Russell','rcarr4n@youtube.com','113.55.165.50','2008-07-10 17:49:27'), - (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111','1992-02-10 17:24:58'), - (170,'Anne','along4p@squidoo.com','36.154.58.107','2012-08-19 23:35:31'), - (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114','1972-03-11 21:46:44'), - (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69','2013-01-13 21:34:51'), - (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144','1998-09-15 14:20:04'), - (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102','2009-01-22 05:34:38'), - (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226','1984-09-05 22:33:15'), - (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46','1990-01-20 12:19:15'), - (177,'Arthur','afisher4w@jugem.jp','107.255.244.98','1983-10-13 11:08:46'), - (178,'Nicole','nryan4x@wsj.com','243.211.33.221','1974-05-30 23:19:14'), - (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101','1992-09-23 02:02:19'), - (180,'Terry','tcox4z@reference.com','20.189.120.106','1982-02-13 12:43:14'), - (181,'Ashley','astanley50@kickstarter.com','86.3.56.98','1976-05-09 01:27:16'), - (182,'Michael','mrivera51@about.me','72.118.249.0','1971-11-11 17:28:37'), - (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47','2002-08-24 14:59:25'), - (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30','2002-03-11 13:41:29'), - (185,'Nicole','nhenderson54@usda.gov','39.253.60.30','1995-04-24 05:55:07'), - (186,'Ralph','rharper55@purevolume.com','167.147.142.189','1980-02-10 18:35:45'), - (187,'Heather','hcunningham56@photobucket.com','96.222.196.229','2007-06-15 05:37:50'), - (188,'Nancy','nlittle57@cbc.ca','241.53.255.175','2007-07-12 23:42:48'), - (189,'Juan','jramirez58@pinterest.com','190.128.84.27','1978-11-07 23:37:37'), - (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49','1979-03-31 23:27:28'), - (191,'Shirley','sstevens5a@prlog.org','200.97.231.248','2011-12-06 07:08:50'), - (192,'Annie','areyes5b@squidoo.com','223.32.182.101','2011-05-28 02:42:09'), - (193,'Jack','jkelley5c@tiny.cc','47.34.118.150','1981-12-05 17:31:40'), - (194,'Keith','krobinson5d@1und1.de','170.210.209.31','1999-03-09 11:05:43'), - (195,'Joseph','jmiller5e@google.com.au','136.74.212.139','1984-10-08 13:18:20'), - (196,'Annie','aday5f@blogspot.com','71.99.186.69','1986-02-18 12:27:34'), - (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107','1983-10-20 17:51:20'), - (198,'Tammy','tward5h@ucoz.ru','141.43.164.70','1980-03-31 04:45:29'), - (199,'Doris','dryan5i@ted.com','239.117.202.188','1985-07-03 03:17:53'), - (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79','1973-04-21 21:36:40'), - (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161','2001-01-20 01:42:26'), - (202,'Samuel','soliver5l@people.com.cn','86.127.246.140','1970-09-02 02:19:00'), - (203,'Carl','csanchez5m@mysql.com','50.149.237.107','1993-12-01 07:02:09'), - (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201','2004-07-06 18:39:33'), - (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170','2014-11-11 18:52:19'), - (206,'Keith','kwillis5p@youtube.com','181.43.206.100','1998-06-13 06:30:51'), - (207,'Justin','jwebb5q@intel.com','211.54.245.74','2000-11-04 16:58:26'), - (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104','1992-12-01 19:48:28'), - (209,'Martin','mpowell5s@flickr.com','153.67.12.241','1983-06-30 06:24:32'), - (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172','2005-01-08 20:50:22'), - (211,'Edward','eray5u@a8.net','205.187.246.65','2011-09-26 08:04:44'), - (212,'Steven','slawson5v@senate.gov','238.150.250.36','1978-11-22 02:48:09'), - (213,'Robert','rthompson5w@furl.net','70.7.89.236','2001-09-12 08:52:07'), - (214,'Jack','jporter5x@diigo.com','220.172.29.99','1976-07-26 14:29:21'), - (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180','2010-03-20 19:21:16'), - (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173','2001-03-11 05:36:40'), - (217,'Jimmy','jstephens60@weather.com','145.101.93.235','1983-04-12 09:35:30'), - (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253','1997-08-29 19:34:34'), - (219,'Lawrence','lgilbert62@ted.com','243.132.8.78','2015-04-08 22:06:56'), - (220,'David','dgardner63@4shared.com','204.40.46.136','1971-07-09 03:29:11'), - (221,'Charles','ckennedy64@gmpg.org','211.83.233.2','2011-02-26 11:55:04'), - (222,'Lillian','lbanks65@msu.edu','124.233.12.80','2010-05-16 20:29:02'), - (223,'Ernest','enguyen66@baidu.com','82.45.128.148','1996-07-04 10:07:04'), - (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223','1983-08-05 12:36:29'), - (225,'Donald','ddavis68@ustream.tv','47.39.218.137','1989-05-27 02:30:56'), - (226,'Joe','jscott69@blogspot.com','140.23.131.75','1973-03-16 12:21:31'), - (227,'Anne','amarshall6a@google.ca','113.162.200.197','1988-12-09 03:38:29'), - (228,'Willie','wturner6b@constantcontact.com','85.83.182.249','1991-10-06 01:51:10'), - (229,'Nicole','nwilson6c@sogou.com','30.223.51.135','1977-05-29 19:54:56'), - (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144','2011-03-13 12:48:47'), - (231,'Lois','lcarr6e@statcounter.com','0.41.36.53','1993-02-06 04:52:01'), - (232,'Shirley','scruz6f@tmall.com','37.156.39.223','2007-02-18 17:47:01'), - (233,'Patrick','pford6g@reverbnation.com','36.198.200.89','1977-03-06 15:47:24'), - (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137','2014-10-28 01:56:56'), - (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202','1987-11-30 16:44:47'), - (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148','1979-01-18 06:33:45'), - (237,'Pamela','pramirez6k@census.gov','138.233.34.163','2012-01-29 10:35:20'), - (238,'Daniel','dcarr6l@php.net','146.21.152.242','1984-11-17 08:22:59'), - (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36','2001-05-30 22:16:44'), - (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38','2000-01-05 20:35:41'), - (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220','1996-11-08 01:27:31'), - (242,'Johnny','jmason6p@flickr.com','192.8.232.114','2013-05-14 05:35:50'), - (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197','1977-08-20 02:05:17'), - (244,'Douglas','dcole6r@cmu.edu','128.135.212.69','2016-10-26 17:40:36'), - (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235','1987-04-26 07:07:13'), - (246,'Donna','dgreen6t@chron.com','227.116.46.107','2011-07-25 12:59:54'), - (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89','1974-05-03 20:28:18'), - (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178','2011-12-20 17:40:18'), - (249,'Melissa','mmurray6w@bbb.org','114.234.118.137','1991-02-26 12:45:44'), - (250,'Jean','jlittle6x@epa.gov','141.21.163.254','1991-08-16 04:57:09'), - (251,'Daniel','dolson6y@nature.com','125.75.104.97','2010-04-23 06:25:54'), - (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249','2015-01-31 02:21:50'), - (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26','1971-12-11 10:31:31'), - (254,'Beverly','broberts71@bluehost.com','244.40.158.89','2013-09-21 13:02:31'), - (255,'Pamela','pmurray72@netscape.com','218.54.95.216','1985-04-16 00:34:00'), - (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229','2000-11-11 09:48:28'), - (257,'Mildred','mpalmer74@is.gd','234.125.95.132','1992-05-25 02:25:02'), - (258,'Jessica','jcampbell75@google.it','55.98.30.140','2014-08-26 00:26:34'), - (259,'Beverly','bthomas76@cpanel.net','48.78.228.176','1970-08-18 10:40:05'), - (260,'Eugene','eward77@cargocollective.com','139.226.204.2','1996-12-04 23:17:00'), - (261,'Andrea','aallen78@webnode.com','160.31.214.38','2009-07-06 07:22:37'), - (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122','2005-06-06 11:44:19'), - (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128','2001-07-03 02:00:10'), - (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221','1994-08-18 19:45:40'), - (265,'Russell','rmiller7c@instagram.com','184.130.152.253','1977-11-06 01:58:12'), - (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206','1990-12-03 22:45:24'), - (267,'Raymond','rknight7e@yandex.ru','161.2.44.252','1995-08-25 04:31:19'), - (268,'Bonnie','brussell7f@elpais.com','199.237.57.207','1991-03-29 08:32:06'), - (269,'Marie','mhenderson7g@elpais.com','52.203.131.144','2004-06-04 21:50:28'), - (270,'Alan','acarr7h@trellian.com','147.51.205.72','2005-03-03 10:51:31'), - (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226','2004-08-04 13:42:40'), - (272,'Christina','cdaniels7j@census.gov','0.238.61.251','1972-10-18 12:47:33'), - (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56','2013-01-13 10:41:35'), - (274,'Laura','lwood7l@icio.us','149.153.38.205','2011-06-25 09:33:59'), - (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172','1999-03-05 03:25:36'), - (276,'Denise','dparker7n@icq.com','0.213.88.138','2011-11-04 09:43:06'), - (277,'Phillip','pparker7o@discuz.net','219.242.165.240','1973-10-19 04:22:29'), - (278,'Joan','jpierce7p@salon.com','63.31.213.202','1989-04-09 22:06:24'), - (279,'Irene','ibaker7q@cbc.ca','102.33.235.114','1992-09-04 13:00:57'), - (280,'Betty','bbowman7r@ted.com','170.91.249.242','2015-09-28 08:14:22'), - (281,'Teresa','truiz7s@boston.com','82.108.158.207','1999-07-18 05:17:09'), - (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187','2003-01-06 15:45:29'), - (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184','2010-05-28 01:56:37'), - (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220','1993-04-03 20:33:51'), - (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78','1990-01-05 17:28:43'), - (286,'Terry','tross7x@macromedia.com','29.112.114.133','2009-08-29 21:32:17'), - (287,'Angela','abradley7y@icq.com','177.44.27.72','1989-10-04 21:46:06'), - (288,'Maria','mhart7z@dailymotion.com','55.27.55.202','1975-01-21 01:22:57'), - (289,'Raymond','randrews80@pinterest.com','88.90.78.67','1992-03-16 21:37:40'), - (290,'Kathy','krice81@bluehost.com','212.63.196.102','2000-12-14 03:06:44'), - (291,'Cynthia','cramos82@nymag.com','107.89.190.6','2005-06-28 02:02:33'), - (292,'Kimberly','kjones83@mysql.com','86.169.101.101','2007-06-13 22:56:49'), - (293,'Timothy','thansen84@microsoft.com','108.100.254.90','2003-04-04 10:31:57'), - (294,'Carol','cspencer85@berkeley.edu','75.118.144.187','1999-03-30 14:53:21'), - (295,'Louis','lmedina86@latimes.com','141.147.163.24','1991-04-11 17:53:13'), - (296,'Margaret','mcole87@google.fr','53.184.26.83','1991-12-19 01:54:10'), - (297,'Mary','mgomez88@yellowpages.com','208.56.57.99','1976-05-21 18:05:08'), - (298,'Amanda','aanderson89@geocities.com','147.73.15.252','1987-08-22 15:05:28'), - (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220','1976-07-15 04:25:04'), - (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193','1990-09-03 21:39:31'), - (301,'Lois','lkennedy8c@amazon.de','194.169.29.187','2007-07-29 14:09:31'), - (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249','2013-09-05 11:32:46'), - (303,'Betty','belliott8e@wired.com','183.105.222.199','1979-09-19 19:29:13'), - (304,'Bobby','bmeyer8f@census.gov','36.13.161.145','2014-05-24 14:34:39'), - (305,'Ann','amorrison8g@sfgate.com','72.154.54.137','1978-10-05 14:22:34'), - (306,'Daniel','djackson8h@wunderground.com','144.95.32.34','1990-07-27 13:23:05'), - (307,'Joe','jboyd8i@alibaba.com','187.105.86.178','2011-09-28 16:46:32'), - (308,'Ralph','rdunn8j@fc2.com','3.19.87.255','1984-10-18 08:00:40'), - (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215','1998-07-04 12:15:21'), - (310,'Paula','pdean8l@hhs.gov','161.100.173.197','1973-02-13 09:38:55'), - (311,'Andrew','agarrett8m@behance.net','199.253.123.218','1991-02-14 13:36:32'), - (312,'Janet','jhowell8n@alexa.com','39.189.139.79','2012-11-24 20:17:33'), - (313,'Keith','khansen8o@godaddy.com','116.186.223.196','1987-08-23 21:22:05'), - (314,'Nicholas','nedwards8p@state.gov','142.175.142.11','1977-03-28 18:27:27'), - (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192','1994-10-26 11:44:26'), - (316,'Frank','fgardner8r@mapy.cz','154.77.119.169','1983-01-29 19:19:51'), - (317,'Eric','eharrison8s@google.cn','245.139.65.123','1984-02-04 09:54:36'), - (318,'Gregory','gcooper8t@go.com','171.147.0.221','2004-06-14 05:22:08'), - (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5','1977-01-07 18:23:43'), - (320,'Juan','jlewis8v@shinystat.com','216.181.171.189','2001-08-23 17:32:43'), - (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28','1983-02-17 00:05:50'), - (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148','2004-02-15 10:12:03'), - (323,'Annie','ahunter8y@example.com','63.36.34.103','2003-07-23 21:15:25'), - (324,'Melissa','mflores8z@cbc.ca','151.230.217.90','1983-11-02 14:53:56'), - (325,'Jane','jweaver90@about.me','0.167.235.217','1987-07-29 00:13:44'), - (326,'Anthony','asmith91@oracle.com','97.87.48.41','2001-05-31 18:44:11'), - (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51','2015-09-12 23:13:55'), - (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186','2010-10-28 08:26:27'), - (329,'Chris','cmurray94@bluehost.com','25.158.167.97','2004-05-05 16:10:31'), - (330,'Denise','dfuller95@hugedomains.com','216.210.149.28','1979-04-20 08:57:24'), - (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213','2009-08-15 03:26:16'), - (332,'Jesse','jhoward97@google.cn','46.181.118.30','1974-04-18 14:08:41'), - (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87','2006-06-30 14:46:52'), - (334,'Janice','jwoods99@pen.io','229.245.237.182','1988-04-06 11:52:58'), - (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80','1976-10-05 08:44:16'), - (336,'Joshua','jray9b@opensource.org','192.253.12.198','1971-12-25 22:27:07'), - (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215','2001-12-31 22:08:13'), - (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201','1983-02-18 20:07:47'), - (339,'Christine','chudson9e@yelp.com','64.198.43.56','1997-09-08 08:03:43'), - (340,'Joe','jparker9f@earthlink.net','251.215.148.153','1973-11-04 05:08:18'), - (341,'Thomas','tkim9g@answers.com','49.187.34.47','1991-08-07 21:13:48'), - (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16','2009-12-08 02:35:49'), - (343,'James','jmitchell9i@umich.edu','43.121.18.147','2011-04-28 17:04:09'), - (344,'Charles','cgardner9j@purevolume.com','197.78.240.240','1998-02-11 06:47:07'), - (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88','2002-05-10 15:33:14'), - (346,'Chris','cgray9l@4shared.com','249.70.192.240','1998-10-03 16:43:42'), - (347,'Gloria','ghayes9m@hibu.com','81.103.138.26','1999-12-26 11:23:13'), - (348,'Edward','eramirez9n@shareasale.com','38.136.90.136','2010-08-19 08:01:06'), - (349,'Cheryl','cbutler9o@google.ca','172.180.78.172','1995-05-27 20:03:52'), - (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6','2014-10-21 01:42:58'), - (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208','2001-02-08 12:19:06'), - (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190','1970-01-05 11:29:12'), - (353,'Sandra','slewis9s@dyndns.org','77.215.201.236','1974-01-05 07:04:04'), - (354,'Todd','tfranklin9t@g.co','167.125.181.82','2009-09-28 10:13:58'), - (355,'Joseph','jlewis9u@webmd.com','244.204.6.11','1990-10-21 15:49:57'), - (356,'Alan','aknight9v@nydailynews.com','152.197.95.83','1996-03-08 08:43:17'), - (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26','1985-11-30 12:09:24'), - (358,'Annie','awright9x@cafepress.com','190.45.231.111','2000-08-24 11:56:06'), - (359,'Diane','dhamilton9y@youtube.com','85.146.171.196','2015-02-24 02:03:57'), - (360,'Antonio','alane9z@auda.org.au','61.63.146.203','2001-05-13 03:43:34'), - (361,'Matthew','mallena0@hhs.gov','29.97.32.19','1973-02-19 23:43:32'), - (362,'Bonnie','bfowlera1@soup.io','251.216.99.53','2013-08-01 15:35:41'), - (363,'Margaret','mgraya2@examiner.com','69.255.151.79','1998-01-23 22:24:59'), - (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61','1973-07-13 00:30:22'), - (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214','2006-11-19 11:52:26'), - (366,'Mark','mcartera5@cpanel.net','220.33.102.142','2007-09-09 09:43:27'), - (367,'Paula','ppricea6@msn.com','36.182.238.124','2009-11-11 09:13:05'), - (368,'Catherine','cgreena7@army.mil','228.203.58.19','2005-08-09 16:52:15'), - (369,'Helen','hhamiltona8@symantec.com','155.56.194.99','2005-02-01 05:40:36'), - (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213','2013-11-06 22:10:23'), - (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48','1994-12-26 23:43:42'), - (372,'Mark','mmarshallab@tumblr.com','114.74.60.47','2016-09-29 18:03:01'), - (373,'Andrew','amartinezac@google.cn','182.54.37.130','1976-06-06 17:04:17'), - (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123','1977-03-28 19:06:59'), - (375,'Bonnie','bstoneae@php.net','196.149.79.137','1970-02-05 17:05:58'), - (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218','2008-11-20 21:51:55'), - (377,'Willie','wwestag@army.mil','35.189.92.118','1992-07-24 05:08:08'), - (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141','2010-01-25 06:29:01'), - (379,'Sandra','swardai@baidu.com','63.11.113.240','1985-05-23 08:07:37'), - (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50','2015-03-05 09:42:07'), - (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24','1975-07-22 22:22:02'), - (382,'Juan','jcastilloal@yelp.com','24.174.74.202','2007-01-17 09:32:43'), - (383,'Donna','dfrazieram@artisteer.com','205.26.147.45','1990-02-11 20:55:46'), - (384,'Rachel','rfloresan@w3.org','109.60.216.162','1983-05-22 22:42:18'), - (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130','2009-05-01 18:02:51'), - (386,'Donald','dbradleyap@etsy.com','42.54.35.126','1997-01-16 16:31:52'), - (387,'Rachel','rfisheraq@nih.gov','160.243.250.45','2006-02-17 22:05:49'), - (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111','1976-06-21 03:36:29'), - (389,'Timothy','twhiteas@ca.gov','36.128.23.70','1975-09-24 03:51:18'), - (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184','1983-04-27 09:02:50'), - (391,'Billy','bfowlerau@jimdo.com','91.200.68.196','1995-01-29 06:57:35'), - (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125','1992-10-27 04:31:39'), - (393,'Linda','lromeroaw@usa.gov','100.71.233.19','1992-06-08 15:13:18'), - (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193','2001-11-11 06:51:01'), - (395,'Katherine','kburkeay@wix.com','151.156.242.141','2010-06-14 19:54:28'), - (396,'Martha','mharrisonaz@youku.com','21.222.10.199','1989-10-16 14:17:55'), - (397,'Dennis','dwellsb0@youtu.be','103.16.29.3','1985-12-21 06:05:51'), - (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234','1982-08-27 01:04:43'), - (399,'Brenda','bfullerb2@t.co','33.253.63.90','2011-04-20 05:00:35'), - (400,'Larry','lhendersonb3@disqus.com','88.95.132.128','1982-08-31 02:15:12'), - (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150','1979-04-15 14:08:09'), - (402,'Terry','thuntb5@usa.gov','65.91.103.240','1998-05-15 11:50:49'), - (403,'Harry','hburnsb6@nasa.gov','33.38.21.244','1981-04-12 14:02:20'), - (404,'Diana','dellisb7@mlb.com','218.229.81.135','1997-01-29 00:17:25'), - (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216','1984-03-09 17:24:03'), - (406,'Julia','jlongb9@fotki.com','10.210.12.104','2005-10-26 03:54:13'), - (407,'Lois','lscottba@msu.edu','188.79.136.138','1973-02-02 18:40:39'), - (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108','2012-06-09 18:22:26'), - (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119','1983-09-28 21:11:27'), - (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79','1970-08-31 21:08:01'), - (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226','1993-05-22 08:07:07'), - (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152','1978-12-31 20:24:51'), - (413,'Christine','cgreenebg@flickr.com','97.25.140.118','1978-07-17 12:55:30'), - (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211','1984-02-27 13:40:08'), - (415,'Kimberly','kromerobi@aol.com','73.56.88.247','1976-09-16 14:22:04'), - (416,'George','gjohnstonbj@fda.gov','240.36.245.185','1979-07-24 14:36:02'), - (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140','2012-09-12 01:56:41'), - (418,'Andrea','astevensbl@goo.gl','31.152.207.204','1979-05-24 11:06:21'), - (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241','1984-02-23 04:07:41'), - (420,'Terry','tmorenobn@blinklist.com','92.161.34.42','1994-06-25 14:01:35'), - (421,'Christopher','cmorenobo@go.com','158.86.176.82','1973-09-05 09:18:47'), - (422,'Dennis','dhansonbp@ning.com','40.160.81.75','1982-01-20 10:19:41'), - (423,'Beverly','brussellbq@de.vu','138.32.56.204','1997-11-06 07:20:19'), - (424,'Howard','hparkerbr@163.com','103.171.134.171','2015-06-24 15:37:10'), - (425,'Helen','hmccoybs@fema.gov','61.200.4.71','1995-06-20 08:59:10'), - (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125','1977-04-11 07:59:28'), - (427,'Tina','twestbu@nhs.uk','80.213.117.74','1992-08-19 05:54:44'), - (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95','1991-09-19 23:22:55'), - (429,'Ashley','aburtonbw@wix.com','233.176.205.109','2009-11-10 05:01:20'), - (430,'Eric','emyersbx@1und1.de','168.91.212.67','1987-08-10 07:16:20'), - (431,'Barbara','blittleby@lycos.com','242.14.189.239','2008-08-02 12:13:04'), - (432,'Sean','sevansbz@instagram.com','14.39.177.13','2007-04-16 17:28:49'), - (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76','1980-12-10 02:19:29'), - (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117','1987-03-01 02:25:45'), - (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141','1972-09-22 12:59:34'), - (436,'Nicole','nstewartc3@1688.com','8.164.23.115','1998-10-27 00:10:17'), - (437,'Earl','ekimc4@ovh.net','100.26.244.177','2013-01-22 10:05:46'), - (438,'Beverly','breedc5@reuters.com','174.12.226.27','1974-09-22 07:29:36'), - (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164','1992-06-05 00:43:40'), - (440,'Charles','cmoorec7@ucoz.com','252.197.131.69','1990-04-09 02:34:05'), - (441,'Alice','alawsonc8@live.com','183.73.220.232','1989-02-28 09:11:04'), - (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14','2005-05-02 19:57:21'), - (443,'Carolyn','cfowlerca@go.com','213.109.55.202','1978-09-10 20:18:20'), - (444,'Anthony','alongcb@free.fr','169.221.158.204','1984-09-13 01:59:23'), - (445,'Annie','amoorecc@e-recht24.de','50.34.148.61','2009-03-26 03:41:07'), - (446,'Carlos','candrewscd@ihg.com','236.69.59.212','1972-03-29 22:42:48'), - (447,'Beverly','bramosce@google.ca','164.250.184.49','1982-11-10 04:34:01'), - (448,'Teresa','tlongcf@umich.edu','174.88.53.223','1987-05-17 12:48:00'), - (449,'Roy','rboydcg@uol.com.br','91.58.243.215','1974-06-16 17:59:54'), - (450,'Ashley','afieldsch@tamu.edu','130.138.11.126','1983-09-15 05:52:36'), - (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245','2003-10-22 12:24:03'), - (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103','1980-11-13 11:01:26'), - (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151','1982-05-17 23:58:03'), - (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166','1998-10-04 00:02:21'), - (455,'Shirley','speterscm@salon.com','248.126.31.15','1987-01-30 06:04:59'), - (456,'Annie','abowmancn@economist.com','222.213.248.59','2006-03-14 23:52:59'), - (457,'Jean','jlarsonco@blogspot.com','71.41.25.195','2007-09-08 23:49:45'), - (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28','2011-03-14 20:25:40'), - (459,'Norma','nrobinsoncq@economist.com','28.225.21.54','1989-10-21 01:22:43'), - (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153','2008-06-27 02:27:30'), - (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249','2016-01-28 16:43:44'), - (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77','2000-05-24 17:33:06'), - (463,'Edward','ebryantcu@jigsy.com','140.31.98.193','1992-12-17 08:32:47'), - (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206','1970-12-05 03:42:19'), - (465,'Michael','mweavercw@reference.com','7.233.133.213','1987-03-29 02:30:54'), - (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224','1990-07-30 07:20:57'), - (467,'Philip','phowardcy@comsenz.com','159.170.247.249','2010-10-15 10:18:37'), - (468,'Mary','mmarshallcz@xing.com','125.132.189.70','2007-07-19 13:48:47'), - (469,'Scott','salvarezd0@theguardian.com','78.49.103.230','1987-10-31 06:10:44'), - (470,'Wayne','wcarrolld1@blog.com','238.1.120.204','1980-11-19 03:26:10'), - (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49','2010-05-06 22:17:04'), - (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240','2007-12-12 19:02:51'), - (473,'Steven','sdixond4@wisc.edu','167.55.237.52','1984-05-05 11:44:37'), - (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133','2000-07-06 08:44:37'), - (475,'Jason','jrobinsond6@hexun.com','138.119.139.56','2006-02-03 05:27:45'), - (476,'Doris','dwoodd7@fema.gov','180.220.156.190','1978-05-11 20:14:20'), - (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229','2006-11-18 08:29:06'), - (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1','1985-09-17 02:46:52'), - (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118','1974-06-07 19:04:05'), - (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174','1999-03-08 02:43:25'), - (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234','2007-05-15 13:17:32'), - (482,'Harold','hmurraydd@parallels.com','133.26.188.80','1993-11-15 03:42:07'), - (483,'Paula','phowellde@samsung.com','34.215.28.216','1993-11-29 15:55:00'), - (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123','1986-08-17 10:19:38'), - (485,'Phyllis','paustindg@vk.com','50.84.34.178','1994-04-13 03:05:24'), - (486,'Laura','lfosterdh@usnews.com','37.8.101.33','2001-06-30 08:58:59'), - (487,'Eric','etaylordi@com.com','103.183.253.45','2006-09-15 20:18:46'), - (488,'Doris','driveradj@prweb.com','247.16.2.199','1989-05-08 09:27:09'), - (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232','1989-08-01 18:36:06'), - (490,'Steve','smoralesdl@jigsy.com','3.76.84.207','2011-03-13 17:01:05'), - (491,'Louis','lsullivandm@who.int','78.135.44.208','1975-11-26 16:01:23'), - (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21','1990-03-13 16:14:56'), - (493,'Ann','adixondo@gmpg.org','191.136.222.111','2002-06-05 14:22:18'), - (494,'Johnny','jhartdp@amazon.com','103.252.198.39','1988-07-30 23:54:49'), - (495,'Susan','srichardsdq@skype.com','126.247.192.11','2005-01-09 12:08:14'), - (496,'Brenda','bparkerdr@skype.com','63.232.216.86','1974-05-18 05:58:29'), - (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112','2014-08-05 18:22:25'), - (498,'Larry','lhayesdt@wordpress.com','162.146.13.46','1997-02-26 14:01:53'), - (499,NULL,'ethomasdu@hhs.gov','6.241.88.250','2007-09-14 13:03:34'), - (500,'Paula','pshawdv@networksolutions.com','123.27.47.249','2003-10-30 21:19:20'); diff --git a/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv b/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv deleted file mode 100644 index 0227dd609..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv +++ /dev/null @@ -1,21 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 diff --git a/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv b/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv deleted file mode 100644 index 0227dd609..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv +++ /dev/null @@ -1,21 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 diff --git a/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv b/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv deleted file mode 100644 index 3e90a18e3..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv +++ /dev/null @@ -1,7 +0,0 @@ -id,id_str,a_bool,looks_like_a_bool,a_date,looks_like_a_date,relative,weekday -1,1,true,true,2019-01-01 12:32:30,2019-01-01 12:32:30,tomorrow,Saturday -2,2,True,True,2019-01-01 12:32:31,2019-01-01 12:32:31,today,Sunday -3,3,TRUE,TRUE,2019-01-01 12:32:32,2019-01-01 12:32:32,yesterday,Monday -4,4,false,false,2019-01-01 01:32:32,2019-01-01 01:32:32,tomorrow,Saturday -5,5,False,False,2019-01-01 01:32:32,2019-01-01 01:32:32,today,Sunday -6,6,FALSE,FALSE,2019-01-01 01:32:32,2019-01-01 01:32:32,yesterday,Monday diff --git a/tests/integration/simple_seed_test/seeds/seed_actual.csv b/tests/integration/simple_seed_test/seeds/seed_actual.csv deleted file mode 100644 index fc1b80145..000000000 --- a/tests/integration/simple_seed_test/seeds/seed_actual.csv +++ /dev/null @@ -1,501 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 -21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49 -22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56 -23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43 -24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44 -25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02 -26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25 -27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18 -28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37 -29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12 -30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23 -31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50 -32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27 -33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44 -34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05 -35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39 -36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52 -37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25 -38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51 -39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29 -40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54 -41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56 -42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31 -43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28 -44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07 -45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15 -46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45 -47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19 -48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24 -49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48 -50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49 -51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12 -52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23 -53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51 -54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19 -55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40 -56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32 -57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09 -58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40 -59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27 -60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06 -61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44 -62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47 -63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37 -64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24 -65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56 -66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35 -67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45 -68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23 -69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21 -70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46 -71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20 -72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54 -73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55 -74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24 -75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30 -76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01 -77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42 -78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33 -79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52 -80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24 -81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56 -82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59 -83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36 -84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27 -85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42 -86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39 -87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49 -88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44 -89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39 -90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04 -91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19 -92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14 -93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49 -94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54 -95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19 -96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10 -97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45 -98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58 -99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55 -100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39 -101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23 -102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36 -103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01 -104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29 -105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20 -106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39 -107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02 -108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03 -109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23 -110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25 -111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21 -112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46 -113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17 -114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51 -115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54 -116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30 -117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42 -118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44 -119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19 -120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15 -121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34 -122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24 -123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21 -124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51 -125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24 -126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34 -127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55 -128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32 -129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43 -130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50 -131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16 -132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35 -133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54 -134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10 -135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52 -136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30 -137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33 -138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44 -139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19 -140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53 -141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23 -142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32 -143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17 -144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41 -145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57 -146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58 -147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32 -148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46 -149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04 -150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45 -151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04 -152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10 -153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00 -154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36 -155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09 -156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54 -157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41 -158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15 -159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38 -160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42 -161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23 -162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34 -163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18 -164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54 -165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46 -166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14 -167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13 -168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27 -169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58 -170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31 -171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44 -172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51 -173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04 -174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38 -175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15 -176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15 -177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46 -178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14 -179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19 -180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14 -181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16 -182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37 -183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25 -184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29 -185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07 -186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45 -187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50 -188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48 -189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37 -190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28 -191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50 -192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09 -193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40 -194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43 -195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20 -196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34 -197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20 -198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29 -199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53 -200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40 -201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26 -202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00 -203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09 -204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33 -205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19 -206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51 -207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26 -208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28 -209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32 -210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22 -211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44 -212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09 -213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07 -214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21 -215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16 -216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40 -217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30 -218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34 -219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56 -220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11 -221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04 -222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02 -223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04 -224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29 -225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56 -226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31 -227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29 -228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10 -229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56 -230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47 -231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01 -232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01 -233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24 -234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56 -235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47 -236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45 -237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20 -238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59 -239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44 -240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41 -241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31 -242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50 -243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17 -244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36 -245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13 -246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54 -247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18 -248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18 -249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44 -250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09 -251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54 -252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50 -253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31 -254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31 -255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00 -256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28 -257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02 -258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34 -259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05 -260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00 -261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37 -262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19 -263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10 -264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40 -265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12 -266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24 -267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19 -268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06 -269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28 -270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31 -271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40 -272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33 -273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35 -274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59 -275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36 -276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06 -277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29 -278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24 -279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57 -280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22 -281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09 -282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29 -283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37 -284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51 -285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43 -286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17 -287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06 -288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57 -289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40 -290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44 -291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33 -292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49 -293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57 -294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21 -295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13 -296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10 -297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08 -298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28 -299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04 -300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31 -301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31 -302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46 -303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13 -304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39 -305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34 -306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05 -307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32 -308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40 -309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21 -310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55 -311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32 -312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33 -313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05 -314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27 -315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26 -316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51 -317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36 -318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08 -319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43 -320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43 -321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50 -322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03 -323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25 -324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56 -325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44 -326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11 -327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55 -328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27 -329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31 -330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24 -331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16 -332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41 -333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52 -334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58 -335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16 -336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07 -337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13 -338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47 -339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43 -340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18 -341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48 -342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49 -343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09 -344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07 -345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14 -346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42 -347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13 -348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06 -349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52 -350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58 -351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06 -352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12 -353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04 -354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58 -355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57 -356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17 -357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24 -358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06 -359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57 -360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34 -361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32 -362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41 -363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59 -364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22 -365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26 -366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27 -367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05 -368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15 -369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36 -370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23 -371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42 -372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01 -373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17 -374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59 -375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58 -376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55 -377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08 -378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01 -379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37 -380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07 -381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02 -382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43 -383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46 -384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18 -385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51 -386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52 -387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49 -388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29 -389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18 -390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50 -391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35 -392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39 -393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18 -394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01 -395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28 -396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55 -397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51 -398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43 -399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35 -400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12 -401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09 -402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49 -403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20 -404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25 -405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03 -406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13 -407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39 -408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26 -409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27 -410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01 -411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07 -412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51 -413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30 -414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08 -415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04 -416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02 -417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41 -418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21 -419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41 -420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35 -421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47 -422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41 -423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19 -424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10 -425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10 -426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28 -427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44 -428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55 -429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20 -430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20 -431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04 -432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49 -433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29 -434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45 -435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34 -436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17 -437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46 -438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36 -439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40 -440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05 -441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04 -442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21 -443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20 -444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23 -445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07 -446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48 -447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01 -448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00 -449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54 -450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36 -451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03 -452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26 -453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03 -454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21 -455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59 -456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59 -457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45 -458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40 -459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43 -460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30 -461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44 -462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06 -463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47 -464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19 -465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54 -466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57 -467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37 -468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47 -469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44 -470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10 -471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04 -472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51 -473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37 -474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37 -475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45 -476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20 -477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06 -478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52 -479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05 -480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25 -481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32 -482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07 -483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00 -484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38 -485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24 -486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59 -487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46 -488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09 -489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06 -490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05 -491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23 -492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56 -493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18 -494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49 -495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14 -496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29 -497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25 -498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53 -499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34 -500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20 diff --git a/tests/integration/simple_seed_test/test_seed_type_override.py b/tests/integration/simple_seed_test/test_seed_type_override.py deleted file mode 100644 index aafb2f3c6..000000000 --- a/tests/integration/simple_seed_test/test_seed_type_override.py +++ /dev/null @@ -1,58 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestSimpleSeedColumnOverride(DBTIntegrationTest): - - @property - def schema(self): - return "simple_seed" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds-config'], - 'macro-paths': ['macros'], - 'seeds': { - 'test': { - 'enabled': False, - 'quote_columns': True, - 'seed_enabled': { - 'enabled': True, - '+column_types': self.seed_enabled_types() - }, - 'seed_tricky': { - 'enabled': True, - '+column_types': self.seed_tricky_types(), - }, - }, - }, - } - - @property - def models(self): - return "models-rs" - - @property - def profile_config(self): - return self.redshift_profile() - - def seed_enabled_types(self): - return { - "id": "text", - "birthday": "date", - } - - def seed_tricky_types(self): - return { - 'id_str': 'text', - 'looks_like_a_bool': 'text', - 'looks_like_a_date': 'text', - } - - @use_profile('redshift') - def test_redshift_simple_seed_with_column_override_redshift(self): - results = self.run_dbt(["seed", "--show"]) - self.assertEqual(len(results), 2) - results = self.run_dbt(["test"]) - self.assertEqual(len(results), 10) From 34f9212249aeeeff980b53c3b5026b6fd37c83b3 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 7 Feb 2023 22:11:35 -0500 Subject: [PATCH 022/113] CT-1948 - convert backup table integration tests to functional tests (#295) * convert backup table integration tests to functional tests * better use of pytest fixtures * moved generic fixtures into `functional/adapter/conftest.py` --- .../Under the Hood-20230130-205628.yaml | 6 + tests/functional/adapter/conftest.py | 25 +++ tests/functional/adapter/test_backup_table.py | 168 ++++++++++++++++++ .../models/model_backup_false.sql | 7 - .../model_backup_param_before_distkey.sql | 7 - .../model_backup_param_before_sortkey.sql | 7 - .../models/model_backup_true.sql | 7 - .../models/model_backup_true_view.sql | 7 - .../models/model_backup_undefined.sql | 7 - .../test_backup_table_option.py | 133 -------------- 10 files changed, 199 insertions(+), 175 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230130-205628.yaml create mode 100644 tests/functional/adapter/conftest.py create mode 100644 tests/functional/adapter/test_backup_table.py delete mode 100644 tests/integration/backup_table_tests/models/model_backup_false.sql delete mode 100644 tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql delete mode 100644 tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql delete mode 100644 tests/integration/backup_table_tests/models/model_backup_true.sql delete mode 100644 tests/integration/backup_table_tests/models/model_backup_true_view.sql delete mode 100644 tests/integration/backup_table_tests/models/model_backup_undefined.sql delete mode 100644 tests/integration/backup_table_tests/test_backup_table_option.py diff --git a/.changes/unreleased/Under the Hood-20230130-205628.yaml b/.changes/unreleased/Under the Hood-20230130-205628.yaml new file mode 100644 index 000000000..c3ec867f5 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230130-205628.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Convert Backup Table tests +time: 2023-01-30T20:56:28.642573-05:00 +custom: + Author: mikealfare + Issue: "293" diff --git a/tests/functional/adapter/conftest.py b/tests/functional/adapter/conftest.py new file mode 100644 index 000000000..e4aa4fe31 --- /dev/null +++ b/tests/functional/adapter/conftest.py @@ -0,0 +1,25 @@ +import pytest + + +@pytest.fixture +def model_ddl(request) -> str: + """ + Returns the contents of the DDL file for the model provided. Use with pytest parameterization. + + Example: + === + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [("backup_is_false", False)], + indirect=["model_ddl"] + ) + def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: bool): + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected + === + + In this example, the fixture returns the contents of the backup_is_false DDL file as a string. + This string is then referenced in the test as model_ddl. + """ + with open(f"target/run/test/models/{request.param}.sql", 'r') as ddl_file: + yield '\n'.join(ddl_file.readlines()) diff --git a/tests/functional/adapter/test_backup_table.py b/tests/functional/adapter/test_backup_table.py new file mode 100644 index 000000000..ee58615b3 --- /dev/null +++ b/tests/functional/adapter/test_backup_table.py @@ -0,0 +1,168 @@ +import pytest + +from dbt.tests.util import run_dbt + + +_MODEL_BACKUP_IS_FALSE = """ +{{ config( + materialized='table', + backup=False +) }} +select 1 as my_col +""" + + +_MODEL_BACKUP_IS_TRUE = """ +{{ config( + materialized='table', + backup=True +) }} +select 1 as my_col +""" + + +_MODEL_IS_UNDEFINED = """ +{{ config( + materialized='table' +) }} +select 1 as my_col +""" + + +_MODEL_IS_TRUE_VIEW = """ +{{ config( + materialized='view', + backup=True +) }} +select 1 as my_col +""" + + +_MODEL_SYNTAX_WITH_DISTKEY = """ +{{ config( + materialized='table', + backup=False, + dist='my_col' +) }} +select 1 as my_col +""" + + +_MODEL_SYNTAX_WITH_SORTKEY = """ +{{ config( + materialized='table', + backup=False, + sort='my_col' +) }} +select 1 as my_col +""" + + +class BackupTableBase: + + @pytest.fixture(scope="class", autouse=True) + def _run_dbt(self, project): + run_dbt(["run"]) + + +class TestBackupTableOption(BackupTableBase): + + @pytest.fixture(scope="class") + def models(self): + return { + "backup_is_false.sql": _MODEL_BACKUP_IS_FALSE, + "backup_is_true.sql": _MODEL_BACKUP_IS_TRUE, + "backup_is_undefined.sql": _MODEL_IS_UNDEFINED, + "backup_is_true_view.sql": _MODEL_IS_TRUE_VIEW, + } + + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [ + ("backup_is_false", False), + ("backup_is_true", True), + ("backup_is_undefined", True), + ("backup_is_true_view", True), + ], + indirect=["model_ddl"] + ) + def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: bool): + """ + Test different scenarios of configuration at the MODEL level and verify the expected setting for backup + + This test looks for whether `backup no` appears in the DDL file. If it does, then the table will not be backed + up. If it does not appear, the table will be backed up. + + Args: + model_ddl: the DDL for each model as a string + backup_expected: whether backup is expected for this model + """ + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected + + +class TestBackupTableSyntax(BackupTableBase): + + @pytest.fixture(scope="class") + def models(self): + return { + "syntax_with_distkey.sql": _MODEL_SYNTAX_WITH_DISTKEY, + "syntax_with_sortkey.sql": _MODEL_SYNTAX_WITH_SORTKEY, + } + + @pytest.mark.parametrize( + "model_ddl,search_phrase", + [ + ("syntax_with_distkey", "diststyle key distkey"), + ("syntax_with_sortkey", "compound sortkey"), + ], + indirect=["model_ddl"] + ) + def test_backup_predicate_precedes_secondary_predicates(self, model_ddl, search_phrase): + """ + Test whether `backup no` appears roughly in the correct spot in the DDL + + This test verifies that the backup predicate comes before the secondary predicates. + This test does not guarantee that the resulting DDL is properly formed. + + Args: + model_ddl: the DDL for each model as a string + search_phrase: the string within the DDL that indicates the distkey or sortkey + """ + assert model_ddl.find("backup no") < model_ddl.find(search_phrase) + + +class TestBackupTableProjectDefault(BackupTableBase): + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"backup": False}} + + @pytest.fixture(scope="class") + def models(self): + return { + "backup_is_true.sql": _MODEL_BACKUP_IS_TRUE, + "backup_is_undefined.sql": _MODEL_IS_UNDEFINED, + } + + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [ + ("backup_is_true", True), + ("backup_is_undefined", False) + ], + indirect=["model_ddl"] + ) + def test_setting_defaults_to_project_option(self, model_ddl: str, backup_expected: bool): + """ + Test different scenarios of configuration at the PROJECT level and verify the expected setting for backup + + This test looks for whether `backup no` appears in the DDL file. If it does, then the table will not be backed + up. If it does not appear, the table will be backed up. + + Args: + model_ddl: the DDL for each model as a string + backup_expected: whether backup is expected for this model + """ + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected diff --git a/tests/integration/backup_table_tests/models/model_backup_false.sql b/tests/integration/backup_table_tests/models/model_backup_false.sql deleted file mode 100644 index 67900ac06..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_false.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False - ) -}} - -select 1 diff --git a/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql b/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql deleted file mode 100644 index 87c586265..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False, dist='distkey' - ) -}} - -select 1 as distkey \ No newline at end of file diff --git a/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql b/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql deleted file mode 100644 index 380aacb5c..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False, sort='sortkey' - ) -}} - -select 1 as sortkey \ No newline at end of file diff --git a/tests/integration/backup_table_tests/models/model_backup_true.sql b/tests/integration/backup_table_tests/models/model_backup_true.sql deleted file mode 100644 index 882522da9..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_true.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=True - ) -}} - -select 2 diff --git a/tests/integration/backup_table_tests/models/model_backup_true_view.sql b/tests/integration/backup_table_tests/models/model_backup_true_view.sql deleted file mode 100644 index 841070a0a..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_true_view.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='view', backup=True - ) -}} - -select 3 diff --git a/tests/integration/backup_table_tests/models/model_backup_undefined.sql b/tests/integration/backup_table_tests/models/model_backup_undefined.sql deleted file mode 100644 index 54468d510..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_undefined.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table' - ) -}} - -select 4 diff --git a/tests/integration/backup_table_tests/test_backup_table_option.py b/tests/integration/backup_table_tests/test_backup_table_option.py deleted file mode 100644 index e32bca803..000000000 --- a/tests/integration/backup_table_tests/test_backup_table_option.py +++ /dev/null @@ -1,133 +0,0 @@ -import os - -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestBackupTableOption(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2 - } - - def check_backup_param_template(self, test_table_name, backup_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_undefined should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_undefined', True) - - # model_backup_true should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_true', True) - - # model_backup_false should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_false', False) - - # Any view should not contain a BACKUP NO parameter, regardless of the specified config (create will fail) - self.check_backup_param_template('model_backup_true_view', True) - -class TestBackupTableOptionProjectFalse(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - # Update project config to set backup to False. - # This should make the 'model_backup_undefined' switch to BACKUP NO - return { - 'config-version': 2, - 'models': {'backup': False} - } - - def check_backup_param_template(self, test_table_name, backup_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option_project_config_false(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_undefined should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_undefined', False) - - # model_backup_true should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_true', True) - - # model_backup_false should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_false', False) - - # Any view should not contain a BACKUP NO parameter, regardless of the specified config (create will fail) - self.check_backup_param_template('model_backup_true_view', True) - -class TestBackupTableOptionOrder(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2 - } - - def check_backup_param_template(self, test_table_name, backup_flag_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_flag_is_expected) - if backup_flag_is_expected: - distkey_index = lowercase_statement.find('distkey') - sortkey_index = lowercase_statement.find('sortkey') - backup_index = lowercase_statement.find('backup no') - self.assertEqual((backup_index < distkey_index) or distkey_index == -1, backup_flag_is_expected) - self.assertEqual((backup_index < sortkey_index) or sortkey_index == -1, backup_flag_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option_project_config_false(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_param_before_distkey should contain a BACKUP NO parameter which precedes a DISTKEY in the table ddl - self.check_backup_param_template('model_backup_param_before_distkey', False) - - # model_backup_param_before_sortkey should contain a BACKUP NO parameter which precedes a SORTKEY in the table ddl - self.check_backup_param_template('model_backup_param_before_sortkey', False) \ No newline at end of file From 0ba78b299287133dfa69f38b4a116247258a1a61 Mon Sep 17 00:00:00 2001 From: Jeremy Cohen Date: Thu, 9 Feb 2023 22:48:20 +0100 Subject: [PATCH 023/113] Rm defer_state_test (#268) Co-authored-by: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> --- .../changed_models/ephemeral_model.sql | 2 - .../changed_models/schema.yml | 9 -- .../changed_models/table_model.sql | 5 - .../changed_models/view_model.sql | 1 - .../changed_models_bad/ephemeral_model.sql | 2 - .../changed_models_bad/schema.yml | 9 -- .../changed_models_bad/table_model.sql | 5 - .../changed_models_bad/view_model.sql | 1 - .../changed_models_missing/schema.yml | 9 -- .../changed_models_missing/table_model.sql | 2 - .../changed_models_missing/view_model.sql | 1 - .../defer_state_test/macros/macros.sql | 3 - .../models/ephemeral_model.sql | 2 - .../defer_state_test/models/exposures.yml | 8 - .../defer_state_test/models/schema.yml | 9 -- .../defer_state_test/models/table_model.sql | 5 - .../defer_state_test/models/view_model.sql | 1 - .../defer_state_test/seeds/seed.csv | 3 - .../snapshots/my_snapshot.sql | 14 -- .../defer_state_test/test_defer_state.py | 153 ------------------ 20 files changed, 244 deletions(-) delete mode 100644 tests/integration/defer_state_test/changed_models/ephemeral_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models/schema.yml delete mode 100644 tests/integration/defer_state_test/changed_models/table_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models/view_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models_bad/schema.yml delete mode 100644 tests/integration/defer_state_test/changed_models_bad/table_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models_bad/view_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models_missing/schema.yml delete mode 100644 tests/integration/defer_state_test/changed_models_missing/table_model.sql delete mode 100644 tests/integration/defer_state_test/changed_models_missing/view_model.sql delete mode 100644 tests/integration/defer_state_test/macros/macros.sql delete mode 100644 tests/integration/defer_state_test/models/ephemeral_model.sql delete mode 100644 tests/integration/defer_state_test/models/exposures.yml delete mode 100644 tests/integration/defer_state_test/models/schema.yml delete mode 100644 tests/integration/defer_state_test/models/table_model.sql delete mode 100644 tests/integration/defer_state_test/models/view_model.sql delete mode 100644 tests/integration/defer_state_test/seeds/seed.csv delete mode 100644 tests/integration/defer_state_test/snapshots/my_snapshot.sql delete mode 100644 tests/integration/defer_state_test/test_defer_state.py diff --git a/tests/integration/defer_state_test/changed_models/ephemeral_model.sql b/tests/integration/defer_state_test/changed_models/ephemeral_model.sql deleted file mode 100644 index 2f976e3a9..000000000 --- a/tests/integration/defer_state_test/changed_models/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from {{ ref('view_model') }} diff --git a/tests/integration/defer_state_test/changed_models/schema.yml b/tests/integration/defer_state_test/changed_models/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models/table_model.sql b/tests/integration/defer_state_test/changed_models/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/changed_models/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/changed_models/view_model.sql b/tests/integration/defer_state_test/changed_models/view_model.sql deleted file mode 100644 index bddbbb23c..000000000 --- a/tests/integration/defer_state_test/changed_models/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql b/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql deleted file mode 100644 index 5155dfa47..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_bad/schema.yml b/tests/integration/defer_state_test/changed_models_bad/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models_bad/table_model.sql b/tests/integration/defer_state_test/changed_models_bad/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/changed_models_bad/view_model.sql b/tests/integration/defer_state_test/changed_models_bad/view_model.sql deleted file mode 100644 index bddbbb23c..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_missing/schema.yml b/tests/integration/defer_state_test/changed_models_missing/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models_missing/table_model.sql b/tests/integration/defer_state_test/changed_models_missing/table_model.sql deleted file mode 100644 index 22b040d2c..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/table_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as fun diff --git a/tests/integration/defer_state_test/changed_models_missing/view_model.sql b/tests/integration/defer_state_test/changed_models_missing/view_model.sql deleted file mode 100644 index 4b91aa0f2..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed') }} diff --git a/tests/integration/defer_state_test/macros/macros.sql b/tests/integration/defer_state_test/macros/macros.sql deleted file mode 100644 index 79519c1b6..000000000 --- a/tests/integration/defer_state_test/macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro my_macro() %} - {% do log('in a macro' ) %} -{% endmacro %} diff --git a/tests/integration/defer_state_test/models/ephemeral_model.sql b/tests/integration/defer_state_test/models/ephemeral_model.sql deleted file mode 100644 index 2f976e3a9..000000000 --- a/tests/integration/defer_state_test/models/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from {{ ref('view_model') }} diff --git a/tests/integration/defer_state_test/models/exposures.yml b/tests/integration/defer_state_test/models/exposures.yml deleted file mode 100644 index 489dec3c3..000000000 --- a/tests/integration/defer_state_test/models/exposures.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 -exposures: - - name: my_exposure - type: application - depends_on: - - ref('view_model') - owner: - email: test@example.com diff --git a/tests/integration/defer_state_test/models/schema.yml b/tests/integration/defer_state_test/models/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/models/table_model.sql b/tests/integration/defer_state_test/models/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/models/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/models/view_model.sql b/tests/integration/defer_state_test/models/view_model.sql deleted file mode 100644 index 4b91aa0f2..000000000 --- a/tests/integration/defer_state_test/models/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed') }} diff --git a/tests/integration/defer_state_test/seeds/seed.csv b/tests/integration/defer_state_test/seeds/seed.csv deleted file mode 100644 index 1a728c8ab..000000000 --- a/tests/integration/defer_state_test/seeds/seed.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,name -1,Alice -2,Bob diff --git a/tests/integration/defer_state_test/snapshots/my_snapshot.sql b/tests/integration/defer_state_test/snapshots/my_snapshot.sql deleted file mode 100644 index 6a7d2b31b..000000000 --- a/tests/integration/defer_state_test/snapshots/my_snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot my_cool_snapshot %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['id'], - ) - }} - select * from {{ ref('view_model') }} - -{% endsnapshot %} diff --git a/tests/integration/defer_state_test/test_defer_state.py b/tests/integration/defer_state_test/test_defer_state.py deleted file mode 100644 index fc942abc0..000000000 --- a/tests/integration/defer_state_test/test_defer_state.py +++ /dev/null @@ -1,153 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import copy -import json -import os -import shutil - - -class TestDeferState(DBTIntegrationTest): - @property - def schema(self): - return "defer_state" - - @property - def models(self): - return "models" - - def setUp(self): - self.other_schema = None - super().setUp() - self._created_schemas.add(self.other_schema) - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'test': { - 'quote_columns': False, - } - } - } - - def get_profile(self, adapter_type): - if self.other_schema is None: - self.other_schema = self.unique_schema() + '_other' - if self.adapter_type == 'snowflake': - self.other_schema = self.other_schema.upper() - profile = super().get_profile(adapter_type) - default_name = profile['test']['target'] - profile['test']['outputs']['otherschema'] = copy.deepcopy(profile['test']['outputs'][default_name]) - profile['test']['outputs']['otherschema']['schema'] = self.other_schema - return profile - - def copy_state(self): - assert not os.path.exists('state') - os.makedirs('state') - shutil.copyfile('target/manifest.json', 'state/manifest.json') - - def run_and_defer(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - assert not any(r.node.deferred for r in results) - results = self.run_dbt(['run']) - assert len(results) == 2 - assert not any(r.node.deferred for r in results) - results = self.run_dbt(['test']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - # test tests first, because run will change things - # no state, wrong schema, failure. - self.run_dbt(['test', '--target', 'otherschema'], expect_pass=False) - - # no state, run also fails - self.run_dbt(['run', '--target', 'otherschema'], expect_pass=False) - - # defer test, it succeeds - results = self.run_dbt(['test', '-m', 'view_model+', '--state', 'state', '--defer', '--target', 'otherschema']) - - # with state it should work though - results = self.run_dbt(['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema']) - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - with open('target/manifest.json') as fp: - data = json.load(fp) - assert data['nodes']['seed.test.seed']['deferred'] - - assert len(results) == 1 - - def run_switchdirs_defer(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - self.use_default_project({'model-paths': ['changed_models']}) - # the sql here is just wrong, so it should fail - self.run_dbt( - ['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=False, - ) - # but this should work since we just use the old happy model - self.run_dbt( - ['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=True, - ) - - self.use_default_project({'model-paths': ['changed_models_bad']}) - # this should fail because the table model refs a broken ephemeral - # model, which it should see - self.run_dbt( - ['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=False, - ) - - def run_defer_iff_not_exists(self): - results = self.run_dbt(['seed', '--target', 'otherschema']) - assert len(results) == 1 - results = self.run_dbt(['run', '--target', 'otherschema']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run', '--state', 'state', '--defer']) - assert len(results) == 2 - - # because the seed now exists in our schema, we shouldn't defer it - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - def run_defer_deleted_upstream(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - self.use_default_project({'model-paths': ['changed_models_missing']}) - # ephemeral_model is now gone. previously this caused a - # keyerror (dbt#2875), now it should pass - self.run_dbt( - ['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=True, - ) - - # despite deferral, test should use models just created in our schema - results = self.run_dbt(['test', '--state', 'state', '--defer']) - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - @use_profile('redshift') - def test_redshift_state_changetarget(self): - self.run_and_defer() From b2de21b61f2536d7f75d27ca6fb7b21c4d06e1f0 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 9 Feb 2023 16:34:16 -0800 Subject: [PATCH 024/113] remove invocation of integration tests in tox (#294) * remove invocation of integration tests in tox * add changie * update CONTRIBUTING.md and changie * update CONTRIBUTING.md --- .../unreleased/Under the Hood-20230130-171158.yaml | 6 ++++++ CONTRIBUTING.md | 10 +++++----- tox.ini | 1 - 3 files changed, 11 insertions(+), 6 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230130-171158.yaml diff --git a/.changes/unreleased/Under the Hood-20230130-171158.yaml b/.changes/unreleased/Under the Hood-20230130-171158.yaml new file mode 100644 index 000000000..7f68bac4e --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230130-171158.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: 'remove tox call to integration tests' +time: 2023-01-30T17:11:58.554584-08:00 +custom: + Author: colin-rogers-dbt + Issue: "257" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b33972697..7e4bc28f0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -55,9 +55,9 @@ To confirm you have correct `dbt-core` and adapter versions installed please run ### Initial Setup -`dbt-redshift` contains [unit](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/unit) and [integration](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/integration) tests. Integration tests require testing against an actual Redshift warehouse. We have CI set up to test against a Redshift warehouse during PR checks. +`dbt-redshift` contains [unit](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/unit) and [functional](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/functional) tests. Functional tests require testing against an actual Redshift warehouse. We have CI set up to test against a Redshift warehouse during PR checks. -In order to run integration tests locally, you will need a `test.env` file in the root of the repository that contains credentials for your Redshift warehouse. +In order to run functional tests locally, you will need a `test.env` file in the root of the repository that contains credentials for your Redshift warehouse. Note: This `test.env` file is git-ignored, but please be extra careful to never check in credentials or other sensitive information when developing. To create your `test.env` file, copy the provided example file, then supply your relevant credentials. @@ -76,8 +76,8 @@ There are a few methods for running tests locally. Finally, you can also run a specific test or group of tests using `pytest` directly. With a Python virtualenv active and dev dependencies installed you can do things like: ```sh -# run specific redshift integration tests -python -m pytest -m profile_redshift tests/integration/simple_seed_test +# run specific redshift functional tests +python -m pytest tests/functional/adapter/concurrent_transactions # run specific redshift functional tests in a file python -m pytest tests/functional/adapter/test_basic.py # run all unit tests in a file @@ -110,6 +110,6 @@ You don't need to worry about which `dbt-redshift` version your change will go i dbt Labs provides a CI environment to test changes to the `dbt-redshift` adapter and periodic checks against the development version of `dbt-core` through Github Actions. -A `dbt-redshift` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. +A `dbt-redshift` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or functional test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. Once all tests are passing and your PR has been approved, a `dbt-redshift` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada: diff --git a/tox.ini b/tox.ini index 85d20f595..81625647a 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,6 @@ passenv = REDSHIFT_TEST_* PYTEST_ADDOPTS commands = - redshift: {envpython} -m pytest {posargs} -m profile_redshift tests/integration redshift: {envpython} -m pytest {posargs} tests/functional deps = -rdev-requirements.txt From 3e956a68bbbee311f0611928374271e13554a55b Mon Sep 17 00:00:00 2001 From: Michelle Ark Date: Fri, 10 Feb 2023 11:54:15 -0500 Subject: [PATCH 025/113] parse vars in unit test utils (#313) * test against feature/click-cli core branch * fix unit tests * remove tests/integration tests * done testing against feature branch * test feature branch * revert requs to use main branch --------- Co-authored-by: Chenyu Li Co-authored-by: Ian Knox --- tests/unit/utils.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index a2a0147ac..e09b7fc69 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -45,6 +45,12 @@ def profile_from_dict(profile, profile_name, cli_vars='{}'): cli_vars = parse_cli_vars(cli_vars) renderer = ProfileRenderer(cli_vars) + + # in order to call dbt's internal profile rendering, we need to set the + # flags global. This is a bit of a hack, but it's the best way to do it. + from dbt.flags import set_from_args + from argparse import Namespace + set_from_args(Namespace(), None) return Profile.from_raw_profile_info( profile, profile_name, @@ -73,8 +79,12 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars='{}'): from dbt.config import Project, Profile, RuntimeConfig + from dbt.config.utils import parse_cli_vars from copy import deepcopy + if not isinstance(cli_vars, dict): + cli_vars = parse_cli_vars(cli_vars) + if isinstance(project, Project): profile_name = project.profile_name else: From e0598b896d1e2eed53bf32ba7a39b3c40ebb85cc Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Mon, 13 Feb 2023 14:10:22 -0600 Subject: [PATCH 026/113] Sync Changelog team to GitHub team (#302) * update to dynamically determine core team members * tweak team logic a to make it more intuitive * update config comments * temporarily point to branch * fix comments so they are not overwritten * point workflow back tp main --- .bumpversion.cfg | 7 +------ .changie.yaml | 13 ++++++++++--- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 99c37bce7..7fe7b8e67 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,18 +1,13 @@ [bumpversion] current_version = 1.5.0a1 -# `parse` allows parsing the version into the parts we need to check. There are some -# unnamed groups and that's okay because they do not need to be audited. If any part -# of the version passed and does not match the regex, it will fail. -# expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly` -# excepted failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number (((?Pa|b|rc) # optional pre-release type ?(?P[\d]+?)) # optional pre-release version number \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator - )? + )? # expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` serialize = {major}.{minor}.{patch}{prekind}{num}.{nightly} {major}.{minor}.{patch}{prekind}{num} diff --git a/.changie.yaml b/.changie.yaml index 7b961180e..620305ac4 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -4,6 +4,7 @@ headerPath: header.tpl.md versionHeaderPath: "" changelogPath: CHANGELOG.md versionExt: md +envPrefix: "CHANGIE_" versionFormat: '## dbt-redshift {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' changeFormat: |- @@ -77,15 +78,21 @@ custom: footerFormat: | {{- $contributorDict := dict }} - {{- /* any names added to this list should be all lowercase for later matching purposes */}} - {{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }} + {{- /* ensure all names in this list are all lowercase for later matching purposes */}} + {{- $core_team := splitList " " .Env.CORE_TEAM }} + {{- /* ensure we always skip snyk and dependabot in addition to the core team */}} + {{- $maintainers := list "dependabot[bot]" "snyk-bot"}} + {{- range $team_member := $core_team }} + {{- $team_member_lower := lower $team_member }} + {{- $maintainers = append $maintainers $team_member_lower }} + {{- end }} {{- range $change := .Changes }} {{- $authorList := splitList " " $change.Custom.Author }} {{- /* loop through all authors for a single changelog */}} {{- range $author := $authorList }} {{- $authorLower := lower $author }} {{- /* we only want to include non-core team contributors */}} - {{- if not (has $authorLower $core_team)}} + {{- if not (has $authorLower $maintainers)}} {{- $changeList := splitList " " $change.Custom.Author }} {{- $IssueList := list }} {{- $changeLink := $change.Kind }} From 2cc47bb2d226ee7268df31a59edf72bb75ef0bda Mon Sep 17 00:00:00 2001 From: sathiish-kumar <118481523+sathiish-kumar@users.noreply.github.com> Date: Wed, 15 Feb 2023 12:07:02 -0800 Subject: [PATCH 027/113] Modify RedshiftConnectionManager to extend from SQLConnectionManager, migrate from psycopg2 to redshift python connector (#251) * Change RedshiftConnectionManager to extend from SQLConnectionManager, define a _get_connect_method method to leverage Redshift python connector to retrieve the connect method * Add/fix unit tests, create RedshiftConnectMethodFactory to vend connect_method * Fix _connection_keys to mimic PostgresConnectionManager * Remove unneeded functions for tmp_cluster_creds and env_var creds auth due to in-built support in Redshift Python Connector * Resolve some TODOs * Fix references to old exceptions, add changelog * Fix errors with functional tests by overriding add_query & execute and modifying multi statement execution * Attempt to fix integration tests by adding `valid_incremental_strategies` in impl.py * Fix unit tests * Attempt to fix integration tests * add unit tests for execute * add unit tests for add_query * make get_connection_method work with serverless * add unit tests for serverless iam connections * add redshift connector version, remove sslmode, connection time out, role, application_name * change redshift_connector version --------- Co-authored-by: jiezhec Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> --- .../Under the Hood-20230118-071542.yaml | 8 + dbt/adapters/redshift/connections.py | 272 +++++++++----- dbt/adapters/redshift/impl.py | 12 +- setup.py | 1 + tests/unit/test_redshift_adapter.py | 337 ++++++++++-------- 5 files changed, 395 insertions(+), 235 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230118-071542.yaml diff --git a/.changes/unreleased/Under the Hood-20230118-071542.yaml b/.changes/unreleased/Under the Hood-20230118-071542.yaml new file mode 100644 index 000000000..afa2f05f6 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230118-071542.yaml @@ -0,0 +1,8 @@ +kind: Under the Hood +body: Replace psycopg2 connector with Redshift python connector when connecting to + Redshift +time: 2023-01-18T07:15:42.183304-08:00 +custom: + Author: sathiish-kumar + Issue: "219" + PR: "251" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index a13353950..cf2cbf8cc 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -1,20 +1,24 @@ +import re from multiprocessing import Lock from contextlib import contextmanager -from typing import NewType +from typing import NewType, Tuple -from dbt.adapters.postgres import PostgresConnectionManager -from dbt.adapters.postgres import PostgresCredentials +import agate +import sqlparse +from dbt.adapters.sql import SQLConnectionManager +from dbt.contracts.connection import AdapterResponse, Connection, Credentials from dbt.events import AdapterLogger import dbt.exceptions import dbt.flags - -import boto3 - +import redshift_connector from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum from dataclasses import dataclass, field from typing import Optional, List +from dbt.helper_types import Port +from redshift_connector import OperationalError, DatabaseError, DataError + logger = AdapterLogger("Redshift") drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock() # type: ignore @@ -38,7 +42,10 @@ class RedshiftConnectionMethod(StrEnum): @dataclass -class RedshiftCredentials(PostgresCredentials): +class RedshiftCredentials(Credentials): + host: str + user: str + port: Port method: str = RedshiftConnectionMethod.DATABASE # type: ignore password: Optional[str] = None # type: ignore cluster_id: Optional[str] = field( @@ -46,25 +53,143 @@ class RedshiftCredentials(PostgresCredentials): metadata={"description": "If using IAM auth, the name of the cluster"}, ) iam_profile: Optional[str] = None - iam_duration_seconds: int = 900 - search_path: Optional[str] = None - keepalives_idle: int = 4 autocreate: bool = False db_groups: List[str] = field(default_factory=list) ra3_node: Optional[bool] = False + connect_timeout: int = 30 + role: Optional[str] = None + sslmode: Optional[str] = None + retries: int = 1 + + _ALIASES = {"dbname": "database", "pass": "password"} @property def type(self): return "redshift" def _connection_keys(self): - keys = super()._connection_keys() - return keys + ("method", "cluster_id", "iam_profile", "iam_duration_seconds") + return "host", "port", "user", "database", "schema", "method", "cluster_id", "iam_profile" + + @property + def unique_field(self) -> str: + return self.host + + +class RedshiftConnectMethodFactory: + credentials: RedshiftCredentials + + def __init__(self, credentials): + self.credentials = credentials + + def get_connect_method(self): + method = self.credentials.method + kwargs = { + "host": self.credentials.host, + "database": self.credentials.database, + "port": self.credentials.port if self.credentials.port else 5439, + "auto_create": self.credentials.autocreate, + "db_groups": self.credentials.db_groups, + "region": self.credentials.host.split(".")[2], + "timeout": self.credentials.connect_timeout, + } + if self.credentials.sslmode: + kwargs["sslmode"] = self.credentials.sslmode + + # Support missing 'method' for backwards compatibility + if method == RedshiftConnectionMethod.DATABASE or method is None: + # this requirement is really annoying to encode into json schema, + # so validate it here + if self.credentials.password is None: + raise dbt.exceptions.FailedToConnectError( + "'password' field is required for 'database' credentials" + ) + + def connect(): + logger.debug("Connecting to redshift with username/password based auth...") + c = redshift_connector.connect( + user=self.credentials.user, password=self.credentials.password, **kwargs + ) + if self.credentials.role: + c.cursor().execute("set role {}".format(self.credentials.role)) + return c + return connect -class RedshiftConnectionManager(PostgresConnectionManager): + elif method == RedshiftConnectionMethod.IAM: + if not self.credentials.cluster_id and "serverless" not in self.credentials.host: + raise dbt.exceptions.FailedToConnectError( + "Failed to use IAM method. 'cluster_id' must be provided for provisioned cluster. " + "'host' must be provided for serverless endpoint." + ) + + def connect(): + logger.debug("Connecting to redshift with IAM based auth...") + c = redshift_connector.connect( + iam=True, + db_user=self.credentials.user, + password="", + user="", + cluster_identifier=self.credentials.cluster_id, + profile=self.credentials.iam_profile, + **kwargs, + ) + if self.credentials.role: + c.cursor().execute("set role {}".format(self.credentials.role)) + return c + + return connect + else: + raise dbt.exceptions.FailedToConnectError( + "Invalid 'method' in profile: '{}'".format(method) + ) + + +class RedshiftConnectionManager(SQLConnectionManager): TYPE = "redshift" + def _get_backend_pid(self): + sql = "select pg_backend_pid()" + _, cursor = self.add_query(sql) + res = cursor.fetchone() + return res + + def cancel(self, connection: Connection): + connection_name = connection.name + try: + pid = self._get_backend_pid() + sql = "select pg_terminate_backend({})".format(pid) + _, cursor = self.add_query(sql) + res = cursor.fetchone() + logger.debug("Cancel query '{}': {}".format(connection_name, res)) + except redshift_connector.error.InterfaceError as e: + if "is closed" in str(e): + logger.debug(f"Connection {connection_name} was already closed") + return + raise + + @classmethod + def get_response(cls, cursor: redshift_connector.Cursor) -> AdapterResponse: + rows = cursor.rowcount + message = f"cursor.rowcount = {rows}" + return AdapterResponse(_message=message, rows_affected=rows) + + @contextmanager + def exception_handler(self, sql): + try: + yield + except redshift_connector.error.DatabaseError as e: + logger.debug(f"Redshift error: {str(e)}") + self.rollback_if_open() + raise dbt.exceptions.DbtDatabaseError(str(e)) + except Exception as e: + logger.debug("Error running SQL: {}", sql) + logger.debug("Rolling back transaction.") + self.rollback_if_open() + # Raise DBT native exceptions as is. + if isinstance(e, dbt.exceptions.Exception): + raise + raise dbt.exceptions.DbtRuntimeError(str(e)) from e + @contextmanager def fresh_transaction(self, name=None): """On entrance to this context manager, hold an exclusive lock and @@ -89,83 +214,68 @@ def fresh_transaction(self, name=None): self.begin() @classmethod - def fetch_cluster_credentials( - cls, db_user, db_name, cluster_id, iam_profile, duration_s, autocreate, db_groups - ): - """Fetches temporary login credentials from AWS. The specified user - must already exist in the database, or else an error will occur""" - - if iam_profile is None: - session = boto3.Session() - boto_client = session.client("redshift") + def open(cls, connection): + if connection.state == "open": + logger.debug("Connection is already open, skipping open.") + return connection + + credentials = connection.credentials + connect_method_factory = RedshiftConnectMethodFactory(credentials) + + def exponential_backoff(attempt: int): + return attempt * attempt + + retryable_exceptions = [OperationalError, DatabaseError, DataError] + + return cls.retry_connection( + connection, + connect=connect_method_factory.get_connect_method(), + logger=logger, + retry_limit=credentials.retries, + retry_timeout=exponential_backoff, + retryable_exceptions=retryable_exceptions, + ) + + def execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> Tuple[AdapterResponse, agate.Table]: + _, cursor = self.add_query(sql, auto_begin) + response = self.get_response(cursor) + if fetch: + table = self.get_result_from_cursor(cursor) else: - logger.debug("Connecting to Redshift using 'IAM'" + f"with profile {iam_profile}") - boto_session = boto3.Session(profile_name=iam_profile) - boto_client = boto_session.client("redshift") + table = dbt.clients.agate_helper.empty_table() + return response, table - try: - return boto_client.get_cluster_credentials( - DbUser=db_user, - DbName=db_name, - ClusterIdentifier=cluster_id, - DurationSeconds=duration_s, - AutoCreate=autocreate, - DbGroups=db_groups, - ) + def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): - except boto_client.exceptions.ClientError as e: - raise dbt.exceptions.FailedToConnectError( - "Unable to get temporary Redshift cluster credentials: {}".format(e) - ) + connection = None + cursor = None - @classmethod - def get_tmp_iam_cluster_credentials(cls, credentials): - cluster_id = credentials.cluster_id + queries = sqlparse.split(sql) - # default via: - # boto3.readthedocs.io/en/latest/reference/services/redshift.html - iam_duration_s = credentials.iam_duration_seconds + for query in queries: + # Strip off comments from the current query + without_comments = re.sub( + re.compile(r"(\".*?\"|\'.*?\')|(/\*.*?\*/|--[^\r\n]*$)", re.MULTILINE), + "", + query, + ).strip() - if not cluster_id: - raise dbt.exceptions.FailedToConnectError( - "'cluster_id' must be provided in profile if IAM " "authentication method selected" + if without_comments == "": + continue + + connection, cursor = super().add_query( + query, auto_begin, bindings=bindings, abridge_sql_log=abridge_sql_log ) - cluster_creds = cls.fetch_cluster_credentials( - credentials.user, - credentials.database, - credentials.cluster_id, - credentials.iam_profile, - iam_duration_s, - credentials.autocreate, - credentials.db_groups, - ) + if cursor is None: + conn = self.get_thread_connection() + conn_name = conn.name if conn and conn.name else "" + raise dbt.exceptions.DbtRuntimeError(f"Tried to run invalid SQL: {sql} on {conn_name}") - # replace username and password with temporary redshift credentials - return credentials.replace( - user=cluster_creds.get("DbUser"), password=cluster_creds.get("DbPassword") - ) + return connection, cursor @classmethod def get_credentials(cls, credentials): - method = credentials.method - - # Support missing 'method' for backwards compatibility - if method == "database" or method is None: - logger.debug("Connecting to Redshift using 'database' credentials") - # this requirement is really annoying to encode into json schema, - # so validate it here - if credentials.password is None: - raise dbt.exceptions.FailedToConnectError( - "'password' field is required for 'database' credentials" - ) - return credentials - - elif method == "iam": - logger.debug("Connecting to Redshift using 'IAM' credentials") - return cls.get_tmp_iam_cluster_credentials(credentials) - - else: - raise dbt.exceptions.FailedToConnectError( - "Invalid 'method' in profile: '{}'".format(method) - ) + return credentials diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 45c983b3e..8b8ba66fd 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -3,7 +3,6 @@ from dbt.adapters.base.impl import AdapterConfig from dbt.adapters.sql import SQLAdapter from dbt.adapters.base.meta import available -from dbt.adapters.postgres import PostgresAdapter from dbt.adapters.redshift import RedshiftConnectionManager from dbt.adapters.redshift.column import RedshiftColumn from dbt.adapters.redshift import RedshiftRelation @@ -22,7 +21,7 @@ class RedshiftConfig(AdapterConfig): backup: Optional[bool] = True -class RedshiftAdapter(PostgresAdapter, SQLAdapter): +class RedshiftAdapter(SQLAdapter): Relation = RedshiftRelation ConnectionManager = RedshiftConnectionManager Column = RedshiftColumn # type: ignore @@ -91,3 +90,12 @@ def _get_catalog_schemas(self, manifest): self.type(), exc.msg ) ) + + def valid_incremental_strategies(self): + """The set of standard builtin strategies which this adapter supports out-of-the-box. + Not used to validate custom strategies defined by end users. + """ + return ["append", "delete+insert"] + + def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: + return f"{add_to} + interval '{number} {interval}'" diff --git a/setup.py b/setup.py index 6f82944de..db8cd45b4 100644 --- a/setup.py +++ b/setup.py @@ -84,6 +84,7 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: f"dbt-core~={_core_version()}", f"dbt-postgres~={_core_version()}", "boto3~=1.26.26", + "redshift-connector~=2.0.910", ], zip_safe=False, classifiers=[ diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 92fd9cbd8..ba5361b0b 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -1,9 +1,11 @@ import unittest from unittest import mock -from unittest.mock import Mock +from unittest.mock import Mock, call import agate import boto3 +import dbt +import redshift_connector from dbt.adapters.redshift import ( RedshiftAdapter, @@ -11,17 +13,10 @@ ) from dbt.clients import agate_helper from dbt.exceptions import FailedToConnectError - +from dbt.adapters.redshift.connections import RedshiftConnectMethodFactory from .utils import config_from_parts_or_dicts, mock_connection, TestAdapterConversions, inject_adapter -def fetch_cluster_credentials(*args, **kwargs): - return { - 'DbUser': 'root', - 'DbPassword': 'tmp_password' - } - - class TestRedshiftAdapter(unittest.TestCase): def setUp(self): @@ -31,7 +26,7 @@ def setUp(self): 'type': 'redshift', 'dbname': 'redshift', 'user': 'root', - 'host': 'thishostshouldnotexist', + 'host': 'thishostshouldnotexist.test.us-east-1', 'pass': 'password', 'port': 5439, 'schema': 'public' @@ -62,32 +57,146 @@ def adapter(self): inject_adapter(self._adapter, RedshiftPlugin) return self._adapter + @mock.patch("redshift_connector.connect", Mock()) def test_implicit_database_conn(self): - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual(creds, self.config.credentials) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host='thishostshouldnotexist.test.us-east-1', + database='redshift', + user='root', + password='password', + port=5439, + auto_create=False, + db_groups=[], + timeout=30, + region='us-east-1' + ) + @mock.patch("redshift_connector.connect", Mock()) def test_explicit_database_conn(self): self.config.method = 'database' - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual(creds, self.config.credentials) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host='thishostshouldnotexist.test.us-east-1', + database='redshift', + user='root', + password='password', + port=5439, + auto_create=False, + db_groups=[], + region='us-east-1', + timeout=30 + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_explicit_iam_conn_without_profile(self): + self.config.credentials = self.config.credentials.replace( + method='iam', + cluster_id='my_redshift', + host='thishostshouldnotexist.test.us-east-1' + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host='thishostshouldnotexist.test.us-east-1', + database='redshift', + db_user='root', + password='', + user='', + cluster_identifier='my_redshift', + region='us-east-1', + auto_create=False, + db_groups=[], + profile=None, + timeout=30, + port=5439 + ) - def test_explicit_iam_conn(self): + @mock.patch('redshift_connector.connect', Mock()) + @mock.patch('boto3.Session', Mock()) + def test_explicit_iam_conn_with_profile(self): self.config.credentials = self.config.credentials.replace( method='iam', cluster_id='my_redshift', - iam_duration_seconds=1200 + iam_profile='test', + host='thishostshouldnotexist.test.us-east-1' + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + + redshift_connector.connect.assert_called_once_with( + iam=True, + host='thishostshouldnotexist.test.us-east-1', + database='redshift', + cluster_identifier='my_redshift', + region='us-east-1', + auto_create=False, + db_groups=[], + db_user='root', + password='', + user='', + profile='test', + timeout=30, + port=5439 ) - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) + @mock.patch('redshift_connector.connect', Mock()) + @mock.patch('boto3.Session', Mock()) + def test_explicit_iam_serverless_with_profile(self): + self.config.credentials = self.config.credentials.replace( + method='iam', + iam_profile='test', + host='doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com' + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host='doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com', + database='redshift', + cluster_identifier=None, + region='us-east-2', + auto_create=False, + db_groups=[], + db_user='root', + password='', + user='', + profile='test', + timeout=30, + port=5439 + ) - expected_creds = self.config.credentials.replace(password='tmp_password') - self.assertEqual(creds, expected_creds) + @mock.patch('redshift_connector.connect', Mock()) + @mock.patch('boto3.Session', Mock()) + def test_serverless_iam_failure(self): + self.config.credentials = self.config.credentials.replace( + method='iam', + iam_profile='test', + host='doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com' + ) + with self.assertRaises(dbt.exceptions.FailedToConnectError) as context: + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host='doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com', + database='redshift', + cluster_identifier=None, + region='us-east-2', + auto_create=False, + db_groups=[], + db_user='root', + password='', + user='', + profile='test', + port=5439, + timeout=30, + ) + self.assertTrue("'host' must be provided" in context.exception.msg) def test_iam_conn_optionals(self): @@ -114,53 +223,19 @@ def test_iam_conn_optionals(self): def test_invalid_auth_method(self): # we have to set method this way, otherwise it won't validate self.config.credentials.method = 'badmethod' - with self.assertRaises(FailedToConnectError) as context: - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - + connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) + connect_method_factory.get_connect_method() self.assertTrue('badmethod' in context.exception.msg) def test_invalid_iam_no_cluster_id(self): self.config.credentials = self.config.credentials.replace(method='iam') with self.assertRaises(FailedToConnectError) as context: - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) + connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) + connect_method_factory.get_connect_method() self.assertTrue("'cluster_id' must be provided" in context.exception.msg) - def test_default_session_is_not_used_when_iam_used(self): - boto3.DEFAULT_SESSION = Mock() - self.config.credentials = self.config.credentials.replace(method='iam') - self.config.credentials.cluster_id = 'clusterid' - with mock.patch('dbt.adapters.redshift.connections.boto3.Session'): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual( - boto3.DEFAULT_SESSION.client.call_count, - 0, - "The redshift client should not be created using " - "the default session because the session object is not thread-safe" - ) - - def test_default_session_is_not_used_when_iam_not_used(self): - boto3.DEFAULT_SESSION = Mock() - self.config.credentials = self.config.credentials.replace(method=None) - with mock.patch('dbt.adapters.redshift.connections.boto3.Session'): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual( - boto3.DEFAULT_SESSION.client.call_count, 0, - "The redshift client should not be created using " - "the default session because the session object is not thread-safe" - ) - def test_cancel_open_connections_empty(self): self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) @@ -172,7 +247,6 @@ def test_cancel_open_connections_master(self): def test_cancel_open_connections_single(self): master = mock_connection('master') model = mock_connection('model') - model.handle.get_backend_pid.return_value = 42 key = self.adapter.connections.get_thread_identifier() self.adapter.connections.thread_connections.update({ @@ -181,100 +255,15 @@ def test_cancel_open_connections_single(self): }) with mock.patch.object(self.adapter.connections, 'add_query') as add_query: query_result = mock.MagicMock() - add_query.return_value = (None, query_result) + cursor = mock.Mock() + cursor.fetchone.return_value = 42 + add_query.side_effect = [(None, cursor), (None, query_result)] self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) - - add_query.assert_called_once_with('select pg_terminate_backend(42)') + add_query.assert_has_calls([call('select pg_backend_pid()'), call('select pg_terminate_backend(42)')]) master.handle.get_backend_pid.assert_not_called() - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_default_keepalive(self, psycopg2): - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - keepalives_idle=4, - application_name='dbt' - ) - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_changed_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=5) - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - keepalives_idle=5, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_search_path(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test") - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - options="-c search_path=test", - keepalives_idle=4, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_search_path_with_space(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test test") - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - options=r"-c search_path=test\ test", - keepalives_idle=4, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_set_zero_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=0) - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - application_name='dbt') - def test_dbname_verification_is_case_insensitive(self): # Override adapter settings from setUp() profile_cfg = { @@ -308,6 +297,50 @@ def test_dbname_verification_is_case_insensitive(self): self._adapter = RedshiftAdapter(self.config) self.adapter.verify_database('redshift') + def test_execute_with_fetch(self): + cursor = mock.Mock() + table = dbt.clients.agate_helper.empty_table() + with mock.patch.object(self.adapter.connections, 'add_query') as mock_add_query: + mock_add_query.return_value = ( + None, cursor) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, 'get_response') as mock_get_response: + mock_get_response.return_value = None + with mock.patch.object(self.adapter.connections, + 'get_result_from_cursor') as mock_get_result_from_cursor: + mock_get_result_from_cursor.return_value = table + self.adapter.connections.execute(sql="select * from test", fetch=True) + mock_add_query.assert_called_once_with('select * from test', False) + mock_get_result_from_cursor.assert_called_once_with(cursor) + mock_get_response.assert_called_once_with(cursor) + + def test_execute_without_fetch(self): + cursor = mock.Mock() + with mock.patch.object(self.adapter.connections, 'add_query') as mock_add_query: + mock_add_query.return_value = ( + None, cursor) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, 'get_response') as mock_get_response: + mock_get_response.return_value = None + with mock.patch.object(self.adapter.connections, + 'get_result_from_cursor') as mock_get_result_from_cursor: + self.adapter.connections.execute(sql="select * from test2", fetch=False) + mock_add_query.assert_called_once_with('select * from test2', False) + mock_get_result_from_cursor.assert_not_called() + mock_get_response.assert_called_once_with(cursor) + + def test_add_query_with_no_cursor(self): + with mock.patch.object(self.adapter.connections, 'get_thread_connection') as mock_get_thread_connection: + mock_get_thread_connection.return_value = None + with self.assertRaisesRegex(dbt.exceptions.DbtRuntimeError, + 'Tried to run invalid SQL: on '): + self.adapter.connections.add_query(sql="") + mock_get_thread_connection.assert_called_once() + + def test_add_query_success(self): + cursor = mock.Mock() + with mock.patch.object(dbt.adapters.redshift.connections.SQLConnectionManager, 'add_query') as mock_add_query: + mock_add_query.return_value = None, cursor + self.adapter.connections.add_query('select * from test3') + mock_add_query.assert_called_once_with('select * from test3', True, bindings=None, abridge_sql_log=False) class TestRedshiftAdapterConversions(TestAdapterConversions): def test_convert_text_type(self): From 2c3f7829a293e1f588ace5098a00d4b80d4a342a Mon Sep 17 00:00:00 2001 From: dave-connors-3 <73915542+dave-connors-3@users.noreply.github.com> Date: Thu, 16 Feb 2023 09:15:49 -0600 Subject: [PATCH 028/113] dbt Constraints / model contracts (#229) * redshift__get_columns_spec_ddl with just column names * create and insert option * fix check warning * extend postgres adapter tests * do not extend * pin core branch for pytest * changie entry * reverse conditional statement, add explicit transactions * add column name to log output, control whitespace * add DDL test, some whitespace control * ran precommit, samll edit to spacing on DDL * update datatype to match DDL * dynamic database name * add test for attempting to insert a null * update commands to be more specific in existing tests * update check to constraints_check * update project yml setting * add column check * remove redundant if statement * fix code formatting check * update redshift tests * Small cleanup * Reset to dbt-core main --------- Co-authored-by: Jeremy Cohen Co-authored-by: Michelle Ark --- .../unreleased/Features-20221209-105640.yaml | 7 ++++ dbt/include/redshift/macros/adapters.sql | 21 ++++++++++ .../macros/utils/get_columns_spec_ddl.sql | 42 +++++++++++++++++++ tests/functional/adapter/test_constraints.py | 40 ++++++++++++++++++ 4 files changed, 110 insertions(+) create mode 100644 .changes/unreleased/Features-20221209-105640.yaml create mode 100644 dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql create mode 100644 tests/functional/adapter/test_constraints.py diff --git a/.changes/unreleased/Features-20221209-105640.yaml b/.changes/unreleased/Features-20221209-105640.yaml new file mode 100644 index 000000000..da32fb499 --- /dev/null +++ b/.changes/unreleased/Features-20221209-105640.yaml @@ -0,0 +1,7 @@ +kind: Features +body: dbt-constraints for redshift +time: 2022-12-09T10:56:40.808781-06:00 +custom: + Author: dave-connors-3 + Issue: "227" + PR: "229" diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index ca888d566..1872ce1cc 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -43,6 +43,25 @@ {{ sql_header if sql_header is not none }} + {%- if config.get('constraints_enabled', False) %} + + create {% if temporary -%}temporary{%- endif %} table + {{ relation.include(database=(not temporary), schema=(not temporary)) }} + {{ get_columns_spec_ddl() }} + {{ get_assert_columns_equivalent(sql) }} + {% if backup == false -%}backup no{%- endif %} + {{ dist(_dist) }} + {{ sort(_sort_type, _sort) }} + ; + + insert into {{ relation.include(database=(not temporary), schema=(not temporary)) }} + ( + {{ sql }} + ) + ; + + {%- else %} + create {% if temporary -%}temporary{%- endif %} table {{ relation.include(database=(not temporary), schema=(not temporary)) }} {% if backup == false -%}backup no{%- endif %} @@ -51,6 +70,8 @@ as ( {{ sql }} ); + + {%- endif %} {%- endmacro %} diff --git a/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql b/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql new file mode 100644 index 000000000..48fc5f576 --- /dev/null +++ b/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql @@ -0,0 +1,42 @@ +{% macro redshift__get_columns_spec_ddl() %} + {#- loop through user_provided_columns to create DDL with data types and constraints -#} + {%- set user_provided_columns = model['columns'] -%} + {%- set primary_keys = [] -%} + {%- set ddl_lines = [] -%} + + {%- for i in user_provided_columns -%} + {%- set col = user_provided_columns[i] -%} + {%- set constraints = col['constraints'] -%} + {%- set ns = namespace(not_null_line = '') -%} + + {%- for constraint in constraints -%} + {%- if constraint == 'primary key' -%} + {%- do primary_keys.append(col['name']) -%} + {%- elif constraint == 'not null' %} + {%- set ns.not_null_line = " not null" -%} + {%- endif -%} + {%- endfor -%} + + {%- set not_null_line = " not null" if not_null_col else "" -%} + + {%- set check = col['constraints_check'] -%} + {%- if check -%} + {{ exceptions.warn("We noticed you have `constraints_check` in your configs, these are NOT compatible with Redshift and will be ignored. See column `" ~ col['name'] ~ "`") }} + {%- endif -%} + + {%- set col_line = col['name'] ~ " " ~ col['data_type'] ~ ns.not_null_line -%} + {%- do ddl_lines.append(col_line) -%} + {%- endfor -%} + + {%- if primary_keys -%} + {%- set primary_key_line = "primary key(" ~ primary_keys | join(", ") ~")" -%} + {%- do ddl_lines.append(primary_key_line) -%} + {%- endif %} + + ( + {%- for line in ddl_lines %} + {{ line }}{{ "," if not loop.last }} + {%- endfor %} + ) + +{% endmacro %} diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py new file mode 100644 index 000000000..9c213c85d --- /dev/null +++ b/tests/functional/adapter/test_constraints.py @@ -0,0 +1,40 @@ +import pytest +from dbt.tests.util import relation_from_name +from dbt.tests.adapter.constraints.test_constraints import ( + BaseConstraintsColumnsEqual, + BaseConstraintsRuntimeEnforcement +) + +_expected_sql_redshift = """ +create table {0} ( + id integer not null, + color text, + date_day date, + primary key(id) +) ; +insert into {0} +( + select + 1 as id, + 'blue' as color, + cast('2019-01-01' as date) as date_day +) +; +""" + +class TestRedshiftConstraintsColumnsEqual(BaseConstraintsColumnsEqual): + pass + + +class TestRedshiftConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self, project): + relation = relation_from_name(project.adapter, "my_model") + tmp_relation = relation.incorporate( + path={"identifier": relation.identifier + "__dbt_tmp"} + ) + return _expected_sql_redshift.format(tmp_relation) + + @pytest.fixture(scope="class") + def expected_error_messages(self): + return ['Cannot insert a NULL value into column id'] From 52385f158f93a5c14e04573bc03cfa7fd63e07d9 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 21 Feb 2023 09:42:26 -0600 Subject: [PATCH 029/113] add new workflow (#321) * add new workflow * rename workflow extension, update comments * update PR body --- .github/workflows/cut-release-branch.yml | 42 ++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/workflows/cut-release-branch.yml diff --git a/.github/workflows/cut-release-branch.yml b/.github/workflows/cut-release-branch.yml new file mode 100644 index 000000000..0d4702c41 --- /dev/null +++ b/.github/workflows/cut-release-branch.yml @@ -0,0 +1,42 @@ +# **what?** +# Calls a centralize3d workflow that will: +# 1. Cut a new branch (generally `*.latest`) +# 2. Also cleans up all files in `.changes/unreleased` and `.changes/previous version on +# `main` and bumps `main` to the input version. + +# **why?** +# Generally reduces the workload of engineers and reduces error. Allow automation. + +# **when?** +# This will run when called manually. + +name: Cut new release branch + +on: + workflow_dispatch: + inputs: + version_to_bump_main: + description: 'The alpha version main should bump to (ex. 1.6.0a1)' + required: true + new_branch_name: + description: 'The full name of the new branch (ex. 1.5.latest)' + required: true + +defaults: + run: + shell: bash + +permissions: + contents: write + +jobs: + cut_branch: + name: "Cut branch and clean up main for dbt-redshift" + uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main + with: + version_to_bump_main: ${{ inputs.version_to_bump_main }} + new_branch_name: ${{ inputs.new_branch_name }} + PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch" + PR_body: "This PR will fail CI until the dbt-core PR has been merged due to release version conflicts. dev-requirements.txt needs to be updated to have the dbt-core dependencies point to this new branch." + secrets: + FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }} From 2f0e9f13d908730a06b683a2fea5fbd4c49b8d21 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 21 Feb 2023 14:36:28 -0600 Subject: [PATCH 030/113] fix release bugs on redshift for version and workflow (#326) (#328) * fix release bugs on redshift for version and workflow * fix the serialize pattern * point to main # Conflicts: # .bumpversion.cfg Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .bumpversion.cfg | 12 ++++++++---- .github/workflows/nightly-release.yml | 15 +++++++++++---- setup.py | 3 ++- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7fe7b8e67..f34375db8 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -4,12 +4,16 @@ current_version = 1.5.0a1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number - (((?Pa|b|rc) # optional pre-release type - ?(?P[\d]+?)) # optional pre-release version number - \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator - )? # expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` + (?P # optional pre-release - ex: a1, b2, rc25 + (?Pa|b|rc) # pre-release type + (?P[\d]+) # pre-release version number + )? + ( # optional nightly release indicator + \.(?Pdev[0-9]+) # ex: .dev02142023 + )? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` serialize = {major}.{minor}.{patch}{prekind}{num}.{nightly} + {major}.{minor}.{patch}.{nightly} {major}.{minor}.{patch}{prekind}{num} {major}.{minor}.{patch} commit = False diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index b668d62ec..54c5fdc69 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -26,7 +26,7 @@ defaults: shell: bash env: - RELEASE_BRANCH: "main" + RELEASE_BRANCH: "1.4.latest" jobs: aggregate-release-data: @@ -65,10 +65,17 @@ jobs: id: current-date run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT + # Bump to the next patch because when this is a previously released patch, the changelog + # markdown will already exist and cause a failure in another step + - name: "Bump Patch Number" + id: bump_patch + run: | + echo "patch=$((${{ steps.semver.outputs.patch }}+1))" >> $GITHUB_OUTPUT + - name: "Generate Nightly Release Version Number" id: nightly-release-version run: | - number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly" + number="${{ steps.semver.outputs.major }}.${{ steps.semver.outputs.minor }}.${{ steps.bump_patch.outputs.patch }}.dev${{ steps.current-date.outputs.date }}" echo "number=$number" >> $GITHUB_OUTPUT - name: "Audit Nightly Release Version And Parse Into Parts" @@ -98,12 +105,12 @@ jobs: uses: ./.github/workflows/release.yml with: sha: ${{ needs.aggregate-release-data.outputs.commit_sha }} - target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }} + target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} version_number: ${{ needs.aggregate-release-data.outputs.version_number }} build_script_path: "scripts/build-dist.sh" env_setup_script_path: "scripts/env-setup.sh" s3_bucket_name: "core-team-artifacts" - package_test_command: "dbt --version" + package_test_command: "dbt -h" test_run: true nightly_release: true secrets: inherit diff --git a/setup.py b/setup.py index db8cd45b4..f938f8e31 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,8 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: plugin_version: the version of this plugin, this is an argument in case we ever want to unit test this """ try: - major, minor, plugin_patch = plugin_version.split(".") + # *_ may indicate a dev release which won't affect the core version needed + major, minor, plugin_patch, *_ = plugin_version.split(".", maxsplit=3) except ValueError: raise ValueError(f"Invalid version: {plugin_version}") From 6e1e2396cd848ae4cd1f76baf97f7c4147044b94 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 21 Feb 2023 14:55:20 -0800 Subject: [PATCH 031/113] Test updating dev-requirements.txt / setup.py on workflow dispatch (#329) * convert test_store_test_failures to functional test * Test updating dev-requirements.txt on workflow dispatch * Test updating dev-requirements.txt on workflow dispatch * add bump version call * install bump version * replace tests/postgres branches * remove `sh` * Update .github/workflows/integration.yml Co-authored-by: Emily Rockman * rename shell script and improve sed pattern to handle existing branch * fix sed cmd * fix sed cmd * fix sed cmd * add debug * change sed delimiter * unncomment version bump --------- Co-authored-by: Emily Rockman --- .github/scripts/update_dbt_core_branch.sh | 20 ++++++++++++++++++++ .github/workflows/integration.yml | 11 +++++++++++ 2 files changed, 31 insertions(+) create mode 100755 .github/scripts/update_dbt_core_branch.sh diff --git a/.github/scripts/update_dbt_core_branch.sh b/.github/scripts/update_dbt_core_branch.sh new file mode 100755 index 000000000..d28a40c35 --- /dev/null +++ b/.github/scripts/update_dbt_core_branch.sh @@ -0,0 +1,20 @@ +#!/bin/bash -e +set -e + +git_branch=$1 +target_req_file="dev-requirements.txt" +core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g" +postgres_req_sed_pattern="s|dbt-core.git.*#egg=dbt-postgres|dbt-core.git@${git_branch}#egg=dbt-postgres|g" +tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$postgres_req_sed_pattern" $target_req_file + sed -i "" "$tests_req_sed_pattern" $target_req_file +else + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$postgres_req_sed_pattern" $target_req_file + sed -i "$tests_req_sed_pattern" $target_req_file +fi +core_version=$(curl "https://raw.githubusercontent.com/dbt-labs/dbt-core/${git_branch}/core/dbt/version.py" | grep "__version__ = *"|cut -d'=' -f2) +bumpversion --allow-dirty --new-version "$core_version" major diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 1fe33e148..1dcbee15e 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -33,6 +33,11 @@ on: pull_request_target: # manual trigger workflow_dispatch: + inputs: + dbt-core-branch: + description: "branch of dbt-core to use in dev-requirements.txt" + required: false + type: string # run this once per night to ensure no regressions from latest dbt-core changes schedule: - cron: '0 5 * * *' # 5 UTC @@ -160,6 +165,12 @@ jobs: python -m pip --version tox --version + - name: Update dev_requirements.txt + if: inputs.dbt-core-branch != '' + run: | + pip install bumpversion + ./.github/scripts/update_dbt_core_branch.sh ${{ inputs.dbt-core-branch }} + - name: Run tox (redshift) if: matrix.adapter == 'redshift' env: From d039e52ad0e0fa689f2f7cca8e7ee21c4c1e3e5c Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Wed, 22 Feb 2023 19:10:27 +0000 Subject: [PATCH 032/113] Bumping version to 1.5.0b1 and generate changelog --- .bumpversion.cfg | 3 +-- .changes/1.5.0-b1.md | 16 ++++++++++++++++ .../Features-20221209-105640.yaml | 0 .../Features-20230127-155317.yaml | 0 .../Under the Hood-20230118-071542.yaml | 0 .../Under the Hood-20230130-171158.yaml | 0 .../Under the Hood-20230130-205628.yaml | 0 CHANGELOG.md | 19 +++++++++++++++++++ dbt/adapters/redshift/__version__.py | 2 +- 9 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 .changes/1.5.0-b1.md rename .changes/{unreleased => 1.5.0}/Features-20221209-105640.yaml (100%) rename .changes/{unreleased => 1.5.0}/Features-20230127-155317.yaml (100%) rename .changes/{unreleased => 1.5.0}/Under the Hood-20230118-071542.yaml (100%) rename .changes/{unreleased => 1.5.0}/Under the Hood-20230130-171158.yaml (100%) rename .changes/{unreleased => 1.5.0}/Under the Hood-20230130-205628.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f34375db8..8c66bdf30 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,5 @@ [bumpversion] -current_version = 1.5.0a1 - +current_version = 1.5.0b1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.5.0-b1.md b/.changes/1.5.0-b1.md new file mode 100644 index 000000000..0f47774a8 --- /dev/null +++ b/.changes/1.5.0-b1.md @@ -0,0 +1,16 @@ +## dbt-redshift 1.5.0-b1 - February 22, 2023 + +### Features + +- dbt-constraints for redshift ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) +- Stand-alone Python module for RedshiftColumn ([#290](https://github.com/dbt-labs/dbt-redshift/issues/290)) + +### Under the Hood + +- Replace psycopg2 connector with Redshift python connector when connecting to Redshift ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) +- remove tox call to integration tests ([#257](https://github.com/dbt-labs/dbt-redshift/issues/257)) +- Convert Backup Table tests ([#293](https://github.com/dbt-labs/dbt-redshift/issues/293)) + +### Contributors +- [@dave-connors-3](https://github.com/dave-connors-3) ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) +- [@sathiish-kumar](https://github.com/sathiish-kumar) ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) diff --git a/.changes/unreleased/Features-20221209-105640.yaml b/.changes/1.5.0/Features-20221209-105640.yaml similarity index 100% rename from .changes/unreleased/Features-20221209-105640.yaml rename to .changes/1.5.0/Features-20221209-105640.yaml diff --git a/.changes/unreleased/Features-20230127-155317.yaml b/.changes/1.5.0/Features-20230127-155317.yaml similarity index 100% rename from .changes/unreleased/Features-20230127-155317.yaml rename to .changes/1.5.0/Features-20230127-155317.yaml diff --git a/.changes/unreleased/Under the Hood-20230118-071542.yaml b/.changes/1.5.0/Under the Hood-20230118-071542.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230118-071542.yaml rename to .changes/1.5.0/Under the Hood-20230118-071542.yaml diff --git a/.changes/unreleased/Under the Hood-20230130-171158.yaml b/.changes/1.5.0/Under the Hood-20230130-171158.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230130-171158.yaml rename to .changes/1.5.0/Under the Hood-20230130-171158.yaml diff --git a/.changes/unreleased/Under the Hood-20230130-205628.yaml b/.changes/1.5.0/Under the Hood-20230130-205628.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230130-205628.yaml rename to .changes/1.5.0/Under the Hood-20230130-205628.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index bb9d7a4bb..d23b8f543 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,25 @@ - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) + +## dbt-redshift 1.5.0-b1 - February 22, 2023 + +### Features + +- dbt-constraints for redshift ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) +- Stand-alone Python module for RedshiftColumn ([#290](https://github.com/dbt-labs/dbt-redshift/issues/290)) + +### Under the Hood + +- Replace psycopg2 connector with Redshift python connector when connecting to Redshift ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) +- remove tox call to integration tests ([#257](https://github.com/dbt-labs/dbt-redshift/issues/257)) +- Convert Backup Table tests ([#293](https://github.com/dbt-labs/dbt-redshift/issues/293)) + +### Contributors +- [@dave-connors-3](https://github.com/dave-connors-3) ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) +- [@sathiish-kumar](https://github.com/sathiish-kumar) ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) + + ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 219c289b1..c3758128c 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0a1" +version = "1.5.0b1" From af2edb410eddcf4ceb648d1e97ccf79e8cf0838a Mon Sep 17 00:00:00 2001 From: Gerda Shank Date: Thu, 23 Feb 2023 14:48:47 -0500 Subject: [PATCH 033/113] Rename "constraints_enabled" to "contract" (#336) --- .changes/unreleased/Under the Hood-20230223-110314.yaml | 6 ++++++ dbt/include/redshift/macros/adapters.sql | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Under the Hood-20230223-110314.yaml diff --git a/.changes/unreleased/Under the Hood-20230223-110314.yaml b/.changes/unreleased/Under the Hood-20230223-110314.yaml new file mode 100644 index 000000000..ef1e35744 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230223-110314.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Rename constraints_enabled to contract +time: 2023-02-23T11:03:14.344028-05:00 +custom: + Author: gshank + Issue: "330" diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index 1872ce1cc..3d81e7e92 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -43,7 +43,7 @@ {{ sql_header if sql_header is not none }} - {%- if config.get('constraints_enabled', False) %} + {%- if config.get('contract', False) %} create {% if temporary -%}temporary{%- endif %} table {{ relation.include(database=(not temporary), schema=(not temporary)) }} From b50de6bdd181e001733f67c5674454f431f09a4c Mon Sep 17 00:00:00 2001 From: Nathaniel May Date: Thu, 23 Feb 2023 17:41:59 -0500 Subject: [PATCH 034/113] add codeowners file (#337) --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..f6283d123 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# This codeowners file is used to ensure all PRs require reviews from the adapters team + +* @dbt-labs/core-adapters From 5a70005c22efefeead23e696c7e65b6ef31d302f Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 23 Feb 2023 18:12:29 -0500 Subject: [PATCH 035/113] CT-1960: test conversion - simple snapshot tests (#309) * simple snapshot tests complete * move single-module test packages up to adapter test root --- tests/functional/adapter/common.py | 135 ++++++ .../adapter/snapshot_tests/seeds.py | 33 ++ .../adapter/snapshot_tests/snapshots.py | 27 ++ .../adapter/snapshot_tests/test_snapshot.py | 206 +++++++++ .../{column_types => }/test_column_types.py | 0 .../test_late_binding_view.py | 0 .../test_query_comment.py | 0 .../test_relation_name.py | 0 .../test_store_test_failures.py | 0 .../add_column_to_source_bq.sql | 56 --- .../check_snapshots_test_current.sql | 51 --- .../check-snapshots/check_cols_cycle.sql | 33 -- .../custom-snapshot-macros/custom.sql | 18 - .../invalidate_bigquery.sql | 12 - .../invalidate_postgres.sql | 27 -- .../invalidate_snowflake.sql | 12 - .../macros/test_no_overlaps.sql | 85 ---- .../models-collision/snapshot_actual.sql | 1 - .../simple_snapshot_test/models-slow/gen.sql | 44 -- .../simple_snapshot_test/models/.gitkeep | 0 .../models/ref_snapshot.sql | 1 - .../simple_snapshot_test/models/schema.yml | 5 - .../integration/simple_snapshot_test/seed.sql | 220 ---------- .../simple_snapshot_test/seed_bq.sql | 81 ---- .../simple_snapshot_test/seed_longtext.sql | 9 - .../simple_snapshot_test/seed_pg.sql | 223 ---------- .../simple_snapshot_test/seeds/seed.csv | 4 - .../seeds/seed_newcol.csv | 4 - .../test-check-col-snapshots-bq/snapshot.sql | 29 -- .../snapshot.sql | 9 - .../test-check-col-snapshots/snapshot.sql | 28 -- .../test-snapshots-bq/snapshot.sql | 19 - .../test_snapshot.sql | 32 -- .../snapshot.sql | 55 --- .../test-snapshots-checkall/snapshot.sql | 4 - .../test-snapshots-invalid/snapshot.sql | 13 - .../test-snapshots-longtext/snapshot.sql | 12 - .../snapshot.sql | 14 - .../snapshot.sql | 14 - .../test-snapshots-pg-custom/snapshot.sql | 14 - .../test-snapshots-pg/snapshot.sql | 19 - .../snapshot.sql | 41 -- .../test-snapshots-select/snapshot.sql | 44 -- .../test_timestamps.sql | 23 - .../test-snapshots-slow/snapshot.sql | 21 - .../test_simple_snapshot.py | 397 ------------------ .../test_snapshot_check_cols.py | 40 -- .../simple_snapshot_test/update.sql | 261 ------------ .../simple_snapshot_test/update_bq.sql | 78 ---- 49 files changed, 401 insertions(+), 2053 deletions(-) create mode 100644 tests/functional/adapter/common.py create mode 100644 tests/functional/adapter/snapshot_tests/seeds.py create mode 100644 tests/functional/adapter/snapshot_tests/snapshots.py create mode 100644 tests/functional/adapter/snapshot_tests/test_snapshot.py rename tests/functional/adapter/{column_types => }/test_column_types.py (100%) rename tests/functional/adapter/{redshift_test => }/test_late_binding_view.py (100%) rename tests/functional/adapter/{query_comment_tests => }/test_query_comment.py (100%) rename tests/functional/adapter/{relation_name_tests => }/test_relation_name.py (100%) rename tests/functional/adapter/{store_test_failures_tests => }/test_store_test_failures.py (100%) delete mode 100644 tests/integration/simple_snapshot_test/add_column_to_source_bq.sql delete mode 100644 tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql delete mode 100644 tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql delete mode 100644 tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql delete mode 100644 tests/integration/simple_snapshot_test/invalidate_bigquery.sql delete mode 100644 tests/integration/simple_snapshot_test/invalidate_postgres.sql delete mode 100644 tests/integration/simple_snapshot_test/invalidate_snowflake.sql delete mode 100644 tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql delete mode 100644 tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql delete mode 100644 tests/integration/simple_snapshot_test/models-slow/gen.sql delete mode 100644 tests/integration/simple_snapshot_test/models/.gitkeep delete mode 100644 tests/integration/simple_snapshot_test/models/ref_snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/models/schema.yml delete mode 100644 tests/integration/simple_snapshot_test/seed.sql delete mode 100644 tests/integration/simple_snapshot_test/seed_bq.sql delete mode 100644 tests/integration/simple_snapshot_test/seed_longtext.sql delete mode 100644 tests/integration/simple_snapshot_test/seed_pg.sql delete mode 100644 tests/integration/simple_snapshot_test/seeds/seed.csv delete mode 100644 tests/integration/simple_snapshot_test/seeds/seed_newcol.csv delete mode 100644 tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql delete mode 100644 tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql delete mode 100644 tests/integration/simple_snapshot_test/test_simple_snapshot.py delete mode 100644 tests/integration/simple_snapshot_test/test_snapshot_check_cols.py delete mode 100644 tests/integration/simple_snapshot_test/update.sql delete mode 100644 tests/integration/simple_snapshot_test/update_bq.sql diff --git a/tests/functional/adapter/common.py b/tests/functional/adapter/common.py new file mode 100644 index 000000000..914e3fcf8 --- /dev/null +++ b/tests/functional/adapter/common.py @@ -0,0 +1,135 @@ +from typing import Dict, List + +from dbt.tests.util import relation_from_name +from dbt.tests.fixtures.project import TestProjInfo + + +def get_records(project: TestProjInfo, table: str, select: str = None, where: str = None) -> List[tuple]: + """ + Gets records from a single table in a dbt project + + Args: + project: the dbt project that contains the table + table: the name of the table without a schema + select: the selection clause; defaults to all columns (*) + where: the where clause to apply, if any; defaults to all records + + Returns: + A list of records with each record as a tuple + """ + table_name = relation_from_name(project.adapter, table) + select_clause = select or "*" + where_clause = where or "1 = 1" + sql = f""" + select {select_clause} + from {table_name} + where {where_clause} + """ + return [tuple(record) for record in project.run_sql(sql, fetch="all")] + + +def update_records(project: TestProjInfo, table: str, updates: Dict[str, str], where: str = None): + """ + Applies updates to a table in a dbt project + + Args: + project: the dbt project that contains the table + table: the name of the table without a schema + updates: the updates to be applied in the form {'field_name': 'expression to be applied'} + where: the where clause to apply, if any; defaults to all records + """ + table_name = relation_from_name(project.adapter, table) + set_clause = ', '.join([' = '.join([field, expression]) for field, expression in updates.items()]) + where_clause = where or "1 = 1" + sql = f""" + update {table_name} + set {set_clause} + where {where_clause} + """ + project.run_sql(sql) + + +def insert_records(project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None): + """ + Inserts records from one table into another table in a dbt project + + Args: + project: the dbt project that contains the table + to_table: the name of the table, without a schema, in which the records will be inserted + from_table: the name of the table, without a schema, which contains the records to be inserted + select: the selection clause to apply on `from_table`; defaults to all columns (*) + where: the where clause to apply on `from_table`, if any; defaults to all records + """ + to_table_name = relation_from_name(project.adapter, to_table) + from_table_name = relation_from_name(project.adapter, from_table) + select_clause = select or "*" + where_clause = where or "1 = 1" + sql = f""" + insert into {to_table_name} + select {select_clause} + from {from_table_name} + where {where_clause} + """ + project.run_sql(sql) + + +def delete_records(project: TestProjInfo, table: str, where: str = None): + """ + Deletes records from a table in a dbt project + + Args: + project: the dbt project that contains the table + table: the name of the table without a schema + where: the where clause to apply, if any; defaults to all records + """ + table_name = relation_from_name(project.adapter, table) + where_clause = where or "1 = 1" + sql = f""" + delete from {table_name} + where {where_clause} + """ + project.run_sql(sql) + + +def clone_table(project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None): + """ + Creates a new table based on another table in a dbt project + + Args: + project: the dbt project that contains the table + to_table: the name of the table, without a schema, to be created + from_table: the name of the table, without a schema, to be cloned + select: the selection clause to apply on `from_table`; defaults to all columns (*) + where: the where clause to apply on `from_table`, if any; defaults to all records + """ + to_table_name = relation_from_name(project.adapter, to_table) + from_table_name = relation_from_name(project.adapter, from_table) + select_clause = select or "*" + where_clause = where or "1 = 1" + sql = f"drop table if exists {to_table_name}" + project.run_sql(sql) + sql = f""" + create table {to_table_name} as + select {select_clause} + from {from_table_name} + where {where_clause} + """ + project.run_sql(sql) + + +def add_column(project: TestProjInfo, table: str, column: str, definition: str): + """ + Applies updates to a table in a dbt project + + Args: + project: the dbt project that contains the table + table: the name of the table without a schema + column: the name of the new column + definition: the definition of the new column, e.g. 'varchar(20) default null' + """ + table_name = relation_from_name(project.adapter, table) + sql = f""" + alter table {table_name} + add column {column} {definition} + """ + project.run_sql(sql) diff --git a/tests/functional/adapter/snapshot_tests/seeds.py b/tests/functional/adapter/snapshot_tests/seeds.py new file mode 100644 index 000000000..262ddd2f3 --- /dev/null +++ b/tests/functional/adapter/snapshot_tests/seeds.py @@ -0,0 +1,33 @@ +SEED_CSV = """ +id,first_name,last_name,email,gender,ip_address,updated_at +1,Judith,Kennedy,jkennedy0@phpbb.com,Female,54.60.24.128,2015-12-24 +2,Arthur,Kelly,akelly1@eepurl.com,Male,62.56.24.215,2015-10-28 +3,Rachel,Moreno,rmoreno2@msu.edu,Female,31.222.249.23,2016-04-05 +4,Ralph,Turner,rturner3@hp.com,Male,157.83.76.114,2016-08-08 +5,Laura,Gonzales,lgonzales4@howstuffworks.com,Female,30.54.105.168,2016-09-01 +6,Katherine,Lopez,klopez5@yahoo.co.jp,Female,169.138.46.89,2016-08-30 +7,Jeremy,Hamilton,jhamilton6@mozilla.org,Male,231.189.13.133,2016-07-17 +8,Heather,Rose,hrose7@goodreads.com,Female,87.165.201.65,2015-12-29 +9,Gregory,Kelly,gkelly8@trellian.com,Male,154.209.99.7,2016-03-24 +10,Rachel,Lopez,rlopez9@themeforest.net,Female,237.165.82.71,2016-08-20 +11,Donna,Welch,dwelcha@shutterfly.com,Female,103.33.110.138,2016-02-27 +12,Russell,Lawrence,rlawrenceb@qq.com,Male,189.115.73.4,2016-06-11 +13,Michelle,Montgomery,mmontgomeryc@scientificamerican.com,Female,243.220.95.82,2016-06-18 +14,Walter,Castillo,wcastillod@pagesperso-orange.fr,Male,71.159.238.196,2016-10-06 +15,Robin,Mills,rmillse@vkontakte.ru,Female,172.190.5.50,2016-10-31 +16,Raymond,Holmes,rholmesf@usgs.gov,Male,148.153.166.95,2016-10-03 +17,Gary,Bishop,gbishopg@plala.or.jp,Male,161.108.182.13,2016-08-29 +18,Anna,Riley,arileyh@nasa.gov,Female,253.31.108.22,2015-12-11 +19,Sarah,Knight,sknighti@foxnews.com,Female,222.220.123.177,2016-09-26 +20,Phyllis,Fox,null,Female,163.191.232.95,2016-08-21 +21,Judy,Robinson,jrobinsonk@blogs.com,Female,208.21.192.232,2016-09-18 +22,Kevin,Alvarez,kalvarezl@buzzfeed.com,Male,228.106.146.9,2016-07-29 +23,Barbara,Carr,bcarrm@pen.io,Female,106.165.140.17,2015-09-24 +24,William,Watkins,wwatkinsn@guardian.co.uk,Male,78.155.84.6,2016-03-08 +25,Judy,Cooper,jcoopero@google.com.au,Female,24.149.123.184,2016-10-05 +26,Shirley,Castillo,scastillop@samsung.com,Female,129.252.181.12,2016-06-20 +27,Justin,Harper,jharperq@opera.com,Male,131.172.103.218,2016-05-21 +28,Marie,Medina,mmedinar@nhs.uk,Female,188.119.125.67,2015-10-08 +29,Kelly,Edwards,kedwardss@phoca.cz,Female,47.121.157.66,2015-09-15 +30,Carl,Coleman,ccolemant@wikipedia.org,Male,82.227.154.83,2016-05-26 +""".strip() diff --git a/tests/functional/adapter/snapshot_tests/snapshots.py b/tests/functional/adapter/snapshot_tests/snapshots.py new file mode 100644 index 000000000..e8ea5a7e2 --- /dev/null +++ b/tests/functional/adapter/snapshot_tests/snapshots.py @@ -0,0 +1,27 @@ +SNAPSHOT_TIMESTAMP_SQL = """ +{% snapshot snapshot %} + {{ config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at', + invalidate_hard_deletes=True, + ) }} + select * from {{ ref('fact') }} +{% endsnapshot %} +""" + + +SNAPSHOT_CHECK_SQL = """ +{% snapshot snapshot %} + {{ config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='check', + check_cols=['email'], + ) }} + select * from {{ ref('fact') }} +{% endsnapshot %} +""" diff --git a/tests/functional/adapter/snapshot_tests/test_snapshot.py b/tests/functional/adapter/snapshot_tests/test_snapshot.py new file mode 100644 index 000000000..0f6153f47 --- /dev/null +++ b/tests/functional/adapter/snapshot_tests/test_snapshot.py @@ -0,0 +1,206 @@ +from typing import Dict, List, Iterable + +import pytest + +from dbt.tests.util import run_dbt + +from tests.functional.adapter import common +from tests.functional.adapter.snapshot_tests import seeds, snapshots + + +MODEL_FACT_SQL = """ +{{ config(materialized="table") }} +select * from {{ ref('seed') }} +where id between 1 and 20 +""" + + +class SnapshotBase: + + @pytest.fixture(scope="class") + def seeds(self): + """ + This seed file contains all records needed for tests, including records which will be inserted after the + initial snapshot. This makes it so that Redshift creates the correct size varchar columns. This table + will only need to be loaded once at the class level. It will never be altered, hence requires no further + setup or teardown. + """ + return {"seed.csv": seeds.SEED_CSV} + + @pytest.fixture(scope="class") + def models(self): + """ + This will be the working base table. It will be altered by each test, hence will require setup and + teardown at the test case level. See `self._setup_method(self, project)`. + """ + return {"fact.sql": MODEL_FACT_SQL} + + @pytest.fixture(scope="class", autouse=True) + def _setup_class(self, project): + """ + Load `seed` once for the whole class + """ + run_dbt(["seed"]) + + @pytest.fixture(scope="function", autouse=True) + def _setup_method(self, project): + """ + Initialize `fact` and `snapshot` for every test case. + Only load the first 20 `seed` records into `fact`; withhold 10 records as "new" (e.g. to test inserts). + + Make the project a class variable to simplify function calls and make the code more readable. + For some reason this doesn't work in the class-scoped fixture, but does in the function-scoped fixture. + """ + self.project = project + self.create_fact_from_seed("id between 1 and 20") + run_dbt(["snapshot"]) + yield + self.delete_snapshot_records() + self.delete_fact_records() + + def update_fact_records(self, updates: Dict[str, str], where: str = None): + common.update_records(self.project, "fact", updates, where) + + def insert_fact_records(self, where: str = None): + common.insert_records(self.project, "fact", "seed", "*", where) + + def delete_fact_records(self, where: str = None): + common.delete_records(self.project, "fact", where) + + def add_fact_column(self, column: str = None, definition: str = None): + common.add_column(self.project, "fact", column, definition) + + def create_fact_from_seed(self, where: str = None): + common.clone_table(self.project, "fact", "seed", "*", where) + + def get_snapshot_records(self, select: str = None, where: str = None) -> List[tuple]: + return common.get_records(self.project, "snapshot", select, where) + + def delete_snapshot_records(self): + common.delete_records(self.project, "snapshot") + + def _assert_results( + self, + ids_with_current_snapshot_records: Iterable, + ids_with_closed_out_snapshot_records: Iterable + ): + """ + All test cases are checked by considering whether a source record's id has a value in `dbt_valid_to` + in `snapshot`. Each id can fall into one of the following cases: + + - The id has only one record in `snapshot`; it has a value in `dbt_valid_to` + - the record was hard deleted in the source + - The id has only one record in `snapshot`; it does not have a value in `dbt_valid_to` + - the record was not updated in the source + - the record was updated in the source, but not in a way that is tracked (e.g. via `strategy='check'`) + - The id has two records in `snapshot`; one has a value in `dbt_valid_to`, the other does not + - the record was altered in the source in a way that is tracked + - the record was hard deleted and revived + + Note: Because of the third scenario, ids may show up in both arguments of this method. + + Args: + ids_with_current_snapshot_records: a list/set/etc. of ids which are not end-dated + ids_with_closed_out_snapshot_records: a list/set/etc. of ids which are end-dated + """ + records = set(self.get_snapshot_records("id, dbt_valid_to is null as is_current")) + expected_records = set().union( + {(i, True) for i in ids_with_current_snapshot_records}, + {(i, False) for i in ids_with_closed_out_snapshot_records} + ) + assert records == expected_records + + +class TestSnapshot(SnapshotBase): + + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots.SNAPSHOT_TIMESTAMP_SQL} + + def test_updates_are_captured_by_snapshot(self, project): + """ + Update the last 5 records. Show that all ids are current, but the last 5 reflect updates. + """ + self.update_fact_records({"updated_at": "updated_at + interval '1 day'"}, "id between 16 and 20") + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 21), + ids_with_closed_out_snapshot_records=range(16, 21) + ) + + def test_inserts_are_captured_by_snapshot(self, project): + """ + Insert 10 records. Show that there are 30 records in `snapshot`, all of which are current. + """ + self.insert_fact_records("id between 21 and 30") + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 31), + ids_with_closed_out_snapshot_records=[] + ) + + def test_deletes_are_captured_by_snapshot(self, project): + """ + Hard delete the last five records. Show that there are now only 15 current records and 5 expired records. + """ + self.delete_fact_records("id between 16 and 20") + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 16), + ids_with_closed_out_snapshot_records=range(16, 21) + ) + + def test_revives_are_captured_by_snapshot(self, project): + """ + Delete the last five records and run snapshot to collect that information, then revive 3 of those records. + Show that there are now 18 current records and 5 expired records. + """ + self.delete_fact_records("id between 16 and 20") + run_dbt(["snapshot"]) + self.insert_fact_records("id between 16 and 18") + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 19), + ids_with_closed_out_snapshot_records=range(16, 21) + ) + + def test_new_column_captured_by_snapshot(self, project): + """ + Add a column to `fact` and populate the last 10 records with a non-null value. + Show that all ids are current, but the last 10 reflect updates and the first 10 do not. + i.e. if the column is added, but not updated, the record does not reflect that it's updated + """ + self.add_fact_column("full_name", "varchar(200) default null") + self.update_fact_records( + { + "full_name": "first_name || ' ' || last_name", + "updated_at": "updated_at + interval '1 day'", + }, + "id between 11 and 20" + ) + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 21), + ids_with_closed_out_snapshot_records=range(11, 21) + ) + + +class TestSnapshotCheck(SnapshotBase): + + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots.SNAPSHOT_CHECK_SQL} + + def test_column_selection_is_reflected_in_snapshot(self, project): + """ + Update the first 10 records on a non-tracked column. + Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both) + Show that all ids are current, and only the tracked column updates are reflected in `snapshot`. + """ + self.update_fact_records({"last_name": "left(last_name, 3)"}, "id between 1 and 10") # not tracked + self.update_fact_records({"email": "left(email, 3)"}, "id between 6 and 15") # tracked + run_dbt(["snapshot"]) + self._assert_results( + ids_with_current_snapshot_records=range(1, 21), + ids_with_closed_out_snapshot_records=range(6, 16) + ) diff --git a/tests/functional/adapter/column_types/test_column_types.py b/tests/functional/adapter/test_column_types.py similarity index 100% rename from tests/functional/adapter/column_types/test_column_types.py rename to tests/functional/adapter/test_column_types.py diff --git a/tests/functional/adapter/redshift_test/test_late_binding_view.py b/tests/functional/adapter/test_late_binding_view.py similarity index 100% rename from tests/functional/adapter/redshift_test/test_late_binding_view.py rename to tests/functional/adapter/test_late_binding_view.py diff --git a/tests/functional/adapter/query_comment_tests/test_query_comment.py b/tests/functional/adapter/test_query_comment.py similarity index 100% rename from tests/functional/adapter/query_comment_tests/test_query_comment.py rename to tests/functional/adapter/test_query_comment.py diff --git a/tests/functional/adapter/relation_name_tests/test_relation_name.py b/tests/functional/adapter/test_relation_name.py similarity index 100% rename from tests/functional/adapter/relation_name_tests/test_relation_name.py rename to tests/functional/adapter/test_relation_name.py diff --git a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py similarity index 100% rename from tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py rename to tests/functional/adapter/test_store_test_failures.py diff --git a/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql b/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql deleted file mode 100644 index e1babb82c..000000000 --- a/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql +++ /dev/null @@ -1,56 +0,0 @@ - -create or replace table {schema}.seed as ( - - select *, - [ - struct( - 1 as field_1, - 2 as field_2 - ), - struct( - 3 as field_1, - 4 as field_2 - ) - ] as repeated_nested_field, - - struct( - 1 as field_1, - 2 as field_2 - ) as nested_field, - - [ - 1, - 2 - ] as repeated_field - - from {schema}.seed - -); - -create or replace table {schema}.snapshot_expected as ( - - select *, - [ - struct( - 1 as field_1, - 2 as field_2 - ), - struct( - 3 as field_1, - 4 as field_2 - ) - ] as repeated_nested_field, - - struct( - 1 as field_1, - 2 as field_2 - ) as nested_field, - - [ - 1, - 2 - ] as repeated_field - - from {schema}.snapshot_expected - -); diff --git a/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql b/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql deleted file mode 100644 index 414afb472..000000000 --- a/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql +++ /dev/null @@ -1,51 +0,0 @@ - - -with query as ( - - -- check that the current value for id=1 is red - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous 'red' value for id=1 is invalidated - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there's only one current record for id=2 - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'pink' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous value for id=2 is represented - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'green' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there are 5 records total in the table - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - ) = 5 then 0 else 1 end as failures - -) - -select * -from query -where failures = 1 diff --git a/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql b/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql deleted file mode 100644 index 8b36f35a1..000000000 --- a/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql +++ /dev/null @@ -1,33 +0,0 @@ - -{% snapshot check_cols_cycle %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['color'] - ) - }} - - {% if var('version') == 1 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 2 %} - - select 1 as id, 'blue' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 3 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'pink' as color - - {% else %} - {% do exceptions.raise_compiler_error("Got bad version: " ~ var('version')) %} - {% endif %} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql b/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql deleted file mode 100644 index 4347088e4..000000000 --- a/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql +++ /dev/null @@ -1,18 +0,0 @@ -{# A "custom" strategy that's really just the timestamp one #} -{% macro snapshot_custom_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - - {% set row_changed_expr -%} - ({{ snapshotted_rel }}.{{ updated_at }} < {{ current_rel }}.{{ updated_at }}) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr - }) %} -{% endmacro %} diff --git a/tests/integration/simple_snapshot_test/invalidate_bigquery.sql b/tests/integration/simple_snapshot_test/invalidate_bigquery.sql deleted file mode 100644 index d4641d451..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_bigquery.sql +++ /dev/null @@ -1,12 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {database}.{schema}.seed set - updated_at = timestamp_add(updated_at, interval 1 hour), - email = case when id = 20 then 'pfoxj@creativecommons.org' else concat('new_', email) end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {database}.{schema}.snapshot_expected set - dbt_valid_to = timestamp_add(updated_at, interval 1 hour) -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/invalidate_postgres.sql b/tests/integration/simple_snapshot_test/invalidate_postgres.sql deleted file mode 100644 index b0bef3c6c..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_postgres.sql +++ /dev/null @@ -1,27 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {schema}.seed set - updated_at = updated_at + interval '1 hour', - email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {schema}.snapshot_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_castillo_expected set - dbt_valid_to = "1-updated_at" + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_alvarez_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_kelly_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/invalidate_snowflake.sql b/tests/integration/simple_snapshot_test/invalidate_snowflake.sql deleted file mode 100644 index 57c4b71d6..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_snowflake.sql +++ /dev/null @@ -1,12 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {database}.{schema}.seed set - updated_at = DATEADD(hour, 1, updated_at), - email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {database}.{schema}.snapshot_expected set - dbt_valid_to = DATEADD(hour, 1, updated_at) -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql b/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql deleted file mode 100644 index 6d432193c..000000000 --- a/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql +++ /dev/null @@ -1,85 +0,0 @@ -{% macro get_snapshot_unique_id() -%} - {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} -{%- endmacro %} - -{% macro default__get_snapshot_unique_id() -%} - {% do return("id || '-' || first_name") %} -{%- endmacro %} - - -{% macro bigquery__get_snapshot_unique_id() -%} - {%- do return('concat(cast(id as string), "-", first_name)') -%} -{%- endmacro %} - -{# - mostly copy+pasted from dbt_utils, but I removed some parameters and added - a query that calls get_snapshot_unique_id -#} -{% test mutually_exclusive_ranges(model) %} - -with base as ( - select {{ get_snapshot_unique_id() }} as dbt_unique_id, - * - from {{ model }} -), -window_functions as ( - - select - dbt_valid_from as lower_bound, - coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, - - lead(dbt_valid_from) over ( - partition by dbt_unique_id - order by dbt_valid_from - ) as next_lower_bound, - - row_number() over ( - partition by dbt_unique_id - order by dbt_valid_from desc - ) = 1 as is_last_record - - from base - -), - -calc as ( - -- We want to return records where one of our assumptions fails, so we'll use - -- the `not` function with `and` statements so we can write our assumptions nore cleanly - select - *, - - -- For each record: lower_bound should be < upper_bound. - -- Coalesce it to return an error on the null case (implicit assumption - -- these columns are not_null) - coalesce( - lower_bound < upper_bound, - is_last_record - ) as lower_bound_less_than_upper_bound, - - -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. - -- Coalesce it to handle null cases for the last record. - coalesce( - upper_bound = next_lower_bound, - is_last_record, - false - ) as upper_bound_equal_to_next_lower_bound - - from window_functions - -), - -validation_errors as ( - - select - * - from calc - - where not( - -- THE FOLLOWING SHOULD BE TRUE -- - lower_bound_less_than_upper_bound - and upper_bound_equal_to_next_lower_bound - ) -) - -select * from validation_errors -{% endtest %} diff --git a/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql b/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql deleted file mode 100644 index 43258a714..000000000 --- a/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id diff --git a/tests/integration/simple_snapshot_test/models-slow/gen.sql b/tests/integration/simple_snapshot_test/models-slow/gen.sql deleted file mode 100644 index 7e71a2bfd..000000000 --- a/tests/integration/simple_snapshot_test/models-slow/gen.sql +++ /dev/null @@ -1,44 +0,0 @@ - -{{ config(materialized='ephemeral') }} - - -/* - Generates 50 rows that "appear" to update every - second to a query-er. - - 1 2020-04-21 20:44:00-04 0 - 2 2020-04-21 20:43:59-04 59 - 3 2020-04-21 20:43:58-04 58 - 4 2020-04-21 20:43:57-04 57 - - .... 1 second later .... - - 1 2020-04-21 20:44:01-04 1 - 2 2020-04-21 20:44:00-04 0 - 3 2020-04-21 20:43:59-04 59 - 4 2020-04-21 20:43:58-04 58 - - This view uses pg_sleep(2) to make queries against - the view take a non-trivial amount of time - - Use statement_timestamp() as it changes during a transactions. - If we used now() or current_time or similar, then the timestamp - of the start of the transaction would be returned instead. -*/ - -with gen as ( - - select - id, - date_trunc('second', statement_timestamp()) - (interval '1 second' * id) as updated_at - - from generate_series(1, 10) id - -) - -select - id, - updated_at, - extract(seconds from updated_at)::int as seconds - -from gen, pg_sleep(2) diff --git a/tests/integration/simple_snapshot_test/models/.gitkeep b/tests/integration/simple_snapshot_test/models/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/simple_snapshot_test/models/ref_snapshot.sql b/tests/integration/simple_snapshot_test/models/ref_snapshot.sql deleted file mode 100644 index c453929ce..000000000 --- a/tests/integration/simple_snapshot_test/models/ref_snapshot.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('snapshot_actual') }} diff --git a/tests/integration/simple_snapshot_test/models/schema.yml b/tests/integration/simple_snapshot_test/models/schema.yml deleted file mode 100644 index 259e55b95..000000000 --- a/tests/integration/simple_snapshot_test/models/schema.yml +++ /dev/null @@ -1,5 +0,0 @@ -version: 2 -snapshots: - - name: snapshot_actual - tests: - - mutually_exclusive_ranges diff --git a/tests/integration/simple_snapshot_test/seed.sql b/tests/integration/simple_snapshot_test/seed.sql deleted file mode 100644 index 8f3422e36..000000000 --- a/tests/integration/simple_snapshot_test/seed.sql +++ /dev/null @@ -1,220 +0,0 @@ -create table {database}.{schema}.seed ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -create table {database}.{schema}.snapshot_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- seed inserts -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', 'jkennedy0@phpbb.com', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', 'akelly1@eepurl.com', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed; - -create table {database}.{schema}.snapshot_castillo_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE - -); - --- one entry -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Castillo'; - -create table {database}.{schema}.snapshot_alvarez_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- 0 entries -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Alvarez'; - -create table {database}.{schema}.snapshot_kelly_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- 2 entries -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/seed_bq.sql b/tests/integration/simple_snapshot_test/seed_bq.sql deleted file mode 100644 index 5ea93fee4..000000000 --- a/tests/integration/simple_snapshot_test/seed_bq.sql +++ /dev/null @@ -1,81 +0,0 @@ -create table {database}.{schema}.seed ( - id INT64, - first_name STRING, - last_name STRING, - email STRING, - gender STRING, - ip_address STRING, - updated_at TIMESTAMP -); - -create table {database}.{schema}.snapshot_expected ( - id INT64, - first_name STRING, - last_name STRING, - email STRING, - gender STRING, - ip_address STRING, - - -- snapshotting fields - updated_at TIMESTAMP, - dbt_valid_from TIMESTAMP, - dbt_valid_to TIMESTAMP, - dbt_scd_id STRING, - dbt_updated_at TIMESTAMP -); - - --- seed inserts -insert {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', 'jkennedy0@phpbb.com', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', 'akelly1@eepurl.com', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed; - diff --git a/tests/integration/simple_snapshot_test/seed_longtext.sql b/tests/integration/simple_snapshot_test/seed_longtext.sql deleted file mode 100644 index a27008ea7..000000000 --- a/tests/integration/simple_snapshot_test/seed_longtext.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table {database}.{schema}.super_long ( - id INTEGER, - longstring TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -insert into {database}.{schema}.super_long (id, longstring, updated_at) VALUES -(1, 'short', current_timestamp), -(2, repeat('a', 500), current_timestamp); diff --git a/tests/integration/simple_snapshot_test/seed_pg.sql b/tests/integration/simple_snapshot_test/seed_pg.sql deleted file mode 100644 index a22a2359c..000000000 --- a/tests/integration/simple_snapshot_test/seed_pg.sql +++ /dev/null @@ -1,223 +0,0 @@ - create table {database}.{schema}.seed ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -create table {database}.{schema}.snapshot_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- seed inserts --- use the same email for two users to verify that duplicated check_cols values --- are handled appropriately -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed; - - - -create table {database}.{schema}.snapshot_castillo_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- one entry -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Castillo'; - -create table {database}.{schema}.snapshot_alvarez_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- 0 entries -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Alvarez'; - -create table {database}.{schema}.snapshot_kelly_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- 2 entries -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/seeds/seed.csv b/tests/integration/simple_snapshot_test/seeds/seed.csv deleted file mode 100644 index 9da8d46ff..000000000 --- a/tests/integration/simple_snapshot_test/seeds/seed.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name -1,Judith -2,Arthur -3,Rachel diff --git a/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv b/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv deleted file mode 100644 index 005517bda..000000000 --- a/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name,last_name -1,Judith,Kennedy -2,Arthur,Kelly -3,Rachel,Moreno diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql deleted file mode 100644 index 9c8459756..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql +++ /dev/null @@ -1,29 +0,0 @@ -{% snapshot snapshot_actual %} - {# this used to be check_cols=('email',), which ought to be totally valid, - but is not because type systems are hard. #} - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='check', - check_cols=['email'], - ) - }} - select * from `{{target.database}}`.`{{schema}}`.seed -{% endsnapshot %} - - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='check', - check_cols='all', - ) - }} - select * from `{{target.database}}`.`{{schema}}`.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql deleted file mode 100644 index daf4cf312..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% snapshot snapshot_actual %} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ config(check_cols='all') }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql deleted file mode 100644 index dd85ed753..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql +++ /dev/null @@ -1,28 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols=['email'], - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols='all', - ) - }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql deleted file mode 100644 index 7ffdedbcc..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from `{{target.database}}`.`{{schema}}`.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql deleted file mode 100644 index e1184c353..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql +++ /dev/null @@ -1,32 +0,0 @@ - -{# /* - Given the repro case for the snapshot build, we'd - expect to see both records have color='pink' - in their most recent rows. -*/ #} - -with expected as ( - - select 1 as id, 'pink' as color union all - select 2 as id, 'pink' as color - -), - -actual as ( - - select id, color - from {{ ref('my_snapshot') }} - where color = 'pink' - and dbt_valid_to is null - -) - -select * from expected -except -select * from actual - -union all - -select * from actual -except -select * from expected diff --git a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql deleted file mode 100644 index dd21fa63c..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql +++ /dev/null @@ -1,55 +0,0 @@ - -{# - REPRO: - 1. Run with check strategy - 2. Add a new ts column and run with check strategy - 3. Run with timestamp strategy on new ts column - - Expect: new entry is added for changed rows in (3) -#} - - -{% snapshot my_snapshot %} - - {#--------------- Configuration ------------ #} - - {{ config( - target_schema=schema, - unique_key='id' - ) }} - - {% if var('strategy') == 'timestamp' %} - {{ config(strategy='timestamp', updated_at='updated_at') }} - {% else %} - {{ config(strategy='check', check_cols=['color']) }} - {% endif %} - - {#--------------- Test setup ------------ #} - - {% if var('step') == 1 %} - - select 1 as id, 'blue' as color - union all - select 2 as id, 'red' as color - - {% elif var('step') == 2 %} - - -- change id=1 color from blue to green - -- id=2 is unchanged when using the check strategy - select 1 as id, 'green' as color, '2020-01-01'::date as updated_at - union all - select 2 as id, 'red' as color, '2020-01-01'::date as updated_at - - {% elif var('step') == 3 %} - - -- bump timestamp for both records. Expect that after this runs - -- using the timestamp strategy, both ids should have the color - -- 'pink' in the database. This should be in the future b/c we're - -- going to compare to the check timestamp, which will be _now_ - select 1 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - union all - select 2 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - - {% endif %} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql deleted file mode 100644 index b9cd002ca..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql +++ /dev/null @@ -1,4 +0,0 @@ -{% snapshot my_snapshot %} - {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} - select * from {{ ref(var('seed_name', 'seed')) }} -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql deleted file mode 100644 index 6d0561f62..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql +++ /dev/null @@ -1,13 +0,0 @@ -{# make sure to never name this anything with `target_schema` in the name, or the test will be invalid! #} -{% snapshot missing_field_target_underscore_schema %} - {# missing the mandatory target_schema parameter #} - {{ - config( - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql deleted file mode 100644 index 35a563e3f..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% snapshot snapshot_actual %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.super_long -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql deleted file mode 100644 index 2398e9e9a..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - {# this custom strategy does not exist in the 'dbt' package #} - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='dbt.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql deleted file mode 100644 index 8b14b9d00..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='test.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql deleted file mode 100644 index d59a0b60d..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql deleted file mode 100644 index ae5aac087..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql deleted file mode 100644 index a62218b2c..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} - -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql deleted file mode 100644 index 06245f36f..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql +++ /dev/null @@ -1,44 +0,0 @@ -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - {{ - config( - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql b/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql deleted file mode 100644 index c8687ceaf..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql +++ /dev/null @@ -1,23 +0,0 @@ - -/* - Assert that the dbt_valid_from of the latest record - is equal to the dbt_valid_to of the previous record -*/ - -with snapshot as ( - - select * from {{ ref('my_slow_snapshot') }} - -) - -select - snap1.id, - snap1.dbt_valid_from as new_valid_from, - snap2.dbt_valid_from as old_valid_from, - snap2.dbt_valid_to as old_valid_to - -from snapshot as snap1 -join snapshot as snap2 on snap1.id = snap2.id -where snap1.dbt_valid_to is null - and snap2.dbt_valid_to is not null - and snap1.dbt_valid_from != snap2.dbt_valid_to diff --git a/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql deleted file mode 100644 index 260d0b967..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql +++ /dev/null @@ -1,21 +0,0 @@ - -{% snapshot my_slow_snapshot %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at' - ) - }} - - select - id, - updated_at, - seconds - - from {{ ref('gen') }} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test_simple_snapshot.py b/tests/integration/simple_snapshot_test/test_simple_snapshot.py deleted file mode 100644 index d1423cb0a..000000000 --- a/tests/integration/simple_snapshot_test/test_simple_snapshot.py +++ /dev/null @@ -1,397 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -from datetime import datetime -import pytz - - -class BaseSimpleSnapshotTest(DBTIntegrationTest): - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models" - - def run_snapshot(self): - return self.run_dbt(['snapshot']) - - def dbt_run_seed_snapshot(self): - self.run_sql_file('seed.sql') - - results = self.run_snapshot() - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - def assert_case_tables_equal(self, actual, expected): - self.assertTablesEqual(actual, expected) - - def assert_expected(self): - self.run_dbt(['test']) - self.assert_case_tables_equal('snapshot_actual', 'snapshot_expected') - - -class TestSimpleSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - "seed-paths": ['seeds'], - "snapshot-paths": ['test-snapshots-pg'], - 'macro-paths': ['macros'], - } - - @use_profile('redshift') - def test__redshift__simple_snapshot(self): - self.dbt_run_seed_snapshot() - - self.assert_expected() - - self.run_sql_file("invalidate_postgres.sql") - self.run_sql_file("update.sql") - - results = self.run_snapshot() - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - self.assert_expected() - - -class TestSimpleColumnSnapshotFiles(DBTIntegrationTest): - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models-checkall" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-checkall'], - 'seeds': { - 'quote_columns': False, - } - } - - def _run_snapshot_test(self): - self.run_dbt(['seed']) - self.run_dbt(['snapshot']) - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - results = self.run_sql( - 'select * from {}.{}.my_snapshot'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - for result in results: - self.assertEqual(len(result), 6) - - self.run_dbt(['snapshot', '--vars', '{seed_name: seed_newcol}']) - results = self.run_sql( - 'select * from {}.{}.my_snapshot where last_name is not NULL'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - - for result in results: - # new column - self.assertEqual(len(result), 7) - self.assertIsNotNone(result[-1]) - - results = self.run_sql( - 'select * from {}.{}.my_snapshot where last_name is NULL'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - for result in results: - # new column - self.assertEqual(len(result), 7) - - @use_profile('redshift') - def test_redshift_renamed_source(self): - self._run_snapshot_test() - - -class TestCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom'], - } - - -class TestNamespacedCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom-namespaced'], - } - - -class TestInvalidNamespacedCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom-invalid'], - } - - def run_snapshot(self): - return self.run_dbt(['snapshot'], expect_pass=False) - - -class TestCheckCols(TestSimpleSnapshotFiles): - NUM_SNAPSHOT_MODELS = 2 - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - # When building the equality tests, only test columns that don't start - # with 'dbt_', because those are time-sensitive - if columns is None: - columns = [c for c in self.get_relation_columns(relation_a) if not c[0].lower().startswith('dbt_')] - return super()._assertTablesEqualSql(relation_a, relation_b, columns=columns) - - def assert_expected(self): - super().assert_expected() - self.assert_case_tables_equal('snapshot_checkall', 'snapshot_expected') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots'], - 'macro-paths': ['macros'], - } - - -class TestConfiguredCheckCols(TestCheckCols): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots-noconfig'], - "snapshots": { - "test": { - "target_schema": self.unique_schema(), - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": ["email"], - }, - }, - 'macro-paths': ['macros'], - } - - -class TestUpdatedAtCheckCols(TestCheckCols): - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - revived_records = self.run_sql( - ''' - select - id, - updated_at, - dbt_valid_from - from {} - '''.format(relation_b), - fetch='all' - ) - - for result in revived_records: - # result is a tuple, the updated_at is second and dbt_valid_from is latest - self.assertIsInstance(result[1], datetime) - self.assertIsInstance(result[2], datetime) - self.assertEqual(result[1].replace(tzinfo=pytz.UTC), result[2].replace(tzinfo=pytz.UTC)) - - if columns is None: - columns = [c for c in self.get_relation_columns(relation_a) if not c[0].lower().startswith('dbt_')] - return super()._assertTablesEqualSql(relation_a, relation_b, columns=columns) - - def assert_expected(self): - super().assert_expected() - self.assertTablesEqual('snapshot_checkall', 'snapshot_expected') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots-noconfig'], - "snapshots": { - "test": { - "target_schema": self.unique_schema(), - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": "all", - "updated_at": "updated_at", - }, - }, - 'macro-paths': ['macros'], - } - - -class TestSnapshotHardDelete(DBTIntegrationTest): - # These tests uses the same seed data, containing 20 records of which we hard delete the last 10. - # These deleted records set the dbt_valid_to to time the snapshot was ran. - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot_004" - - @property - def models(self): - return "models" - - @property - def project_config(self): - if self.adapter_type == 'bigquery': - paths = ['test-snapshots-bq'] - else: - paths = ['test-snapshots-pg'] - - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": paths, - 'macro-paths': ['macros'], - } - - def _test_snapshot_hard_delete(self): - self._snapshot() - - if self.adapter_type == 'snowflake': - self.assertTablesEqual("SNAPSHOT_EXPECTED", "SNAPSHOT_ACTUAL") - else: - self.assertTablesEqual("snapshot_expected", "snapshot_actual") - - self._invalidated_snapshot_datetime = None - self._revived_snapshot_datetime = None - - self._delete_records() - self._snapshot_and_assert_invalidated() - self._revive_records() - self._snapshot_and_assert_revived() - - def _snapshot(self): - begin_snapshot_datetime = datetime.now(pytz.UTC) - results = self.run_dbt(['snapshot', '--vars', '{invalidate_hard_deletes: true}']) - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - return begin_snapshot_datetime - - def _delete_records(self): - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - self.run_sql( - 'delete from {}.{}.seed where id >= 10;'.format(database, self.unique_schema()) - ) - - def _snapshot_and_assert_invalidated(self): - self._invalidated_snapshot_datetime = self._snapshot() - - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - snapshotted = self.run_sql( - ''' - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - order by id - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(snapshotted), 20) - for result in snapshotted[10:]: - # result is a tuple, the dbt_valid_to column is the latest - self.assertIsInstance(result[-1], datetime) - self.assertGreaterEqual(result[-1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime) - - def _revive_records(self): - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - revival_timestamp = datetime.now(pytz.UTC).strftime(r'%Y-%m-%d %H:%M:%S') - self.run_sql( - ''' - insert into {}.{}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values - (10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '{}'), - (11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '{}') - '''.format(database, self.unique_schema(), revival_timestamp, revival_timestamp) - ) - - def _snapshot_and_assert_revived(self): - self._revived_snapshot_datetime = self._snapshot() - - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - # records which weren't revived (id != 10, 11) - invalidated_records = self.run_sql( - ''' - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is not null - order by id - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(invalidated_records), 11) - for result in invalidated_records: - # result is a tuple, the dbt_valid_to column is the latest - self.assertIsInstance(result[1], datetime) - self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime) - - # records which weren't revived (id != 10, 11) - revived_records = self.run_sql( - ''' - select - id, - dbt_valid_from, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is null - and id IN (10, 11) - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(revived_records), 2) - for result in revived_records: - # result is a tuple, the dbt_valid_from is second and dbt_valid_to is latest - self.assertIsInstance(result[1], datetime) - # there are milliseconds (part of microseconds in datetime objects) in the - # invalidated_snapshot_datetime and not in result datetime so set the microseconds to 0 - self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime.replace(microsecond=0)) - self.assertIsNone(result[2]) - - @use_profile('redshift') - def test__redshift__snapshot_hard_delete(self): - self.run_sql_file('seed.sql') - self._test_snapshot_hard_delete() - diff --git a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py b/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py deleted file mode 100644 index 4bc9262ed..000000000 --- a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py +++ /dev/null @@ -1,40 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestSimpleSnapshotFiles(DBTIntegrationTest): - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - "snapshot-paths": ['check-snapshots'], - "test-paths": ['check-snapshots-expected'], - "model-paths": [], - } - - def snapshot_check_cols_cycle(self): - results = self.run_dbt(["snapshot", '--vars', 'version: 1']) - self.assertEqual(len(results), 1) - - results = self.run_dbt(["snapshot", '--vars', 'version: 2']) - self.assertEqual(len(results), 1) - - results = self.run_dbt(["snapshot", '--vars', 'version: 3']) - self.assertEqual(len(results), 1) - - def assert_expected(self): - self.run_dbt(['test', '--select', 'test_type:singular', '--vars', 'version: 3']) - - @use_profile('redshift') - def test__redshift__simple_snapshot(self): - self.snapshot_check_cols_cycle() - self.assert_expected() diff --git a/tests/integration/simple_snapshot_test/update.sql b/tests/integration/simple_snapshot_test/update.sql deleted file mode 100644 index 890959f32..000000000 --- a/tests/integration/simple_snapshot_test/update.sql +++ /dev/null @@ -1,261 +0,0 @@ --- insert v2 of the 11 - 21 records - -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20; - - -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Castillo'; - - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Alvarez'; - - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Kelly'; - --- insert 10 new records -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), -(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), -(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), -(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), -(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), -(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), -(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), -(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), -(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), -(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20; - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Castillo'; - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Alvarez'; - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/update_bq.sql b/tests/integration/simple_snapshot_test/update_bq.sql deleted file mode 100644 index 5c972d8af..000000000 --- a/tests/integration/simple_snapshot_test/update_bq.sql +++ /dev/null @@ -1,78 +0,0 @@ --- insert v2 of the 11 - 21 records - -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20; - - --- insert 10 new records -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), -(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), -(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), -(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), -(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), -(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), -(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), -(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), -(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), -(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); - - --- add these new records to the snapshot table -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed -where id > 20; - From abc1619c65be49aa9193b81103944fc567840b76 Mon Sep 17 00:00:00 2001 From: Nathaniel May Date: Mon, 27 Feb 2023 12:25:08 -0500 Subject: [PATCH 036/113] mirror issues to ADAP jira project (#322) --- .github/workflows/jira-creation.yml | 4 +++- .github/workflows/jira-label.yml | 4 +++- .github/workflows/jira-transition.yml | 7 ++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml index b4016befc..2611a8bdd 100644 --- a/.github/workflows/jira-creation.yml +++ b/.github/workflows/jira-creation.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main + uses: dbt-labs/actions/.github/workflows/jira-creation.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index 3da2e3a38..1637cbe38 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main + uses: dbt-labs/actions/.github/workflows/jira-label.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml index ed9f9cd4f..99158a15f 100644 --- a/.github/workflows/jira-transition.yml +++ b/.github/workflows/jira-transition.yml @@ -15,9 +15,14 @@ on: issues: types: [closed, deleted, reopened] +# no special access is needed +permissions: read-all + jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main + uses: dbt-labs/actions/.github/workflows/jira-transition.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} From fdfe8ec69b2bb13aa08ae0ae46716eee614931a6 Mon Sep 17 00:00:00 2001 From: Nathaniel May Date: Mon, 27 Feb 2023 12:31:00 -0500 Subject: [PATCH 037/113] Revert "mirror issues to ADAP jira project (#322)" (#338) This reverts commit abc1619c65be49aa9193b81103944fc567840b76. --- .github/workflows/jira-creation.yml | 4 +--- .github/workflows/jira-label.yml | 4 +--- .github/workflows/jira-transition.yml | 7 +------ 3 files changed, 3 insertions(+), 12 deletions(-) diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml index 2611a8bdd..b4016befc 100644 --- a/.github/workflows/jira-creation.yml +++ b/.github/workflows/jira-creation.yml @@ -19,9 +19,7 @@ permissions: jobs: call-label-action: - uses: dbt-labs/actions/.github/workflows/jira-creation.yml@main - with: - project_key: ADAP + uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index 1637cbe38..3da2e3a38 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -19,9 +19,7 @@ permissions: jobs: call-label-action: - uses: dbt-labs/actions/.github/workflows/jira-label.yml@main - with: - project_key: ADAP + uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml index 99158a15f..ed9f9cd4f 100644 --- a/.github/workflows/jira-transition.yml +++ b/.github/workflows/jira-transition.yml @@ -15,14 +15,9 @@ on: issues: types: [closed, deleted, reopened] -# no special access is needed -permissions: read-all - jobs: call-label-action: - uses: dbt-labs/actions/.github/workflows/jira-transition.yml@main - with: - project_key: ADAP + uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} From 48c0b5a5e319102e50a56bc954cb08887a41f482 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Wed, 1 Mar 2023 08:13:01 -0800 Subject: [PATCH 038/113] [CT-2099]: Delete concurrent_transaction test and clean up integration tests (#325) * Convert concurrent transaction to functional.WIP * Fix query_state init.WIP * Nits * Include test README * Line nit * Testing the changes.WIP * Fix exception printing.WIP * Restoring query_state and checking exception type * Simplify and clean up * Test changes.WIP * autouse * Fixture to function * Changes after trying a few things.WIP * minor cleanup.wip * minor changes * Fixing the pytz version.WIP * Remove concurrent tests and integ tests dir * remove pytz fix --------- Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- tests/integration/__init__.py | 0 tests/integration/base.py | 955 ------------------ .../concurrent_transaction_test/README.md | 33 - .../macros/udfs.sql | 13 - .../models-incremental/model_1.sql | 9 - .../models-incremental/view_model.sql | 3 - .../models-table/model_1.sql | 5 - .../models-table/view_model.sql | 3 - .../models-view/model_1.sql | 5 - .../models-view/view_model.sql | 3 - .../test_concurrent_transaction.py | 136 --- 11 files changed, 1165 deletions(-) delete mode 100644 tests/integration/__init__.py delete mode 100644 tests/integration/base.py delete mode 100644 tests/integration/concurrent_transaction_test/README.md delete mode 100644 tests/integration/concurrent_transaction_test/macros/udfs.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-incremental/model_1.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-incremental/view_model.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-table/model_1.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-table/view_model.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-view/model_1.sql delete mode 100644 tests/integration/concurrent_transaction_test/models-view/view_model.sql delete mode 100644 tests/integration/concurrent_transaction_test/test_concurrent_transaction.py diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/base.py b/tests/integration/base.py deleted file mode 100644 index 9222bade9..000000000 --- a/tests/integration/base.py +++ /dev/null @@ -1,955 +0,0 @@ -import os -import io -import random -import shutil -import sys -import tempfile -import traceback -import unittest -import warnings -from contextlib import contextmanager -from datetime import datetime -from functools import wraps - -import pytest -import yaml -from unittest.mock import patch - -import dbt.main as dbt -from dbt import flags -from dbt.deprecations import reset_deprecations -from dbt.adapters.factory import get_adapter, reset_adapters, register_adapter -from dbt.clients.jinja import template_cache -from dbt.config import RuntimeConfig -from dbt.context import providers -from dbt.logger import log_manager -from dbt.events.functions import ( - capture_stdout_logs, fire_event, setup_event_logger, stop_capture_stdout_logs -) -from dbt.events import AdapterLogger -from dbt.contracts.graph.manifest import Manifest - - -logger = AdapterLogger("Redshift") -INITIAL_ROOT = os.getcwd() - - -def normalize(path): - """On windows, neither is enough on its own: - - >>> normcase('C:\\documents/ALL CAPS/subdir\\..') - 'c:\\documents\\all caps\\subdir\\..' - >>> normpath('C:\\documents/ALL CAPS/subdir\\..') - 'C:\\documents\\ALL CAPS' - >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) - 'c:\\documents\\all caps' - """ - return os.path.normcase(os.path.normpath(path)) - - -class Normalized: - def __init__(self, value): - self.value = value - - def __repr__(self): - return f'Normalized({self.value!r})' - - def __str__(self): - return f'Normalized({self.value!s})' - - def __eq__(self, other): - return normalize(self.value) == normalize(other) - - -class FakeArgs: - def __init__(self): - self.threads = 1 - self.defer = False - self.full_refresh = False - self.models = None - self.select = None - self.exclude = None - self.single_threaded = False - self.selector_name = None - self.state = None - self.defer = None - - -class TestArgs: - def __init__(self, kwargs): - self.which = 'run' - self.single_threaded = False - self.profiles_dir = None - self.project_dir = None - self.__dict__.update(kwargs) - - -def _profile_from_test_name(test_name): - adapter_names = ('redshift',) - adapters_in_name = sum(x in test_name for x in adapter_names) - if adapters_in_name != 1: - raise ValueError( - 'test names must have exactly 1 profile choice embedded, {} has {}' - .format(test_name, adapters_in_name) - ) - - for adapter_name in adapter_names: - if adapter_name in test_name: - return adapter_name - - raise ValueError( - 'could not find adapter name in test name {}'.format(test_name) - ) - - -def _pytest_test_name(): - return os.environ['PYTEST_CURRENT_TEST'].split()[0] - - -def _pytest_get_test_root(): - test_path = _pytest_test_name().split('::')[0] - relative_to = INITIAL_ROOT - head = os.path.relpath(test_path, relative_to) - - path_parts = [] - while head: - head, tail = os.path.split(head) - path_parts.append(tail) - path_parts.reverse() - # dbt tests are all of the form 'tests/integration/suite_name' - target = os.path.join(*path_parts[:3]) # TODO: try to not hard code this - return os.path.join(relative_to, target) - - -def _really_makedirs(path): - while not os.path.exists(path): - try: - os.makedirs(path) - except EnvironmentError: - raise - - -class DBTIntegrationTest(unittest.TestCase): - CREATE_SCHEMA_STATEMENT = 'CREATE SCHEMA {}' - DROP_SCHEMA_STATEMENT = 'DROP SCHEMA IF EXISTS {} CASCADE' - - _randint = random.randint(0, 9999) - _runtime_timedelta = (datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0)) - _runtime = ( - (int(_runtime_timedelta.total_seconds() * 1e6)) + - _runtime_timedelta.microseconds - ) - - prefix = f'test{_runtime}{_randint:04}' - setup_alternate_db = False - - def redshift_profile(self): - return { - 'config': { - 'send_anonymous_usage_stats': False - }, - 'test': { - 'outputs': { - 'default2': { - 'type': 'redshift', - 'threads': 1, - 'retries': 6, - 'host': os.getenv('REDSHIFT_TEST_HOST'), - 'port': int(os.getenv('REDSHIFT_TEST_PORT')), - 'user': os.getenv('REDSHIFT_TEST_USER'), - 'pass': os.getenv('REDSHIFT_TEST_PASS'), - 'dbname': os.getenv('REDSHIFT_TEST_DBNAME'), - 'schema': self.unique_schema() - } - }, - 'target': 'default2' - } - } - - @property - def packages_config(self): - return None - - @property - def selectors_config(self): - return None - - def unique_schema(self): - schema = self.schema - - to_return = "{}_{}".format(self.prefix, schema) - - return to_return.lower() - - @property - def default_database(self): - database = self.config.credentials.database - return database - - @property - def alternative_database(self): - return None - - def get_profile(self, adapter_type): - if adapter_type == 'redshift': - return self.redshift_profile() - else: - raise ValueError('invalid adapter type {}'.format(adapter_type)) - - def _pick_profile(self): - test_name = self.id().split('.')[-1] - return _profile_from_test_name(test_name) - - def _symlink_test_folders(self): - for entry in os.listdir(self.test_original_source_path): - src = os.path.join(self.test_original_source_path, entry) - tst = os.path.join(self.test_root_dir, entry) - if os.path.isdir(src) or src.endswith('.sql'): - # symlink all sql files and all directories. - os.symlink(src, tst) - os.symlink(self._logs_dir, os.path.join(self.test_root_dir, 'logs')) - - @property - def test_root_realpath(self): - if sys.platform == 'darwin': - return os.path.realpath(self.test_root_dir) - else: - return self.test_root_dir - - def _generate_test_root_dir(self): - return normalize(tempfile.mkdtemp(prefix='dbt-int-test-')) - - def setUp(self): - # Logbook warnings are ignored so we don't have to fork logbook to support python 3.10. - # This _only_ works for tests in `test/integration`. - warnings.filterwarnings( - "ignore", - category=DeprecationWarning, - module="logbook" - ) - self.dbt_core_install_root = os.path.dirname(dbt.__file__) - log_manager.reset_handlers() - self.initial_dir = INITIAL_ROOT - os.chdir(self.initial_dir) - # before we go anywhere, collect the initial path info - self._logs_dir = os.path.join(self.initial_dir, 'logs', self.prefix) - setup_event_logger(self._logs_dir) - _really_makedirs(self._logs_dir) - self.test_original_source_path = _pytest_get_test_root() - self.test_root_dir = self._generate_test_root_dir() - - os.chdir(self.test_root_dir) - try: - self._symlink_test_folders() - except Exception as exc: - msg = '\n\t'.join(( - 'Failed to symlink test folders!', - 'initial_dir={0.initial_dir}', - 'test_original_source_path={0.test_original_source_path}', - 'test_root_dir={0.test_root_dir}' - )).format(self) - logger.exception(msg) - - # if logging isn't set up, I still really want this message. - print(msg) - traceback.print_exc() - - raise - - self._created_schemas = set() - reset_deprecations() - template_cache.clear() - - self.use_profile(self._pick_profile()) - self.use_default_project() - self.set_packages() - self.set_selectors() - self.load_config() - - def use_default_project(self, overrides=None): - # create a dbt_project.yml - base_project_config = { - 'name': 'test', - 'version': '1.0', - 'config-version': 2, - 'test-paths': [], - 'model-paths': [self.models], - 'profile': 'test', - } - - project_config = {} - project_config.update(base_project_config) - project_config.update(self.project_config) - project_config.update(overrides or {}) - - with open("dbt_project.yml", 'w') as f: - yaml.safe_dump(project_config, f, default_flow_style=True) - - def use_profile(self, adapter_type): - self.adapter_type = adapter_type - - profile_config = {} - default_profile_config = self.get_profile(adapter_type) - - profile_config.update(default_profile_config) - profile_config.update(self.profile_config) - - if not os.path.exists(self.test_root_dir): - os.makedirs(self.test_root_dir) - - flags.PROFILES_DIR = self.test_root_dir - profiles_path = os.path.join(self.test_root_dir, 'profiles.yml') - with open(profiles_path, 'w') as f: - yaml.safe_dump(profile_config, f, default_flow_style=True) - self._profile_config = profile_config - - def set_packages(self): - if self.packages_config is not None: - with open('packages.yml', 'w') as f: - yaml.safe_dump(self.packages_config, f, default_flow_style=True) - - def set_selectors(self): - if self.selectors_config is not None: - with open('selectors.yml', 'w') as f: - yaml.safe_dump(self.selectors_config, f, default_flow_style=True) - - def load_config(self): - # we've written our profile and project. Now we want to instantiate a - # fresh adapter for the tests. - # it's important to use a different connection handle here so - # we don't look into an incomplete transaction - kwargs = { - 'profile': None, - 'profiles_dir': self.test_root_dir, - 'target': None, - } - - config = RuntimeConfig.from_args(TestArgs(kwargs)) - - register_adapter(config) - adapter = get_adapter(config) - adapter.cleanup_connections() - self.adapter_type = adapter.type() - self.adapter = adapter - self.config = config - - self._drop_schemas() - self._create_schemas() - - def quote_as_configured(self, value, quote_key): - return self.adapter.quote_as_configured(value, quote_key) - - def tearDown(self): - # get any current run adapter and clean up its connections before we - # reset them. It'll probably be different from ours because - # handle_and_check() calls reset_adapters(). - register_adapter(self.config) - adapter = get_adapter(self.config) - if adapter is not self.adapter: - adapter.cleanup_connections() - if not hasattr(self, 'adapter'): - self.adapter = adapter - - self._drop_schemas() - - self.adapter.cleanup_connections() - reset_adapters() - os.chdir(INITIAL_ROOT) - try: - shutil.rmtree(self.test_root_dir) - except EnvironmentError: - logger.exception('Could not clean up after test - {} not removable' - .format(self.test_root_dir)) - - def _get_schema_fqn(self, database, schema): - schema_fqn = self.quote_as_configured(schema, 'schema') - return schema_fqn - - def _create_schema_named(self, database, schema): - schema_fqn = self._get_schema_fqn(database, schema) - self.run_sql(self.CREATE_SCHEMA_STATEMENT.format(schema_fqn)) - self._created_schemas.add(schema_fqn) - - def _drop_schema_named(self, database, schema): - schema_fqn = self._get_schema_fqn(database, schema) - self.run_sql(self.DROP_SCHEMA_STATEMENT.format(schema_fqn)) - - def _create_schemas(self): - schema = self.unique_schema() - with self.adapter.connection_named('__test'): - self._create_schema_named(self.default_database, schema) - - def _drop_schemas_sql(self): - schema = self.unique_schema() - # we always want to drop these if necessary, we'll clear it soon. - self._created_schemas.add( - self._get_schema_fqn(self.default_database, schema) - ) - drop_alternative = ( - self.setup_alternate_db and - self.adapter_type not in {'redshift'} and - self.alternative_database - ) - if drop_alternative: - self._created_schemas.add( - self._get_schema_fqn(self.alternative_database, schema) - ) - - for schema_fqn in self._created_schemas: - self.run_sql(self.DROP_SCHEMA_STATEMENT.format(schema_fqn)) - - self._created_schemas.clear() - - def _drop_schemas(self): - with self.adapter.connection_named('__test'): - self._drop_schemas_sql() - - @property - def project_config(self): - return { - 'config-version': 2, - } - - @property - def profile_config(self): - return {} - - def run_dbt(self, args=None, expect_pass=True, profiles_dir=True): - res, success = self.run_dbt_and_check(args=args, profiles_dir=profiles_dir) - self.assertEqual( - success, expect_pass, - "dbt exit state did not match expected") - - return res - - - def run_dbt_and_capture(self, *args, **kwargs): - try: - stringbuf = capture_stdout_logs() - res = self.run_dbt(*args, **kwargs) - stdout = stringbuf.getvalue() - - finally: - stop_capture_stdout_logs() - - return res, stdout - - def run_dbt_and_check(self, args=None, profiles_dir=True): - log_manager.reset_handlers() - if args is None: - args = ["run"] - - final_args = [] - - if os.getenv('DBT_TEST_SINGLE_THREADED') in ('y', 'Y', '1'): - final_args.append('--single-threaded') - - final_args.extend(args) - - if profiles_dir: - final_args.extend(['--profiles-dir', self.test_root_dir]) - final_args.append('--log-cache-events') - - logger.info("Invoking dbt with {}".format(final_args)) - return dbt.handle_and_check(final_args) - - def run_sql_file(self, path, kwargs=None): - with open(path, 'r') as f: - statements = f.read().split(";") - for statement in statements: - self.run_sql(statement, kwargs=kwargs) - - def transform_sql(self, query, kwargs=None): - to_return = query - - base_kwargs = { - 'schema': self.unique_schema(), - 'database': self.adapter.quote(self.default_database), - } - if kwargs is None: - kwargs = {} - base_kwargs.update(kwargs) - - to_return = to_return.format(**base_kwargs) - - return to_return - - def run_sql_common(self, sql, fetch, conn): - with conn.handle.cursor() as cursor: - try: - cursor.execute(sql) - conn.handle.commit() - if fetch == 'one': - return cursor.fetchone() - elif fetch == 'all': - return cursor.fetchall() - else: - return - except BaseException as e: - if conn.handle and not getattr(conn.handle, 'closed', True): - conn.handle.rollback() - print(sql) - print(e) - raise - finally: - conn.transaction_open = False - - def run_sql(self, query, fetch='None', kwargs=None, connection_name=None): - if connection_name is None: - connection_name = '__test' - - if query.strip() == "": - return - - sql = self.transform_sql(query, kwargs=kwargs) - - with self.get_connection(connection_name) as conn: - logger.debug('test connection "{}" executing: {}'.format(conn.name, sql)) - return self.run_sql_common(sql, fetch, conn) - - def _ilike(self, target, value): - return "{} ilike '{}'".format(target, value) - - def get_many_table_columns_information_schema(self, tables, schema, database=None): - columns = 'table_name, column_name, data_type, character_maximum_length' - - sql = """ - select {columns} - from {db_string}information_schema.columns - where {schema_filter} - and ({table_filter}) - order by column_name asc""" - - db_string = '' - if database: - db_string = self.quote_as_configured(database, 'database') + '.' - - table_filters_s = " OR ".join( - self._ilike('table_name', table.replace('"', '')) - for table in tables - ) - schema_filter = self._ilike('table_schema', schema) - - sql = sql.format( - columns=columns, - schema_filter=schema_filter, - table_filter=table_filters_s, - db_string=db_string) - - columns = self.run_sql(sql, fetch='all') - return list(map(self.filter_many_columns, columns)) - - def get_many_table_columns(self, tables, schema, database=None): - result = self.get_many_table_columns_information_schema(tables, schema, database) - result.sort(key=lambda x: '{}.{}'.format(x[0], x[1])) - return result - - def filter_many_columns(self, column): - if len(column) == 3: - table_name, column_name, data_type = column - char_size = None - else: - table_name, column_name, data_type, char_size = column - return (table_name, column_name, data_type, char_size) - - @contextmanager - def get_connection(self, name=None): - """Create a test connection context where all executed macros, etc will - get self.adapter as the adapter. - - This allows tests to run normal adapter macros as if reset_adapters() - were not called by handle_and_check (for asserts, etc) - """ - if name is None: - name = '__test' - with patch.object(providers, 'get_adapter', return_value=self.adapter): - with self.adapter.connection_named(name): - conn = self.adapter.connections.get_thread_connection() - yield conn - - def get_relation_columns(self, relation): - with self.get_connection(): - columns = self.adapter.get_columns_in_relation(relation) - - return sorted(((c.name, c.dtype, c.char_size) for c in columns), - key=lambda x: x[0]) - - def get_table_columns(self, table, schema=None, database=None): - schema = self.unique_schema() if schema is None else schema - database = self.default_database if database is None else database - relation = self.adapter.Relation.create( - database=database, - schema=schema, - identifier=table, - type='table', - quote_policy=self.config.quoting - ) - return self.get_relation_columns(relation) - - def get_table_columns_as_dict(self, tables, schema=None): - col_matrix = self.get_many_table_columns(tables, schema) - res = {} - for row in col_matrix: - table_name = row[0] - col_def = row[1:] - if table_name not in res: - res[table_name] = [] - res[table_name].append(col_def) - return res - - def get_models_in_schema(self, schema=None): - schema = self.unique_schema() if schema is None else schema - sql = """ - select table_name, - case when table_type = 'BASE TABLE' then 'table' - when table_type = 'VIEW' then 'view' - else table_type - end as materialization - from information_schema.tables - where {} - order by table_name - """ - - sql = sql.format(self._ilike('table_schema', schema)) - result = self.run_sql(sql, fetch='all') - - return {model_name: materialization for (model_name, materialization) in result} - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - if columns is None: - columns = self.get_relation_columns(relation_a) - column_names = [c[0] for c in columns] - - sql = self.adapter.get_rows_different_sql( - relation_a, relation_b, column_names - ) - - return sql - - def assertTablesEqual(self, table_a, table_b, - table_a_schema=None, table_b_schema=None, - table_a_db=None, table_b_db=None): - if table_a_schema is None: - table_a_schema = self.unique_schema() - - if table_b_schema is None: - table_b_schema = self.unique_schema() - - if table_a_db is None: - table_a_db = self.default_database - - if table_b_db is None: - table_b_db = self.default_database - - relation_a = self._make_relation(table_a, table_a_schema, table_a_db) - relation_b = self._make_relation(table_b, table_b_schema, table_b_db) - - self._assertTableColumnsEqual(relation_a, relation_b) - - sql = self._assertTablesEqualSql(relation_a, relation_b) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - def _make_relation(self, identifier, schema=None, database=None): - if schema is None: - schema = self.unique_schema() - if database is None: - database = self.default_database - return self.adapter.Relation.create( - database=database, - schema=schema, - identifier=identifier, - quote_policy=self.config.quoting - ) - - def get_many_relation_columns(self, relations): - """Returns a dict of (datbase, schema) -> (dict of (table_name -> list of columns)) - """ - schema_fqns = {} - for rel in relations: - this_schema = schema_fqns.setdefault((rel.database, rel.schema), []) - this_schema.append(rel.identifier) - - column_specs = {} - for key, tables in schema_fqns.items(): - database, schema = key - columns = self.get_many_table_columns(tables, schema, database=database) - table_columns = {} - for col in columns: - table_columns.setdefault(col[0], []).append(col[1:]) - for rel_name, columns in table_columns.items(): - key = (database, schema, rel_name) - column_specs[key] = columns - - return column_specs - - def assertManyRelationsEqual(self, relations, default_schema=None, default_database=None): - if default_schema is None: - default_schema = self.unique_schema() - if default_database is None: - default_database = self.default_database - - specs = [] - for relation in relations: - if not isinstance(relation, (tuple, list)): - relation = [relation] - - assert len(relation) <= 3 - - if len(relation) == 3: - relation = self._make_relation(*relation) - elif len(relation) == 2: - relation = self._make_relation(relation[0], relation[1], default_database) - elif len(relation) == 1: - relation = self._make_relation(relation[0], default_schema, default_database) - else: - raise ValueError('relation must be a sequence of 1, 2, or 3 values') - - specs.append(relation) - - with self.get_connection(): - column_specs = self.get_many_relation_columns(specs) - - # make sure everyone has equal column definitions - first_columns = None - for relation in specs: - key = (relation.database, relation.schema, relation.identifier) - # get a good error here instead of a hard-to-diagnose KeyError - self.assertIn(key, column_specs, f'No columns found for {key}') - columns = column_specs[key] - if first_columns is None: - first_columns = columns - else: - self.assertEqual( - first_columns, columns, - '{} did not match {}'.format(str(specs[0]), str(relation)) - ) - - # make sure everyone has the same data. if we got here, everyone had - # the same column specs! - first_relation = None - for relation in specs: - if first_relation is None: - first_relation = relation - else: - sql = self._assertTablesEqualSql(first_relation, relation, - columns=first_columns) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - def assertManyTablesEqual(self, *args): - schema = self.unique_schema() - - all_tables = [] - for table_equivalencies in args: - all_tables += list(table_equivalencies) - - all_cols = self.get_table_columns_as_dict(all_tables, schema) - - for table_equivalencies in args: - first_table = table_equivalencies[0] - first_relation = self._make_relation(first_table) - - # assert that all tables have the same columns - base_result = all_cols[first_table] - self.assertTrue(len(base_result) > 0) - - for other_table in table_equivalencies[1:]: - other_result = all_cols[other_table] - self.assertTrue(len(other_result) > 0) - self.assertEqual(base_result, other_result) - - other_relation = self._make_relation(other_table) - sql = self._assertTablesEqualSql(first_relation, - other_relation, - columns=base_result) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - - def _assertTableRowCountsEqual(self, relation_a, relation_b): - cmp_query = """ - with table_a as ( - - select count(*) as num_rows from {} - - ), table_b as ( - - select count(*) as num_rows from {} - - ) - - select table_a.num_rows - table_b.num_rows as difference - from table_a, table_b - - """.format(str(relation_a), str(relation_b)) - - res = self.run_sql(cmp_query, fetch='one') - - self.assertEqual(int(res[0]), 0, "Row count of table {} doesn't match row count of table {}. ({} rows different)".format( - relation_a.identifier, - relation_b.identifier, - res[0] - ) - ) - - def assertTableDoesNotExist(self, table, schema=None, database=None): - columns = self.get_table_columns(table, schema, database) - - self.assertEqual( - len(columns), - 0 - ) - - def assertTableDoesExist(self, table, schema=None, database=None): - columns = self.get_table_columns(table, schema, database) - - self.assertGreater( - len(columns), - 0 - ) - - def _assertTableColumnsEqual(self, relation_a, relation_b): - table_a_result = self.get_relation_columns(relation_a) - table_b_result = self.get_relation_columns(relation_b) - - text_types = {'text', 'character varying', 'character', 'varchar'} - - self.assertEqual(len(table_a_result), len(table_b_result)) - for a_column, b_column in zip(table_a_result, table_b_result): - a_name, a_type, a_size = a_column - b_name, b_type, b_size = b_column - self.assertEqual(a_name, b_name, - '{} vs {}: column "{}" != "{}"'.format( - relation_a, relation_b, a_name, b_name - )) - - self.assertEqual(a_type, b_type, - '{} vs {}: column "{}" has type "{}" != "{}"'.format( - relation_a, relation_b, a_name, a_type, b_type - )) - - self.assertEqual(a_size, b_size, - '{} vs {}: column "{}" has size "{}" != "{}"'.format( - relation_a, relation_b, a_name, a_size, b_size - )) - - def assertEquals(self, *args, **kwargs): - # assertEquals is deprecated. This makes the warnings less chatty - self.assertEqual(*args, **kwargs) - - def assertBetween(self, timestr, start, end=None): - datefmt = '%Y-%m-%dT%H:%M:%S.%fZ' - if end is None: - end = datetime.utcnow() - - parsed = datetime.strptime(timestr, datefmt) - - self.assertLessEqual(start, parsed, - 'parsed date {} happened before {}'.format( - parsed, - start.strftime(datefmt)) - ) - self.assertGreaterEqual(end, parsed, - 'parsed date {} happened after {}'.format( - parsed, - end.strftime(datefmt)) - ) - - -def use_profile(profile_name): - """A decorator to declare a test method as using a particular profile. - Handles both setting the nose attr and calling self.use_profile. - - Use like this: - - class TestSomething(DBIntegrationTest): - @use_profile('postgres') - def test_postgres_thing(self): - self.assertEqual(self.adapter_type, 'postgres') - - @use_profile('snowflake') - def test_snowflake_thing(self): - self.assertEqual(self.adapter_type, 'snowflake') - """ - def outer(wrapped): - @getattr(pytest.mark, 'profile_'+profile_name) - @wraps(wrapped) - def func(self, *args, **kwargs): - return wrapped(self, *args, **kwargs) - # sanity check at import time - assert _profile_from_test_name(wrapped.__name__) == profile_name - return func - return outer - - -class AnyFloat: - """Any float. Use this in assertEqual() calls to assert that it is a float. - """ - def __eq__(self, other): - return isinstance(other, float) - - -class AnyString: - """Any string. Use this in assertEqual() calls to assert that it is a string. - """ - def __eq__(self, other): - return isinstance(other, str) - - -class AnyStringWith: - def __init__(self, contains=None): - self.contains = contains - - def __eq__(self, other): - if not isinstance(other, str): - return False - - if self.contains is None: - return True - - return self.contains in other - - def __repr__(self): - return 'AnyStringWith<{!r}>'.format(self.contains) - - -def get_manifest(): - path = './target/partial_parse.msgpack' - if os.path.exists(path): - with open(path, 'rb') as fp: - manifest_mp = fp.read() - manifest: Manifest = Manifest.from_msgpack(manifest_mp) - return manifest - else: - return None diff --git a/tests/integration/concurrent_transaction_test/README.md b/tests/integration/concurrent_transaction_test/README.md deleted file mode 100644 index 48ece8615..000000000 --- a/tests/integration/concurrent_transaction_test/README.md +++ /dev/null @@ -1,33 +0,0 @@ - -This test warrants some explanation. In dbt <=0.10.1, Redshift table and view materializations suffered from issues around concurrent transactions. In order to reliably reproduce this error, a query needs to select from a dbt model as the table is being rebuilt. Critically, this concurrent select needs to query the table during the drop/swap portition of the materialization. This looks like: - -```sql -begin; -create table as (...); -drop table old_table cascade; -// <---- The concurrent query needs to be running here! -alter table new_table rename to old_table; -commit; -``` - -In order to reliably reproduce this failure, the model shown above needs to block for a long time between the `drop` and `alter` statements. We can't just stick a sleep() call in there, as this code is defined in the materialization. Instead, we can reliably reproduce the failure by: - -1) creating a view that depends on this model -2) issuing a long-running query on the view before `dbt run` is invoked -3) issuing _another_ long-running query against the original model - -Since long-running query (step 2) is selecting from the view, Redshift blocks on the `drop ... cascade`, of the materialization, which causes the query from step 3 time to overlap with the critical section of the materialization between the `drop` and `alter` statements. - -In dbt v0.10.1, this integration test results in: - -``` -====================================================================== -FAIL: test__redshift__concurrent_transaction (test_concurrent_transaction.TestConcurrentTransaction) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "/usr/src/app/test/integration/032_concurrent_transaction_test/test_concurrent_transaction.py", line 84, in test__redshift__concurrent_transaction - self.assertEqual(self.query_state['model_1'], 'good') -AssertionError: 'error: table 3379442 dropped by concurrent transaction\n' != 'good' -- error: table 3379442 dropped by concurrent transaction -+ good -``` diff --git a/tests/integration/concurrent_transaction_test/macros/udfs.sql b/tests/integration/concurrent_transaction_test/macros/udfs.sql deleted file mode 100644 index 8fc46d110..000000000 --- a/tests/integration/concurrent_transaction_test/macros/udfs.sql +++ /dev/null @@ -1,13 +0,0 @@ - -{% macro create_udfs() %} - -CREATE OR REPLACE FUNCTION {{ target.schema }}.f_sleep (x float) -RETURNS bool IMMUTABLE -AS -$$ - from time import sleep - sleep(x) - return True -$$ LANGUAGE plpythonu; - -{% endmacro %} diff --git a/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql b/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql deleted file mode 100644 index 3d8ac43ba..000000000 --- a/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql +++ /dev/null @@ -1,9 +0,0 @@ - -{{ config(materialized='incremental', unique_key='id') }} - --- incremental model -select 1 as id - -{% if is_incremental() %} - where TRUE -{% endif %} diff --git a/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql b/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/models-table/model_1.sql b/tests/integration/concurrent_transaction_test/models-table/model_1.sql deleted file mode 100644 index 344a2e8bd..000000000 --- a/tests/integration/concurrent_transaction_test/models-table/model_1.sql +++ /dev/null @@ -1,5 +0,0 @@ - -{{ config(materialized='table') }} - --- table model -select 1 as id diff --git a/tests/integration/concurrent_transaction_test/models-table/view_model.sql b/tests/integration/concurrent_transaction_test/models-table/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-table/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/models-view/model_1.sql b/tests/integration/concurrent_transaction_test/models-view/model_1.sql deleted file mode 100644 index 21a96e98c..000000000 --- a/tests/integration/concurrent_transaction_test/models-view/model_1.sql +++ /dev/null @@ -1,5 +0,0 @@ - -{{ config(materialized='view') }} - --- view model -select 1 as id diff --git a/tests/integration/concurrent_transaction_test/models-view/view_model.sql b/tests/integration/concurrent_transaction_test/models-view/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-view/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py b/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py deleted file mode 100644 index 8da9f7fb9..000000000 --- a/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py +++ /dev/null @@ -1,136 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import threading -from dbt.adapters.factory import FACTORY - - -def get_adapter_standalone(config): - plugin = FACTORY.plugins[config.credentials.type] - cls = plugin.adapter - return cls(config) - - -class BaseTestConcurrentTransaction(DBTIntegrationTest): - - def reset(self): - self.query_state = { - 'view_model': 'wait', - 'model_1': 'wait', - } - - def setUp(self): - super().setUp() - self._secret_adapter = get_adapter_standalone(self.config) - self.reset() - - def tearDown(self): - self._secret_adapter.cleanup_connections() - super().tearDown() - - @property - def schema(self): - return "concurrent_transaction" - - @property - def project_config(self): - return { - 'config-version': 2, - "macro-paths": ["macros"], - "on-run-start": [ - "{{ create_udfs() }}", - ], - } - - def run_select_and_check(self, rel, sql): - connection_name = '__test_{}'.format(id(threading.current_thread())) - try: - with self._secret_adapter.connection_named(connection_name): - conn = self._secret_adapter.connections.get_thread_connection() - res = self.run_sql_common(self.transform_sql(sql), 'one', conn) - - # The result is the output of f_sleep(), which is True - if res[0]: - self.query_state[rel] = 'good' - else: - self.query_state[rel] = 'bad' - - except Exception as e: - if 'concurrent transaction' in str(e): - self.query_state[rel] = 'error: {}'.format(e) - else: - self.query_state[rel] = 'error: {}'.format(e) - - def async_select(self, rel, sleep=10): - # Run the select statement in a thread. When the query returns, the global - # query_state will be update with a state of good/bad/error, and the associated - # error will be reported if one was raised. - - schema = self.unique_schema() - query = ''' - -- async_select: {rel} - select {schema}.f_sleep({sleep}) from {schema}.{rel} - '''.format( - schema=schema, - sleep=sleep, - rel=rel) - - thread = threading.Thread(target=self.run_select_and_check, args=(rel, query)) - thread.start() - return thread - - def run_test(self): - self.use_profile("redshift") - - # First run the project to make sure the models exist - results = self.run_dbt(args=['run']) - self.assertEqual(len(results), 2) - - # Execute long-running queries in threads - t1 = self.async_select('view_model', 10) - t2 = self.async_select('model_1', 5) - - # While the queries are executing, re-run the project - res = self.run_dbt(args=['run', '--threads', '8']) - self.assertEqual(len(res), 2) - - # Finally, wait for these threads to finish - t1.join() - t2.join() - - self.assertTrue(len(res) > 0) - - # If the query succeeded, the global query_state should be 'good' - self.assertEqual(self.query_state['view_model'], 'good') - self.assertEqual(self.query_state['model_1'], 'good') - - -class TableTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-table" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_table(self): -# self.reset() -# self.run_test() - - -class ViewTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-view" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_view(self): -# self.reset() -# self.run_test() - - -class IncrementalTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-incremental" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_incremental(self): -# self.reset() -# self.run_test() From 6029336548fd707ff0ac83646086fddac0f850a6 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 1 Mar 2023 15:09:16 -0500 Subject: [PATCH 039/113] Standardize dev requirements (#339) * pinned dev requirements to major version, synced across adapters * hard pinned mypy * pinned wheel due to vulnerability in version 0.30 * added py37-specific pins where needed * updated requirements and pre-commit-config to standard for adapters * update flake8 config * black --- .flake8 | 12 ++++---- .pre-commit-config.yaml | 18 ++++++------ Makefile | 5 ++++ dbt/adapters/redshift/connections.py | 1 - dev-requirements.txt | 42 +++++++++++++++++----------- 5 files changed, 46 insertions(+), 32 deletions(-) diff --git a/.flake8 b/.flake8 index f39d154c0..bbc3202a0 100644 --- a/.flake8 +++ b/.flake8 @@ -4,9 +4,11 @@ select = W F ignore = - W503 # makes Flake8 work like black - W504 - E203 # makes Flake8 work like black - E741 - E501 + # makes Flake8 work like black + W503, + W504, + # makes Flake8 work like black + E203, + E741, + E501, exclude = test diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a4e34d870..1141ccc97 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,11 +5,11 @@ exclude: '^tests/.*' # Force all unspecified python hooks to run python 3.8 default_language_version: - python: python3.8 + python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.4.0 hooks: - id: check-yaml args: [--unsafe] @@ -18,31 +18,31 @@ repos: - id: trailing-whitespace - id: check-case-conflict - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 23.1.0 hooks: - id: black - additional_dependencies: ['click==8.0.4'] + additional_dependencies: ['click~=8.1'] args: - "--line-length=99" - "--target-version=py38" - id: black alias: black-check stages: [manual] - additional_dependencies: ['click==8.0.4'] + additional_dependencies: ['click~=8.1'] args: - "--line-length=99" - "--target-version=py38" - "--check" - "--diff" - repo: https://github.com/pycqa/flake8 - rev: 4.0.1 + rev: 6.0.0 hooks: - id: flake8 - id: flake8 alias: flake8-check stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.942 + rev: v1.0.1 hooks: - id: mypy # N.B.: Mypy is... a bit fragile. @@ -55,12 +55,12 @@ repos: # of our control to the mix. Unfortunately, there's nothing we can # do about per pre-commit's author. # See https://github.com/pre-commit/pre-commit/issues/730 for details. - args: [--show-error-codes, --ignore-missing-imports] + args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases] files: ^dbt/adapters/.* language: system - id: mypy alias: mypy-check stages: [manual] - args: [--show-error-codes, --pretty, --ignore-missing-imports] + args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases] files: ^dbt/adapters language: system diff --git a/Makefile b/Makefile index 924c6a26f..0cc3a43d6 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,11 @@ dev: ## Installs adapter in develop mode along with development dependencies @\ pip install -e . -r dev-requirements.txt && pre-commit install +.PHONY: dev-uninstall +dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment + ## Useful when updating versions, or if you accidentally installed into the system interpreter + pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y + .PHONY: mypy mypy: ## Runs mypy against staged changes for static type checking. @\ diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index cf2cbf8cc..465087d72 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -248,7 +248,6 @@ def execute( return response, table def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): - connection = None cursor = None diff --git a/dev-requirements.txt b/dev-requirements.txt index 6eac3d2b4..1345b8abc 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,22 +4,30 @@ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugins/postgres -black~=22.8.0 -click~=8.1.3 +# if version 1.x or greater -> pin to major version +# if version 0.x -> pin to minor +black~=23.1 bumpversion~=0.6.0 -flake8 -flaky~=3.7.0 -freezegun~=0.3.12 -ipdb~=0.13.9 -mypy~=0.971.0 -pip-tools~=6.11.0 -pre-commit~=2.20.0 -pytest~=7.2.0 +click~=8.1 +flake8~=5.0;python_version=="3.7" +flake8~=6.0;python_version>="3.8" +flaky~=3.7 +freezegun~=1.2 +ipdb~=0.13.11 +mypy==1.0.1 # patch updates have historically introduced breaking changes +pip-tools~=6.12 +pre-commit~=2.21;python_version=="3.7" +pre-commit~=3.1;python_version>="3.8" +pre-commit-hooks~=4.4 +pytest~=7.2 +pytest-csv~=3.0 pytest-dotenv~=0.5.2 -pytest-logbook~=1.2.0 -pytest-csv~=3.0.0 -pytest-xdist~=3.1.0 -pytz~=2022.6.0 -tox~=4.0.0 -twine~=4.0.2 -wheel~=0.37.1 +pytest-logbook~=1.2 +pytest-xdist~=3.2 +pytz~=2022.7 +tox~=3.0;python_version=="3.7" +tox~=4.4;python_version>="3.8" +types-pytz~=2022.7 +types-requests~=2.28 +twine~=4.0 +wheel~=0.38 From 92d752706ef2dad76663106fc90a94b4836d6b80 Mon Sep 17 00:00:00 2001 From: Nathaniel May Date: Wed, 1 Mar 2023 16:52:19 -0500 Subject: [PATCH 040/113] Revert "Revert "mirror issues to ADAP jira project (#322)" (#338)" (#353) This reverts commit fdfe8ec69b2bb13aa08ae0ae46716eee614931a6. --- .github/workflows/jira-creation.yml | 4 +++- .github/workflows/jira-label.yml | 4 +++- .github/workflows/jira-transition.yml | 7 ++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml index b4016befc..2611a8bdd 100644 --- a/.github/workflows/jira-creation.yml +++ b/.github/workflows/jira-creation.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main + uses: dbt-labs/actions/.github/workflows/jira-creation.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index 3da2e3a38..1637cbe38 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main + uses: dbt-labs/actions/.github/workflows/jira-label.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml index ed9f9cd4f..99158a15f 100644 --- a/.github/workflows/jira-transition.yml +++ b/.github/workflows/jira-transition.yml @@ -15,9 +15,14 @@ on: issues: types: [closed, deleted, reopened] +# no special access is needed +permissions: read-all + jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main + uses: dbt-labs/actions/.github/workflows/jira-transition.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} From 7fe8328d4c2275d33fac0a09adb27d254d585b66 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Thu, 2 Mar 2023 19:30:43 +0000 Subject: [PATCH 041/113] Bumping version to 1.5.0b2 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.5.0-b2.md | 5 +++++ .../Under the Hood-20230223-110314.yaml | 0 CHANGELOG.md | 9 ++++++++- dbt/adapters/redshift/__version__.py | 2 +- 5 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 .changes/1.5.0-b2.md rename .changes/{unreleased => 1.5.0}/Under the Hood-20230223-110314.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8c66bdf30..40acefba6 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.5.0b1 +current_version = 1.5.0b2 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.5.0-b2.md b/.changes/1.5.0-b2.md new file mode 100644 index 000000000..c46ca4493 --- /dev/null +++ b/.changes/1.5.0-b2.md @@ -0,0 +1,5 @@ +## dbt-redshift 1.5.0-b2 - March 02, 2023 + +### Under the Hood + +- Rename constraints_enabled to contract ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330)) diff --git a/.changes/unreleased/Under the Hood-20230223-110314.yaml b/.changes/1.5.0/Under the Hood-20230223-110314.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230223-110314.yaml rename to .changes/1.5.0/Under the Hood-20230223-110314.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index d23b8f543..dc87d0f99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.5.0-b2 - March 02, 2023 + +### Under the Hood + +- Rename constraints_enabled to contract ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330)) + + + ## dbt-redshift 1.5.0-b1 - February 22, 2023 ### Features @@ -22,7 +30,6 @@ - [@dave-connors-3](https://github.com/dave-connors-3) ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) - [@sathiish-kumar](https://github.com/sathiish-kumar) ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index c3758128c..4f8b15313 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0b1" +version = "1.5.0b2" From 6d55588ca74aae4339fecf8eb19b029d690a3922 Mon Sep 17 00:00:00 2001 From: Peter Webb Date: Fri, 3 Mar 2023 14:45:05 -0500 Subject: [PATCH 042/113] CT-2103: Implement data_type_code_to_name for redshift (#346) * CT-2103: Implement data_type_code_to_name for redshift * CT-2103: Hardcode dbt version for testing. * CT-2103: Update dev-requirements to use branch name Co-authored-by: Jeremy Cohen * CT-2103: Fix mypy complaint * CT-2103: Provide a Redshift specific override of test cases for new tests * CT-2103: Remove now-unneeded fixture overrides * CT-2103: Remove temporary branch hacks in dev-requirements.txt * CT-2103: Revert temporary branch changes to setup.py --------- Co-authored-by: Jeremy Cohen Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .../unreleased/Features-20230301-113553.yaml | 6 ++++++ dbt/adapters/redshift/connections.py | 7 ++++++- tests/functional/adapter/test_constraints.py | 17 ++++++++++++++++- 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 .changes/unreleased/Features-20230301-113553.yaml diff --git a/.changes/unreleased/Features-20230301-113553.yaml b/.changes/unreleased/Features-20230301-113553.yaml new file mode 100644 index 000000000..8ed2087ef --- /dev/null +++ b/.changes/unreleased/Features-20230301-113553.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Implemented data_type_code_to_name for redshift +time: 2023-03-01T11:35:53.98885-05:00 +custom: + Author: peterallenwebb + Issue: "319" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 465087d72..d4804bd8b 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -1,7 +1,7 @@ import re from multiprocessing import Lock from contextlib import contextmanager -from typing import NewType, Tuple +from typing import NewType, Tuple, Union import agate import sqlparse @@ -18,6 +18,7 @@ from dbt.helper_types import Port from redshift_connector import OperationalError, DatabaseError, DataError +from redshift_connector.utils.oids import get_datatype_name logger = AdapterLogger("Redshift") @@ -278,3 +279,7 @@ def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): @classmethod def get_credentials(cls, credentials): return credentials + + @classmethod + def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: + return get_datatype_name(type_code) diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 9c213c85d..0b8091a78 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -22,8 +22,23 @@ ; """ + class TestRedshiftConstraintsColumnsEqual(BaseConstraintsColumnsEqual): - pass + @pytest.fixture + def data_types(self, schema_int_type, int_type, string_type): + # NOTE: Unlike some other adapters, we don't test array or JSON types here, because + # Redshift does not support them as materialized table column types. + + # sql_column_value, schema_data_type, error_data_type + return [ + ["1", schema_int_type, int_type], + ["'1'", string_type, string_type], + ["cast('2019-01-01' as date)", "date", "DATE"], + ["true", "bool", "BOOL"], + ["'2013-11-03 00:00:00-07'::timestamptz", "timestamptz", "TIMESTAMPTZ"], + ["'2013-11-03 00:00:00-07'::timestamp", "timestamp", "TIMESTAMP"], + ["'1'::numeric", "numeric", "NUMERIC"] + ] class TestRedshiftConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement): From 5719a7e5c881e4746fb77f6cf5e15f09418dd8bb Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Wed, 8 Mar 2023 11:27:56 -0600 Subject: [PATCH 043/113] support contracts on models materialized as view #584 (#360) * first pass with contract check * rename test class * clean up test * point to branch * fix whitespace * fix class name * remove dbt-core pin --- .changes/unreleased/Features-20230301-113553.yaml | 6 +++--- dbt/include/redshift/macros/adapters.sql | 5 ++++- tests/functional/adapter/test_constraints.py | 12 ++++++++++-- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/.changes/unreleased/Features-20230301-113553.yaml b/.changes/unreleased/Features-20230301-113553.yaml index 8ed2087ef..40cc40ed9 100644 --- a/.changes/unreleased/Features-20230301-113553.yaml +++ b/.changes/unreleased/Features-20230301-113553.yaml @@ -1,6 +1,6 @@ kind: Features -body: Implemented data_type_code_to_name for redshift +body: Enforce contracts on models materialized as tables and views time: 2023-03-01T11:35:53.98885-05:00 custom: - Author: peterallenwebb - Issue: "319" + Author: peterallenwebb emmyoop + Issue: 319 340 diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index 3d81e7e92..ede52b353 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -83,7 +83,10 @@ {{ sql_header if sql_header is not none }} - create view {{ relation }} as ( + create view {{ relation }} + {% if config.get('contract', False) -%} + {{ get_assert_columns_equivalent(sql) }} + {%- endif %} as ( {{ sql }} ) {{ bind_qualifier }}; {% endmacro %} diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 0b8091a78..94283fc3e 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -1,7 +1,8 @@ import pytest from dbt.tests.util import relation_from_name from dbt.tests.adapter.constraints.test_constraints import ( - BaseConstraintsColumnsEqual, + BaseTableConstraintsColumnsEqual, + BaseViewConstraintsColumnsEqual, BaseConstraintsRuntimeEnforcement ) @@ -23,7 +24,7 @@ """ -class TestRedshiftConstraintsColumnsEqual(BaseConstraintsColumnsEqual): +class RedshiftColumnEqualSetup: @pytest.fixture def data_types(self, schema_int_type, int_type, string_type): # NOTE: Unlike some other adapters, we don't test array or JSON types here, because @@ -41,6 +42,13 @@ def data_types(self, schema_int_type, int_type, string_type): ] +class TestRedshiftTableConstraintsColumnsEqual(RedshiftColumnEqualSetup, BaseTableConstraintsColumnsEqual): + pass + + +class TestRedshiftViewConstraintsColumnsEqual(RedshiftColumnEqualSetup, BaseViewConstraintsColumnsEqual): + pass + class TestRedshiftConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement): @pytest.fixture(scope="class") def expected_sql(self, project): From b46d91b99476112aac293ee6a20da73d3dda163d Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Wed, 8 Mar 2023 15:32:42 -0600 Subject: [PATCH 044/113] work on adapter cut-release and nightly-release update (#345) * work on adapter cut-release and nightly-release update * fix pre-commit issue, and fix varaible subsitution description * changes to regex * remove dummy value * fix regex for update_release_branch * update to main, and change pointer for testing in ci/cd * update pr body * updating remote * point back to main branch for action * change action pointer to test new logic * change action pointer back to main post testing * test slight change * point back to @main action branch post testing --- .github/scripts/update_dependencies.sh | 18 ++++++++++++++++++ .github/scripts/update_release_branch.sh | 11 +++++++++++ .github/workflows/cut-release-branch.yml | 2 +- 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 .github/scripts/update_dependencies.sh create mode 100644 .github/scripts/update_release_branch.sh diff --git a/.github/scripts/update_dependencies.sh b/.github/scripts/update_dependencies.sh new file mode 100644 index 000000000..6000b5006 --- /dev/null +++ b/.github/scripts/update_dependencies.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e +set -e + +git_branch=$1 +target_req_file="dev-requirements.txt" +core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g" +postgres_req_sed_pattern="s|dbt-core.git.*#egg=dbt-postgres|dbt-core.git@${git_branch}#egg=dbt-postgres|g" +tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$postgres_req_sed_pattern" $target_req_file + sed -i "" "$tests_req_sed_pattern" $target_req_file +else + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$postgres_req_sed_pattern" $target_req_file + sed -i "$tests_req_sed_pattern" $target_req_file +fi diff --git a/.github/scripts/update_release_branch.sh b/.github/scripts/update_release_branch.sh new file mode 100644 index 000000000..75b9ccef6 --- /dev/null +++ b/.github/scripts/update_release_branch.sh @@ -0,0 +1,11 @@ +#!/bin/bash -e +set -e + +release_branch=$1 +target_req_file=".github/workflows/nightly-release.yml" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "s|[0-9].[0-9].latest|$release_branch|" $target_req_file +else + sed -i "s|[0-9].[0-9].latest|$release_branch|" $target_req_file +fi diff --git a/.github/workflows/cut-release-branch.yml b/.github/workflows/cut-release-branch.yml index 0d4702c41..1c487a8a8 100644 --- a/.github/workflows/cut-release-branch.yml +++ b/.github/workflows/cut-release-branch.yml @@ -37,6 +37,6 @@ jobs: version_to_bump_main: ${{ inputs.version_to_bump_main }} new_branch_name: ${{ inputs.new_branch_name }} PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch" - PR_body: "This PR will fail CI until the dbt-core PR has been merged due to release version conflicts. dev-requirements.txt needs to be updated to have the dbt-core dependencies point to this new branch." + PR_body: "This PR will fail CI until the dbt-core PR has been merged due to release version conflicts." secrets: FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }} From a6ee39e1d25246af66a09d54b1ae79c2823f1c73 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 13 Mar 2023 12:37:56 -0600 Subject: [PATCH 045/113] add triage-labels workflow (#366) * add triage-labels workflow * fix end of files --- .github/workflows/triage-labels.yml | 33 +++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 .github/workflows/triage-labels.yml diff --git a/.github/workflows/triage-labels.yml b/.github/workflows/triage-labels.yml new file mode 100644 index 000000000..a71dc5e1f --- /dev/null +++ b/.github/workflows/triage-labels.yml @@ -0,0 +1,33 @@ +# **what?** +# When the core team triages, we sometimes need more information from the issue creator. In +# those cases we remove the `triage` label and add the `awaiting_response` label. Once we +# recieve a response in the form of a comment, we want the `awaiting_response` label removed +# in favor of the `triage` label so we are aware that the issue needs action. + +# **why?** +# To help with out team triage issue tracking + +# **when?** +# This will run when a comment is added to an issue and that issue has to `awaiting_response` label. + +name: Update Triage Label + +on: issue_comment + +defaults: + run: + shell: bash + +permissions: + issues: write + +jobs: + triage_label: + if: contains(github.event.issue.labels.*.name, 'awaiting_response') + runs-on: ubuntu-latest + steps: + - name: initial labeling + uses: andymckay/labeler@master + with: + add-labels: "triage" + remove-labels: "awaiting_response" From b5d29c321ff33ef5854f694d90b063e81c979673 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 15 Mar 2023 10:43:26 -0700 Subject: [PATCH 046/113] Add tests to precommit hooks and so close out this repo for test conversions. (#371) Co-authored-by: Mila Page --- .pre-commit-config.yaml | 3 - pytest.ini | 1 - tests/conftest.py | 16 +- tests/functional/adapter/common.py | 16 +- tests/functional/adapter/conftest.py | 4 +- .../test_incremental_on_schema_change.py | 5 +- .../incremental/test_incremental_unique_id.py | 2 +- .../adapter/snapshot_tests/test_snapshot.py | 36 +- tests/functional/adapter/test_backup_table.py | 15 +- tests/functional/adapter/test_basic.py | 25 +- .../adapter/test_changing_relation_type.py | 3 +- tests/functional/adapter/test_column_types.py | 11 +- tests/functional/adapter/test_constraints.py | 21 +- tests/functional/adapter/test_grants.py | 2 - .../adapter/test_late_binding_view.py | 11 +- tests/functional/adapter/test_macros.py | 20 +- tests/functional/adapter/test_persist_docs.py | 26 +- .../functional/adapter/test_query_comment.py | 8 +- .../functional/adapter/test_relation_name.py | 18 +- tests/functional/adapter/test_simple_seed.py | 10 +- .../adapter/test_store_test_failures.py | 4 +- .../adapter/utils/test_data_types.py | 11 +- .../adapter/utils/test_timestamps.py | 2 +- tests/functional/adapter/utils/test_utils.py | 3 - tests/unit/mock_adapter.py | 6 +- tests/unit/test_context.py | 184 +++++---- tests/unit/test_redshift_adapter.py | 370 ++++++++++-------- tests/unit/utils.py | 52 +-- 28 files changed, 452 insertions(+), 433 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1141ccc97..b748e03ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,5 @@ # For more on configuring pre-commit hooks (see https://pre-commit.com/) -# TODO: remove global exclusion of tests when testing overhaul is complete -exclude: '^tests/.*' - # Force all unspecified python hooks to run python 3.8 default_language_version: python: python3 diff --git a/pytest.ini b/pytest.ini index b04a6ccf3..b3d74bc14 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,5 +6,4 @@ env_files = test.env testpaths = tests/unit - tests/integration tests/functional diff --git a/tests/conftest.py b/tests/conftest.py index 18fcbb714..96f0d43e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,12 +11,12 @@ @pytest.fixture(scope="class") def dbt_profile_target(): return { - 'type': 'redshift', - 'threads': 1, - 'retries': 6, - 'host': os.getenv('REDSHIFT_TEST_HOST'), - 'port': int(os.getenv('REDSHIFT_TEST_PORT')), - 'user': os.getenv('REDSHIFT_TEST_USER'), - 'pass': os.getenv('REDSHIFT_TEST_PASS'), - 'dbname': os.getenv('REDSHIFT_TEST_DBNAME'), + "type": "redshift", + "threads": 1, + "retries": 6, + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), } diff --git a/tests/functional/adapter/common.py b/tests/functional/adapter/common.py index 914e3fcf8..ce7c0903b 100644 --- a/tests/functional/adapter/common.py +++ b/tests/functional/adapter/common.py @@ -4,7 +4,9 @@ from dbt.tests.fixtures.project import TestProjInfo -def get_records(project: TestProjInfo, table: str, select: str = None, where: str = None) -> List[tuple]: +def get_records( + project: TestProjInfo, table: str, select: str = None, where: str = None +) -> List[tuple]: """ Gets records from a single table in a dbt project @@ -39,7 +41,9 @@ def update_records(project: TestProjInfo, table: str, updates: Dict[str, str], w where: the where clause to apply, if any; defaults to all records """ table_name = relation_from_name(project.adapter, table) - set_clause = ', '.join([' = '.join([field, expression]) for field, expression in updates.items()]) + set_clause = ", ".join( + [" = ".join([field, expression]) for field, expression in updates.items()] + ) where_clause = where or "1 = 1" sql = f""" update {table_name} @@ -49,7 +53,9 @@ def update_records(project: TestProjInfo, table: str, updates: Dict[str, str], w project.run_sql(sql) -def insert_records(project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None): +def insert_records( + project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None +): """ Inserts records from one table into another table in a dbt project @@ -91,7 +97,9 @@ def delete_records(project: TestProjInfo, table: str, where: str = None): project.run_sql(sql) -def clone_table(project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None): +def clone_table( + project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None +): """ Creates a new table based on another table in a dbt project diff --git a/tests/functional/adapter/conftest.py b/tests/functional/adapter/conftest.py index e4aa4fe31..c5c980154 100644 --- a/tests/functional/adapter/conftest.py +++ b/tests/functional/adapter/conftest.py @@ -21,5 +21,5 @@ def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: b In this example, the fixture returns the contents of the backup_is_false DDL file as a string. This string is then referenced in the test as model_ddl. """ - with open(f"target/run/test/models/{request.param}.sql", 'r') as ddl_file: - yield '\n'.join(ddl_file.readlines()) + with open(f"target/run/test/models/{request.param}.sql", "r") as ddl_file: + yield "\n".join(ddl_file.readlines()) diff --git a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py index 192097bc5..7b73d212b 100644 --- a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -1,4 +1,7 @@ -from dbt.tests.adapter.incremental.test_incremental_on_schema_change import BaseIncrementalOnSchemaChange +from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( + BaseIncrementalOnSchemaChange, +) + class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): pass diff --git a/tests/functional/adapter/incremental/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py index 14e9b7ea8..5fcdfbe16 100644 --- a/tests/functional/adapter/incremental/test_incremental_unique_id.py +++ b/tests/functional/adapter/incremental/test_incremental_unique_id.py @@ -2,4 +2,4 @@ class TestUniqueKeyRedshift(BaseIncrementalUniqueKey): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/snapshot_tests/test_snapshot.py b/tests/functional/adapter/snapshot_tests/test_snapshot.py index 0f6153f47..9a4ef7694 100644 --- a/tests/functional/adapter/snapshot_tests/test_snapshot.py +++ b/tests/functional/adapter/snapshot_tests/test_snapshot.py @@ -16,7 +16,6 @@ class SnapshotBase: - @pytest.fixture(scope="class") def seeds(self): """ @@ -80,9 +79,9 @@ def delete_snapshot_records(self): common.delete_records(self.project, "snapshot") def _assert_results( - self, - ids_with_current_snapshot_records: Iterable, - ids_with_closed_out_snapshot_records: Iterable + self, + ids_with_current_snapshot_records: Iterable, + ids_with_closed_out_snapshot_records: Iterable, ): """ All test cases are checked by considering whether a source record's id has a value in `dbt_valid_to` @@ -106,13 +105,12 @@ def _assert_results( records = set(self.get_snapshot_records("id, dbt_valid_to is null as is_current")) expected_records = set().union( {(i, True) for i in ids_with_current_snapshot_records}, - {(i, False) for i in ids_with_closed_out_snapshot_records} + {(i, False) for i in ids_with_closed_out_snapshot_records}, ) assert records == expected_records class TestSnapshot(SnapshotBase): - @pytest.fixture(scope="class") def snapshots(self): return {"snapshot.sql": snapshots.SNAPSHOT_TIMESTAMP_SQL} @@ -121,11 +119,13 @@ def test_updates_are_captured_by_snapshot(self, project): """ Update the last 5 records. Show that all ids are current, but the last 5 reflect updates. """ - self.update_fact_records({"updated_at": "updated_at + interval '1 day'"}, "id between 16 and 20") + self.update_fact_records( + {"updated_at": "updated_at + interval '1 day'"}, "id between 16 and 20" + ) run_dbt(["snapshot"]) self._assert_results( ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(16, 21) + ids_with_closed_out_snapshot_records=range(16, 21), ) def test_inserts_are_captured_by_snapshot(self, project): @@ -135,8 +135,7 @@ def test_inserts_are_captured_by_snapshot(self, project): self.insert_fact_records("id between 21 and 30") run_dbt(["snapshot"]) self._assert_results( - ids_with_current_snapshot_records=range(1, 31), - ids_with_closed_out_snapshot_records=[] + ids_with_current_snapshot_records=range(1, 31), ids_with_closed_out_snapshot_records=[] ) def test_deletes_are_captured_by_snapshot(self, project): @@ -147,7 +146,7 @@ def test_deletes_are_captured_by_snapshot(self, project): run_dbt(["snapshot"]) self._assert_results( ids_with_current_snapshot_records=range(1, 16), - ids_with_closed_out_snapshot_records=range(16, 21) + ids_with_closed_out_snapshot_records=range(16, 21), ) def test_revives_are_captured_by_snapshot(self, project): @@ -161,7 +160,7 @@ def test_revives_are_captured_by_snapshot(self, project): run_dbt(["snapshot"]) self._assert_results( ids_with_current_snapshot_records=range(1, 19), - ids_with_closed_out_snapshot_records=range(16, 21) + ids_with_closed_out_snapshot_records=range(16, 21), ) def test_new_column_captured_by_snapshot(self, project): @@ -176,17 +175,16 @@ def test_new_column_captured_by_snapshot(self, project): "full_name": "first_name || ' ' || last_name", "updated_at": "updated_at + interval '1 day'", }, - "id between 11 and 20" + "id between 11 and 20", ) run_dbt(["snapshot"]) self._assert_results( ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(11, 21) + ids_with_closed_out_snapshot_records=range(11, 21), ) class TestSnapshotCheck(SnapshotBase): - @pytest.fixture(scope="class") def snapshots(self): return {"snapshot.sql": snapshots.SNAPSHOT_CHECK_SQL} @@ -197,10 +195,12 @@ def test_column_selection_is_reflected_in_snapshot(self, project): Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both) Show that all ids are current, and only the tracked column updates are reflected in `snapshot`. """ - self.update_fact_records({"last_name": "left(last_name, 3)"}, "id between 1 and 10") # not tracked - self.update_fact_records({"email": "left(email, 3)"}, "id between 6 and 15") # tracked + self.update_fact_records( + {"last_name": "left(last_name, 3)"}, "id between 1 and 10" + ) # not tracked + self.update_fact_records({"email": "left(email, 3)"}, "id between 6 and 15") # tracked run_dbt(["snapshot"]) self._assert_results( ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(6, 16) + ids_with_closed_out_snapshot_records=range(6, 16), ) diff --git a/tests/functional/adapter/test_backup_table.py b/tests/functional/adapter/test_backup_table.py index ee58615b3..4c24250f1 100644 --- a/tests/functional/adapter/test_backup_table.py +++ b/tests/functional/adapter/test_backup_table.py @@ -59,14 +59,12 @@ class BackupTableBase: - @pytest.fixture(scope="class", autouse=True) def _run_dbt(self, project): run_dbt(["run"]) class TestBackupTableOption(BackupTableBase): - @pytest.fixture(scope="class") def models(self): return { @@ -84,7 +82,7 @@ def models(self): ("backup_is_undefined", True), ("backup_is_true_view", True), ], - indirect=["model_ddl"] + indirect=["model_ddl"], ) def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: bool): """ @@ -102,7 +100,6 @@ def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: b class TestBackupTableSyntax(BackupTableBase): - @pytest.fixture(scope="class") def models(self): return { @@ -116,7 +113,7 @@ def models(self): ("syntax_with_distkey", "diststyle key distkey"), ("syntax_with_sortkey", "compound sortkey"), ], - indirect=["model_ddl"] + indirect=["model_ddl"], ) def test_backup_predicate_precedes_secondary_predicates(self, model_ddl, search_phrase): """ @@ -133,7 +130,6 @@ def test_backup_predicate_precedes_secondary_predicates(self, model_ddl, search_ class TestBackupTableProjectDefault(BackupTableBase): - @pytest.fixture(scope="class") def project_config_update(self): return {"models": {"backup": False}} @@ -147,11 +143,8 @@ def models(self): @pytest.mark.parametrize( "model_ddl,backup_expected", - [ - ("backup_is_true", True), - ("backup_is_undefined", False) - ], - indirect=["model_ddl"] + [("backup_is_true", True), ("backup_is_undefined", False)], + indirect=["model_ddl"], ) def test_setting_defaults_to_project_option(self, model_ddl: str, backup_expected: bool): """ diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py index 06cf9948f..d2289efa3 100644 --- a/tests/functional/adapter/test_basic.py +++ b/tests/functional/adapter/test_basic.py @@ -12,10 +12,17 @@ from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate, BaseDocsGenReferences -from dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats, expected_references_catalog +from dbt.tests.adapter.basic.expected_catalog import ( + base_expected_catalog, + no_stats, + expected_references_catalog, +) from dbt.tests.adapter.basic.files import seeds_base_csv, seeds_added_csv, seeds_newcolumns_csv -from tests.functional.adapter.expected_stats import redshift_stats, redshift_ephemeral_summary_stats +from tests.functional.adapter.expected_stats import ( + redshift_stats, + redshift_ephemeral_summary_stats, +) # set the datatype of the name column in the 'added' seed so that it can hold the '_update' that's added @@ -86,19 +93,19 @@ class TestBaseAdapterMethod(BaseAdapterMethod): class TestDocsGenerateRedshift(BaseDocsGenerate): - @pytest.fixture(scope="class") + @pytest.fixture(scope="class") def expected_catalog(self, project, profile_user): return base_expected_catalog( - project, - role=profile_user, - id_type="integer", + project, + role=profile_user, + id_type="integer", text_type=AnyStringWith("character varying"), time_type="timestamp without time zone", - view_type="VIEW", - table_type="BASE TABLE", + view_type="VIEW", + table_type="BASE TABLE", model_stats=no_stats(), seed_stats=redshift_stats(), - ) + ) # TODO: update this or delete it diff --git a/tests/functional/adapter/test_changing_relation_type.py b/tests/functional/adapter/test_changing_relation_type.py index 1f0ba15ad..81ba99918 100644 --- a/tests/functional/adapter/test_changing_relation_type.py +++ b/tests/functional/adapter/test_changing_relation_type.py @@ -1,4 +1,5 @@ from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator + class TestRedshiftChangeRelationTypes(BaseChangeRelationTypeValidator): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/test_column_types.py b/tests/functional/adapter/test_column_types.py index 81d5ca0fa..e24167456 100644 --- a/tests/functional/adapter/test_column_types.py +++ b/tests/functional/adapter/test_column_types.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes -_MODEL_SQL = """ +_MODEL_SQL = """ select 1::smallint as smallint_col, 2::int as int_col, @@ -46,14 +46,11 @@ text_col: ['string', 'not number'] """ -class TestRedshiftColumnTypes(BaseColumnTypes): +class TestRedshiftColumnTypes(BaseColumnTypes): @pytest.fixture(scope="class") def models(self): - return { - "model.sql": _MODEL_SQL, - "schema.yml": _SCHEMA_YML - } + return {"model.sql": _MODEL_SQL, "schema.yml": _SCHEMA_YML} def test_run_and_test(self, project): - self.run_and_test() \ No newline at end of file + self.run_and_test() diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 94283fc3e..9918b5037 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -3,7 +3,7 @@ from dbt.tests.adapter.constraints.test_constraints import ( BaseTableConstraintsColumnsEqual, BaseViewConstraintsColumnsEqual, - BaseConstraintsRuntimeEnforcement + BaseConstraintsRuntimeEnforcement, ) _expected_sql_redshift = """ @@ -38,26 +38,29 @@ def data_types(self, schema_int_type, int_type, string_type): ["true", "bool", "BOOL"], ["'2013-11-03 00:00:00-07'::timestamptz", "timestamptz", "TIMESTAMPTZ"], ["'2013-11-03 00:00:00-07'::timestamp", "timestamp", "TIMESTAMP"], - ["'1'::numeric", "numeric", "NUMERIC"] + ["'1'::numeric", "numeric", "NUMERIC"], ] -class TestRedshiftTableConstraintsColumnsEqual(RedshiftColumnEqualSetup, BaseTableConstraintsColumnsEqual): +class TestRedshiftTableConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseTableConstraintsColumnsEqual +): pass -class TestRedshiftViewConstraintsColumnsEqual(RedshiftColumnEqualSetup, BaseViewConstraintsColumnsEqual): +class TestRedshiftViewConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseViewConstraintsColumnsEqual +): pass + class TestRedshiftConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement): @pytest.fixture(scope="class") def expected_sql(self, project): relation = relation_from_name(project.adapter, "my_model") - tmp_relation = relation.incorporate( - path={"identifier": relation.identifier + "__dbt_tmp"} - ) + tmp_relation = relation.incorporate(path={"identifier": relation.identifier + "__dbt_tmp"}) return _expected_sql_redshift.format(tmp_relation) - + @pytest.fixture(scope="class") def expected_error_messages(self): - return ['Cannot insert a NULL value into column id'] + return ["Cannot insert a NULL value into column id"] diff --git a/tests/functional/adapter/test_grants.py b/tests/functional/adapter/test_grants.py index bbad59f96..b627e450a 100644 --- a/tests/functional/adapter/test_grants.py +++ b/tests/functional/adapter/test_grants.py @@ -1,7 +1,5 @@ -import pytest from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants -from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants diff --git a/tests/functional/adapter/test_late_binding_view.py b/tests/functional/adapter/test_late_binding_view.py index 7c7bfa69d..013bf06be 100644 --- a/tests/functional/adapter/test_late_binding_view.py +++ b/tests/functional/adapter/test_late_binding_view.py @@ -18,7 +18,6 @@ class TestLateBindingView: - @pytest.fixture(scope="class") def models(self): return { @@ -27,20 +26,18 @@ def models(self): @pytest.fixture(scope="class") def seeds(self): - return { - "seed.csv": _SEED_CSV - } + return {"seed.csv": _SEED_CSV} @pytest.fixture(scope="class") def project_config_update(self): return { - 'seeds': { - 'quote_columns': False, + "seeds": { + "quote_columns": False, } } def test_late_binding_view_query(self, project): - seed_run_result = run_dbt(['seed']) + seed_run_result = run_dbt(["seed"]) assert len(seed_run_result) == 1 run_result = run_dbt() assert len(run_result) == 1 diff --git a/tests/functional/adapter/test_macros.py b/tests/functional/adapter/test_macros.py index 0994cae28..0596ab549 100644 --- a/tests/functional/adapter/test_macros.py +++ b/tests/functional/adapter/test_macros.py @@ -22,33 +22,29 @@ {% endmacro %} {% macro dispatch_to_parent() %} - {% set macro = adapter.dispatch('dispatch_to_parent') %} - {{ macro() }} + {% set macro = adapter.dispatch('dispatch_to_parent') %} + {{ macro() }} {% endmacro %} {% macro default__dispatch_to_parent() %} - {% set msg = 'No default implementation of dispatch_to_parent' %} + {% set msg = 'No default implementation of dispatch_to_parent' %} {{ exceptions.raise_compiler_error(msg) }} {% endmacro %} {% macro postgres__dispatch_to_parent() %} - {{ return('') }} + {{ return('') }} {% endmacro %} """ -class TestRedshift: +class TestRedshift: @pytest.fixture(scope="class") def macros(self): - return { - "macro.sql": _MACRO_SQL - } + return {"macro.sql": _MACRO_SQL} @pytest.fixture(scope="class") def models(self): - return { - "model.sql": _MODEL_SQL - } + return {"model.sql": _MODEL_SQL} def test_inherited_macro(self, project): - run_dbt() \ No newline at end of file + run_dbt() diff --git a/tests/functional/adapter/test_persist_docs.py b/tests/functional/adapter/test_persist_docs.py index 4d18f8ec6..61b8bd5a6 100644 --- a/tests/functional/adapter/test_persist_docs.py +++ b/tests/functional/adapter/test_persist_docs.py @@ -27,31 +27,31 @@ class TestPersistDocsLateBinding(BasePersistDocsBase): @pytest.fixture(scope="class") def project_config_update(self): return { - 'models': { - 'test': { - '+persist_docs': { + "models": { + "test": { + "+persist_docs": { "relation": True, "columns": True, }, - 'view_model': { - 'bind': False, - } + "view_model": { + "bind": False, + }, } } } def test_comment_on_late_binding_view(self, project): run_dbt() - run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: + run_dbt(["docs", "generate"]) + with open("target/catalog.json") as fp: catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 4 - table_node = catalog_data['nodes']['model.test.table_model'] + assert "nodes" in catalog_data + assert len(catalog_data["nodes"]) == 4 + table_node = catalog_data["nodes"]["model.test.table_model"] view_node = self._assert_has_table_comments(table_node) - view_node = catalog_data['nodes']['model.test.view_model'] + view_node = catalog_data["nodes"]["model.test.view_model"] self._assert_has_view_comments(view_node, False, False) - no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] + no_docs_node = catalog_data["nodes"]["model.test.no_docs_model"] self._assert_has_view_comments(no_docs_node, False, False) diff --git a/tests/functional/adapter/test_query_comment.py b/tests/functional/adapter/test_query_comment.py index 281a90867..db6a440d7 100644 --- a/tests/functional/adapter/test_query_comment.py +++ b/tests/functional/adapter/test_query_comment.py @@ -1,4 +1,3 @@ -import pytest from dbt.tests.adapter.query_comment.test_query_comment import ( BaseQueryComments, BaseMacroQueryComments, @@ -12,17 +11,22 @@ class TestQueryCommentsRedshift(BaseQueryComments): pass + class TestMacroQueryCommentsRedshift(BaseMacroQueryComments): pass + class TestMacroArgsQueryCommentsRedshift(BaseMacroArgsQueryComments): pass + class TestMacroInvalidQueryCommentsRedshift(BaseMacroInvalidQueryComments): pass + class TestNullQueryCommentsRedshift(BaseNullQueryComments): pass + class TestEmptyQueryCommentsRedshift(BaseEmptyQueryComments): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/test_relation_name.py b/tests/functional/adapter/test_relation_name.py index 733ef7f49..f17bbda63 100644 --- a/tests/functional/adapter/test_relation_name.py +++ b/tests/functional/adapter/test_relation_name.py @@ -68,9 +68,7 @@ def setUp(self, project): @pytest.fixture(scope="class") def seeds(self): - return { - "seed.csv": seeds__seed - } + return {"seed.csv": seeds__seed} @pytest.fixture(scope="class") def project_config_update(self): @@ -84,12 +82,8 @@ def project_config_update(self): class TestAdapterDDL(TestAdapterDDLBase): @pytest.fixture(scope="class") def models(self): - relname_51_chars_long = ( - "incremental_table_whose_name_is_51_characters_abcde.sql" - ) - relname_52_chars_long = ( - "relation_whose_name_is_52_chars_long_abcdefghijklmno.sql" - ) + relname_51_chars_long = "incremental_table_whose_name_is_51_characters_abcde.sql" + relname_52_chars_long = "relation_whose_name_is_52_chars_long_abcdefghijklmno.sql" relname_63_chars_long = ( "relation_whose_name_is_63_chars_long_abcdefghijklmnopqrstuvwxyz.sql" ) @@ -110,7 +104,7 @@ def models(self): relname_63_chars_long: models__relationname_63_chars_long, relname_63_chars_long_b: models__relationname_63_chars_long, relname_64_chars_long: models__relationname_64_chars_long, - relname_127_chars_long: models__relationname_127_chars_long + relname_127_chars_long: models__relationname_127_chars_long, } def test_long_name_succeeds(self, project): @@ -127,9 +121,7 @@ def models(self): "relation_whose_name_is_127_characters89012345678901234567890123456" "78901234567890123456789012345678901234567890123456789012345678.sql" ) - return { - relname_128_chars_long: models__relationname_127_chars_long - } + return {relname_128_chars_long: models__relationname_127_chars_long} def test_too_long_of_name_fails(self, project): results = run_dbt(["run"], expect_pass=False) diff --git a/tests/functional/adapter/test_simple_seed.py b/tests/functional/adapter/test_simple_seed.py index 5c57f1895..1e8cc1cd5 100644 --- a/tests/functional/adapter/test_simple_seed.py +++ b/tests/functional/adapter/test_simple_seed.py @@ -60,9 +60,7 @@ def schema(self): @pytest.fixture(scope="class") def models(self): - return { - "models-rs.yml": _SCHEMA_YML - } + return {"models-rs.yml": _SCHEMA_YML} @staticmethod def seed_enabled_types(): @@ -74,9 +72,9 @@ def seed_enabled_types(): @staticmethod def seed_tricky_types(): return { - 'seed_id_str': 'text', - 'looks_like_a_bool': 'text', - 'looks_like_a_date': 'text', + "seed_id_str": "text", + "looks_like_a_bool": "text", + "looks_like_a_date": "text", } def test_redshift_simple_seed_with_column_override_redshift(self, project): diff --git a/tests/functional/adapter/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py index e6c0f38b4..5d6b70fbb 100644 --- a/tests/functional/adapter/test_store_test_failures.py +++ b/tests/functional/adapter/test_store_test_failures.py @@ -1,4 +1,6 @@ -from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import TestStoreTestFailures +from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( + TestStoreTestFailures, +) class RedshiftTestStoreTestFailures(TestStoreTestFailures): diff --git a/tests/functional/adapter/utils/test_data_types.py b/tests/functional/adapter/utils/test_data_types.py index 147a962b5..3201afcfb 100644 --- a/tests/functional/adapter/utils/test_data_types.py +++ b/tests/functional/adapter/utils/test_data_types.py @@ -1,4 +1,3 @@ -import pytest from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt @@ -11,23 +10,23 @@ class TestTypeBigInt(BaseTypeBigInt): pass - + class TestTypeFloat(BaseTypeFloat): pass - + class TestTypeInt(BaseTypeInt): pass - + class TestTypeNumeric(BaseTypeNumeric): pass - + class TestTypeString(BaseTypeString): pass - + class TestTypeTimestamp(BaseTypeTimestamp): pass diff --git a/tests/functional/adapter/utils/test_timestamps.py b/tests/functional/adapter/utils/test_timestamps.py index 417bfab2b..6c525be44 100644 --- a/tests/functional/adapter/utils/test_timestamps.py +++ b/tests/functional/adapter/utils/test_timestamps.py @@ -17,4 +17,4 @@ def expected_sql(self): select getdate() as current_timestamp, getdate() as current_timestamp_in_utc_backcompat, getdate() as current_timestamp_backcompat - """ \ No newline at end of file + """ diff --git a/tests/functional/adapter/utils/test_utils.py b/tests/functional/adapter/utils/test_utils.py index 03b9cc916..266103fbc 100644 --- a/tests/functional/adapter/utils/test_utils.py +++ b/tests/functional/adapter/utils/test_utils.py @@ -1,5 +1,3 @@ -import pytest - from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct @@ -12,7 +10,6 @@ from dbt.tests.adapter.utils.test_datediff import BaseDateDiff from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc from dbt.tests.adapter.utils.test_escape_single_quotes import BaseEscapeSingleQuotesQuote -from dbt.tests.adapter.utils.test_escape_single_quotes import BaseEscapeSingleQuotesBackslash from dbt.tests.adapter.utils.test_except import BaseExcept from dbt.tests.adapter.utils.test_hash import BaseHash from dbt.tests.adapter.utils.test_intersect import BaseIntersect diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py index cc2861e4e..8547480d1 100644 --- a/tests/unit/mock_adapter.py +++ b/tests/unit/mock_adapter.py @@ -1,16 +1,16 @@ from unittest import mock from contextlib import contextmanager -from dbt.adapters.base import BaseAdapter, PythonJobHelper +from dbt.adapters.base import BaseAdapter def adapter_factory(): class MockAdapter(BaseAdapter): - ConnectionManager = mock.MagicMock(TYPE='mock') + ConnectionManager = mock.MagicMock(TYPE="mock") responder = mock.MagicMock() # some convenient defaults responder.quote.side_effect = lambda identifier: '"{}"'.format(identifier) - responder.date_function.side_effect = lambda: 'unitdate()' + responder.date_function.side_effect = lambda: "unitdate()" responder.is_cancelable.side_effect = lambda: False @contextmanager diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py index 5170fcfbf..542387c0d 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/test_context.py @@ -15,11 +15,7 @@ from dbt.contracts.graph.model_config import ( NodeConfig, ) -from dbt.contracts.graph.nodes import ( - ModelNode, - DependsOn, - Macro -) +from dbt.contracts.graph.nodes import ModelNode, DependsOn, Macro from dbt.context import providers from dbt.node_types import NodeType @@ -27,73 +23,73 @@ class TestRuntimeWrapper(unittest.TestCase): def setUp(self): self.mock_config = mock.MagicMock() - self.mock_config.quoting = { - 'database': True, 'schema': True, 'identifier': True} + self.mock_config.quoting = {"database": True, "schema": True, "identifier": True} adapter_class = adapter_factory() self.mock_adapter = adapter_class(self.mock_config) self.namespace = mock.MagicMock() - self.wrapper = providers.RuntimeDatabaseWrapper( - self.mock_adapter, self.namespace) + self.wrapper = providers.RuntimeDatabaseWrapper(self.mock_adapter, self.namespace) self.responder = self.mock_adapter.responder PROFILE_DATA = { - 'target': 'test', - 'quoting': {}, - 'outputs': { - 'test': { - 'type': 'redshift', - 'host': 'localhost', - 'schema': 'analytics', - 'user': 'test', - 'pass': 'test', - 'dbname': 'test', - 'port': 1, + "target": "test", + "quoting": {}, + "outputs": { + "test": { + "type": "redshift", + "host": "localhost", + "schema": "analytics", + "user": "test", + "pass": "test", + "dbname": "test", + "port": 1, } }, } PROJECT_DATA = { - 'name': 'root', - 'version': '0.1', - 'profile': 'test', - 'project-root': os.getcwd(), - 'config-version': 2, + "name": "root", + "version": "0.1", + "profile": "test", + "project-root": os.getcwd(), + "config-version": 2, } def model(): return ModelNode( - alias='model_one', - name='model_one', - database='dbt', - schema='analytics', + alias="model_one", + name="model_one", + database="dbt", + schema="analytics", resource_type=NodeType.Model, - unique_id='model.root.model_one', - fqn=['root', 'model_one'], - package_name='root', - original_file_path='model_one.sql', - root_path='/usr/src/app', + unique_id="model.root.model_one", + fqn=["root", "model_one"], + package_name="root", + original_file_path="model_one.sql", + root_path="/usr/src/app", refs=[], sources=[], depends_on=DependsOn(), - config=NodeConfig.from_dict({ - 'enabled': True, - 'materialized': 'view', - 'persist_docs': {}, - 'post-hook': [], - 'pre-hook': [], - 'vars': {}, - 'quoting': {}, - 'column_types': {}, - 'tags': [], - }), + config=NodeConfig.from_dict( + { + "enabled": True, + "materialized": "view", + "persist_docs": {}, + "post-hook": [], + "pre-hook": [], + "vars": {}, + "quoting": {}, + "column_types": {}, + "tags": [], + } + ), tags=[], - path='model_one.sql', - raw_sql='', - description='', - columns={} + path="model_one.sql", + raw_sql="", + description="", + columns={}, ) @@ -101,8 +97,8 @@ def mock_macro(name, package_name): macro = mock.MagicMock( __class__=Macro, package_name=package_name, - resource_type='macro', - unique_id=f'macro.{package_name}.{name}', + resource_type="macro", + unique_id=f"macro.{package_name}.{name}", ) # Mock(name=...) does not set the `name` attribute, this does. macro.name = name @@ -111,7 +107,7 @@ def mock_macro(name, package_name): def mock_manifest(config): manifest_macros = {} - for name in ['macro_a', 'macro_b']: + for name in ["macro_a", "macro_b"]: macro = mock_macro(name, config.project_name) manifest_macros[macro.unique_id] = macro return mock.MagicMock(macros=manifest_macros) @@ -120,47 +116,49 @@ def mock_manifest(config): def mock_model(): return mock.MagicMock( __class__=ModelNode, - alias='model_one', - name='model_one', - database='dbt', - schema='analytics', + alias="model_one", + name="model_one", + database="dbt", + schema="analytics", resource_type=NodeType.Model, - unique_id='model.root.model_one', - fqn=['root', 'model_one'], - package_name='root', - original_file_path='model_one.sql', - root_path='/usr/src/app', + unique_id="model.root.model_one", + fqn=["root", "model_one"], + package_name="root", + original_file_path="model_one.sql", + root_path="/usr/src/app", refs=[], sources=[], depends_on=DependsOn(), - config=NodeConfig.from_dict({ - 'enabled': True, - 'materialized': 'view', - 'persist_docs': {}, - 'post-hook': [], - 'pre-hook': [], - 'vars': {}, - 'quoting': {}, - 'column_types': {}, - 'tags': [], - }), + config=NodeConfig.from_dict( + { + "enabled": True, + "materialized": "view", + "persist_docs": {}, + "post-hook": [], + "pre-hook": [], + "vars": {}, + "quoting": {}, + "column_types": {}, + "tags": [], + } + ), tags=[], - path='model_one.sql', - raw_sql='', - description='', + path="model_one.sql", + raw_sql="", + description="", columns={}, ) @pytest.fixture def get_adapter(): - with mock.patch.object(providers, 'get_adapter') as patch: + with mock.patch.object(providers, "get_adapter") as patch: yield patch @pytest.fixture def get_include_paths(): - with mock.patch.object(factory, 'get_include_paths') as patch: + with mock.patch.object(factory, "get_include_paths") as patch: patch.return_value = [] yield patch @@ -177,12 +175,12 @@ def manifest_fx(config): @pytest.fixture def manifest_extended(manifest_fx): - dbt_macro = mock_macro('default__some_macro', 'dbt') + dbt_macro = mock_macro("default__some_macro", "dbt") # same namespace, same name, different pkg! - rs_macro = mock_macro('redshift__some_macro', 'dbt_redshift') + rs_macro = mock_macro("redshift__some_macro", "dbt_redshift") # same name, different package - package_default_macro = mock_macro('default__some_macro', 'root') - package_rs_macro = mock_macro('redshift__some_macro', 'root') + package_default_macro = mock_macro("default__some_macro", "root") + package_rs_macro = mock_macro("redshift__some_macro", "root") manifest_fx.macros[dbt_macro.unique_id] = dbt_macro manifest_fx.macros[rs_macro.unique_id] = rs_macro manifest_fx.macros[package_default_macro.unique_id] = package_default_macro @@ -200,8 +198,8 @@ def redshift_adapter(config, get_adapter): def test_resolve_specific(config, manifest_extended, redshift_adapter, get_include_paths): - rs_macro = manifest_extended.macros['macro.dbt_redshift.redshift__some_macro'] - package_rs_macro = manifest_extended.macros['macro.root.redshift__some_macro'] + rs_macro = manifest_extended.macros["macro.dbt_redshift.redshift__some_macro"] + package_rs_macro = manifest_extended.macros["macro.root.redshift__some_macro"] ctx = providers.generate_runtime_model_context( model=mock_model(), @@ -209,24 +207,24 @@ def test_resolve_specific(config, manifest_extended, redshift_adapter, get_inclu manifest=manifest_extended, ) - ctx['adapter'].config.dispatch + ctx["adapter"].config.dispatch # macro_a exists, but default__macro_a and redshift__macro_a do not with pytest.raises(dbt.exceptions.CompilationError): - ctx['adapter'].dispatch('macro_a').macro + ctx["adapter"].dispatch("macro_a").macro # root namespace is always preferred, unless search order is explicitly defined in 'dispatch' config - assert ctx['adapter'].dispatch('some_macro').macro is package_rs_macro - assert ctx['adapter'].dispatch('some_macro', 'dbt').macro is package_rs_macro - assert ctx['adapter'].dispatch('some_macro', 'root').macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro").macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro", "dbt").macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro", "root").macro is package_rs_macro # override 'dbt' namespace search order, dispatch to 'root' first - ctx['adapter'].config.dispatch = [{'macro_namespace': 'dbt', 'search_order': ['root', 'dbt']}] - assert ctx['adapter'].dispatch('some_macro', macro_namespace='dbt').macro is package_rs_macro + ctx["adapter"].config.dispatch = [{"macro_namespace": "dbt", "search_order": ["root", "dbt"]}] + assert ctx["adapter"].dispatch("some_macro", macro_namespace="dbt").macro is package_rs_macro # override 'dbt' namespace search order, dispatch to 'dbt' only - ctx['adapter'].config.dispatch = [{'macro_namespace': 'dbt', 'search_order': ['dbt']}] - assert ctx['adapter'].dispatch('some_macro', macro_namespace='dbt').macro is rs_macro + ctx["adapter"].config.dispatch = [{"macro_namespace": "dbt", "search_order": ["dbt"]}] + assert ctx["adapter"].dispatch("some_macro", macro_namespace="dbt").macro is rs_macro # override 'root' namespace search order, dispatch to 'dbt' first - ctx['adapter'].config.dispatch = [{'macro_namespace': 'root', 'search_order': ['dbt', 'root']}] + ctx["adapter"].config.dispatch = [{"macro_namespace": "root", "search_order": ["dbt", "root"]}] diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index ba5361b0b..27bcd98f8 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -3,7 +3,6 @@ from unittest.mock import Mock, call import agate -import boto3 import dbt import redshift_connector @@ -14,37 +13,41 @@ from dbt.clients import agate_helper from dbt.exceptions import FailedToConnectError from dbt.adapters.redshift.connections import RedshiftConnectMethodFactory -from .utils import config_from_parts_or_dicts, mock_connection, TestAdapterConversions, inject_adapter +from .utils import ( + config_from_parts_or_dicts, + mock_connection, + TestAdapterConversions, + inject_adapter, +) class TestRedshiftAdapter(unittest.TestCase): - def setUp(self): profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist.test.us-east-1', - 'pass': 'password', - 'port': 5439, - 'schema': 'public' + "outputs": { + "test": { + "type": "redshift", + "dbname": "redshift", + "user": "root", + "host": "thishostshouldnotexist.test.us-east-1", + "pass": "password", + "port": 5439, + "schema": "public", } }, - 'target': 'test' + "target": "test", } project_cfg = { - 'name': 'X', - 'version': '0.1', - 'profile': 'test', - 'project-root': '/tmp/dbt/does-not-exist', - 'quoting': { - 'identifier': False, - 'schema': True, + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, }, - 'config-version': 2, + "config-version": 2, } self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) @@ -62,174 +65,171 @@ def test_implicit_database_conn(self): connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( - host='thishostshouldnotexist.test.us-east-1', - database='redshift', - user='root', - password='password', + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", port=5439, auto_create=False, db_groups=[], timeout=30, - region='us-east-1' + region="us-east-1", ) @mock.patch("redshift_connector.connect", Mock()) def test_explicit_database_conn(self): - self.config.method = 'database' + self.config.method = "database" connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( - host='thishostshouldnotexist.test.us-east-1', - database='redshift', - user='root', - password='password', + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", port=5439, auto_create=False, db_groups=[], - region='us-east-1', - timeout=30 + region="us-east-1", + timeout=30, ) @mock.patch("redshift_connector.connect", Mock()) def test_explicit_iam_conn_without_profile(self): self.config.credentials = self.config.credentials.replace( - method='iam', - cluster_id='my_redshift', - host='thishostshouldnotexist.test.us-east-1' + method="iam", cluster_id="my_redshift", host="thishostshouldnotexist.test.us-east-1" ) connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( iam=True, - host='thishostshouldnotexist.test.us-east-1', - database='redshift', - db_user='root', - password='', - user='', - cluster_identifier='my_redshift', - region='us-east-1', + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + db_user="root", + password="", + user="", + cluster_identifier="my_redshift", + region="us-east-1", auto_create=False, db_groups=[], profile=None, timeout=30, - port=5439 + port=5439, ) - @mock.patch('redshift_connector.connect', Mock()) - @mock.patch('boto3.Session', Mock()) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) def test_explicit_iam_conn_with_profile(self): self.config.credentials = self.config.credentials.replace( - method='iam', - cluster_id='my_redshift', - iam_profile='test', - host='thishostshouldnotexist.test.us-east-1' + method="iam", + cluster_id="my_redshift", + iam_profile="test", + host="thishostshouldnotexist.test.us-east-1", ) connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( iam=True, - host='thishostshouldnotexist.test.us-east-1', - database='redshift', - cluster_identifier='my_redshift', - region='us-east-1', + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + cluster_identifier="my_redshift", + region="us-east-1", auto_create=False, db_groups=[], - db_user='root', - password='', - user='', - profile='test', + db_user="root", + password="", + user="", + profile="test", timeout=30, - port=5439 + port=5439, ) - @mock.patch('redshift_connector.connect', Mock()) - @mock.patch('boto3.Session', Mock()) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) def test_explicit_iam_serverless_with_profile(self): self.config.credentials = self.config.credentials.replace( - method='iam', - iam_profile='test', - host='doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com' + method="iam", + iam_profile="test", + host="doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com", ) connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( iam=True, - host='doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com', - database='redshift', + host="doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com", + database="redshift", cluster_identifier=None, - region='us-east-2', + region="us-east-2", auto_create=False, db_groups=[], - db_user='root', - password='', - user='', - profile='test', + db_user="root", + password="", + user="", + profile="test", timeout=30, - port=5439 + port=5439, ) - @mock.patch('redshift_connector.connect', Mock()) - @mock.patch('boto3.Session', Mock()) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) def test_serverless_iam_failure(self): self.config.credentials = self.config.credentials.replace( - method='iam', - iam_profile='test', - host='doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com' + method="iam", + iam_profile="test", + host="doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com", ) with self.assertRaises(dbt.exceptions.FailedToConnectError) as context: connection = self.adapter.acquire_connection("dummy") connection.handle redshift_connector.connect.assert_called_once_with( iam=True, - host='doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com', - database='redshift', + host="doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com", + database="redshift", cluster_identifier=None, - region='us-east-2', + region="us-east-2", auto_create=False, db_groups=[], - db_user='root', - password='', - user='', - profile='test', + db_user="root", + password="", + user="", + profile="test", port=5439, timeout=30, - ) + ) self.assertTrue("'host' must be provided" in context.exception.msg) def test_iam_conn_optionals(self): - profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist', - 'port': 5439, - 'schema': 'public', - 'method': 'iam', - 'cluster_id': 'my_redshift', - 'db_groups': ["my_dbgroup"], - 'autocreate': True, + "outputs": { + "test": { + "type": "redshift", + "dbname": "redshift", + "user": "root", + "host": "thishostshouldnotexist", + "port": 5439, + "schema": "public", + "method": "iam", + "cluster_id": "my_redshift", + "db_groups": ["my_dbgroup"], + "autocreate": True, } }, - 'target': 'test' + "target": "test", } config_from_parts_or_dicts(self.config, profile_cfg) def test_invalid_auth_method(self): # we have to set method this way, otherwise it won't validate - self.config.credentials.method = 'badmethod' + self.config.credentials.method = "badmethod" with self.assertRaises(FailedToConnectError) as context: connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) connect_method_factory.get_connect_method() - self.assertTrue('badmethod' in context.exception.msg) + self.assertTrue("badmethod" in context.exception.msg) def test_invalid_iam_no_cluster_id(self): - self.config.credentials = self.config.credentials.replace(method='iam') + self.config.credentials = self.config.credentials.replace(method="iam") with self.assertRaises(FailedToConnectError) as context: connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) connect_method_factory.get_connect_method() @@ -241,171 +241,195 @@ def test_cancel_open_connections_empty(self): def test_cancel_open_connections_master(self): key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections[key] = mock_connection('master') + self.adapter.connections.thread_connections[key] = mock_connection("master") self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) def test_cancel_open_connections_single(self): - master = mock_connection('master') - model = mock_connection('model') + master = mock_connection("master") + model = mock_connection("model") key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections.update({ - key: master, - 1: model, - }) - with mock.patch.object(self.adapter.connections, 'add_query') as add_query: + self.adapter.connections.thread_connections.update( + { + key: master, + 1: model, + } + ) + with mock.patch.object(self.adapter.connections, "add_query") as add_query: query_result = mock.MagicMock() cursor = mock.Mock() cursor.fetchone.return_value = 42 add_query.side_effect = [(None, cursor), (None, query_result)] self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) - add_query.assert_has_calls([call('select pg_backend_pid()'), call('select pg_terminate_backend(42)')]) + add_query.assert_has_calls( + [call("select pg_backend_pid()"), call("select pg_terminate_backend(42)")] + ) master.handle.get_backend_pid.assert_not_called() def test_dbname_verification_is_case_insensitive(self): # Override adapter settings from setUp() profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'Redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist', - 'pass': 'password', - 'port': 5439, - 'schema': 'public' + "outputs": { + "test": { + "type": "redshift", + "dbname": "Redshift", + "user": "root", + "host": "thishostshouldnotexist", + "pass": "password", + "port": 5439, + "schema": "public", } }, - 'target': 'test' + "target": "test", } project_cfg = { - 'name': 'X', - 'version': '0.1', - 'profile': 'test', - 'project-root': '/tmp/dbt/does-not-exist', - 'quoting': { - 'identifier': False, - 'schema': True, + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, }, - 'config-version': 2, + "config-version": 2, } self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) self.adapter.cleanup_connections() self._adapter = RedshiftAdapter(self.config) - self.adapter.verify_database('redshift') + self.adapter.verify_database("redshift") def test_execute_with_fetch(self): cursor = mock.Mock() table = dbt.clients.agate_helper.empty_table() - with mock.patch.object(self.adapter.connections, 'add_query') as mock_add_query: + with mock.patch.object(self.adapter.connections, "add_query") as mock_add_query: mock_add_query.return_value = ( - None, cursor) # when mock_add_query is called, it will always return None, cursor - with mock.patch.object(self.adapter.connections, 'get_response') as mock_get_response: + None, + cursor, + ) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, "get_response") as mock_get_response: mock_get_response.return_value = None - with mock.patch.object(self.adapter.connections, - 'get_result_from_cursor') as mock_get_result_from_cursor: + with mock.patch.object( + self.adapter.connections, "get_result_from_cursor" + ) as mock_get_result_from_cursor: mock_get_result_from_cursor.return_value = table self.adapter.connections.execute(sql="select * from test", fetch=True) - mock_add_query.assert_called_once_with('select * from test', False) + mock_add_query.assert_called_once_with("select * from test", False) mock_get_result_from_cursor.assert_called_once_with(cursor) mock_get_response.assert_called_once_with(cursor) def test_execute_without_fetch(self): cursor = mock.Mock() - with mock.patch.object(self.adapter.connections, 'add_query') as mock_add_query: + with mock.patch.object(self.adapter.connections, "add_query") as mock_add_query: mock_add_query.return_value = ( - None, cursor) # when mock_add_query is called, it will always return None, cursor - with mock.patch.object(self.adapter.connections, 'get_response') as mock_get_response: + None, + cursor, + ) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, "get_response") as mock_get_response: mock_get_response.return_value = None - with mock.patch.object(self.adapter.connections, - 'get_result_from_cursor') as mock_get_result_from_cursor: + with mock.patch.object( + self.adapter.connections, "get_result_from_cursor" + ) as mock_get_result_from_cursor: self.adapter.connections.execute(sql="select * from test2", fetch=False) - mock_add_query.assert_called_once_with('select * from test2', False) + mock_add_query.assert_called_once_with("select * from test2", False) mock_get_result_from_cursor.assert_not_called() mock_get_response.assert_called_once_with(cursor) def test_add_query_with_no_cursor(self): - with mock.patch.object(self.adapter.connections, 'get_thread_connection') as mock_get_thread_connection: + with mock.patch.object( + self.adapter.connections, "get_thread_connection" + ) as mock_get_thread_connection: mock_get_thread_connection.return_value = None - with self.assertRaisesRegex(dbt.exceptions.DbtRuntimeError, - 'Tried to run invalid SQL: on '): + with self.assertRaisesRegex( + dbt.exceptions.DbtRuntimeError, "Tried to run invalid SQL: on " + ): self.adapter.connections.add_query(sql="") mock_get_thread_connection.assert_called_once() def test_add_query_success(self): cursor = mock.Mock() - with mock.patch.object(dbt.adapters.redshift.connections.SQLConnectionManager, 'add_query') as mock_add_query: + with mock.patch.object( + dbt.adapters.redshift.connections.SQLConnectionManager, "add_query" + ) as mock_add_query: mock_add_query.return_value = None, cursor - self.adapter.connections.add_query('select * from test3') - mock_add_query.assert_called_once_with('select * from test3', True, bindings=None, abridge_sql_log=False) + self.adapter.connections.add_query("select * from test3") + mock_add_query.assert_called_once_with( + "select * from test3", True, bindings=None, abridge_sql_log=False + ) + class TestRedshiftAdapterConversions(TestAdapterConversions): def test_convert_text_type(self): rows = [ - ['', 'a1', 'stringval1'], - ['', 'a2', 'stringvalasdfasdfasdfa'], - ['', 'a3', 'stringval3'], + ["", "a1", "stringval1"], + ["", "a2", "stringvalasdfasdfasdfa"], + ["", "a3", "stringval3"], ] agate_table = self._make_table_of(rows, agate.Text) - expected = ['varchar(64)', 'varchar(2)', 'varchar(22)'] + expected = ["varchar(64)", "varchar(2)", "varchar(22)"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_text_type(agate_table, col_idx) == expect def test_convert_number_type(self): rows = [ - ['', '23.98', '-1'], - ['', '12.78', '-2'], - ['', '79.41', '-3'], + ["", "23.98", "-1"], + ["", "12.78", "-2"], + ["", "79.41", "-3"], ] agate_table = self._make_table_of(rows, agate.Number) - expected = ['integer', 'float8', 'integer'] + expected = ["integer", "float8", "integer"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_number_type(agate_table, col_idx) == expect def test_convert_boolean_type(self): rows = [ - ['', 'false', 'true'], - ['', 'false', 'false'], - ['', 'false', 'true'], + ["", "false", "true"], + ["", "false", "false"], + ["", "false", "true"], ] agate_table = self._make_table_of(rows, agate.Boolean) - expected = ['boolean', 'boolean', 'boolean'] + expected = ["boolean", "boolean", "boolean"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_boolean_type(agate_table, col_idx) == expect def test_convert_datetime_type(self): rows = [ - ['', '20190101T01:01:01Z', '2019-01-01 01:01:01'], - ['', '20190102T01:01:01Z', '2019-01-01 01:01:01'], - ['', '20190103T01:01:01Z', '2019-01-01 01:01:01'], + ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], + ] + agate_table = self._make_table_of( + rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] + ) + expected = [ + "timestamp without time zone", + "timestamp without time zone", + "timestamp without time zone", ] - agate_table = self._make_table_of(rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime]) - expected = ['timestamp without time zone', 'timestamp without time zone', 'timestamp without time zone'] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_datetime_type(agate_table, col_idx) == expect def test_convert_date_type(self): rows = [ - ['', '2019-01-01', '2019-01-04'], - ['', '2019-01-02', '2019-01-04'], - ['', '2019-01-03', '2019-01-04'], + ["", "2019-01-01", "2019-01-04"], + ["", "2019-01-02", "2019-01-04"], + ["", "2019-01-03", "2019-01-04"], ] agate_table = self._make_table_of(rows, agate.Date) - expected = ['date', 'date', 'date'] + expected = ["date", "date", "date"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_date_type(agate_table, col_idx) == expect def test_convert_time_type(self): # dbt's default type testers actually don't have a TimeDelta at all. rows = [ - ['', '120s', '10s'], - ['', '3m', '11s'], - ['', '1h', '12s'], + ["", "120s", "10s"], + ["", "3m", "11s"], + ["", "1h", "12s"], ] agate_table = self._make_table_of(rows, agate.TimeDelta) - expected = ['varchar(24)', 'varchar(24)', 'varchar(24)'] + expected = ["varchar(24)", "varchar(24)", "varchar(24)"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/utils.py b/tests/unit/utils.py index e09b7fc69..f2ca418e3 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -26,21 +26,22 @@ def normalize(path): class Obj: - which = 'blah' + which = "blah" single_threaded = False -def mock_connection(name, state='open'): +def mock_connection(name, state="open"): conn = mock.MagicMock() conn.name = name conn.state = state return conn -def profile_from_dict(profile, profile_name, cli_vars='{}'): +def profile_from_dict(profile, profile_name, cli_vars="{}"): from dbt.config import Profile from dbt.config.renderer import ProfileRenderer from dbt.config.utils import parse_cli_vars + if not isinstance(cli_vars, dict): cli_vars = parse_cli_vars(cli_vars) @@ -50,6 +51,7 @@ def profile_from_dict(profile, profile_name, cli_vars='{}'): # flags global. This is a bit of a hack, but it's the best way to do it. from dbt.flags import set_from_args from argparse import Namespace + set_from_args(Namespace(), None) return Profile.from_raw_profile_info( profile, @@ -58,15 +60,16 @@ def profile_from_dict(profile, profile_name, cli_vars='{}'): ) -def project_from_dict(project, profile, packages=None, selectors=None, cli_vars='{}'): +def project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"): from dbt.config.renderer import DbtProjectYamlRenderer from dbt.config.utils import parse_cli_vars + if not isinstance(cli_vars, dict): cli_vars = parse_cli_vars(cli_vars) renderer = DbtProjectYamlRenderer(profile, cli_vars) - project_root = project.pop('project-root', os.getcwd()) + project_root = project.pop("project-root", os.getcwd()) partial = PartialProject.from_dicts( project_root=project_root, @@ -77,7 +80,7 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= return partial.render(renderer) -def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars='{}'): +def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars="{}"): from dbt.config import Project, Profile, RuntimeConfig from dbt.config.utils import parse_cli_vars from copy import deepcopy @@ -88,7 +91,7 @@ def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, if isinstance(project, Project): profile_name = project.profile_name else: - profile_name = project.get('profile') + profile_name = project.get("profile") if not isinstance(profile, Profile): profile = profile_from_dict( @@ -108,16 +111,13 @@ def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, args = Obj() args.vars = cli_vars - args.profile_dir = '/dev/null' - return RuntimeConfig.from_parts( - project=project, - profile=profile, - args=args - ) + args.profile_dir = "/dev/null" + return RuntimeConfig.from_parts(project=project, profile=profile, args=args) def inject_plugin(plugin): from dbt.adapters.factory import FACTORY + key = plugin.adapter.type() FACTORY.plugins[key] = plugin @@ -125,8 +125,11 @@ def inject_plugin(plugin): def inject_plugin_for(config): # from dbt.adapters.postgres import Plugin, PostgresAdapter from dbt.adapters.factory import FACTORY + FACTORY.load_plugin(config.credentials.type) - adapter = FACTORY.get_adapter(config) # TODO: there's a get_adaptor function in factory.py, but no method on AdapterContainer + adapter = FACTORY.get_adapter( + config + ) # TODO: there's a get_adaptor function in factory.py, but no method on AdapterContainer return adapter @@ -136,12 +139,14 @@ def inject_adapter(value, plugin): """ inject_plugin(plugin) from dbt.adapters.factory import FACTORY + key = value.type() FACTORY.adapters[key] = value def clear_plugin(plugin): from dbt.adapters.factory import FACTORY + key = plugin.adapter.type() FACTORY.plugins.pop(key, None) FACTORY.adapters.pop(key, None) @@ -184,7 +189,7 @@ def compare_dicts(dict1, dict2): common_keys = set(first_set).intersection(set(second_set)) found_differences = False for key in common_keys: - if dict1[key] != dict2[key] : + if dict1[key] != dict2[key]: print(f"--- --- first dict: {key}: {str(dict1[key])}") print(f"--- --- second dict: {key}: {str(dict2[key])}") found_differences = True @@ -199,7 +204,7 @@ def assert_from_dict(obj, dct, cls=None): cls = obj.__class__ cls.validate(dct) obj_from_dict = cls.from_dict(dct) - if hasattr(obj, 'created_at'): + if hasattr(obj, "created_at"): obj_from_dict.created_at = 1 obj.created_at = 1 assert obj_from_dict == obj @@ -207,10 +212,10 @@ def assert_from_dict(obj, dct, cls=None): def assert_to_dict(obj, dct): obj_to_dict = obj.to_dict(omit_none=True) - if 'created_at' in obj_to_dict: - obj_to_dict['created_at'] = 1 - if 'created_at' in dct: - dct['created_at'] = 1 + if "created_at" in obj_to_dict: + obj_to_dict["created_at"] = 1 + if "created_at" in dct: + dct["created_at"] = 1 assert obj_to_dict == dct @@ -226,10 +231,10 @@ def assert_fails_validation(dct, cls): class TestAdapterConversions(TestCase): - @staticmethod def _get_tester_for(column_type): from dbt.clients import agate_helper + if column_type is agate.TimeDelta: # dbt never makes this! return agate.TimeDelta() @@ -237,10 +242,10 @@ def _get_tester_for(column_type): if isinstance(instance, column_type): return instance - raise ValueError(f'no tester for {column_type}') + raise ValueError(f"no tester for {column_type}") def _make_table_of(self, rows, column_types): - column_names = list(string.ascii_letters[:len(rows[0])]) + column_names = list(string.ascii_letters[: len(rows[0])]) if isinstance(column_types, type): column_types = [self._get_tester_for(column_types) for _ in column_names] else: @@ -251,6 +256,7 @@ def _make_table_of(self, rows, column_types): def load_internal_manifest_macros(config, macro_hook=lambda m: None): from dbt.parser.manifest import ManifestLoader + return ManifestLoader.load_macros(config, macro_hook) From f07fbda2f4244f8316b1852c96d7f484588e9937 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 16 Mar 2023 15:07:56 -0400 Subject: [PATCH 047/113] ADAP-366: View Rerun Bug (#375) * moved models out of test file (more than 20 lines); moved files to directory (more than 1 file) * created test case for #365 * added sslmode to `RedshiftCredentials._connection_keys` * moved code out of try block that would not trigger exception * added link relations in cache logic * pulled up abstract methods that were not implemented, but retained `NotImplementedError` * the macro `postgres_get_relations` only has one underscore in `dbt-core` instead of two, like `redshift__get_relations` * changie --- .../unreleased/Fixes-20230316-132120.yaml | 7 ++ dbt/adapters/redshift/connections.py | 66 ++++++++++------- dbt/adapters/redshift/impl.py | 73 ++++++++++++++++--- dbt/include/redshift/macros/relations.sql | 2 +- .../functional/adapter/backup_tests/models.py | 61 ++++++++++++++++ .../{ => backup_tests}/test_backup_table.py | 71 +++--------------- tests/functional/adapter/test_basic.py | 40 +++++++++- 7 files changed, 219 insertions(+), 101 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230316-132120.yaml create mode 100644 tests/functional/adapter/backup_tests/models.py rename tests/functional/adapter/{ => backup_tests}/test_backup_table.py (74%) diff --git a/.changes/unreleased/Fixes-20230316-132120.yaml b/.changes/unreleased/Fixes-20230316-132120.yaml new file mode 100644 index 000000000..ed36f8a30 --- /dev/null +++ b/.changes/unreleased/Fixes-20230316-132120.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating + from `PostgresAdapter` to `SQLAdapter` +time: 2023-03-16T13:21:20.306393-04:00 +custom: + Author: mikealfare + Issue: "365" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index d4804bd8b..f35a429da 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -1,27 +1,26 @@ import re from multiprocessing import Lock from contextlib import contextmanager -from typing import NewType, Tuple, Union +from typing import NewType, Tuple, Union, Optional, List +from dataclasses import dataclass, field import agate import sqlparse +import redshift_connector +from redshift_connector.utils.oids import get_datatype_name + from dbt.adapters.sql import SQLConnectionManager from dbt.contracts.connection import AdapterResponse, Connection, Credentials from dbt.events import AdapterLogger import dbt.exceptions import dbt.flags -import redshift_connector from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum - -from dataclasses import dataclass, field -from typing import Optional, List - from dbt.helper_types import Port -from redshift_connector import OperationalError, DatabaseError, DataError -from redshift_connector.utils.oids import get_datatype_name + logger = AdapterLogger("Redshift") + drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock() # type: ignore @@ -69,7 +68,17 @@ def type(self): return "redshift" def _connection_keys(self): - return "host", "port", "user", "database", "schema", "method", "cluster_id", "iam_profile" + return ( + "host", + "port", + "user", + "database", + "schema", + "method", + "cluster_id", + "iam_profile", + "sslmode", + ) @property def unique_field(self) -> str: @@ -114,8 +123,6 @@ def connect(): c.cursor().execute("set role {}".format(self.credentials.role)) return c - return connect - elif method == RedshiftConnectionMethod.IAM: if not self.credentials.cluster_id and "serverless" not in self.credentials.host: raise dbt.exceptions.FailedToConnectError( @@ -138,12 +145,13 @@ def connect(): c.cursor().execute("set role {}".format(self.credentials.role)) return c - return connect else: raise dbt.exceptions.FailedToConnectError( "Invalid 'method' in profile: '{}'".format(method) ) + return connect + class RedshiftConnectionManager(SQLConnectionManager): TYPE = "redshift" @@ -155,19 +163,19 @@ def _get_backend_pid(self): return res def cancel(self, connection: Connection): - connection_name = connection.name try: pid = self._get_backend_pid() - sql = "select pg_terminate_backend({})".format(pid) - _, cursor = self.add_query(sql) - res = cursor.fetchone() - logger.debug("Cancel query '{}': {}".format(connection_name, res)) - except redshift_connector.error.InterfaceError as e: + except redshift_connector.InterfaceError as e: if "is closed" in str(e): - logger.debug(f"Connection {connection_name} was already closed") + logger.debug(f"Connection {connection.name} was already closed") return raise + sql = f"select pg_terminate_backend({pid})" + _, cursor = self.add_query(sql) + res = cursor.fetchone() + logger.debug(f"Cancel query '{connection.name}': {res}") + @classmethod def get_response(cls, cursor: redshift_connector.Cursor) -> AdapterResponse: rows = cursor.rowcount @@ -178,29 +186,27 @@ def get_response(cls, cursor: redshift_connector.Cursor) -> AdapterResponse: def exception_handler(self, sql): try: yield - except redshift_connector.error.DatabaseError as e: + except redshift_connector.DatabaseError as e: logger.debug(f"Redshift error: {str(e)}") self.rollback_if_open() - raise dbt.exceptions.DbtDatabaseError(str(e)) + raise dbt.exceptions.DbtDatabaseError(str(e).strip()) from e + except Exception as e: logger.debug("Error running SQL: {}", sql) logger.debug("Rolling back transaction.") self.rollback_if_open() # Raise DBT native exceptions as is. - if isinstance(e, dbt.exceptions.Exception): + if isinstance(e, dbt.exceptions.DbtRuntimeError): raise raise dbt.exceptions.DbtRuntimeError(str(e)) from e @contextmanager - def fresh_transaction(self, name=None): + def fresh_transaction(self): """On entrance to this context manager, hold an exclusive lock and create a fresh transaction for redshift, then commit and begin a new one before releasing the lock on exit. See drop_relation in RedshiftAdapter for more information. - - :param Optional[str] name: The name of the connection to use, or None - to use the default. """ with drop_lock: connection = self.get_thread_connection() @@ -210,8 +216,8 @@ def fresh_transaction(self, name=None): self.begin() yield - self.commit() + self.begin() @classmethod @@ -226,7 +232,11 @@ def open(cls, connection): def exponential_backoff(attempt: int): return attempt * attempt - retryable_exceptions = [OperationalError, DatabaseError, DataError] + retryable_exceptions = [ + redshift_connector.OperationalError, + redshift_connector.DatabaseError, + redshift_connector.DataError, + ] return cls.retry_connection( connection, diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 8b8ba66fd..718f14b08 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -1,17 +1,24 @@ from dataclasses import dataclass -from typing import Optional +from typing import Optional, Set, Any, Dict, Type +from collections import namedtuple + +from dbt.adapters.base import PythonJobHelper from dbt.adapters.base.impl import AdapterConfig from dbt.adapters.sql import SQLAdapter from dbt.adapters.base.meta import available -from dbt.adapters.redshift import RedshiftConnectionManager -from dbt.adapters.redshift.column import RedshiftColumn -from dbt.adapters.redshift import RedshiftRelation +from dbt.contracts.connection import AdapterResponse from dbt.events import AdapterLogger import dbt.exceptions +from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation, RedshiftColumn + + logger = AdapterLogger("Redshift") +GET_RELATIONS_MACRO_NAME = "redshift__get_relations" + + @dataclass class RedshiftConfig(AdapterConfig): sort_type: Optional[str] = None @@ -24,6 +31,7 @@ class RedshiftConfig(AdapterConfig): class RedshiftAdapter(SQLAdapter): Relation = RedshiftRelation ConnectionManager = RedshiftConnectionManager + connections: RedshiftConnectionManager Column = RedshiftColumn # type: ignore AdapterSpecificConfigs = RedshiftConfig # type: ignore @@ -85,11 +93,8 @@ def _get_catalog_schemas(self, manifest): try: return schemas.flatten(allow_multiple_databases=self.config.credentials.ra3_node) except dbt.exceptions.DbtRuntimeError as exc: - raise dbt.exceptions.CompilationError( - "Cross-db references allowed only in {} RA3.* node. Got {}".format( - self.type(), exc.msg - ) - ) + msg = f"Cross-db references allowed only in {self.type()} RA3.* node. Got {exc.msg}" + raise dbt.exceptions.CompilationError(msg) def valid_incremental_strategies(self): """The set of standard builtin strategies which this adapter supports out-of-the-box. @@ -99,3 +104,53 @@ def valid_incremental_strategies(self): def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: return f"{add_to} + interval '{number} {interval}'" + + def _link_cached_database_relations(self, schemas: Set[str]): + """ + :param schemas: The set of schemas that should have links added. + """ + database = self.config.credentials.database + _Relation = namedtuple("_Relation", "database schema identifier") + links = [ + ( + _Relation(database, dep_schema, dep_identifier), + _Relation(database, ref_schema, ref_identifier), + ) + for dep_schema, dep_identifier, ref_schema, ref_identifier in self.execute_macro( + GET_RELATIONS_MACRO_NAME + ) + # don't record in cache if this relation isn't in a relevant schema + if ref_schema in schemas + ] + + for dependent, referenced in links: + self.cache.add_link( + referenced=self.Relation.create(**referenced._asdict()), + dependent=self.Relation.create(**dependent._asdict()), + ) + + def _link_cached_relations(self, manifest): + schemas = set( + relation.schema.lower() + for relation in self._get_cache_schemas(manifest) + if self.verify_database(relation.database) == "" + ) + self._link_cached_database_relations(schemas) + + def _relations_cache_for_schemas(self, manifest, cache_schemas=None): + super()._relations_cache_for_schemas(manifest, cache_schemas) + self._link_cached_relations(manifest) + + # avoid non-implemented abstract methods warning + # make it clear what needs to be implemented while still raising the error in super() + # we can update these with Redshift-specific messages if needed + @property + def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]: + return super().python_submission_helpers + + @property + def default_python_submission_method(self) -> str: + return super().default_python_submission_method + + def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: + return super().generate_python_submission_response(submission_result) diff --git a/dbt/include/redshift/macros/relations.sql b/dbt/include/redshift/macros/relations.sql index ed682ae7d..1a5cd34c4 100644 --- a/dbt/include/redshift/macros/relations.sql +++ b/dbt/include/redshift/macros/relations.sql @@ -1,3 +1,3 @@ {% macro redshift__get_relations () -%} - {{ return(dbt.postgres__get_relations()) }} + {{ return(dbt.postgres_get_relations()) }} {% endmacro %} diff --git a/tests/functional/adapter/backup_tests/models.py b/tests/functional/adapter/backup_tests/models.py new file mode 100644 index 000000000..6432e7319 --- /dev/null +++ b/tests/functional/adapter/backup_tests/models.py @@ -0,0 +1,61 @@ +BACKUP_IS_FALSE = """ +{{ config( + materialized='table', + backup=False +) }} +select 1 as my_col +""" + + +BACKUP_IS_TRUE = """ +{{ config( + materialized='table', + backup=True +) }} +select 1 as my_col +""" + + +BACKUP_IS_UNDEFINED = """ +{{ config( + materialized='table' +) }} +select 1 as my_col +""" + + +BACKUP_IS_TRUE_VIEW = """ +{{ config( + materialized='view', + backup=True +) }} +select 1 as my_col +""" + + +SYNTAX_WITH_DISTKEY = """ +{{ config( + materialized='table', + backup=False, + dist='my_col' +) }} +select 1 as my_col +""" + + +SYNTAX_WITH_SORTKEY = """ +{{ config( + materialized='table', + backup=False, + sort='my_col' +) }} +select 1 as my_col +""" + + +BACKUP_IS_UNDEFINED_DEPENDENT_VIEW = """ +{{ config( + materialized='view', +) }} +select * from {{ ref('backup_is_undefined') }} +""" diff --git a/tests/functional/adapter/test_backup_table.py b/tests/functional/adapter/backup_tests/test_backup_table.py similarity index 74% rename from tests/functional/adapter/test_backup_table.py rename to tests/functional/adapter/backup_tests/test_backup_table.py index 4c24250f1..6871b70a7 100644 --- a/tests/functional/adapter/test_backup_table.py +++ b/tests/functional/adapter/backup_tests/test_backup_table.py @@ -2,60 +2,7 @@ from dbt.tests.util import run_dbt - -_MODEL_BACKUP_IS_FALSE = """ -{{ config( - materialized='table', - backup=False -) }} -select 1 as my_col -""" - - -_MODEL_BACKUP_IS_TRUE = """ -{{ config( - materialized='table', - backup=True -) }} -select 1 as my_col -""" - - -_MODEL_IS_UNDEFINED = """ -{{ config( - materialized='table' -) }} -select 1 as my_col -""" - - -_MODEL_IS_TRUE_VIEW = """ -{{ config( - materialized='view', - backup=True -) }} -select 1 as my_col -""" - - -_MODEL_SYNTAX_WITH_DISTKEY = """ -{{ config( - materialized='table', - backup=False, - dist='my_col' -) }} -select 1 as my_col -""" - - -_MODEL_SYNTAX_WITH_SORTKEY = """ -{{ config( - materialized='table', - backup=False, - sort='my_col' -) }} -select 1 as my_col -""" +from tests.functional.adapter.backup_tests import models class BackupTableBase: @@ -68,10 +15,10 @@ class TestBackupTableOption(BackupTableBase): @pytest.fixture(scope="class") def models(self): return { - "backup_is_false.sql": _MODEL_BACKUP_IS_FALSE, - "backup_is_true.sql": _MODEL_BACKUP_IS_TRUE, - "backup_is_undefined.sql": _MODEL_IS_UNDEFINED, - "backup_is_true_view.sql": _MODEL_IS_TRUE_VIEW, + "backup_is_false.sql": models.BACKUP_IS_FALSE, + "backup_is_true.sql": models.BACKUP_IS_TRUE, + "backup_is_undefined.sql": models.BACKUP_IS_UNDEFINED, + "backup_is_true_view.sql": models.BACKUP_IS_TRUE_VIEW, } @pytest.mark.parametrize( @@ -103,8 +50,8 @@ class TestBackupTableSyntax(BackupTableBase): @pytest.fixture(scope="class") def models(self): return { - "syntax_with_distkey.sql": _MODEL_SYNTAX_WITH_DISTKEY, - "syntax_with_sortkey.sql": _MODEL_SYNTAX_WITH_SORTKEY, + "syntax_with_distkey.sql": models.SYNTAX_WITH_DISTKEY, + "syntax_with_sortkey.sql": models.SYNTAX_WITH_SORTKEY, } @pytest.mark.parametrize( @@ -137,8 +84,8 @@ def project_config_update(self): @pytest.fixture(scope="class") def models(self): return { - "backup_is_true.sql": _MODEL_BACKUP_IS_TRUE, - "backup_is_undefined.sql": _MODEL_IS_UNDEFINED, + "backup_is_true.sql": models.BACKUP_IS_TRUE, + "backup_is_undefined.sql": models.BACKUP_IS_UNDEFINED, } @pytest.mark.parametrize( diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py index d2289efa3..8f8198a27 100644 --- a/tests/functional/adapter/test_basic.py +++ b/tests/functional/adapter/test_basic.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import AnyStringWith +from dbt.tests.util import AnyStringWith, run_dbt from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral @@ -127,3 +127,41 @@ def expected_catalog(self, project, profile_user): view_summary_stats=no_stats(), ephemeral_summary_stats=redshift_ephemeral_summary_stats(), ) + + +class TestViewRerun: + """ + This test addresses: https://github.com/dbt-labs/dbt-redshift/issues/365 + """ + + @pytest.fixture(scope="class") + def models(self): + return { + "base_table.sql": "{{ config(materialized='table') }} select 1 as id", + "base_view.sql": "{{ config(bind=True) }} select * from {{ ref('base_table') }}", + } + + def test_rerunning_dependent_view_refreshes(self, project): + """ + Assert that subsequent runs of `dbt run` will correctly recreate a view. + """ + + def db_objects(): + check_objects_exist_sql = f""" + select tablename + from pg_tables + where schemaname ilike '{project.test_schema}' + union all + select viewname + from pg_views + where schemaname ilike '{project.test_schema}' + order by 1 + """ + return project.run_sql(check_objects_exist_sql, fetch="all") + + results = run_dbt(["run"]) + assert len(results) == 2 + assert db_objects() == (["base_table"], ["base_view"]) + results = run_dbt(["run"]) + assert len(results) == 2 + assert db_objects() == (["base_table"], ["base_view"]) From b3f8c3253a61880fb488eef72cd7555cb2542e99 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Thu, 16 Mar 2023 20:11:56 +0000 Subject: [PATCH 048/113] Bumping version to 1.5.0b3 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.5.0-b3.md | 9 +++++++++ .../Features-20230301-113553.yaml | 0 .../Fixes-20230316-132120.yaml | 0 CHANGELOG.md | 14 ++++++++++++-- dbt/adapters/redshift/__version__.py | 2 +- 6 files changed, 23 insertions(+), 4 deletions(-) create mode 100644 .changes/1.5.0-b3.md rename .changes/{unreleased => 1.5.0}/Features-20230301-113553.yaml (100%) rename .changes/{unreleased => 1.5.0}/Fixes-20230316-132120.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 40acefba6..448efb95c 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.5.0b2 +current_version = 1.5.0b3 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.5.0-b3.md b/.changes/1.5.0-b3.md new file mode 100644 index 000000000..dda1f66d1 --- /dev/null +++ b/.changes/1.5.0-b3.md @@ -0,0 +1,9 @@ +## dbt-redshift 1.5.0-b3 - March 16, 2023 + +### Features + +- Enforce contracts on models materialized as tables and views ([#319](https://github.com/dbt-labs/dbt-redshift/issues/319), [#340](https://github.com/dbt-labs/dbt-redshift/issues/340)) + +### Fixes + +- Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating from `PostgresAdapter` to `SQLAdapter` ([#365](https://github.com/dbt-labs/dbt-redshift/issues/365)) diff --git a/.changes/unreleased/Features-20230301-113553.yaml b/.changes/1.5.0/Features-20230301-113553.yaml similarity index 100% rename from .changes/unreleased/Features-20230301-113553.yaml rename to .changes/1.5.0/Features-20230301-113553.yaml diff --git a/.changes/unreleased/Fixes-20230316-132120.yaml b/.changes/1.5.0/Fixes-20230316-132120.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230316-132120.yaml rename to .changes/1.5.0/Fixes-20230316-132120.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index dc87d0f99..4a583e587 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,14 +5,24 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.5.0-b3 - March 16, 2023 + +### Features + +- Enforce contracts on models materialized as tables and views ([#319](https://github.com/dbt-labs/dbt-redshift/issues/319), [#340](https://github.com/dbt-labs/dbt-redshift/issues/340)) + +### Fixes + +- Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating from `PostgresAdapter` to `SQLAdapter` ([#365](https://github.com/dbt-labs/dbt-redshift/issues/365)) + + + ## dbt-redshift 1.5.0-b2 - March 02, 2023 ### Under the Hood - Rename constraints_enabled to contract ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330)) - - ## dbt-redshift 1.5.0-b1 - February 22, 2023 ### Features diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 4f8b15313..649c005ac 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0b2" +version = "1.5.0b3" From 08b99ea5dc17439e9a057b733b70699c30793648 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 19 Mar 2023 15:56:56 -0400 Subject: [PATCH 049/113] Update wheel requirement from ~=0.38 to ~=0.40 (#369) Updates the requirements on [wheel](https://github.com/pypa/wheel) to permit the latest version. - [Release notes](https://github.com/pypa/wheel/releases) - [Changelog](https://github.com/pypa/wheel/blob/main/docs/news.rst) - [Commits](https://github.com/pypa/wheel/compare/0.38.0...0.40.0) --- updated-dependencies: - dependency-name: wheel dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 1345b8abc..a65a03f59 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -30,4 +30,4 @@ tox~=4.4;python_version>="3.8" types-pytz~=2022.7 types-requests~=2.28 twine~=4.0 -wheel~=0.38 +wheel~=0.40 From e611fd4d4aafd1e0e1417da1e56dbd0f9bec942d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 19 Mar 2023 16:47:22 -0400 Subject: [PATCH 050/113] Update ipdb requirement from ~=0.13.11 to ~=0.13.13 (#364) Updates the requirements on [ipdb](https://github.com/gotcha/ipdb) to permit the latest version. - [Release notes](https://github.com/gotcha/ipdb/releases) - [Changelog](https://github.com/gotcha/ipdb/blob/master/HISTORY.txt) - [Commits](https://github.com/gotcha/ipdb/compare/0.13.11...0.13.13) --- updated-dependencies: - dependency-name: ipdb dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index a65a03f59..9a48a3315 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -13,7 +13,7 @@ flake8~=5.0;python_version=="3.7" flake8~=6.0;python_version>="3.8" flaky~=3.7 freezegun~=1.2 -ipdb~=0.13.11 +ipdb~=0.13.13 mypy==1.0.1 # patch updates have historically introduced breaking changes pip-tools~=6.12 pre-commit~=2.21;python_version=="3.7" From 31bae6ebf6006f31aa6ce92a0f1e8b09d77bdee1 Mon Sep 17 00:00:00 2001 From: Gerda Shank Date: Sun, 19 Mar 2023 19:54:02 -0400 Subject: [PATCH 051/113] Use contracted column order in create_table_as (#368) --- .changes/unreleased/Features-20230314-124314.yaml | 6 ++++++ dbt/include/redshift/macros/adapters.sql | 1 + tests/functional/adapter/test_constraints.py | 14 ++++++++++---- 3 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 .changes/unreleased/Features-20230314-124314.yaml diff --git a/.changes/unreleased/Features-20230314-124314.yaml b/.changes/unreleased/Features-20230314-124314.yaml new file mode 100644 index 000000000..5141193ff --- /dev/null +++ b/.changes/unreleased/Features-20230314-124314.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Use contracted column order in create_table_as +time: 2023-03-14T12:43:14.104667-04:00 +custom: + Author: gshank + Issue: "356" diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index ede52b353..ebf2e16a5 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -49,6 +49,7 @@ {{ relation.include(database=(not temporary), schema=(not temporary)) }} {{ get_columns_spec_ddl() }} {{ get_assert_columns_equivalent(sql) }} + {%- set sql = get_select_subquery(sql) %} {% if backup == false -%}backup no{%- endif %} {{ dist(_dist) }} {{ sort(_sort_type, _sort) }} diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 9918b5037..64abae1b8 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -10,15 +10,21 @@ create table {0} ( id integer not null, color text, - date_day date, + date_day text, primary key(id) ) ; insert into {0} ( select - 1 as id, - 'blue' as color, - cast('2019-01-01' as date) as date_day + id, + color, + date_day from + ( + select + 'blue' as color, + 1 as id, + '2019-01-01' as date_day + ) as model_subq ) ; """ From b9da95179eb898802359fa48557957362b4e634c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Mar 2023 00:06:47 -0700 Subject: [PATCH 052/113] Bump mypy from 1.0.1 to 1.1.1 (#362) * Bump mypy from 1.0.1 to 1.1.1 Bumps [mypy](https://github.com/python/mypy) from 1.0.1 to 1.1.1. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v1.0.1...v1.1.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * update pre commit config --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mila Page --- .pre-commit-config.yaml | 2 +- dev-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b748e03ec..3d80b955c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: alias: flake8-check stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.1 + rev: v1.1.1 hooks: - id: mypy # N.B.: Mypy is... a bit fragile. diff --git a/dev-requirements.txt b/dev-requirements.txt index 9a48a3315..3c5fd1729 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -14,7 +14,7 @@ flake8~=6.0;python_version>="3.8" flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 -mypy==1.0.1 # patch updates have historically introduced breaking changes +mypy==1.1.1 # patch updates have historically introduced breaking changes pip-tools~=6.12 pre-commit~=2.21;python_version=="3.7" pre-commit~=3.1;python_version>="3.8" From f0d92d2eecaffb48e99bbb63125c70f8e73f18e2 Mon Sep 17 00:00:00 2001 From: Peter Webb Date: Wed, 22 Mar 2023 13:29:25 -0400 Subject: [PATCH 053/113] Modify adapter to support unified constraint fields (#372) * CT-2214: Modify adapter to work with unified constraint fields * CT-2214: Add changelog entry * CT-2214: Add temporary branch pin * add triage-labels workflow (#366) * add triage-labels workflow * fix end of files * Add tests to precommit hooks and so close out this repo for test conversions. (#371) Co-authored-by: Mila Page * CT-2214: Remove unneeded line * CT-2214: Revert requirements in preparation for merge --------- Co-authored-by: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Co-authored-by: Mila Page <67295367+VersusFacit@users.noreply.github.com> Co-authored-by: Mila Page --- .changes/unreleased/Features-20230314-154621.yaml | 6 ++++++ .../macros/utils/get_columns_spec_ddl.sql | 15 +++++++-------- 2 files changed, 13 insertions(+), 8 deletions(-) create mode 100644 .changes/unreleased/Features-20230314-154621.yaml diff --git a/.changes/unreleased/Features-20230314-154621.yaml b/.changes/unreleased/Features-20230314-154621.yaml new file mode 100644 index 000000000..ac9b75bc2 --- /dev/null +++ b/.changes/unreleased/Features-20230314-154621.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Modify adapter to work with unified constraint fields +time: 2023-03-14T15:46:21.963751-04:00 +custom: + Author: peterallenwebb + Issue: "341" diff --git a/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql b/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql index 48fc5f576..5c9ba5dec 100644 --- a/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql +++ b/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql @@ -7,21 +7,20 @@ {%- for i in user_provided_columns -%} {%- set col = user_provided_columns[i] -%} {%- set constraints = col['constraints'] -%} - {%- set ns = namespace(not_null_line = '') -%} + {%- set ns = namespace(not_null_line='', has_check_constraints=False) -%} {%- for constraint in constraints -%} - {%- if constraint == 'primary key' -%} + {%- if constraint.type == 'primary_key' -%} {%- do primary_keys.append(col['name']) -%} - {%- elif constraint == 'not null' %} + {%- elif constraint.type == 'not_null' %} {%- set ns.not_null_line = " not null" -%} + {%- elif constraint.type == 'check' %} + {%- set ns.has_check_constraints = True -%} {%- endif -%} {%- endfor -%} - {%- set not_null_line = " not null" if not_null_col else "" -%} - - {%- set check = col['constraints_check'] -%} - {%- if check -%} - {{ exceptions.warn("We noticed you have `constraints_check` in your configs, these are NOT compatible with Redshift and will be ignored. See column `" ~ col['name'] ~ "`") }} + {%- if ns.has_check_constraints -%} + {{ exceptions.warn("There are check constraints in your configs, which Redshift does not support. They will be ignored. See column `" ~ col['name'] ~ "`") }} {%- endif -%} {%- set col_line = col['name'] ~ " " ~ col['data_type'] ~ ns.not_null_line -%} From 75b3d7cfea9d6ad1515039eeabf0b823efb60b45 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Fri, 24 Mar 2023 20:07:37 -0700 Subject: [PATCH 054/113] Adjust workflow. (#381) Co-authored-by: Mila Page --- .github/workflows/integration.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 1dcbee15e..3cf5ff711 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -98,6 +98,7 @@ jobs: redshift: - 'dbt/**' - 'tests/**' + - 'dev-requirements.txt' - name: Generate integration test matrix id: generate-matrix From 7ec738e2c9fa487f697b33975baaba81fef65753 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 25 Mar 2023 18:17:07 -0700 Subject: [PATCH 055/113] Update pre-commit requirement from ~=2.21 to ~=3.2 (#377) Updates the requirements on [pre-commit](https://github.com/pre-commit/pre-commit) to permit the latest version. - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/commits/v3.2.0) --- updated-dependencies: - dependency-name: pre-commit dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mila Page <67295367+VersusFacit@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 3c5fd1729..31cf738ff 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -17,7 +17,7 @@ ipdb~=0.13.13 mypy==1.1.1 # patch updates have historically introduced breaking changes pip-tools~=6.12 pre-commit~=2.21;python_version=="3.7" -pre-commit~=3.1;python_version>="3.8" +pre-commit~=3.2;python_version>="3.8" pre-commit-hooks~=4.4 pytest~=7.2 pytest-csv~=3.0 From c7a42015a0fc50735a422d8119a920a90434cb53 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 28 Mar 2023 11:52:29 -0500 Subject: [PATCH 056/113] add test for enforcing contracts on incremental materializations (#374) * add test for enforcing contracts on incremental materializations * fix dependency double copy * reset dev reqs --- tests/functional/adapter/test_constraints.py | 41 +++++++++++++++----- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 64abae1b8..5dd5aad09 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -1,19 +1,22 @@ import pytest -from dbt.tests.util import relation_from_name from dbt.tests.adapter.constraints.test_constraints import ( BaseTableConstraintsColumnsEqual, BaseViewConstraintsColumnsEqual, - BaseConstraintsRuntimeEnforcement, + BaseIncrementalConstraintsColumnsEqual, + BaseConstraintsRuntimeDdlEnforcement, + BaseConstraintsRollback, + BaseIncrementalConstraintsRuntimeDdlEnforcement, + BaseIncrementalConstraintsRollback, ) _expected_sql_redshift = """ -create table {0} ( +create table ( id integer not null, color text, date_day text, primary key(id) ) ; -insert into {0} +insert into ( select id, @@ -60,13 +63,33 @@ class TestRedshiftViewConstraintsColumnsEqual( pass -class TestRedshiftConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement): +class TestRedshiftIncrementalConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseIncrementalConstraintsColumnsEqual +): + pass + + +class TestRedshiftTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + return _expected_sql_redshift + + +class TestRedshiftTableConstraintsRollback(BaseConstraintsRollback): @pytest.fixture(scope="class") - def expected_sql(self, project): - relation = relation_from_name(project.adapter, "my_model") - tmp_relation = relation.incorporate(path={"identifier": relation.identifier + "__dbt_tmp"}) - return _expected_sql_redshift.format(tmp_relation) + def expected_error_messages(self): + return ["Cannot insert a NULL value into column id"] + + +class TestRedshiftIncrementalConstraintsRuntimeDdlEnforcement( + BaseIncrementalConstraintsRuntimeDdlEnforcement +): + @pytest.fixture(scope="class") + def expected_sql(self): + return _expected_sql_redshift + +class TestRedshiftIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback): @pytest.fixture(scope="class") def expected_error_messages(self): return ["Cannot insert a NULL value into column id"] From c4d44b299b5a5bef24ae36df9a04f37f87f61c7b Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 28 Mar 2023 17:40:09 -0500 Subject: [PATCH 057/113] update to retrieve contract enforced from dict (#383) * update to retrieve contract enforced from dict * add dependecy on branch * change ref * Update Under the Hood-20230223-110314.yaml * remove extra get * revert branch dependency --- .changes/1.5.0/Under the Hood-20230223-110314.yaml | 6 ------ .changes/unreleased/Under the Hood-20230223-110314.yaml | 6 ++++++ dbt/include/redshift/macros/adapters.sql | 6 ++++-- 3 files changed, 10 insertions(+), 8 deletions(-) delete mode 100644 .changes/1.5.0/Under the Hood-20230223-110314.yaml create mode 100644 .changes/unreleased/Under the Hood-20230223-110314.yaml diff --git a/.changes/1.5.0/Under the Hood-20230223-110314.yaml b/.changes/1.5.0/Under the Hood-20230223-110314.yaml deleted file mode 100644 index ef1e35744..000000000 --- a/.changes/1.5.0/Under the Hood-20230223-110314.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Rename constraints_enabled to contract -time: 2023-02-23T11:03:14.344028-05:00 -custom: - Author: gshank - Issue: "330" diff --git a/.changes/unreleased/Under the Hood-20230223-110314.yaml b/.changes/unreleased/Under the Hood-20230223-110314.yaml new file mode 100644 index 000000000..a4f519569 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230223-110314.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Treat contract config as a python object +time: 2023-02-23T11:03:14.344028-05:00 +custom: + Author: gshank emmyoop + Issue: 330 382 diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index ebf2e16a5..c863ee362 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -43,7 +43,8 @@ {{ sql_header if sql_header is not none }} - {%- if config.get('contract', False) %} + {%- set contract_config = config.get('contract') -%} + {%- if contract_config.enforced -%} create {% if temporary -%}temporary{%- endif %} table {{ relation.include(database=(not temporary), schema=(not temporary)) }} @@ -85,7 +86,8 @@ {{ sql_header if sql_header is not none }} create view {{ relation }} - {% if config.get('contract', False) -%} + {%- set contract_config = config.get('contract') -%} + {%- if contract_config.enforced -%} {{ get_assert_columns_equivalent(sql) }} {%- endif %} as ( {{ sql }} From 97b734028f0b4939ce273aafa5f2d263ba36fdcf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 20:36:17 -0700 Subject: [PATCH 058/113] Update pytz requirement from ~=2022.7 to ~=2023.2 (#384) Updates the requirements on [pytz](https://github.com/stub42/pytz) to permit the latest version. - [Release notes](https://github.com/stub42/pytz/releases) - [Commits](https://github.com/stub42/pytz/compare/release_2022.7...release_2023.2) --- updated-dependencies: - dependency-name: pytz dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mila Page <67295367+VersusFacit@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 31cf738ff..35e8b8237 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -24,7 +24,7 @@ pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 pytest-xdist~=3.2 -pytz~=2022.7 +pytz~=2023.2 tox~=3.0;python_version=="3.7" tox~=4.4;python_version>="3.8" types-pytz~=2022.7 From a9ae7ef2bcca245ea76a55b46f12be035f45b4f6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 20:58:33 -0700 Subject: [PATCH 059/113] Update types-pytz requirement from ~=2022.7 to ~=2023.2 (#385) Updates the requirements on [types-pytz](https://github.com/python/typeshed) to permit the latest version. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pytz dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mila Page <67295367+VersusFacit@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 35e8b8237..25f4a7690 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -27,7 +27,7 @@ pytest-xdist~=3.2 pytz~=2023.2 tox~=3.0;python_version=="3.7" tox~=4.4;python_version>="3.8" -types-pytz~=2022.7 +types-pytz~=2023.2 types-requests~=2.28 twine~=4.0 wheel~=0.40 From b2c803952409dd9263481a386e4df0a7bca637a6 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Thu, 30 Mar 2023 19:00:21 +0000 Subject: [PATCH 060/113] Bumping version to 1.5.0b4 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.5.0-b4.md | 10 ++++++++++ .../Features-20230314-124314.yaml | 0 .../Features-20230314-154621.yaml | 0 .../Under the Hood-20230223-110314.yaml | 0 CHANGELOG.md | 15 +++++++++++++-- dbt/adapters/redshift/__version__.py | 2 +- 7 files changed, 25 insertions(+), 4 deletions(-) create mode 100644 .changes/1.5.0-b4.md rename .changes/{unreleased => 1.5.0}/Features-20230314-124314.yaml (100%) rename .changes/{unreleased => 1.5.0}/Features-20230314-154621.yaml (100%) rename .changes/{unreleased => 1.5.0}/Under the Hood-20230223-110314.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 448efb95c..40a1d919e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.5.0b3 +current_version = 1.5.0b4 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.5.0-b4.md b/.changes/1.5.0-b4.md new file mode 100644 index 000000000..5ab44a175 --- /dev/null +++ b/.changes/1.5.0-b4.md @@ -0,0 +1,10 @@ +## dbt-redshift 1.5.0-b4 - March 30, 2023 + +### Features + +- Use contracted column order in create_table_as ([#356](https://github.com/dbt-labs/dbt-redshift/issues/356)) +- Modify adapter to work with unified constraint fields ([#341](https://github.com/dbt-labs/dbt-redshift/issues/341)) + +### Under the Hood + +- Treat contract config as a python object ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330), [#382](https://github.com/dbt-labs/dbt-redshift/issues/382)) diff --git a/.changes/unreleased/Features-20230314-124314.yaml b/.changes/1.5.0/Features-20230314-124314.yaml similarity index 100% rename from .changes/unreleased/Features-20230314-124314.yaml rename to .changes/1.5.0/Features-20230314-124314.yaml diff --git a/.changes/unreleased/Features-20230314-154621.yaml b/.changes/1.5.0/Features-20230314-154621.yaml similarity index 100% rename from .changes/unreleased/Features-20230314-154621.yaml rename to .changes/1.5.0/Features-20230314-154621.yaml diff --git a/.changes/unreleased/Under the Hood-20230223-110314.yaml b/.changes/1.5.0/Under the Hood-20230223-110314.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230223-110314.yaml rename to .changes/1.5.0/Under the Hood-20230223-110314.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a583e587..aedf8599c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,19 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.5.0-b4 - March 30, 2023 + +### Features + +- Use contracted column order in create_table_as ([#356](https://github.com/dbt-labs/dbt-redshift/issues/356)) +- Modify adapter to work with unified constraint fields ([#341](https://github.com/dbt-labs/dbt-redshift/issues/341)) + +### Under the Hood + +- Treat contract config as a python object ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330), [#382](https://github.com/dbt-labs/dbt-redshift/issues/382)) + + + ## dbt-redshift 1.5.0-b3 - March 16, 2023 ### Features @@ -15,8 +28,6 @@ - Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating from `PostgresAdapter` to `SQLAdapter` ([#365](https://github.com/dbt-labs/dbt-redshift/issues/365)) - - ## dbt-redshift 1.5.0-b2 - March 02, 2023 ### Under the Hood diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 649c005ac..4a7a8147e 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0b3" +version = "1.5.0b4" From 068295901c73c347781660939a44cb1eb5e928a9 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Fri, 7 Apr 2023 08:50:04 -0700 Subject: [PATCH 061/113] Test conversion for simple snapshot into a functional test (#397) * Test conversion Simple snapshot * Remove branch change --- tests/functional/adapter/common.py | 143 ------------ .../adapter/snapshot_tests/seeds.py | 33 --- .../adapter/snapshot_tests/snapshots.py | 27 --- .../adapter/snapshot_tests/test_snapshot.py | 206 ------------------ .../adapter/test_simple_snapshot.py | 9 + 5 files changed, 9 insertions(+), 409 deletions(-) delete mode 100644 tests/functional/adapter/common.py delete mode 100644 tests/functional/adapter/snapshot_tests/seeds.py delete mode 100644 tests/functional/adapter/snapshot_tests/snapshots.py delete mode 100644 tests/functional/adapter/snapshot_tests/test_snapshot.py create mode 100644 tests/functional/adapter/test_simple_snapshot.py diff --git a/tests/functional/adapter/common.py b/tests/functional/adapter/common.py deleted file mode 100644 index ce7c0903b..000000000 --- a/tests/functional/adapter/common.py +++ /dev/null @@ -1,143 +0,0 @@ -from typing import Dict, List - -from dbt.tests.util import relation_from_name -from dbt.tests.fixtures.project import TestProjInfo - - -def get_records( - project: TestProjInfo, table: str, select: str = None, where: str = None -) -> List[tuple]: - """ - Gets records from a single table in a dbt project - - Args: - project: the dbt project that contains the table - table: the name of the table without a schema - select: the selection clause; defaults to all columns (*) - where: the where clause to apply, if any; defaults to all records - - Returns: - A list of records with each record as a tuple - """ - table_name = relation_from_name(project.adapter, table) - select_clause = select or "*" - where_clause = where or "1 = 1" - sql = f""" - select {select_clause} - from {table_name} - where {where_clause} - """ - return [tuple(record) for record in project.run_sql(sql, fetch="all")] - - -def update_records(project: TestProjInfo, table: str, updates: Dict[str, str], where: str = None): - """ - Applies updates to a table in a dbt project - - Args: - project: the dbt project that contains the table - table: the name of the table without a schema - updates: the updates to be applied in the form {'field_name': 'expression to be applied'} - where: the where clause to apply, if any; defaults to all records - """ - table_name = relation_from_name(project.adapter, table) - set_clause = ", ".join( - [" = ".join([field, expression]) for field, expression in updates.items()] - ) - where_clause = where or "1 = 1" - sql = f""" - update {table_name} - set {set_clause} - where {where_clause} - """ - project.run_sql(sql) - - -def insert_records( - project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None -): - """ - Inserts records from one table into another table in a dbt project - - Args: - project: the dbt project that contains the table - to_table: the name of the table, without a schema, in which the records will be inserted - from_table: the name of the table, without a schema, which contains the records to be inserted - select: the selection clause to apply on `from_table`; defaults to all columns (*) - where: the where clause to apply on `from_table`, if any; defaults to all records - """ - to_table_name = relation_from_name(project.adapter, to_table) - from_table_name = relation_from_name(project.adapter, from_table) - select_clause = select or "*" - where_clause = where or "1 = 1" - sql = f""" - insert into {to_table_name} - select {select_clause} - from {from_table_name} - where {where_clause} - """ - project.run_sql(sql) - - -def delete_records(project: TestProjInfo, table: str, where: str = None): - """ - Deletes records from a table in a dbt project - - Args: - project: the dbt project that contains the table - table: the name of the table without a schema - where: the where clause to apply, if any; defaults to all records - """ - table_name = relation_from_name(project.adapter, table) - where_clause = where or "1 = 1" - sql = f""" - delete from {table_name} - where {where_clause} - """ - project.run_sql(sql) - - -def clone_table( - project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None -): - """ - Creates a new table based on another table in a dbt project - - Args: - project: the dbt project that contains the table - to_table: the name of the table, without a schema, to be created - from_table: the name of the table, without a schema, to be cloned - select: the selection clause to apply on `from_table`; defaults to all columns (*) - where: the where clause to apply on `from_table`, if any; defaults to all records - """ - to_table_name = relation_from_name(project.adapter, to_table) - from_table_name = relation_from_name(project.adapter, from_table) - select_clause = select or "*" - where_clause = where or "1 = 1" - sql = f"drop table if exists {to_table_name}" - project.run_sql(sql) - sql = f""" - create table {to_table_name} as - select {select_clause} - from {from_table_name} - where {where_clause} - """ - project.run_sql(sql) - - -def add_column(project: TestProjInfo, table: str, column: str, definition: str): - """ - Applies updates to a table in a dbt project - - Args: - project: the dbt project that contains the table - table: the name of the table without a schema - column: the name of the new column - definition: the definition of the new column, e.g. 'varchar(20) default null' - """ - table_name = relation_from_name(project.adapter, table) - sql = f""" - alter table {table_name} - add column {column} {definition} - """ - project.run_sql(sql) diff --git a/tests/functional/adapter/snapshot_tests/seeds.py b/tests/functional/adapter/snapshot_tests/seeds.py deleted file mode 100644 index 262ddd2f3..000000000 --- a/tests/functional/adapter/snapshot_tests/seeds.py +++ /dev/null @@ -1,33 +0,0 @@ -SEED_CSV = """ -id,first_name,last_name,email,gender,ip_address,updated_at -1,Judith,Kennedy,jkennedy0@phpbb.com,Female,54.60.24.128,2015-12-24 -2,Arthur,Kelly,akelly1@eepurl.com,Male,62.56.24.215,2015-10-28 -3,Rachel,Moreno,rmoreno2@msu.edu,Female,31.222.249.23,2016-04-05 -4,Ralph,Turner,rturner3@hp.com,Male,157.83.76.114,2016-08-08 -5,Laura,Gonzales,lgonzales4@howstuffworks.com,Female,30.54.105.168,2016-09-01 -6,Katherine,Lopez,klopez5@yahoo.co.jp,Female,169.138.46.89,2016-08-30 -7,Jeremy,Hamilton,jhamilton6@mozilla.org,Male,231.189.13.133,2016-07-17 -8,Heather,Rose,hrose7@goodreads.com,Female,87.165.201.65,2015-12-29 -9,Gregory,Kelly,gkelly8@trellian.com,Male,154.209.99.7,2016-03-24 -10,Rachel,Lopez,rlopez9@themeforest.net,Female,237.165.82.71,2016-08-20 -11,Donna,Welch,dwelcha@shutterfly.com,Female,103.33.110.138,2016-02-27 -12,Russell,Lawrence,rlawrenceb@qq.com,Male,189.115.73.4,2016-06-11 -13,Michelle,Montgomery,mmontgomeryc@scientificamerican.com,Female,243.220.95.82,2016-06-18 -14,Walter,Castillo,wcastillod@pagesperso-orange.fr,Male,71.159.238.196,2016-10-06 -15,Robin,Mills,rmillse@vkontakte.ru,Female,172.190.5.50,2016-10-31 -16,Raymond,Holmes,rholmesf@usgs.gov,Male,148.153.166.95,2016-10-03 -17,Gary,Bishop,gbishopg@plala.or.jp,Male,161.108.182.13,2016-08-29 -18,Anna,Riley,arileyh@nasa.gov,Female,253.31.108.22,2015-12-11 -19,Sarah,Knight,sknighti@foxnews.com,Female,222.220.123.177,2016-09-26 -20,Phyllis,Fox,null,Female,163.191.232.95,2016-08-21 -21,Judy,Robinson,jrobinsonk@blogs.com,Female,208.21.192.232,2016-09-18 -22,Kevin,Alvarez,kalvarezl@buzzfeed.com,Male,228.106.146.9,2016-07-29 -23,Barbara,Carr,bcarrm@pen.io,Female,106.165.140.17,2015-09-24 -24,William,Watkins,wwatkinsn@guardian.co.uk,Male,78.155.84.6,2016-03-08 -25,Judy,Cooper,jcoopero@google.com.au,Female,24.149.123.184,2016-10-05 -26,Shirley,Castillo,scastillop@samsung.com,Female,129.252.181.12,2016-06-20 -27,Justin,Harper,jharperq@opera.com,Male,131.172.103.218,2016-05-21 -28,Marie,Medina,mmedinar@nhs.uk,Female,188.119.125.67,2015-10-08 -29,Kelly,Edwards,kedwardss@phoca.cz,Female,47.121.157.66,2015-09-15 -30,Carl,Coleman,ccolemant@wikipedia.org,Male,82.227.154.83,2016-05-26 -""".strip() diff --git a/tests/functional/adapter/snapshot_tests/snapshots.py b/tests/functional/adapter/snapshot_tests/snapshots.py deleted file mode 100644 index e8ea5a7e2..000000000 --- a/tests/functional/adapter/snapshot_tests/snapshots.py +++ /dev/null @@ -1,27 +0,0 @@ -SNAPSHOT_TIMESTAMP_SQL = """ -{% snapshot snapshot %} - {{ config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - invalidate_hard_deletes=True, - ) }} - select * from {{ ref('fact') }} -{% endsnapshot %} -""" - - -SNAPSHOT_CHECK_SQL = """ -{% snapshot snapshot %} - {{ config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['email'], - ) }} - select * from {{ ref('fact') }} -{% endsnapshot %} -""" diff --git a/tests/functional/adapter/snapshot_tests/test_snapshot.py b/tests/functional/adapter/snapshot_tests/test_snapshot.py deleted file mode 100644 index 9a4ef7694..000000000 --- a/tests/functional/adapter/snapshot_tests/test_snapshot.py +++ /dev/null @@ -1,206 +0,0 @@ -from typing import Dict, List, Iterable - -import pytest - -from dbt.tests.util import run_dbt - -from tests.functional.adapter import common -from tests.functional.adapter.snapshot_tests import seeds, snapshots - - -MODEL_FACT_SQL = """ -{{ config(materialized="table") }} -select * from {{ ref('seed') }} -where id between 1 and 20 -""" - - -class SnapshotBase: - @pytest.fixture(scope="class") - def seeds(self): - """ - This seed file contains all records needed for tests, including records which will be inserted after the - initial snapshot. This makes it so that Redshift creates the correct size varchar columns. This table - will only need to be loaded once at the class level. It will never be altered, hence requires no further - setup or teardown. - """ - return {"seed.csv": seeds.SEED_CSV} - - @pytest.fixture(scope="class") - def models(self): - """ - This will be the working base table. It will be altered by each test, hence will require setup and - teardown at the test case level. See `self._setup_method(self, project)`. - """ - return {"fact.sql": MODEL_FACT_SQL} - - @pytest.fixture(scope="class", autouse=True) - def _setup_class(self, project): - """ - Load `seed` once for the whole class - """ - run_dbt(["seed"]) - - @pytest.fixture(scope="function", autouse=True) - def _setup_method(self, project): - """ - Initialize `fact` and `snapshot` for every test case. - Only load the first 20 `seed` records into `fact`; withhold 10 records as "new" (e.g. to test inserts). - - Make the project a class variable to simplify function calls and make the code more readable. - For some reason this doesn't work in the class-scoped fixture, but does in the function-scoped fixture. - """ - self.project = project - self.create_fact_from_seed("id between 1 and 20") - run_dbt(["snapshot"]) - yield - self.delete_snapshot_records() - self.delete_fact_records() - - def update_fact_records(self, updates: Dict[str, str], where: str = None): - common.update_records(self.project, "fact", updates, where) - - def insert_fact_records(self, where: str = None): - common.insert_records(self.project, "fact", "seed", "*", where) - - def delete_fact_records(self, where: str = None): - common.delete_records(self.project, "fact", where) - - def add_fact_column(self, column: str = None, definition: str = None): - common.add_column(self.project, "fact", column, definition) - - def create_fact_from_seed(self, where: str = None): - common.clone_table(self.project, "fact", "seed", "*", where) - - def get_snapshot_records(self, select: str = None, where: str = None) -> List[tuple]: - return common.get_records(self.project, "snapshot", select, where) - - def delete_snapshot_records(self): - common.delete_records(self.project, "snapshot") - - def _assert_results( - self, - ids_with_current_snapshot_records: Iterable, - ids_with_closed_out_snapshot_records: Iterable, - ): - """ - All test cases are checked by considering whether a source record's id has a value in `dbt_valid_to` - in `snapshot`. Each id can fall into one of the following cases: - - - The id has only one record in `snapshot`; it has a value in `dbt_valid_to` - - the record was hard deleted in the source - - The id has only one record in `snapshot`; it does not have a value in `dbt_valid_to` - - the record was not updated in the source - - the record was updated in the source, but not in a way that is tracked (e.g. via `strategy='check'`) - - The id has two records in `snapshot`; one has a value in `dbt_valid_to`, the other does not - - the record was altered in the source in a way that is tracked - - the record was hard deleted and revived - - Note: Because of the third scenario, ids may show up in both arguments of this method. - - Args: - ids_with_current_snapshot_records: a list/set/etc. of ids which are not end-dated - ids_with_closed_out_snapshot_records: a list/set/etc. of ids which are end-dated - """ - records = set(self.get_snapshot_records("id, dbt_valid_to is null as is_current")) - expected_records = set().union( - {(i, True) for i in ids_with_current_snapshot_records}, - {(i, False) for i in ids_with_closed_out_snapshot_records}, - ) - assert records == expected_records - - -class TestSnapshot(SnapshotBase): - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots.SNAPSHOT_TIMESTAMP_SQL} - - def test_updates_are_captured_by_snapshot(self, project): - """ - Update the last 5 records. Show that all ids are current, but the last 5 reflect updates. - """ - self.update_fact_records( - {"updated_at": "updated_at + interval '1 day'"}, "id between 16 and 20" - ) - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(16, 21), - ) - - def test_inserts_are_captured_by_snapshot(self, project): - """ - Insert 10 records. Show that there are 30 records in `snapshot`, all of which are current. - """ - self.insert_fact_records("id between 21 and 30") - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 31), ids_with_closed_out_snapshot_records=[] - ) - - def test_deletes_are_captured_by_snapshot(self, project): - """ - Hard delete the last five records. Show that there are now only 15 current records and 5 expired records. - """ - self.delete_fact_records("id between 16 and 20") - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 16), - ids_with_closed_out_snapshot_records=range(16, 21), - ) - - def test_revives_are_captured_by_snapshot(self, project): - """ - Delete the last five records and run snapshot to collect that information, then revive 3 of those records. - Show that there are now 18 current records and 5 expired records. - """ - self.delete_fact_records("id between 16 and 20") - run_dbt(["snapshot"]) - self.insert_fact_records("id between 16 and 18") - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 19), - ids_with_closed_out_snapshot_records=range(16, 21), - ) - - def test_new_column_captured_by_snapshot(self, project): - """ - Add a column to `fact` and populate the last 10 records with a non-null value. - Show that all ids are current, but the last 10 reflect updates and the first 10 do not. - i.e. if the column is added, but not updated, the record does not reflect that it's updated - """ - self.add_fact_column("full_name", "varchar(200) default null") - self.update_fact_records( - { - "full_name": "first_name || ' ' || last_name", - "updated_at": "updated_at + interval '1 day'", - }, - "id between 11 and 20", - ) - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(11, 21), - ) - - -class TestSnapshotCheck(SnapshotBase): - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots.SNAPSHOT_CHECK_SQL} - - def test_column_selection_is_reflected_in_snapshot(self, project): - """ - Update the first 10 records on a non-tracked column. - Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both) - Show that all ids are current, and only the tracked column updates are reflected in `snapshot`. - """ - self.update_fact_records( - {"last_name": "left(last_name, 3)"}, "id between 1 and 10" - ) # not tracked - self.update_fact_records({"email": "left(email, 3)"}, "id between 6 and 15") # tracked - run_dbt(["snapshot"]) - self._assert_results( - ids_with_current_snapshot_records=range(1, 21), - ids_with_closed_out_snapshot_records=range(6, 16), - ) diff --git a/tests/functional/adapter/test_simple_snapshot.py b/tests/functional/adapter/test_simple_snapshot.py new file mode 100644 index 000000000..4db5b2330 --- /dev/null +++ b/tests/functional/adapter/test_simple_snapshot.py @@ -0,0 +1,9 @@ +from dbt.tests.adapter.simple_snapshot.test_snapshot import BaseSnapshotCheck, BaseSimpleSnapshot + + +class TestSnapshot(BaseSimpleSnapshot): + pass + + +class TestSnapshotCheck(BaseSnapshotCheck): + pass From d34894520b1514291d6e50d726abbd80655969ab Mon Sep 17 00:00:00 2001 From: Peter Webb Date: Tue, 11 Apr 2023 13:59:27 -0400 Subject: [PATCH 062/113] Model-Level Constraints (#395) * CT-2222: Redshift adapter support for model-level constrints * CT-2222: Add changelog entry --- .../unreleased/Features-20230406-104634.yaml | 6 +++ dbt/adapters/redshift/impl.py | 16 +++++++- dbt/include/redshift/macros/adapters.sql | 2 +- .../macros/utils/get_columns_spec_ddl.sql | 41 ------------------- dev-requirements.txt | 6 +-- tests/functional/adapter/test_constraints.py | 34 +++++++++++++-- 6 files changed, 56 insertions(+), 49 deletions(-) create mode 100644 .changes/unreleased/Features-20230406-104634.yaml delete mode 100644 dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql diff --git a/.changes/unreleased/Features-20230406-104634.yaml b/.changes/unreleased/Features-20230406-104634.yaml new file mode 100644 index 000000000..1dd770544 --- /dev/null +++ b/.changes/unreleased/Features-20230406-104634.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for model-level constraints +time: 2023-04-06T10:46:34.699722-04:00 +custom: + Author: peterallenwebb + Issue: "343" diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 718f14b08..d7208a0a8 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -11,7 +11,7 @@ import dbt.exceptions from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation, RedshiftColumn - +from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint logger = AdapterLogger("Redshift") @@ -154,3 +154,17 @@ def default_python_submission_method(self) -> str: def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: return super().generate_python_submission_response(submission_result) + + @classmethod + def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> str: + if constraint.type == ConstraintType.check: + return "" # check not supported by redshift + else: + return super().render_column_constraint(constraint) + + @classmethod + def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]: + if constraint.type == ConstraintType.check: + return None # check not supported by redshift + else: + return super().render_model_constraint(constraint) diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index c863ee362..7adf3a077 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -48,7 +48,7 @@ create {% if temporary -%}temporary{%- endif %} table {{ relation.include(database=(not temporary), schema=(not temporary)) }} - {{ get_columns_spec_ddl() }} + {{ get_table_columns_and_constraints() }} {{ get_assert_columns_equivalent(sql) }} {%- set sql = get_select_subquery(sql) %} {% if backup == false -%}backup no{%- endif %} diff --git a/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql b/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql deleted file mode 100644 index 5c9ba5dec..000000000 --- a/dbt/include/redshift/macros/utils/get_columns_spec_ddl.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% macro redshift__get_columns_spec_ddl() %} - {#- loop through user_provided_columns to create DDL with data types and constraints -#} - {%- set user_provided_columns = model['columns'] -%} - {%- set primary_keys = [] -%} - {%- set ddl_lines = [] -%} - - {%- for i in user_provided_columns -%} - {%- set col = user_provided_columns[i] -%} - {%- set constraints = col['constraints'] -%} - {%- set ns = namespace(not_null_line='', has_check_constraints=False) -%} - - {%- for constraint in constraints -%} - {%- if constraint.type == 'primary_key' -%} - {%- do primary_keys.append(col['name']) -%} - {%- elif constraint.type == 'not_null' %} - {%- set ns.not_null_line = " not null" -%} - {%- elif constraint.type == 'check' %} - {%- set ns.has_check_constraints = True -%} - {%- endif -%} - {%- endfor -%} - - {%- if ns.has_check_constraints -%} - {{ exceptions.warn("There are check constraints in your configs, which Redshift does not support. They will be ignored. See column `" ~ col['name'] ~ "`") }} - {%- endif -%} - - {%- set col_line = col['name'] ~ " " ~ col['data_type'] ~ ns.not_null_line -%} - {%- do ddl_lines.append(col_line) -%} - {%- endfor -%} - - {%- if primary_keys -%} - {%- set primary_key_line = "primary key(" ~ primary_keys | join(", ") ~")" -%} - {%- do ddl_lines.append(primary_key_line) -%} - {%- endif %} - - ( - {%- for line in ddl_lines %} - {{ line }}{{ "," if not loop.last }} - {%- endfor %} - ) - -{% endmacro %} diff --git a/dev-requirements.txt b/dev-requirements.txt index 25f4a7690..c0b2c25ec 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,8 +1,8 @@ # install latest changes in dbt-core + dbt-postgres # TODO: how to switch from HEAD to x.y.latest branches after minor releases? -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugins/postgres +git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-core&subdirectory=core +git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-tests-adapter&subdirectory=tests/adapter +git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-postgres&subdirectory=plugins/postgres # if version 1.x or greater -> pin to major version # if version 0.x -> pin to minor diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 5dd5aad09..0fdd37da7 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -7,14 +7,14 @@ BaseConstraintsRollback, BaseIncrementalConstraintsRuntimeDdlEnforcement, BaseIncrementalConstraintsRollback, + BaseModelConstraintsRuntimeEnforcement, ) _expected_sql_redshift = """ create table ( - id integer not null, + id integer not null primary key, color text, - date_day text, - primary key(id) + date_day text ) ; insert into ( @@ -93,3 +93,31 @@ class TestRedshiftIncrementalConstraintsRollback(BaseIncrementalConstraintsRollb @pytest.fixture(scope="class") def expected_error_messages(self): return ["Cannot insert a NULL value into column id"] + + +class TestRedshiftModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + return """ +create table ( + id integer not null, + color text, + date_day text, + primary key (id), + constraint strange_uniqueness_requirement unique (color, date_day) +) ; +insert into +( + select + id, + color, + date_day from + ( + select + 1 as id, + 'blue' as color, + '2019-01-01' as date_day + ) as model_subq +) +; +""" From 5da1e4e4e921e3e8fecaae2bda95f36a003ffb5b Mon Sep 17 00:00:00 2001 From: Peter Webb Date: Tue, 11 Apr 2023 14:59:12 -0400 Subject: [PATCH 063/113] Repair accidental change to dev-requirements (#404) --- .changes/unreleased/Fixes-20230411-143706.yaml | 6 ++++++ dev-requirements.txt | 6 +++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230411-143706.yaml diff --git a/.changes/unreleased/Fixes-20230411-143706.yaml b/.changes/unreleased/Fixes-20230411-143706.yaml new file mode 100644 index 000000000..799e031e5 --- /dev/null +++ b/.changes/unreleased/Fixes-20230411-143706.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Repair accidental change to dev-requirements +time: 2023-04-11T14:37:06.868255-04:00 +custom: + Author: peterallenwebb + Issue: "403" diff --git a/dev-requirements.txt b/dev-requirements.txt index c0b2c25ec..25f4a7690 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,8 +1,8 @@ # install latest changes in dbt-core + dbt-postgres # TODO: how to switch from HEAD to x.y.latest branches after minor releases? -git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-tests-adapter&subdirectory=tests/adapter -git+https://github.com/dbt-labs/dbt-core.git@paw/ct-1922-model-level-constraints#egg=dbt-postgres&subdirectory=plugins/postgres +git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core +git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter +git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugins/postgres # if version 1.x or greater -> pin to major version # if version 0.x -> pin to minor From 6faee98a9f91b4882177492c48f49d734c0f9c47 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 11 Apr 2023 12:51:48 -0700 Subject: [PATCH 064/113] fix the json obj in msgs bug for both text and Json logs (#398) * fix the json obj in msgs bug for both text and Json logs * Clean up the code and fix the second bug by using a dummy message * make the changie the log message a mite more explicit --------- Co-authored-by: Mila Page --- .changes/unreleased/Fixes-20230407-213725.yaml | 6 ++++++ dbt/adapters/redshift/connections.py | 12 +++++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230407-213725.yaml diff --git a/.changes/unreleased/Fixes-20230407-213725.yaml b/.changes/unreleased/Fixes-20230407-213725.yaml new file mode 100644 index 000000000..e83b0e147 --- /dev/null +++ b/.changes/unreleased/Fixes-20230407-213725.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix two adapter logging bugs arising from using the redshift cursor object +time: 2023-04-07T21:37:25.873248-07:00 +custom: + Author: versusfacit + Issue: "373" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index f35a429da..9e830cb53 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -178,8 +178,10 @@ def cancel(self, connection: Connection): @classmethod def get_response(cls, cursor: redshift_connector.Cursor) -> AdapterResponse: + # redshift_connector.Cursor doesn't have a status message attribute but + # this function is only used for successful run, so we can just return a dummy rows = cursor.rowcount - message = f"cursor.rowcount = {rows}" + message = "SUCCESS" return AdapterResponse(_message=message, rows_affected=rows) @contextmanager @@ -187,9 +189,13 @@ def exception_handler(self, sql): try: yield except redshift_connector.DatabaseError as e: - logger.debug(f"Redshift error: {str(e)}") + try: + err_msg = e.args[0]["M"] # this is a type redshift sets, so we must use these keys + except Exception: + err_msg = str(e).strip() + logger.debug(f"Redshift error: {err_msg}") self.rollback_if_open() - raise dbt.exceptions.DbtDatabaseError(str(e).strip()) from e + raise dbt.exceptions.DbtDatabaseError(err_msg) from e except Exception as e: logger.debug("Error running SQL: {}", sql) From ee1d08d90b615e50e288558042f045080f5d47c9 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 11 Apr 2023 15:19:21 -0500 Subject: [PATCH 065/113] update to generalize constraints (#393) * update to generalize constraints * fix primary key logic * remove special logic for primary key * fix mypy * update dev requirements --- .changes/1.5.0/Features-20221209-105640.yaml | 7 +++-- dbt/adapters/redshift/impl.py | 27 ++++++++------------ 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/.changes/1.5.0/Features-20221209-105640.yaml b/.changes/1.5.0/Features-20221209-105640.yaml index da32fb499..ee31dc7b7 100644 --- a/.changes/1.5.0/Features-20221209-105640.yaml +++ b/.changes/1.5.0/Features-20221209-105640.yaml @@ -1,7 +1,6 @@ kind: Features -body: dbt-constraints for redshift +body: 'Support for data types constraints in Redshift' time: 2022-12-09T10:56:40.808781-06:00 custom: - Author: dave-connors-3 - Issue: "227" - PR: "229" + Author: dave-connors-3 emmyoop + Issue: 227 342 diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index d7208a0a8..54fdd7dcf 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -3,15 +3,16 @@ from collections import namedtuple from dbt.adapters.base import PythonJobHelper -from dbt.adapters.base.impl import AdapterConfig +from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport from dbt.adapters.sql import SQLAdapter from dbt.adapters.base.meta import available from dbt.contracts.connection import AdapterResponse +from dbt.contracts.graph.nodes import ConstraintType from dbt.events import AdapterLogger + import dbt.exceptions from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation, RedshiftColumn -from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint logger = AdapterLogger("Redshift") @@ -36,6 +37,14 @@ class RedshiftAdapter(SQLAdapter): AdapterSpecificConfigs = RedshiftConfig # type: ignore + CONSTRAINT_SUPPORT = { + ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, + ConstraintType.not_null: ConstraintSupport.ENFORCED, + ConstraintType.unique: ConstraintSupport.NOT_ENFORCED, + ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED, + ConstraintType.foreign_key: ConstraintSupport.NOT_ENFORCED, + } + @classmethod def date_function(cls): return "getdate()" @@ -154,17 +163,3 @@ def default_python_submission_method(self) -> str: def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: return super().generate_python_submission_response(submission_result) - - @classmethod - def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> str: - if constraint.type == ConstraintType.check: - return "" # check not supported by redshift - else: - return super().render_column_constraint(constraint) - - @classmethod - def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]: - if constraint.type == ConstraintType.check: - return None # check not supported by redshift - else: - return super().render_model_constraint(constraint) From 6254b0fecc24017bcaf209be890c499934e19a18 Mon Sep 17 00:00:00 2001 From: FishtownBuildBot <77737458+FishtownBuildBot@users.noreply.github.com> Date: Fri, 14 Apr 2023 15:18:38 -0500 Subject: [PATCH 066/113] Cleanup main after cutting new 1.5.latest branch (#407) * Bumping version to 1.5.0rc1 * Clean up changelog on main * pre-commit fixes * update RELEASE_BRANCH env --------- Co-authored-by: Matthew McKnight --- .bumpversion.cfg | 2 +- .changes/1.5.0-b1.md | 16 ------- .changes/1.5.0-b2.md | 5 -- .changes/1.5.0-b3.md | 9 ---- .changes/1.5.0-b4.md | 10 ---- .changes/1.5.0/Features-20221209-105640.yaml | 6 --- .changes/1.5.0/Features-20230127-155317.yaml | 6 --- .changes/1.5.0/Features-20230301-113553.yaml | 6 --- .changes/1.5.0/Features-20230314-124314.yaml | 6 --- .changes/1.5.0/Features-20230314-154621.yaml | 6 --- .changes/1.5.0/Fixes-20230316-132120.yaml | 7 --- .../1.5.0/Under the Hood-20230118-071542.yaml | 8 ---- .../1.5.0/Under the Hood-20230130-171158.yaml | 6 --- .../1.5.0/Under the Hood-20230130-205628.yaml | 6 --- .../1.5.0/Under the Hood-20230223-110314.yaml | 6 --- .../unreleased/Features-20230406-104634.yaml | 6 --- .../unreleased/Fixes-20230407-213725.yaml | 6 --- .../unreleased/Fixes-20230411-143706.yaml | 6 --- .github/workflows/nightly-release.yml | 2 +- CHANGELOG.md | 46 ------------------- dbt/adapters/redshift/__version__.py | 2 +- 21 files changed, 3 insertions(+), 170 deletions(-) delete mode 100644 .changes/1.5.0-b1.md delete mode 100644 .changes/1.5.0-b2.md delete mode 100644 .changes/1.5.0-b3.md delete mode 100644 .changes/1.5.0-b4.md delete mode 100644 .changes/1.5.0/Features-20221209-105640.yaml delete mode 100644 .changes/1.5.0/Features-20230127-155317.yaml delete mode 100644 .changes/1.5.0/Features-20230301-113553.yaml delete mode 100644 .changes/1.5.0/Features-20230314-124314.yaml delete mode 100644 .changes/1.5.0/Features-20230314-154621.yaml delete mode 100644 .changes/1.5.0/Fixes-20230316-132120.yaml delete mode 100644 .changes/1.5.0/Under the Hood-20230118-071542.yaml delete mode 100644 .changes/1.5.0/Under the Hood-20230130-171158.yaml delete mode 100644 .changes/1.5.0/Under the Hood-20230130-205628.yaml delete mode 100644 .changes/1.5.0/Under the Hood-20230223-110314.yaml delete mode 100644 .changes/unreleased/Features-20230406-104634.yaml delete mode 100644 .changes/unreleased/Fixes-20230407-213725.yaml delete mode 100644 .changes/unreleased/Fixes-20230411-143706.yaml diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 40a1d919e..54240f292 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.5.0b4 +current_version = 1.5.0rc1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.5.0-b1.md b/.changes/1.5.0-b1.md deleted file mode 100644 index 0f47774a8..000000000 --- a/.changes/1.5.0-b1.md +++ /dev/null @@ -1,16 +0,0 @@ -## dbt-redshift 1.5.0-b1 - February 22, 2023 - -### Features - -- dbt-constraints for redshift ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) -- Stand-alone Python module for RedshiftColumn ([#290](https://github.com/dbt-labs/dbt-redshift/issues/290)) - -### Under the Hood - -- Replace psycopg2 connector with Redshift python connector when connecting to Redshift ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) -- remove tox call to integration tests ([#257](https://github.com/dbt-labs/dbt-redshift/issues/257)) -- Convert Backup Table tests ([#293](https://github.com/dbt-labs/dbt-redshift/issues/293)) - -### Contributors -- [@dave-connors-3](https://github.com/dave-connors-3) ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) -- [@sathiish-kumar](https://github.com/sathiish-kumar) ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) diff --git a/.changes/1.5.0-b2.md b/.changes/1.5.0-b2.md deleted file mode 100644 index c46ca4493..000000000 --- a/.changes/1.5.0-b2.md +++ /dev/null @@ -1,5 +0,0 @@ -## dbt-redshift 1.5.0-b2 - March 02, 2023 - -### Under the Hood - -- Rename constraints_enabled to contract ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330)) diff --git a/.changes/1.5.0-b3.md b/.changes/1.5.0-b3.md deleted file mode 100644 index dda1f66d1..000000000 --- a/.changes/1.5.0-b3.md +++ /dev/null @@ -1,9 +0,0 @@ -## dbt-redshift 1.5.0-b3 - March 16, 2023 - -### Features - -- Enforce contracts on models materialized as tables and views ([#319](https://github.com/dbt-labs/dbt-redshift/issues/319), [#340](https://github.com/dbt-labs/dbt-redshift/issues/340)) - -### Fixes - -- Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating from `PostgresAdapter` to `SQLAdapter` ([#365](https://github.com/dbt-labs/dbt-redshift/issues/365)) diff --git a/.changes/1.5.0-b4.md b/.changes/1.5.0-b4.md deleted file mode 100644 index 5ab44a175..000000000 --- a/.changes/1.5.0-b4.md +++ /dev/null @@ -1,10 +0,0 @@ -## dbt-redshift 1.5.0-b4 - March 30, 2023 - -### Features - -- Use contracted column order in create_table_as ([#356](https://github.com/dbt-labs/dbt-redshift/issues/356)) -- Modify adapter to work with unified constraint fields ([#341](https://github.com/dbt-labs/dbt-redshift/issues/341)) - -### Under the Hood - -- Treat contract config as a python object ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330), [#382](https://github.com/dbt-labs/dbt-redshift/issues/382)) diff --git a/.changes/1.5.0/Features-20221209-105640.yaml b/.changes/1.5.0/Features-20221209-105640.yaml deleted file mode 100644 index ee31dc7b7..000000000 --- a/.changes/1.5.0/Features-20221209-105640.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: 'Support for data types constraints in Redshift' -time: 2022-12-09T10:56:40.808781-06:00 -custom: - Author: dave-connors-3 emmyoop - Issue: 227 342 diff --git a/.changes/1.5.0/Features-20230127-155317.yaml b/.changes/1.5.0/Features-20230127-155317.yaml deleted file mode 100644 index c18c00a32..000000000 --- a/.changes/1.5.0/Features-20230127-155317.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Stand-alone Python module for RedshiftColumn -time: 2023-01-27T15:53:17.999882-08:00 -custom: - Author: nssalian - Issue: "290" diff --git a/.changes/1.5.0/Features-20230301-113553.yaml b/.changes/1.5.0/Features-20230301-113553.yaml deleted file mode 100644 index 40cc40ed9..000000000 --- a/.changes/1.5.0/Features-20230301-113553.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Enforce contracts on models materialized as tables and views -time: 2023-03-01T11:35:53.98885-05:00 -custom: - Author: peterallenwebb emmyoop - Issue: 319 340 diff --git a/.changes/1.5.0/Features-20230314-124314.yaml b/.changes/1.5.0/Features-20230314-124314.yaml deleted file mode 100644 index 5141193ff..000000000 --- a/.changes/1.5.0/Features-20230314-124314.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Use contracted column order in create_table_as -time: 2023-03-14T12:43:14.104667-04:00 -custom: - Author: gshank - Issue: "356" diff --git a/.changes/1.5.0/Features-20230314-154621.yaml b/.changes/1.5.0/Features-20230314-154621.yaml deleted file mode 100644 index ac9b75bc2..000000000 --- a/.changes/1.5.0/Features-20230314-154621.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Modify adapter to work with unified constraint fields -time: 2023-03-14T15:46:21.963751-04:00 -custom: - Author: peterallenwebb - Issue: "341" diff --git a/.changes/1.5.0/Fixes-20230316-132120.yaml b/.changes/1.5.0/Fixes-20230316-132120.yaml deleted file mode 100644 index ed36f8a30..000000000 --- a/.changes/1.5.0/Fixes-20230316-132120.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating - from `PostgresAdapter` to `SQLAdapter` -time: 2023-03-16T13:21:20.306393-04:00 -custom: - Author: mikealfare - Issue: "365" diff --git a/.changes/1.5.0/Under the Hood-20230118-071542.yaml b/.changes/1.5.0/Under the Hood-20230118-071542.yaml deleted file mode 100644 index afa2f05f6..000000000 --- a/.changes/1.5.0/Under the Hood-20230118-071542.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: Under the Hood -body: Replace psycopg2 connector with Redshift python connector when connecting to - Redshift -time: 2023-01-18T07:15:42.183304-08:00 -custom: - Author: sathiish-kumar - Issue: "219" - PR: "251" diff --git a/.changes/1.5.0/Under the Hood-20230130-171158.yaml b/.changes/1.5.0/Under the Hood-20230130-171158.yaml deleted file mode 100644 index 7f68bac4e..000000000 --- a/.changes/1.5.0/Under the Hood-20230130-171158.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: 'remove tox call to integration tests' -time: 2023-01-30T17:11:58.554584-08:00 -custom: - Author: colin-rogers-dbt - Issue: "257" diff --git a/.changes/1.5.0/Under the Hood-20230130-205628.yaml b/.changes/1.5.0/Under the Hood-20230130-205628.yaml deleted file mode 100644 index c3ec867f5..000000000 --- a/.changes/1.5.0/Under the Hood-20230130-205628.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Convert Backup Table tests -time: 2023-01-30T20:56:28.642573-05:00 -custom: - Author: mikealfare - Issue: "293" diff --git a/.changes/1.5.0/Under the Hood-20230223-110314.yaml b/.changes/1.5.0/Under the Hood-20230223-110314.yaml deleted file mode 100644 index a4f519569..000000000 --- a/.changes/1.5.0/Under the Hood-20230223-110314.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Treat contract config as a python object -time: 2023-02-23T11:03:14.344028-05:00 -custom: - Author: gshank emmyoop - Issue: 330 382 diff --git a/.changes/unreleased/Features-20230406-104634.yaml b/.changes/unreleased/Features-20230406-104634.yaml deleted file mode 100644 index 1dd770544..000000000 --- a/.changes/unreleased/Features-20230406-104634.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add support for model-level constraints -time: 2023-04-06T10:46:34.699722-04:00 -custom: - Author: peterallenwebb - Issue: "343" diff --git a/.changes/unreleased/Fixes-20230407-213725.yaml b/.changes/unreleased/Fixes-20230407-213725.yaml deleted file mode 100644 index e83b0e147..000000000 --- a/.changes/unreleased/Fixes-20230407-213725.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix two adapter logging bugs arising from using the redshift cursor object -time: 2023-04-07T21:37:25.873248-07:00 -custom: - Author: versusfacit - Issue: "373" diff --git a/.changes/unreleased/Fixes-20230411-143706.yaml b/.changes/unreleased/Fixes-20230411-143706.yaml deleted file mode 100644 index 799e031e5..000000000 --- a/.changes/unreleased/Fixes-20230411-143706.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Repair accidental change to dev-requirements -time: 2023-04-11T14:37:06.868255-04:00 -custom: - Author: peterallenwebb - Issue: "403" diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index 54c5fdc69..4762d1218 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -26,7 +26,7 @@ defaults: shell: bash env: - RELEASE_BRANCH: "1.4.latest" + RELEASE_BRANCH: "1.5.latest" jobs: aggregate-release-data: diff --git a/CHANGELOG.md b/CHANGELOG.md index aedf8599c..f186f07b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,52 +5,6 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-redshift 1.5.0-b4 - March 30, 2023 - -### Features - -- Use contracted column order in create_table_as ([#356](https://github.com/dbt-labs/dbt-redshift/issues/356)) -- Modify adapter to work with unified constraint fields ([#341](https://github.com/dbt-labs/dbt-redshift/issues/341)) - -### Under the Hood - -- Treat contract config as a python object ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330), [#382](https://github.com/dbt-labs/dbt-redshift/issues/382)) - - - -## dbt-redshift 1.5.0-b3 - March 16, 2023 - -### Features - -- Enforce contracts on models materialized as tables and views ([#319](https://github.com/dbt-labs/dbt-redshift/issues/319), [#340](https://github.com/dbt-labs/dbt-redshift/issues/340)) - -### Fixes - -- Added methods to `RedshiftAdapter` that were inadvertantly dropped when migrating from `PostgresAdapter` to `SQLAdapter` ([#365](https://github.com/dbt-labs/dbt-redshift/issues/365)) - -## dbt-redshift 1.5.0-b2 - March 02, 2023 - -### Under the Hood - -- Rename constraints_enabled to contract ([#330](https://github.com/dbt-labs/dbt-redshift/issues/330)) - -## dbt-redshift 1.5.0-b1 - February 22, 2023 - -### Features - -- dbt-constraints for redshift ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) -- Stand-alone Python module for RedshiftColumn ([#290](https://github.com/dbt-labs/dbt-redshift/issues/290)) - -### Under the Hood - -- Replace psycopg2 connector with Redshift python connector when connecting to Redshift ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) -- remove tox call to integration tests ([#257](https://github.com/dbt-labs/dbt-redshift/issues/257)) -- Convert Backup Table tests ([#293](https://github.com/dbt-labs/dbt-redshift/issues/293)) - -### Contributors -- [@dave-connors-3](https://github.com/dave-connors-3) ([#227](https://github.com/dbt-labs/dbt-redshift/issues/227)) -- [@sathiish-kumar](https://github.com/sathiish-kumar) ([#219](https://github.com/dbt-labs/dbt-redshift/issues/219)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 4a7a8147e..fa6c5a1ac 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0b4" +version = "1.5.0rc1" From 2f2f09549273ed93ebf2241fdf285db7e7b22b95 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 18 Apr 2023 11:35:18 -0400 Subject: [PATCH 067/113] [create-pull-request] automated change (#411) Co-authored-by: Github Build Bot --- .bumpversion.cfg | 2 +- .changes/1.6.0-a1.md | 1 + CHANGELOG.md | 4 ++++ dbt/adapters/redshift/__version__.py | 2 +- 4 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 .changes/1.6.0-a1.md diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 54240f292..6880d23fc 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.5.0rc1 +current_version = 1.6.0a1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-a1.md b/.changes/1.6.0-a1.md new file mode 100644 index 000000000..57e14b79c --- /dev/null +++ b/.changes/1.6.0-a1.md @@ -0,0 +1 @@ +## dbt-redshift 1.6.0-a1 - April 17, 2023 diff --git a/CHANGELOG.md b/CHANGELOG.md index f186f07b2..5a8ea48d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.6.0-a1 - April 17, 2023 + + + ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index fa6c5a1ac..07fc02eef 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.5.0rc1" +version = "1.6.0a1" From e31b4ba506a90e928815ef5231a3ffa678631e05 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Wed, 19 Apr 2023 10:46:34 -0500 Subject: [PATCH 068/113] revert changes to point to 1.4.latest (#412) until 1.5.final is released --- .github/workflows/nightly-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index 4762d1218..54c5fdc69 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -26,7 +26,7 @@ defaults: shell: bash env: - RELEASE_BRANCH: "1.5.latest" + RELEASE_BRANCH: "1.4.latest" jobs: aggregate-release-data: From abc73687fb1998d021b3a3c2b5aa25fd63e7fe46 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Apr 2023 22:14:57 -0700 Subject: [PATCH 069/113] Update pip-tools requirement from ~=6.12 to ~=6.13 (#400) Updates the requirements on [pip-tools](https://github.com/jazzband/pip-tools) to permit the latest version. - [Release notes](https://github.com/jazzband/pip-tools/releases) - [Changelog](https://github.com/jazzband/pip-tools/blob/main/CHANGELOG.md) - [Commits](https://github.com/jazzband/pip-tools/compare/6.12.0...6.13.0) --- updated-dependencies: - dependency-name: pip-tools dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 25f4a7690..8f111a652 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -15,7 +15,7 @@ flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 mypy==1.1.1 # patch updates have historically introduced breaking changes -pip-tools~=6.12 +pip-tools~=6.13 pre-commit~=2.21;python_version=="3.7" pre-commit~=3.2;python_version>="3.8" pre-commit-hooks~=4.4 From f059c25401b09fffa3935c3084b99351d6469569 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 21 Apr 2023 11:41:07 -0400 Subject: [PATCH 070/113] Update pytest requirement from ~=7.2 to ~=7.3 (#414) * upgrading pytest * Add automated changelog yaml from template for bot PR * linter --------- Co-authored-by: Github Build Bot --- .changes/unreleased/Dependencies-20230421-032407.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230421-032407.yaml diff --git a/.changes/unreleased/Dependencies-20230421-032407.yaml b/.changes/unreleased/Dependencies-20230421-032407.yaml new file mode 100644 index 000000000..5b08ed592 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230421-032407.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: "Update pytest requirement from ~=7.2 to ~=7.3" +time: 2023-04-21T03:24:07.00000Z +custom: + Author: mikealfare + PR: 414 diff --git a/dev-requirements.txt b/dev-requirements.txt index 8f111a652..839b5fc8a 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -19,7 +19,7 @@ pip-tools~=6.13 pre-commit~=2.21;python_version=="3.7" pre-commit~=3.2;python_version>="3.8" pre-commit-hooks~=4.4 -pytest~=7.2 +pytest~=7.3 pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 From ed8983bea0608dac2a027b03756acb0f08f5444d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Apr 2023 12:14:16 -0400 Subject: [PATCH 071/113] Bump mypy from 1.1.1 to 1.2.0 (#396) Bumps [mypy](https://github.com/python/mypy) from 1.1.1 to 1.2.0. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v1.1.1...v1.2.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 839b5fc8a..0dc7d0940 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -14,7 +14,7 @@ flake8~=6.0;python_version>="3.8" flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 -mypy==1.1.1 # patch updates have historically introduced breaking changes +mypy==1.2.0 # patch updates have historically introduced breaking changes pip-tools~=6.13 pre-commit~=2.21;python_version=="3.7" pre-commit~=3.2;python_version>="3.8" From be47abae66612a3ca0565b320ea27b127e3f195f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Apr 2023 12:42:09 -0400 Subject: [PATCH 072/113] Update types-pytz requirement from ~=2023.2 to ~=2023.3 (#391) Updates the requirements on [types-pytz](https://github.com/python/typeshed) to permit the latest version. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pytz dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 0dc7d0940..7877e3d54 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -27,7 +27,7 @@ pytest-xdist~=3.2 pytz~=2023.2 tox~=3.0;python_version=="3.7" tox~=4.4;python_version>="3.8" -types-pytz~=2023.2 +types-pytz~=2023.3 types-requests~=2.28 twine~=4.0 wheel~=0.40 From c811a1da3743c9b7d179975baa357f225164a444 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Apr 2023 13:45:33 -0400 Subject: [PATCH 073/113] Update pytz requirement from ~=2023.2 to ~=2023.3 (#390) Updates the requirements on [pytz](https://github.com/stub42/pytz) to permit the latest version. - [Release notes](https://github.com/stub42/pytz/releases) - [Commits](https://github.com/stub42/pytz/compare/release_2023.2...release_2023.3) --- updated-dependencies: - dependency-name: pytz dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 7877e3d54..c7bede5b7 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -24,7 +24,7 @@ pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 pytest-xdist~=3.2 -pytz~=2023.2 +pytz~=2023.3 tox~=3.0;python_version=="3.7" tox~=4.4;python_version>="3.8" types-pytz~=2023.3 From e1784596a1fd2a515d3a4f0d32b878e87255ab32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Apr 2023 14:02:06 -0400 Subject: [PATCH 074/113] Update black requirement from ~=23.1 to ~=23.3 (#389) Updates the requirements on [black](https://github.com/psf/black) to permit the latest version. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.1.0...23.3.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index c7bede5b7..22fc91a9f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -6,7 +6,7 @@ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugi # if version 1.x or greater -> pin to major version # if version 0.x -> pin to minor -black~=23.1 +black~=23.3 bumpversion~=0.6.0 click~=8.1 flake8~=5.0;python_version=="3.7" From aa98c958981fe112079e55b5831daf32b45b39f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Apr 2023 20:49:48 -0400 Subject: [PATCH 075/113] Update tox requirement from ~=3.0 to ~=4.5 (#416) Updates the requirements on [tox](https://github.com/tox-dev/tox) to permit the latest version. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/commits/4.5.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 22fc91a9f..963e64bbc 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -26,7 +26,7 @@ pytest-logbook~=1.2 pytest-xdist~=3.2 pytz~=2023.3 tox~=3.0;python_version=="3.7" -tox~=4.4;python_version>="3.8" +tox~=4.5;python_version>="3.8" types-pytz~=2023.3 types-requests~=2.28 twine~=4.0 From b644963a6bf7d3e64bd71e6ee97a205900089db1 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Fri, 28 Apr 2023 16:37:56 -0700 Subject: [PATCH 076/113] Fix for #419: dbt-redshift 1.5 does not work with non-standard redshift hostnames (#420) * Fix for region parsing in host name.WIP * Edge cases and cleanup * Minor nits * clean up aws regions invocation * Log message and doc nits * Changie entry * Skip validation if aws regions are not determined * move the regions var earlier to effectively cache --- .../unreleased/Fixes-20230428-142321.yaml | 6 ++ dbt/adapters/redshift/connections.py | 55 ++++++++++- tests/unit/test_redshift_adapter.py | 95 ++++++++++++++++++- 3 files changed, 149 insertions(+), 7 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230428-142321.yaml diff --git a/.changes/unreleased/Fixes-20230428-142321.yaml b/.changes/unreleased/Fixes-20230428-142321.yaml new file mode 100644 index 000000000..c7fefda43 --- /dev/null +++ b/.changes/unreleased/Fixes-20230428-142321.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Adding region as independent param in profiles +time: 2023-04-28T14:23:21.041865-07:00 +custom: + Author: nssalian + Issue: "419" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 9e830cb53..59058f53b 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -7,6 +7,8 @@ import agate import sqlparse import redshift_connector +import urllib.request +import json from redshift_connector.utils.oids import get_datatype_name from dbt.adapters.sql import SQLConnectionManager @@ -17,13 +19,10 @@ from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum from dbt.helper_types import Port - logger = AdapterLogger("Redshift") - drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock() # type: ignore - IAMDuration = NewType("IAMDuration", int) @@ -36,6 +35,23 @@ def json_schema(self): dbtClassMixin.register_field_encoders({IAMDuration: IAMDurationEncoder()}) +def _get_aws_regions(): + # Extract the prefixes from the AWS IP ranges JSON to determine the available regions + url = "https://ip-ranges.amazonaws.com/ip-ranges.json" + response = urllib.request.urlopen(url) + data = json.loads(response.read().decode()) + regions = set() + + for prefix in data["prefixes"]: + if prefix["service"] == "AMAZON": + regions.add(prefix["region"]) + + return regions + + +_AVAILABLE_AWS_REGIONS = _get_aws_regions() + + class RedshiftConnectionMethod(StrEnum): DATABASE = "database" IAM = "iam" @@ -60,6 +76,7 @@ class RedshiftCredentials(Credentials): role: Optional[str] = None sslmode: Optional[str] = None retries: int = 1 + region: Optional[str] = None # if not provided, will be determined from host _ALIASES = {"dbname": "database", "pass": "password"} @@ -78,6 +95,7 @@ def _connection_keys(self): "cluster_id", "iam_profile", "sslmode", + "region", ) @property @@ -85,6 +103,13 @@ def unique_field(self) -> str: return self.host +def _is_valid_region(region): + if region is None or len(region) == 0: + logger.warning("Couldn't determine AWS regions. Skipping validation to avoid blocking.") + return True + return region in _AVAILABLE_AWS_REGIONS + + class RedshiftConnectMethodFactory: credentials: RedshiftCredentials @@ -99,9 +124,27 @@ def get_connect_method(self): "port": self.credentials.port if self.credentials.port else 5439, "auto_create": self.credentials.autocreate, "db_groups": self.credentials.db_groups, - "region": self.credentials.host.split(".")[2], + "region": self.credentials.region, "timeout": self.credentials.connect_timeout, } + if kwargs["region"] is None: + logger.debug("No region provided, attempting to determine from host.") + try: + region_value = self.credentials.host.split(".")[2] + except IndexError: + raise dbt.exceptions.FailedToConnectError( + "No region provided and unable to determine region from host: " + "{}".format(self.credentials.host) + ) + + kwargs["region"] = region_value + + # Validate the set region + if not _is_valid_region(kwargs["region"]): + raise dbt.exceptions.FailedToConnectError( + "Invalid region provided: {}".format(kwargs["region"]) + ) + if self.credentials.sslmode: kwargs["sslmode"] = self.credentials.sslmode @@ -117,7 +160,9 @@ def get_connect_method(self): def connect(): logger.debug("Connecting to redshift with username/password based auth...") c = redshift_connector.connect( - user=self.credentials.user, password=self.credentials.password, **kwargs + user=self.credentials.user, + password=self.credentials.password, + **kwargs, ) if self.credentials.role: c.cursor().execute("set role {}".format(self.credentials.role)) diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 27bcd98f8..25f3b3d73 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -97,7 +97,9 @@ def test_explicit_database_conn(self): @mock.patch("redshift_connector.connect", Mock()) def test_explicit_iam_conn_without_profile(self): self.config.credentials = self.config.credentials.replace( - method="iam", cluster_id="my_redshift", host="thishostshouldnotexist.test.us-east-1" + method="iam", + cluster_id="my_redshift", + host="thishostshouldnotexist.test.us-east-1", ) connection = self.adapter.acquire_connection("dummy") connection.handle @@ -171,6 +173,92 @@ def test_explicit_iam_serverless_with_profile(self): port=5439, ) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_region(self): + # Successful test + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233.redshift-serverless.amazonaws.com", + region="us-east-2", + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233.redshift-serverless.amazonaws.com", + database="redshift", + cluster_identifier=None, + region="us-east-2", + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=30, + port=5439, + ) + + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_region_failure(self): + # Failure test with no region + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233_no_region", + region=None, + ) + + with self.assertRaises(dbt.exceptions.FailedToConnectError): + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233_no_region", + database="redshift", + cluster_identifier=None, + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=30, + port=5439, + ) + + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_invalid_region(self): + # Invalid region test + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233_no_region.us-not-a-region-1", + region=None, + ) + + with self.assertRaises(dbt.exceptions.FailedToConnectError): + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233_no_region", + database="redshift", + cluster_identifier=None, + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=30, + port=5439, + ) + @mock.patch("redshift_connector.connect", Mock()) @mock.patch("boto3.Session", Mock()) def test_serverless_iam_failure(self): @@ -263,7 +351,10 @@ def test_cancel_open_connections_single(self): self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) add_query.assert_has_calls( - [call("select pg_backend_pid()"), call("select pg_terminate_backend(42)")] + [ + call("select pg_backend_pid()"), + call("select pg_terminate_backend(42)"), + ] ) master.handle.get_backend_pid.assert_not_called() From 2c52b3071362eb198c8bfe25ffe062387cbbb229 Mon Sep 17 00:00:00 2001 From: David Bloss Date: Fri, 5 May 2023 14:04:35 -0500 Subject: [PATCH 077/113] update used gh actions ahead of set-output, node12 deprecation (#418) --- .github/workflows/integration.yml | 16 ++++++++-------- .github/workflows/main.yml | 12 ++++++------ 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 3cf5ff711..88a568ea4 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -71,13 +71,13 @@ jobs: steps: - name: Check out the repository (non-PR) if: github.event_name != 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} @@ -102,7 +102,7 @@ jobs: - name: Generate integration test matrix id: generate-matrix - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CHANGES: ${{ steps.get-changes.outputs.changes }} with: @@ -141,7 +141,7 @@ jobs: steps: - name: Check out the repository if: github.event_name != 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false @@ -149,7 +149,7 @@ jobs: # this is necessary for the `pull_request_target` event - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} @@ -185,7 +185,7 @@ jobs: DBT_TEST_USER_3: dbt_test_user_3 run: tox - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: logs @@ -196,7 +196,7 @@ jobs: id: date run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv @@ -232,7 +232,7 @@ jobs: steps: - name: Posting scheduled run failures - uses: ravsamhq/notify-slack-action@v1 + uses: ravsamhq/notify-slack-action@v2 if: ${{ github.event_name == 'schedule' }} with: notification_title: 'Redshift nightly integration test failed' diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 909766cc1..c70434111 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,7 +43,7 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false @@ -80,7 +80,7 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false @@ -104,7 +104,7 @@ jobs: id: date run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv @@ -120,7 +120,7 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false @@ -156,7 +156,7 @@ jobs: if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi echo "::set-output name=is_alpha::$is_alpha" - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: dist path: dist/ @@ -188,7 +188,7 @@ jobs: python -m pip install --upgrade wheel python -m pip --version - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: name: dist path: dist/ From 52a666eaf1592e6bf87a01501a1fa444c1cd2a52 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Tue, 9 May 2023 16:59:47 -0700 Subject: [PATCH 078/113] Fix execute signature based on core (#435) --- .changes/unreleased/Fixes-20230509-143721.yaml | 6 ++++++ dbt/adapters/redshift/connections.py | 8 ++++++-- tests/unit/test_redshift_adapter.py | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230509-143721.yaml diff --git a/.changes/unreleased/Fixes-20230509-143721.yaml b/.changes/unreleased/Fixes-20230509-143721.yaml new file mode 100644 index 000000000..6d80abf7c --- /dev/null +++ b/.changes/unreleased/Fixes-20230509-143721.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Update signature for execute method +time: 2023-05-09T14:37:21.163869-07:00 +custom: + Author: nssalian + Issue: ''' ''' diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 59058f53b..c1e55d2fa 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -299,12 +299,16 @@ def exponential_backoff(attempt: int): ) def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False + self, + sql: str, + auto_begin: bool = False, + fetch: bool = False, + limit: Optional[int] = None, ) -> Tuple[AdapterResponse, agate.Table]: _, cursor = self.add_query(sql, auto_begin) response = self.get_response(cursor) if fetch: - table = self.get_result_from_cursor(cursor) + table = self.get_result_from_cursor(cursor, limit) else: table = dbt.clients.agate_helper.empty_table() return response, table diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 25f3b3d73..c90a53dfd 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -408,7 +408,7 @@ def test_execute_with_fetch(self): mock_get_result_from_cursor.return_value = table self.adapter.connections.execute(sql="select * from test", fetch=True) mock_add_query.assert_called_once_with("select * from test", False) - mock_get_result_from_cursor.assert_called_once_with(cursor) + mock_get_result_from_cursor.assert_called_once_with(cursor, None) mock_get_response.assert_called_once_with(cursor) def test_execute_without_fetch(self): From 2e97dcbe9fcb392923d0bbc32b5022b78797b67f Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Fri, 12 May 2023 19:02:40 +0000 Subject: [PATCH 079/113] Bumping version to 1.6.0b1 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.6.0-b1.md | 10 ++++++++++ .../Dependencies-20230421-032407.yaml | 0 .../Fixes-20230428-142321.yaml | 0 .../Fixes-20230509-143721.yaml | 0 CHANGELOG.md | 13 ++++++++++++- dbt/adapters/redshift/__version__.py | 2 +- 7 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 .changes/1.6.0-b1.md rename .changes/{unreleased => 1.6.0}/Dependencies-20230421-032407.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230428-142321.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230509-143721.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6880d23fc..f24dacddf 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.6.0a1 +current_version = 1.6.0b1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-b1.md b/.changes/1.6.0-b1.md new file mode 100644 index 000000000..07e7c693a --- /dev/null +++ b/.changes/1.6.0-b1.md @@ -0,0 +1,10 @@ +## dbt-redshift 1.6.0-b1 - May 12, 2023 + +### Fixes + +- Adding region as independent param in profiles ([#419](https://github.com/dbt-labs/dbt-redshift/issues/419)) +- Update signature for execute method ([#'](https://github.com/dbt-labs/dbt-redshift/issues/'), [#'](https://github.com/dbt-labs/dbt-redshift/issues/')) + +### Dependencies + +- Update pytest requirement from ~=7.2 to ~=7.3 ([#414](https://github.com/dbt-labs/dbt-redshift/pull/414)) diff --git a/.changes/unreleased/Dependencies-20230421-032407.yaml b/.changes/1.6.0/Dependencies-20230421-032407.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230421-032407.yaml rename to .changes/1.6.0/Dependencies-20230421-032407.yaml diff --git a/.changes/unreleased/Fixes-20230428-142321.yaml b/.changes/1.6.0/Fixes-20230428-142321.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230428-142321.yaml rename to .changes/1.6.0/Fixes-20230428-142321.yaml diff --git a/.changes/unreleased/Fixes-20230509-143721.yaml b/.changes/1.6.0/Fixes-20230509-143721.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230509-143721.yaml rename to .changes/1.6.0/Fixes-20230509-143721.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a8ea48d0..563b9a61c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,10 +5,21 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-redshift 1.6.0-a1 - April 17, 2023 +## dbt-redshift 1.6.0-b1 - May 12, 2023 + +### Fixes + +- Adding region as independent param in profiles ([#419](https://github.com/dbt-labs/dbt-redshift/issues/419)) +- Update signature for execute method ([#'](https://github.com/dbt-labs/dbt-redshift/issues/'), [#'](https://github.com/dbt-labs/dbt-redshift/issues/')) + +### Dependencies +- Update pytest requirement from ~=7.2 to ~=7.3 ([#414](https://github.com/dbt-labs/dbt-redshift/pull/414)) + +## dbt-redshift 1.6.0-a1 - April 17, 2023 + ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 07fc02eef..cafa91966 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.6.0a1" +version = "1.6.0b1" From b6ea1d699e4d9ec638d3883c315825f5b166541e Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Mon, 15 May 2023 09:28:59 -0500 Subject: [PATCH 080/113] more github deprecations fixes (#440) * more github deprecations fixes * swap out abandonded action --- .github/workflows/bot-changelog.yml | 2 +- .github/workflows/triage-labels.yml | 13 +++++-------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/.github/workflows/bot-changelog.yml b/.github/workflows/bot-changelog.yml index 8122ab8b4..9938c51e5 100644 --- a/.github/workflows/bot-changelog.yml +++ b/.github/workflows/bot-changelog.yml @@ -49,7 +49,7 @@ jobs: - name: Create and commit changelog on bot PR if: "contains(github.event.pull_request.labels.*.name, ${{ matrix.label }})" id: bot_changelog - uses: emmyoop/changie_bot@v1.0 + uses: emmyoop/changie_bot@v1.1.0 with: GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} commit_author_name: "Github Build Bot" diff --git a/.github/workflows/triage-labels.yml b/.github/workflows/triage-labels.yml index a71dc5e1f..97649a522 100644 --- a/.github/workflows/triage-labels.yml +++ b/.github/workflows/triage-labels.yml @@ -23,11 +23,8 @@ permissions: jobs: triage_label: - if: contains(github.event.issue.labels.*.name, 'awaiting_response') - runs-on: ubuntu-latest - steps: - - name: initial labeling - uses: andymckay/labeler@master - with: - add-labels: "triage" - remove-labels: "awaiting_response" + uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main + with: + add_label: "triage" + remove_label: "awaiting_response" + secrets: inherit # this is only acceptable because we own the action we're calling From 272a19e59d0567d2306efb18d99dd867335d692c Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 16 May 2023 10:07:37 -0500 Subject: [PATCH 081/113] updating set-output (#450) * updating set-output * add back run, move comment --- .github/workflows/integration.yml | 3 ++- .github/workflows/main.yml | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 88a568ea4..07631e4e2 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -194,7 +194,8 @@ jobs: - name: Get current date if: always() id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + run: | + echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - uses: actions/upload-artifact@v3 if: always() diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c70434111..9137fe91d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -102,7 +102,9 @@ jobs: - name: Get current date if: always() id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + #no colons allowed for artifacts + run: | + echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT - uses: actions/upload-artifact@v3 if: always() @@ -154,8 +156,7 @@ jobs: run: | export is_alpha=0 if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi - echo "::set-output name=is_alpha::$is_alpha" - + echo "is_alpha=$is_alpha" >> $GITHUB_OUTPUT - uses: actions/upload-artifact@v3 with: name: dist From a406b3a1b0f54db68088ec53f5369362b32b4605 Mon Sep 17 00:00:00 2001 From: David Bloss Date: Wed, 17 May 2023 15:46:40 -0500 Subject: [PATCH 082/113] update used gh actions ahead of node12 deprecation (#453) Co-authored-by: Emily Rockman --- .github/workflows/backport.yml | 2 +- .github/workflows/bot-changelog.yml | 2 +- .github/workflows/integration.yml | 2 +- .github/workflows/main.yml | 9 +++++---- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index d5c7fffed..46f240347 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -35,6 +35,6 @@ jobs: github.event.pull_request.merged && contains(github.event.label.name, 'backport') steps: - - uses: tibdex/backport@v2.0.2 + - uses: tibdex/backport@v2 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/bot-changelog.yml b/.github/workflows/bot-changelog.yml index 9938c51e5..94498d25f 100644 --- a/.github/workflows/bot-changelog.yml +++ b/.github/workflows/bot-changelog.yml @@ -49,7 +49,7 @@ jobs: - name: Create and commit changelog on bot PR if: "contains(github.event.pull_request.labels.*.name, ${{ matrix.label }})" id: bot_changelog - uses: emmyoop/changie_bot@v1.1.0 + uses: emmyoop/changie_bot@v1 with: GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} commit_author_name: "Github Build Bot" diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 07631e4e2..e68fe4966 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -155,7 +155,7 @@ jobs: ref: ${{ github.event.pull_request.head.sha }} - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9137fe91d..a668a50d8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -48,7 +48,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: '3.8' @@ -85,7 +85,7 @@ jobs: persist-credentials: false - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -127,7 +127,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: '3.8' @@ -157,6 +157,7 @@ jobs: export is_alpha=0 if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi echo "is_alpha=$is_alpha" >> $GITHUB_OUTPUT + - uses: actions/upload-artifact@v3 with: name: dist @@ -179,7 +180,7 @@ jobs: steps: - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} From d13a376ac6ea538eaed77a5ce22a440a690157b3 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 23 May 2023 16:26:51 -0700 Subject: [PATCH 083/113] Add autocommit feature to let dbt users run certain db commands in macros (#458) * Add a fix. Add a test. * Add changelog * Fix test profile * Fix spelling of Redshift * Update changelog --------- Co-authored-by: Mila Page Co-authored-by: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> --- .../unreleased/Fixes-20230520-043039.yaml | 6 ++ dbt/adapters/redshift/connections.py | 5 ++ tests/functional/adapter/test_autocommit.py | 60 +++++++++++++++++++ 3 files changed, 71 insertions(+) create mode 100644 .changes/unreleased/Fixes-20230520-043039.yaml create mode 100644 tests/functional/adapter/test_autocommit.py diff --git a/.changes/unreleased/Fixes-20230520-043039.yaml b/.changes/unreleased/Fixes-20230520-043039.yaml new file mode 100644 index 000000000..1cb97526e --- /dev/null +++ b/.changes/unreleased/Fixes-20230520-043039.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Add a new connection param to reenable certain Redshift commands in macros. +time: 2023-05-20T04:30:39.358755-07:00 +custom: + Author: versusfacit + Issue: "463" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index c1e55d2fa..5cea2c736 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -77,6 +77,7 @@ class RedshiftCredentials(Credentials): sslmode: Optional[str] = None retries: int = 1 region: Optional[str] = None # if not provided, will be determined from host + autocommit: Optional[bool] = False _ALIASES = {"dbname": "database", "pass": "password"} @@ -164,6 +165,8 @@ def connect(): password=self.credentials.password, **kwargs, ) + if self.credentials.autocommit: + c.autocommit = True if self.credentials.role: c.cursor().execute("set role {}".format(self.credentials.role)) return c @@ -186,6 +189,8 @@ def connect(): profile=self.credentials.iam_profile, **kwargs, ) + if self.credentials.autocommit: + c.autocommit = True if self.credentials.role: c.cursor().execute("set role {}".format(self.credentials.role)) return c diff --git a/tests/functional/adapter/test_autocommit.py b/tests/functional/adapter/test_autocommit.py new file mode 100644 index 000000000..78f33e0f7 --- /dev/null +++ b/tests/functional/adapter/test_autocommit.py @@ -0,0 +1,60 @@ +import os +import pytest + +from dbt.tests.util import run_dbt_and_capture + +_MACROS__CREATE_DB = """ +{% macro create_db_fake() %} + +{% set database = "db_for_test__do_delete_if_you_see_this" %} + +{# IF NOT EXISTS not avaiable but Redshift merely returns an error for trying to overwrite #} +{% set create_command %} + CREATE DATABASE {{ database }} +{% endset %} + +{{ log(create_command, info=True) }} + +{% do run_query(create_command) %} + +{{ log("Created redshift database " ~ database, info=True) }} + +{% endmacro %} +""" + + +class TestAutocommitWorksWithTransactionBlocks: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + @pytest.fixture(scope="class") + def dbt_profile_target(self): + return { + "type": "redshift", + "threads": 1, + "retries": 6, + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), + "autocommit": True, + } + + def test_autocommit_allows_for_more_commands(self, project): + """Scenario: user has autocommit=True in their target to run macros with normally + forbidden commands like CREATE DATABASE and VACUUM""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" not in out + + +class TestTransactionBlocksPreventCertainCommands: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + def test_normally_create_db_disallowed(self, project): + """Monitor if status quo in Redshift connector changes""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" in out From c12b625cbb952a6b63fda5223ca08128700efbf8 Mon Sep 17 00:00:00 2001 From: Jessie Chen <121250701+jiezhen-chen@users.noreply.github.com> Date: Wed, 24 May 2023 12:02:23 -0700 Subject: [PATCH 084/113] Default connect_timeout to None (#433) * change connect_timeout to defaulted None * update test_redshift_adapter to pass new changes * test that connect_timeout parameter is called with the right value * add new changelog --------- Co-authored-by: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> --- .../unreleased/Fixes-20230508-094834.yaml | 6 ++++ dbt/adapters/redshift/connections.py | 2 +- tests/unit/test_redshift_adapter.py | 35 ++++++++++++++----- 3 files changed, 33 insertions(+), 10 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230508-094834.yaml diff --git a/.changes/unreleased/Fixes-20230508-094834.yaml b/.changes/unreleased/Fixes-20230508-094834.yaml new file mode 100644 index 000000000..378c428e3 --- /dev/null +++ b/.changes/unreleased/Fixes-20230508-094834.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix redshift_connector issue of timing out after 30s +time: 2023-05-08T09:48:34.019843-07:00 +custom: + Author: jiezhen-chen + Issue: "427" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 5cea2c736..1a1e7353b 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -72,7 +72,7 @@ class RedshiftCredentials(Credentials): autocreate: bool = False db_groups: List[str] = field(default_factory=list) ra3_node: Optional[bool] = False - connect_timeout: int = 30 + connect_timeout: Optional[int] = None role: Optional[str] = None sslmode: Optional[str] = None retries: int = 1 diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index c90a53dfd..9a43c1ce3 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -72,7 +72,7 @@ def test_implicit_database_conn(self): port=5439, auto_create=False, db_groups=[], - timeout=30, + timeout=None, region="us-east-1", ) @@ -91,7 +91,7 @@ def test_explicit_database_conn(self): auto_create=False, db_groups=[], region="us-east-1", - timeout=30, + timeout=None, ) @mock.patch("redshift_connector.connect", Mock()) @@ -115,10 +115,27 @@ def test_explicit_iam_conn_without_profile(self): auto_create=False, db_groups=[], profile=None, - timeout=30, + timeout=None, port=5439, ) + @mock.patch("redshift_connector.connect", Mock()) + def test_conn_timeout_30(self): + self.config.credentials = self.config.credentials.replace(connect_timeout=30) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=30, + ) + @mock.patch("redshift_connector.connect", Mock()) @mock.patch("boto3.Session", Mock()) def test_explicit_iam_conn_with_profile(self): @@ -143,7 +160,7 @@ def test_explicit_iam_conn_with_profile(self): password="", user="", profile="test", - timeout=30, + timeout=None, port=5439, ) @@ -169,7 +186,7 @@ def test_explicit_iam_serverless_with_profile(self): password="", user="", profile="test", - timeout=30, + timeout=None, port=5439, ) @@ -197,7 +214,7 @@ def test_explicit_region(self): password="", user="", profile="test", - timeout=30, + timeout=None, port=5439, ) @@ -226,7 +243,7 @@ def test_explicit_region_failure(self): password="", user="", profile="test", - timeout=30, + timeout=None, port=5439, ) @@ -255,7 +272,7 @@ def test_explicit_invalid_region(self): password="", user="", profile="test", - timeout=30, + timeout=None, port=5439, ) @@ -283,7 +300,7 @@ def test_serverless_iam_failure(self): user="", profile="test", port=5439, - timeout=30, + timeout=None, ) self.assertTrue("'host' must be provided" in context.exception.msg) From adb44828fefff91dd6e97ee20ee8fb7e395a2ece Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Wed, 24 May 2023 23:52:43 -0600 Subject: [PATCH 085/113] Escape `%` symbols in table/view/column comments (#466) * Use tests with table/view/column-level comments * Changelog entry * escape % until the underlying issue is fixed in redshift_connector * Revert "Use tests with table/view/column-level comments" This reverts commit 1b41d8c479004194da7eb29ced98577c80f24d0a. --- .changes/unreleased/Fixes-20230524-151825.yaml | 6 ++++++ dbt/include/redshift/macros/adapters.sql | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 .changes/unreleased/Fixes-20230524-151825.yaml diff --git a/.changes/unreleased/Fixes-20230524-151825.yaml b/.changes/unreleased/Fixes-20230524-151825.yaml new file mode 100644 index 000000000..bed2f78e4 --- /dev/null +++ b/.changes/unreleased/Fixes-20230524-151825.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Escape `%` symbols in table/view/column comments +time: 2023-05-24T15:18:25.834088-06:00 +custom: + Author: dbeatty10 + Issue: "441" diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index 7adf3a077..aae87489f 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -292,3 +292,21 @@ {% endif %} {% endmacro %} + +{# + By using dollar-quoting like this, users can embed anything they want into their comments + (including nested dollar-quoting), as long as they do not use this exact dollar-quoting + label. It would be nice to just pick a new one but eventually you do have to give up. +#} +{% macro postgres_escape_comment(comment) -%} + {% if comment is not string %} + {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} + {% endif %} + {%- set magic = '$dbt_comment_literal_block$' -%} + {%- if magic in comment -%} + {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%} + {%- endif -%} + {#- -- escape % until the underlying issue is fixed in redshift_connector -#} + {%- set comment = comment|replace("%", "%%") -%} + {{ magic }}{{ comment }}{{ magic }} +{%- endmacro %} From 1b4cf0c77c631472830ec0dc4dca940316f84b8b Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Thu, 25 May 2023 07:56:36 -0600 Subject: [PATCH 086/113] Use smaller batch size for seeds (#468) Co-authored-by: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> --- .changes/unreleased/Fixes-20230524-165236.yaml | 6 ++++++ .../redshift/macros/materializations/seeds/helpers.sql | 3 +++ 2 files changed, 9 insertions(+) create mode 100644 .changes/unreleased/Fixes-20230524-165236.yaml create mode 100644 dbt/include/redshift/macros/materializations/seeds/helpers.sql diff --git a/.changes/unreleased/Fixes-20230524-165236.yaml b/.changes/unreleased/Fixes-20230524-165236.yaml new file mode 100644 index 000000000..083191405 --- /dev/null +++ b/.changes/unreleased/Fixes-20230524-165236.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Use smaller default batch size for seeds +time: 2023-05-24T16:52:36.915348-06:00 +custom: + Author: dbeatty10 + Issue: "347" diff --git a/dbt/include/redshift/macros/materializations/seeds/helpers.sql b/dbt/include/redshift/macros/materializations/seeds/helpers.sql new file mode 100644 index 000000000..34f83548a --- /dev/null +++ b/dbt/include/redshift/macros/materializations/seeds/helpers.sql @@ -0,0 +1,3 @@ +{% macro redshift__get_batch_size() %} + {{ return(500) }} +{% endmacro %} From fdbf78a65323ac7108f8caa1f23ec3bf81c21c3e Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Thu, 25 May 2023 16:23:05 +0000 Subject: [PATCH 087/113] Bumping version to 1.6.0b2 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.6.0-b2.md | 11 +++++++++++ .../Fixes-20230508-094834.yaml | 0 .../Fixes-20230520-043039.yaml | 0 .../Fixes-20230524-151825.yaml | 0 .../Fixes-20230524-165236.yaml | 0 CHANGELOG.md | 15 +++++++++++++-- dbt/adapters/redshift/__version__.py | 2 +- 8 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 .changes/1.6.0-b2.md rename .changes/{unreleased => 1.6.0}/Fixes-20230508-094834.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230520-043039.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230524-151825.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230524-165236.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f24dacddf..581aad340 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.6.0b1 +current_version = 1.6.0b2 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-b2.md b/.changes/1.6.0-b2.md new file mode 100644 index 000000000..d0f1d2871 --- /dev/null +++ b/.changes/1.6.0-b2.md @@ -0,0 +1,11 @@ +## dbt-redshift 1.6.0-b2 - May 25, 2023 + +### Fixes + +- Fix redshift_connector issue of timing out after 30s ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) +- Add a new connection param to reenable certain Redshift commands in macros. ([#463](https://github.com/dbt-labs/dbt-redshift/issues/463)) +- Escape `%` symbols in table/view/column comments ([#441](https://github.com/dbt-labs/dbt-redshift/issues/441)) +- Use smaller default batch size for seeds ([#347](https://github.com/dbt-labs/dbt-redshift/issues/347)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) diff --git a/.changes/unreleased/Fixes-20230508-094834.yaml b/.changes/1.6.0/Fixes-20230508-094834.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230508-094834.yaml rename to .changes/1.6.0/Fixes-20230508-094834.yaml diff --git a/.changes/unreleased/Fixes-20230520-043039.yaml b/.changes/1.6.0/Fixes-20230520-043039.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230520-043039.yaml rename to .changes/1.6.0/Fixes-20230520-043039.yaml diff --git a/.changes/unreleased/Fixes-20230524-151825.yaml b/.changes/1.6.0/Fixes-20230524-151825.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230524-151825.yaml rename to .changes/1.6.0/Fixes-20230524-151825.yaml diff --git a/.changes/unreleased/Fixes-20230524-165236.yaml b/.changes/1.6.0/Fixes-20230524-165236.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230524-165236.yaml rename to .changes/1.6.0/Fixes-20230524-165236.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 563b9a61c..bb1774237 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,19 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.6.0-b2 - May 25, 2023 + +### Fixes + +- Fix redshift_connector issue of timing out after 30s ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) +- Add a new connection param to reenable certain Redshift commands in macros. ([#463](https://github.com/dbt-labs/dbt-redshift/issues/463)) +- Escape `%` symbols in table/view/column comments ([#441](https://github.com/dbt-labs/dbt-redshift/issues/441)) +- Use smaller default batch size for seeds ([#347](https://github.com/dbt-labs/dbt-redshift/issues/347)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) + + ## dbt-redshift 1.6.0-b1 - May 12, 2023 ### Fixes @@ -16,8 +29,6 @@ - Update pytest requirement from ~=7.2 to ~=7.3 ([#414](https://github.com/dbt-labs/dbt-redshift/pull/414)) - - ## dbt-redshift 1.6.0-a1 - April 17, 2023 ## Previous Releases diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index cafa91966..21c2b2836 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.6.0b1" +version = "1.6.0b2" From 5af296c898c3640bbd7a91a14ac64ce50db89042 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 26 May 2023 15:13:43 -0500 Subject: [PATCH 088/113] convert to reusable nightly tests (#459) * convert to reusable nightly tests * fix triage label workflow * point to main --- .github/workflows/integration.yml | 3 --- .github/workflows/release-branch-tests.yml | 31 ++++++++++++++++++++++ .github/workflows/triage-labels.yml | 1 + 3 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/release-branch-tests.yml diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index e68fe4966..0d6d91855 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -38,9 +38,6 @@ on: description: "branch of dbt-core to use in dev-requirements.txt" required: false type: string - # run this once per night to ensure no regressions from latest dbt-core changes - schedule: - - cron: '0 5 * * *' # 5 UTC # explicitly turn off permissions for `GITHUB_TOKEN` permissions: read-all diff --git a/.github/workflows/release-branch-tests.yml b/.github/workflows/release-branch-tests.yml new file mode 100644 index 000000000..004c6fb29 --- /dev/null +++ b/.github/workflows/release-branch-tests.yml @@ -0,0 +1,31 @@ +# **what?** +# The purpose of this workflow is to trigger CI to run for each release +# branch on a regular cadence. If the CI workflow fails for a branch, it +# will post to dev-core-alerts to raise awareness. The + +# **why?** +# Ensures release branches are always shippable and not broken. +# Also, can catch any dependencies shifting beneath us that might +# introduce breaking changes (could also impact Cloud). + +# **when?** +# Once each morning. Manual trigger can also test on demand + +name: Release branch scheduled testing + +on: + # run this once per night to ensure no regressions from latest dbt-core changes + schedule: + - cron: '0 5 * * *' # 5 UTC + + workflow_dispatch: # for manual triggering + +# no special access is needed +permissions: read-all + +jobs: + run_tests: + uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main + with: + workflows_to_run: '["main.yml", "integration.yml"]' + secrets: inherit diff --git a/.github/workflows/triage-labels.yml b/.github/workflows/triage-labels.yml index 97649a522..91f529e3e 100644 --- a/.github/workflows/triage-labels.yml +++ b/.github/workflows/triage-labels.yml @@ -23,6 +23,7 @@ permissions: jobs: triage_label: + if: contains(github.event.issue.labels.*.name, 'awaiting_response') uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main with: add_label: "triage" From 85c1d199e8dd088b672e2157ae1f023c1a4b825c Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 30 May 2023 17:38:42 -0400 Subject: [PATCH 089/113] drop support for py37 (#474) * drop support for py37 * drop support for py37 --- .../unreleased/Breaking Changes-20230530-165542.yaml | 6 ++++++ .github/scripts/integration-test-matrix.js | 2 +- .github/workflows/main.yml | 4 ++-- .github/workflows/nightly-release.yml | 2 +- CONTRIBUTING.md | 2 +- dev-requirements.txt | 9 +++------ setup.py | 7 +++---- tox.ini | 6 +++--- 8 files changed, 20 insertions(+), 18 deletions(-) create mode 100644 .changes/unreleased/Breaking Changes-20230530-165542.yaml diff --git a/.changes/unreleased/Breaking Changes-20230530-165542.yaml b/.changes/unreleased/Breaking Changes-20230530-165542.yaml new file mode 100644 index 000000000..68b4bfdda --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20230530-165542.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Drop support for python 3.7 +time: 2023-05-30T16:55:42.393416-04:00 +custom: + Author: mikealfare + Issue: dbt-core/7082 diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js index 9e7698ef5..7db445d9e 100644 --- a/.github/scripts/integration-test-matrix.js +++ b/.github/scripts/integration-test-matrix.js @@ -1,6 +1,6 @@ module.exports = ({ context }) => { const defaultPythonVersion = "3.8"; - const supportedPythonVersions = ["3.7", "3.8", "3.9", "3.10", "3.11"]; + const supportedPythonVersions = ["3.8", "3.9", "3.10", "3.11"]; const supportedAdapters = ["redshift"]; // if PR, generate matrix based on files changed and PR labels diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a668a50d8..a76df7e9e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -72,7 +72,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11'] env: TOXENV: "unit" @@ -176,7 +176,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index 54c5fdc69..f552a04eb 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -26,7 +26,7 @@ defaults: shell: bash env: - RELEASE_BRANCH: "1.4.latest" + RELEASE_BRANCH: "main" jobs: aggregate-release-data: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7e4bc28f0..ff08b6190 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -70,7 +70,7 @@ $EDITOR test.env There are a few methods for running tests locally. #### `tox` -`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel. For example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, Python 3.10, and `flake8` checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration of these tests are located in `tox.ini`. +`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel. For example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10, and `flake8` checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration of these tests are located in `tox.ini`. #### `pytest` Finally, you can also run a specific test or group of tests using `pytest` directly. With a Python virtualenv active and dev dependencies installed you can do things like: diff --git a/dev-requirements.txt b/dev-requirements.txt index 963e64bbc..e9e4083c5 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -9,15 +9,13 @@ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugi black~=23.3 bumpversion~=0.6.0 click~=8.1 -flake8~=5.0;python_version=="3.7" -flake8~=6.0;python_version>="3.8" +flake8~=6.0 flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 mypy==1.2.0 # patch updates have historically introduced breaking changes pip-tools~=6.13 -pre-commit~=2.21;python_version=="3.7" -pre-commit~=3.2;python_version>="3.8" +pre-commit~=3.2 pre-commit-hooks~=4.4 pytest~=7.3 pytest-csv~=3.0 @@ -25,8 +23,7 @@ pytest-dotenv~=0.5.2 pytest-logbook~=1.2 pytest-xdist~=3.2 pytz~=2023.3 -tox~=3.0;python_version=="3.7" -tox~=4.5;python_version>="3.8" +tox~=4.5 types-pytz~=2023.3 types-requests~=2.28 twine~=4.0 diff --git a/setup.py b/setup.py index f938f8e31..edfe926f4 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,9 @@ #!/usr/bin/env python import sys -if sys.version_info < (3, 7): +if sys.version_info < (3, 8): print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.7 or higher.") + print("Please upgrade to Python 3.8 or higher.") sys.exit(1) @@ -94,11 +94,10 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], - python_requires=">=3.7", + python_requires=">=3.8", ) diff --git a/tox.ini b/tox.ini index 81625647a..b42bd89b7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] skipsdist = True -envlist = py37,py38,py39,py310,py311 +envlist = py38,py39,py310,py311 -[testenv:{unit,py37,py38,py39,py310,py311,py}] +[testenv:{unit,py38,py39,py310,py311,py}] description = unit testing skip_install = true passenv = @@ -13,7 +13,7 @@ deps = -rdev-requirements.txt -e. -[testenv:{integration,py37,py38,py39,py310,py311,py}-{redshift}] +[testenv:{integration,py38,py39,py310,py311,py}-{redshift}] description = adapter plugin integration testing skip_install = true passenv = From a37f8f64cbb46cef560216be0dfd8b7eb2ebf601 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 31 May 2023 11:07:57 -0400 Subject: [PATCH 090/113] ADAP-512: sslmode translation (#473) * translate sslmode from psycopg2 to redshift_connector * add _translate_sslmode method * add unit tests * update logs * enhance sslmode translation logic * update unit tests * change log warnings to log level of DEBUG * add changie * rework translation logic to use metadata classes * readability * updated default to match dbt-redshift 1.4 for backwards compatibility * removed unnecessary cruft and unreachable scenarios * updated tests to reflect new defaults * aligning patterns with core * translate sslmode from psycopg2 to redshift_connector * add _translate_sslmode method * add unit tests * update logs * enhance sslmode translation logic * update unit tests * change log warnings to log level of DEBUG * add changie * rework translation logic to use metadata classes * readability * updated default to match dbt-redshift 1.4 for backwards compatibility * removed unnecessary cruft and unreachable scenarios * updated tests to reflect new defaults * aligning patterns with core --------- Co-authored-by: Jessie Chen Co-authored-by: Jessie Chen <121250701+jiezhen-chen@users.noreply.github.com> --- .../Breaking Changes-20230522-111945.yaml | 7 ++ .../unreleased/Fixes-20230512-082027.yaml | 6 + dbt/adapters/redshift/connections.py | 85 ++++++++++++- tests/unit/test_redshift_adapter.py | 112 +++++++++++++++++- 4 files changed, 202 insertions(+), 8 deletions(-) create mode 100644 .changes/unreleased/Breaking Changes-20230522-111945.yaml create mode 100644 .changes/unreleased/Fixes-20230512-082027.yaml diff --git a/.changes/unreleased/Breaking Changes-20230522-111945.yaml b/.changes/unreleased/Breaking Changes-20230522-111945.yaml new file mode 100644 index 000000000..63a03b3ba --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20230522-111945.yaml @@ -0,0 +1,7 @@ +kind: Breaking Changes +body: sslmode behavior has changed. To connect without ssl, set sslmode = disable. + To connect using ssl, set sslmode to verify-ca, or verify-full. +time: 2023-05-22T11:19:45.927903-07:00 +custom: + Author: jiezhen-chen + Issue: "429" diff --git a/.changes/unreleased/Fixes-20230512-082027.yaml b/.changes/unreleased/Fixes-20230512-082027.yaml new file mode 100644 index 000000000..7116257d8 --- /dev/null +++ b/.changes/unreleased/Fixes-20230512-082027.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: translate psycopg2 sslmode to ssl and sslmode in redshift_connector +time: 2023-05-12T08:20:27.486301-07:00 +custom: + Author: jiezhen-chen + Issue: "429" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 1a1e7353b..509fd491e 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -13,12 +13,25 @@ from dbt.adapters.sql import SQLConnectionManager from dbt.contracts.connection import AdapterResponse, Connection, Credentials +from dbt.contracts.util import Replaceable +from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum, ValidationError from dbt.events import AdapterLogger -import dbt.exceptions +from dbt.exceptions import DbtRuntimeError, CompilationError import dbt.flags -from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum from dbt.helper_types import Port + +class SSLConfigError(CompilationError): + def __init__(self, exc: ValidationError): + self.exc = exc + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + validator_msg = self.validator_error_message(self.exc) + msg = f"Could not parse SSL config: {validator_msg}" + return msg + + logger = AdapterLogger("Redshift") drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock() # type: ignore @@ -57,6 +70,66 @@ class RedshiftConnectionMethod(StrEnum): IAM = "iam" +class UserSSLMode(StrEnum): + disable = "disable" + allow = "allow" + prefer = "prefer" + require = "require" + verify_ca = "verify-ca" + verify_full = "verify-full" + + @classmethod + def default(cls) -> "UserSSLMode": + # default for `psycopg2`, which aligns with dbt-redshift 1.4 and provides backwards compatibility + return cls.prefer + + +class RedshiftSSLMode(StrEnum): + verify_ca = "verify-ca" + verify_full = "verify-full" + + +SSL_MODE_TRANSLATION = { + UserSSLMode.disable: None, + UserSSLMode.allow: RedshiftSSLMode.verify_ca, + UserSSLMode.prefer: RedshiftSSLMode.verify_ca, + UserSSLMode.require: RedshiftSSLMode.verify_ca, + UserSSLMode.verify_ca: RedshiftSSLMode.verify_ca, + UserSSLMode.verify_full: RedshiftSSLMode.verify_full, +} + + +@dataclass +class RedshiftSSLConfig(dbtClassMixin, Replaceable): # type: ignore + ssl: bool = True + sslmode: Optional[RedshiftSSLMode] = SSL_MODE_TRANSLATION[UserSSLMode.default()] + + @classmethod + def parse(cls, user_sslmode: UserSSLMode) -> "RedshiftSSLConfig": + try: + raw_redshift_ssl = { + "ssl": user_sslmode != UserSSLMode.disable, + "sslmode": SSL_MODE_TRANSLATION[user_sslmode], + } + cls.validate(raw_redshift_ssl) + except ValidationError as exc: + raise SSLConfigError(exc) + + redshift_ssl = cls.from_dict(raw_redshift_ssl) + + if redshift_ssl.ssl: + message = ( + f"Establishing connection using ssl with `sslmode` set to '{user_sslmode}'." + f"To connect without ssl, set `sslmode` to 'disable'." + ) + else: + message = "Establishing connection without ssl." + + logger.debug(message) + + return redshift_ssl + + @dataclass class RedshiftCredentials(Credentials): host: str @@ -74,7 +147,7 @@ class RedshiftCredentials(Credentials): ra3_node: Optional[bool] = False connect_timeout: Optional[int] = None role: Optional[str] = None - sslmode: Optional[str] = None + sslmode: Optional[UserSSLMode] = field(default_factory=UserSSLMode.default) retries: int = 1 region: Optional[str] = None # if not provided, will be determined from host autocommit: Optional[bool] = False @@ -146,8 +219,8 @@ def get_connect_method(self): "Invalid region provided: {}".format(kwargs["region"]) ) - if self.credentials.sslmode: - kwargs["sslmode"] = self.credentials.sslmode + redshift_ssl_config = RedshiftSSLConfig.parse(self.credentials.sslmode) + kwargs.update(redshift_ssl_config.to_dict()) # Support missing 'method' for backwards compatibility if method == RedshiftConnectionMethod.DATABASE or method is None: @@ -342,7 +415,7 @@ def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): if cursor is None: conn = self.get_thread_connection() conn_name = conn.name if conn and conn.name else "" - raise dbt.exceptions.DbtRuntimeError(f"Tried to run invalid SQL: {sql} on {conn_name}") + raise DbtRuntimeError(f"Tried to run invalid SQL: {sql} on {conn_name}") return connection, cursor diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 9a43c1ce3..5e4e00b94 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -12,7 +12,7 @@ ) from dbt.clients import agate_helper from dbt.exceptions import FailedToConnectError -from dbt.adapters.redshift.connections import RedshiftConnectMethodFactory +from dbt.adapters.redshift.connections import RedshiftConnectMethodFactory, RedshiftSSLConfig from .utils import ( config_from_parts_or_dicts, mock_connection, @@ -21,6 +21,9 @@ ) +DEFAULT_SSL_CONFIG = RedshiftSSLConfig().to_dict() + + class TestRedshiftAdapter(unittest.TestCase): def setUp(self): profile_cfg = { @@ -74,6 +77,7 @@ def test_implicit_database_conn(self): db_groups=[], timeout=None, region="us-east-1", + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -92,6 +96,7 @@ def test_explicit_database_conn(self): db_groups=[], region="us-east-1", timeout=None, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -112,11 +117,12 @@ def test_explicit_iam_conn_without_profile(self): user="", cluster_identifier="my_redshift", region="us-east-1", + timeout=None, auto_create=False, db_groups=[], profile=None, - timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -134,6 +140,7 @@ def test_conn_timeout_30(self): db_groups=[], region="us-east-1", timeout=30, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -162,6 +169,7 @@ def test_explicit_iam_conn_with_profile(self): profile="test", timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -188,6 +196,7 @@ def test_explicit_iam_serverless_with_profile(self): profile="test", timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -216,6 +225,7 @@ def test_explicit_region(self): profile="test", timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -245,6 +255,7 @@ def test_explicit_region_failure(self): profile="test", timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) @mock.patch("redshift_connector.connect", Mock()) @@ -274,8 +285,104 @@ def test_explicit_invalid_region(self): profile="test", timeout=None, port=5439, + **DEFAULT_SSL_CONFIG, ) + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_disable(self): + self.config.credentials.sslmode = "disable" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=None, + ssl=False, + sslmode=None, + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_allow(self): + self.config.credentials.sslmode = "allow" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_verify_full(self): + self.config.credentials.sslmode = "verify-full" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=None, + ssl=True, + sslmode="verify-full", + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_verify_ca(self): + self.config.credentials.sslmode = "verify-ca" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_prefer(self): + self.config.credentials.sslmode = "prefer" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region="us-east-1", + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + @mock.patch("redshift_connector.connect", Mock()) @mock.patch("boto3.Session", Mock()) def test_serverless_iam_failure(self): @@ -301,6 +408,7 @@ def test_serverless_iam_failure(self): profile="test", port=5439, timeout=None, + **DEFAULT_SSL_CONFIG, ) self.assertTrue("'host' must be provided" in context.exception.msg) From c665456c2a837ebe53f30474a62d21f227597aa4 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Wed, 31 May 2023 13:28:49 -0500 Subject: [PATCH 091/113] adding link to 1.5 release notes (#472) * adding link to 1.5 release notes * run changie merge --- .changes/0.0.0.md | 1 + CHANGELOG.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md index ed4a3e9be..4c3cb6b65 100644 --- a/.changes/0.0.0.md +++ b/.changes/0.0.0.md @@ -1,5 +1,6 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.5](https://github.com/dbt-labs/dbt-redshift/blob/1.5.latest/CHANGELOG.md) - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb1774237..2d676aa66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,7 +17,6 @@ ### Contributors - [@jiezhen-chen](https://github.com/jiezhen-chen) ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) - ## dbt-redshift 1.6.0-b1 - May 12, 2023 ### Fixes @@ -33,6 +32,7 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.5](https://github.com/dbt-labs/dbt-redshift/blob/1.5.latest/CHANGELOG.md) - [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) From 876076b9702d45942e40f96d9b1f2b8417689bbc Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Thu, 1 Jun 2023 16:34:29 -0700 Subject: [PATCH 092/113] Restore transaction semantics used by dbt-redshift prior to 1.5 (#475) * Rename flag and get autocommit on * Add changie * Add test for issue 451 * Add test for issue #454 * Fix spelling. * Revert names * update tests * update the comment to use pep for rationale --------- Co-authored-by: Mila Page --- .../unreleased/Fixes-20230531-153347.yaml | 7 + dbt/adapters/redshift/connections.py | 3 +- tests/functional/adapter/test_autocommit.py | 60 ------ tests/functional/test_autocommit.py | 171 ++++++++++++++++++ 4 files changed, 180 insertions(+), 61 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230531-153347.yaml delete mode 100644 tests/functional/adapter/test_autocommit.py create mode 100644 tests/functional/test_autocommit.py diff --git a/.changes/unreleased/Fixes-20230531-153347.yaml b/.changes/unreleased/Fixes-20230531-153347.yaml new file mode 100644 index 000000000..4c4d324ad --- /dev/null +++ b/.changes/unreleased/Fixes-20230531-153347.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Get autocommit on by default to restore old semantics users had relied on prior + to 1.5. Add tests. +time: 2023-05-31T15:33:47.180508-07:00 +custom: + Author: versusfacit + Issue: "425" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 509fd491e..0c25ec08d 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -150,7 +150,8 @@ class RedshiftCredentials(Credentials): sslmode: Optional[UserSSLMode] = field(default_factory=UserSSLMode.default) retries: int = 1 region: Optional[str] = None # if not provided, will be determined from host - autocommit: Optional[bool] = False + # opt-in by default per team deliberation on https://peps.python.org/pep-0249/#autocommit + autocommit: Optional[bool] = True _ALIASES = {"dbname": "database", "pass": "password"} diff --git a/tests/functional/adapter/test_autocommit.py b/tests/functional/adapter/test_autocommit.py deleted file mode 100644 index 78f33e0f7..000000000 --- a/tests/functional/adapter/test_autocommit.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import pytest - -from dbt.tests.util import run_dbt_and_capture - -_MACROS__CREATE_DB = """ -{% macro create_db_fake() %} - -{% set database = "db_for_test__do_delete_if_you_see_this" %} - -{# IF NOT EXISTS not avaiable but Redshift merely returns an error for trying to overwrite #} -{% set create_command %} - CREATE DATABASE {{ database }} -{% endset %} - -{{ log(create_command, info=True) }} - -{% do run_query(create_command) %} - -{{ log("Created redshift database " ~ database, info=True) }} - -{% endmacro %} -""" - - -class TestAutocommitWorksWithTransactionBlocks: - @pytest.fixture(scope="class") - def macros(self): - return {"macro.sql": _MACROS__CREATE_DB} - - @pytest.fixture(scope="class") - def dbt_profile_target(self): - return { - "type": "redshift", - "threads": 1, - "retries": 6, - "host": os.getenv("REDSHIFT_TEST_HOST"), - "port": int(os.getenv("REDSHIFT_TEST_PORT")), - "user": os.getenv("REDSHIFT_TEST_USER"), - "pass": os.getenv("REDSHIFT_TEST_PASS"), - "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), - "autocommit": True, - } - - def test_autocommit_allows_for_more_commands(self, project): - """Scenario: user has autocommit=True in their target to run macros with normally - forbidden commands like CREATE DATABASE and VACUUM""" - result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) - assert "CREATE DATABASE cannot run inside a transaction block" not in out - - -class TestTransactionBlocksPreventCertainCommands: - @pytest.fixture(scope="class") - def macros(self): - return {"macro.sql": _MACROS__CREATE_DB} - - def test_normally_create_db_disallowed(self, project): - """Monitor if status quo in Redshift connector changes""" - result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) - assert "CREATE DATABASE cannot run inside a transaction block" in out diff --git a/tests/functional/test_autocommit.py b/tests/functional/test_autocommit.py new file mode 100644 index 000000000..e5e54a34f --- /dev/null +++ b/tests/functional/test_autocommit.py @@ -0,0 +1,171 @@ +import os +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture + +_MACROS__CREATE_DB = """ +{% macro create_db_fake() %} + +{% set database = "db_for_test__do_delete_if_you_see_this" %} + +{# IF NOT EXISTS not avaiable but Redshift merely returns an error for trying to overwrite #} +{% set create_command %} + CREATE DATABASE {{ database }} +{% endset %} + +{{ log(create_command, info=True) }} + +{% do run_query(create_command) %} + +{{ log("Created redshift database " ~ database, info=True) }} + +{% endmacro %} +""" + +_MACROS__UPDATE_MY_MODEL = """ +{% macro update_some_model(alert_ids, sent_at, table_name) %} + {% set update_query %} + UPDATE {{ ref('my_model') }} set status = 'sent' + {% endset %} + {% do run_query(update_query) %} +{% endmacro %} +""" + +_MACROS__UPDATE_MY_SEED = """ +{% macro update_my_seed() %} +update {{ ref("my_seed") }} set status = 'done' +{% endmacro %} +""" + +_MODELS__MY_MODEL = """ +{{ config(materialized="table") }} + +select 1 as id, 'pending' as status +""" + +_MODELS__AFTER_COMMIT = """ +{{ + config( + post_hook=after_commit("{{ update_my_seed() }}") + ) +}} + +select 1 as id +""" + +_SEEDS_MY_SEED = """ +id,status +1,pending +""".lstrip() + + +class TestTransactionBlocksPreventCertainCommands: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + def test_autocommit_deactivated_prevents_DDL(self, project): + """Scenario: user has autocommit=True in their target to run macros with normally + forbidden commands like CREATE DATABASE and VACUUM""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" not in out + + +class TestAutocommitUnblocksDDLInTransactions: + @pytest.fixture(scope="class") + def dbt_profile_target(self): + return { + "type": "redshift", + "threads": 1, + "retries": 6, + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), + "autocommit": False, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + def test_default_setting_allows_DDL(self, project): + """Monitor if status quo in Redshift connector changes""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" in out + + +class TestUpdateDDLCommits: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_MODEL} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__MY_MODEL} + + def test_update_will_go_through(self, project): + run_dbt() + run_dbt(["run-operation", "update_some_model"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_model".format(project.test_schema)] + ) + assert "1 | sent" in out + + +class TestUpdateDDLDoesNotCommitWithoutAutocommit: + @pytest.fixture(scope="class") + def dbt_profile_target(self): + return { + "type": "redshift", + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), + "autocommit": False, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_MODEL} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__MY_MODEL} + + def test_update_will_not_go_through(self, project): + run_dbt() + run_dbt(["run-operation", "update_some_model"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_model".format(project.test_schema)] + ) + assert "1 | pending" in out + + +class TestAfterCommitMacroTakesEffect: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_SEED} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__AFTER_COMMIT} + + @pytest.fixture(scope="class") + def seeds(self): + return {"my_seed.csv": _SEEDS_MY_SEED} + + def test_update_happens_via_macro_in_config(self, project): + run_dbt(["seed"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_seed".format(project.test_schema)] + ) + assert "1 | pending" in out + + run_dbt() + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_seed".format(project.test_schema)] + ) + assert "1 | done" in out From 491611bd2e5eb22ecf0e6417a6b632a1da2ad2c7 Mon Sep 17 00:00:00 2001 From: Michelle Ark Date: Fri, 2 Jun 2023 12:36:13 -0700 Subject: [PATCH 093/113] testing constraint rendering fixes (#430) --- .changes/unreleased/Under the Hood-20230511-162623.yaml | 6 ++++++ tests/functional/adapter/test_constraints.py | 9 ++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230511-162623.yaml diff --git a/.changes/unreleased/Under the Hood-20230511-162623.yaml b/.changes/unreleased/Under the Hood-20230511-162623.yaml new file mode 100644 index 000000000..9166641af --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230511-162623.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: test constraint rendering of foreign key and unique constraints +time: 2023-05-11T16:26:23.49096-04:00 +custom: + Author: michelleark + Issue: "7417" diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 0fdd37da7..04095434f 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -12,7 +12,7 @@ _expected_sql_redshift = """ create table ( - id integer not null primary key, + id integer not null primary key references (id) unique, color text, date_day text ) ; @@ -23,6 +23,7 @@ color, date_day from ( + -- depends_on: select 'blue' as color, 1 as id, @@ -104,7 +105,8 @@ def expected_sql(self): color text, date_day text, primary key (id), - constraint strange_uniqueness_requirement unique (color, date_day) + constraint strange_uniqueness_requirement unique (color, date_day), + foreign key (id) references (id) ) ; insert into ( @@ -113,9 +115,10 @@ def expected_sql(self): color, date_day from ( + -- depends_on: select - 1 as id, 'blue' as color, + 1 as id, '2019-01-01' as date_day ) as model_subq ) From 8f71a525e7acb46a4040ae1fc2b22a3c80ff0bf0 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 7 Jun 2023 09:57:13 -0700 Subject: [PATCH 094/113] Standardize the adapter for core debug changes. (#478) Co-authored-by: Mila Page --- .changes/unreleased/Features-20230604-041410.yaml | 6 ++++++ dbt/adapters/redshift/connections.py | 14 ++++++++++++-- dbt/adapters/redshift/impl.py | 4 ++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 .changes/unreleased/Features-20230604-041410.yaml diff --git a/.changes/unreleased/Features-20230604-041410.yaml b/.changes/unreleased/Features-20230604-041410.yaml new file mode 100644 index 000000000..e617e3561 --- /dev/null +++ b/.changes/unreleased/Features-20230604-041410.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Standardize the _connection_keys and debug_query for `dbt debug`. +time: 2023-06-04T04:14:10.191263-07:00 +custom: + Author: versusfacit + Issue: PR754 diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 0c25ec08d..2c43b46a7 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -162,15 +162,25 @@ def type(self): def _connection_keys(self): return ( "host", - "port", "user", + "port", "database", - "schema", "method", "cluster_id", "iam_profile", + "schema", "sslmode", "region", + "sslmode", + "region", + "iam_profile", + "autocreate", + "db_groups", + "ra3_node", + "connect_timeout", + "role", + "retries", + "autocommit", ) @property diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 54fdd7dcf..2d7f3a854 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -163,3 +163,7 @@ def default_python_submission_method(self) -> str: def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: return super().generate_python_submission_response(submission_result) + + def debug_query(self): + """Override for DebugTask method""" + self.execute("select 1 as id") From 722abd60f06b05f4487f4d4de0e4c6a0a467fe76 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Wed, 7 Jun 2023 13:06:00 -0500 Subject: [PATCH 095/113] re-pointing reelease branch env for nightly releases to a .latest branch (#482) * re-pointing reelease branch env for nightly releases to a .latest branch * adding comment to explain why variable doesn't test against main --------- Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .github/workflows/nightly-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index f552a04eb..46db5b749 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -26,7 +26,7 @@ defaults: shell: bash env: - RELEASE_BRANCH: "main" + RELEASE_BRANCH: "1.5.latest" # must test against most recent .latest branch to have parity for dependency with core jobs: aggregate-release-data: From 36daba2f9d1c551b5265a4bbbeeaa89be376943d Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Fri, 9 Jun 2023 09:31:44 -0700 Subject: [PATCH 096/113] migrate redshift__list_relations_without_caching off postgres dependency (#487) * convert test_store_test_failures to functional test * migrate redshift__list_relations_without_caching off postgres dependency * add changie --- .../unreleased/Fixes-20230608-180130.yaml | 6 ++++++ dbt/include/redshift/macros/adapters.sql | 19 ++++++++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Fixes-20230608-180130.yaml diff --git a/.changes/unreleased/Fixes-20230608-180130.yaml b/.changes/unreleased/Fixes-20230608-180130.yaml new file mode 100644 index 000000000..47dfda72c --- /dev/null +++ b/.changes/unreleased/Fixes-20230608-180130.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: remove depdency on postgres__list_relations_without_caching macro +time: 2023-06-08T18:01:30.954976-07:00 +custom: + Author: colin-rogers-dbt + Issue: "488" diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index aae87489f..34c61a1a2 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -225,7 +225,24 @@ {% macro redshift__list_relations_without_caching(schema_relation) %} - {{ return(postgres__list_relations_without_caching(schema_relation)) }} + {% call statement('list_relations_without_caching', fetch_result=True) -%} + select + '{{ schema_relation.database }}' as database, + tablename as name, + schemaname as schema, + 'table' as type + from pg_tables + where schemaname ilike '{{ schema_relation.schema }}' + union all + select + '{{ schema_relation.database }}' as database, + viewname as name, + schemaname as schema, + 'view' as type + from pg_views + where schemaname ilike '{{ schema_relation.schema }}' + {% endcall %} + {{ return(load_result('list_relations_without_caching').table) }} {% endmacro %} From 5a1f69837ef1c034452246dc5f497d2413e55749 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Fri, 9 Jun 2023 16:36:13 +0000 Subject: [PATCH 097/113] Bumping version to 1.6.0b3 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.6.0-b3.md | 23 +++++++++++++++++ .../Breaking Changes-20230522-111945.yaml | 0 .../Breaking Changes-20230530-165542.yaml | 0 .../Features-20230604-041410.yaml | 0 .../Fixes-20230512-082027.yaml | 0 .../Fixes-20230531-153347.yaml | 0 .../Fixes-20230608-180130.yaml | 0 .../Under the Hood-20230511-162623.yaml | 0 CHANGELOG.md | 25 +++++++++++++++++++ dbt/adapters/redshift/__version__.py | 2 +- 11 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 .changes/1.6.0-b3.md rename .changes/{unreleased => 1.6.0}/Breaking Changes-20230522-111945.yaml (100%) rename .changes/{unreleased => 1.6.0}/Breaking Changes-20230530-165542.yaml (100%) rename .changes/{unreleased => 1.6.0}/Features-20230604-041410.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230512-082027.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230531-153347.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230608-180130.yaml (100%) rename .changes/{unreleased => 1.6.0}/Under the Hood-20230511-162623.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 581aad340..86668d7f2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.6.0b2 +current_version = 1.6.0b3 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-b3.md b/.changes/1.6.0-b3.md new file mode 100644 index 000000000..31cc589a9 --- /dev/null +++ b/.changes/1.6.0-b3.md @@ -0,0 +1,23 @@ +## dbt-redshift 1.6.0-b3 - June 09, 2023 + +### Breaking Changes + +- sslmode behavior has changed. To connect without ssl, set sslmode = disable. To connect using ssl, set sslmode to verify-ca, or verify-full. ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Drop support for python 3.7 ([#dbt-core/7082](https://github.com/dbt-labs/dbt-redshift/issues/dbt-core/7082)) + +### Features + +- Standardize the _connection_keys and debug_query for `dbt debug`. ([#PR754](https://github.com/dbt-labs/dbt-redshift/issues/PR754)) + +### Fixes + +- translate psycopg2 sslmode to ssl and sslmode in redshift_connector ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Get autocommit on by default to restore old semantics users had relied on prior to 1.5. Add tests. ([#425](https://github.com/dbt-labs/dbt-redshift/issues/425)) +- remove depdency on postgres__list_relations_without_caching macro ([#488](https://github.com/dbt-labs/dbt-redshift/issues/488)) + +### Under the Hood + +- test constraint rendering of foreign key and unique constraints ([#7417](https://github.com/dbt-labs/dbt-redshift/issues/7417)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429), [#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) diff --git a/.changes/unreleased/Breaking Changes-20230522-111945.yaml b/.changes/1.6.0/Breaking Changes-20230522-111945.yaml similarity index 100% rename from .changes/unreleased/Breaking Changes-20230522-111945.yaml rename to .changes/1.6.0/Breaking Changes-20230522-111945.yaml diff --git a/.changes/unreleased/Breaking Changes-20230530-165542.yaml b/.changes/1.6.0/Breaking Changes-20230530-165542.yaml similarity index 100% rename from .changes/unreleased/Breaking Changes-20230530-165542.yaml rename to .changes/1.6.0/Breaking Changes-20230530-165542.yaml diff --git a/.changes/unreleased/Features-20230604-041410.yaml b/.changes/1.6.0/Features-20230604-041410.yaml similarity index 100% rename from .changes/unreleased/Features-20230604-041410.yaml rename to .changes/1.6.0/Features-20230604-041410.yaml diff --git a/.changes/unreleased/Fixes-20230512-082027.yaml b/.changes/1.6.0/Fixes-20230512-082027.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230512-082027.yaml rename to .changes/1.6.0/Fixes-20230512-082027.yaml diff --git a/.changes/unreleased/Fixes-20230531-153347.yaml b/.changes/1.6.0/Fixes-20230531-153347.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230531-153347.yaml rename to .changes/1.6.0/Fixes-20230531-153347.yaml diff --git a/.changes/unreleased/Fixes-20230608-180130.yaml b/.changes/1.6.0/Fixes-20230608-180130.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230608-180130.yaml rename to .changes/1.6.0/Fixes-20230608-180130.yaml diff --git a/.changes/unreleased/Under the Hood-20230511-162623.yaml b/.changes/1.6.0/Under the Hood-20230511-162623.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230511-162623.yaml rename to .changes/1.6.0/Under the Hood-20230511-162623.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d676aa66..6a93ee08b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,31 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.6.0-b3 - June 09, 2023 + +### Breaking Changes + +- sslmode behavior has changed. To connect without ssl, set sslmode = disable. To connect using ssl, set sslmode to verify-ca, or verify-full. ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Drop support for python 3.7 ([#dbt-core/7082](https://github.com/dbt-labs/dbt-redshift/issues/dbt-core/7082)) + +### Features + +- Standardize the _connection_keys and debug_query for `dbt debug`. ([#PR754](https://github.com/dbt-labs/dbt-redshift/issues/PR754)) + +### Fixes + +- translate psycopg2 sslmode to ssl and sslmode in redshift_connector ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Get autocommit on by default to restore old semantics users had relied on prior to 1.5. Add tests. ([#425](https://github.com/dbt-labs/dbt-redshift/issues/425)) +- remove depdency on postgres__list_relations_without_caching macro ([#488](https://github.com/dbt-labs/dbt-redshift/issues/488)) + +### Under the Hood + +- test constraint rendering of foreign key and unique constraints ([#7417](https://github.com/dbt-labs/dbt-redshift/issues/7417)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429), [#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) + + ## dbt-redshift 1.6.0-b2 - May 25, 2023 ### Fixes diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 21c2b2836..0c2870f87 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.6.0b2" +version = "1.6.0b3" From df8a9ab0cdac70ee4752d0e4ced3def343a019c5 Mon Sep 17 00:00:00 2001 From: Jeremy Cohen Date: Tue, 13 Jun 2023 23:48:16 -0400 Subject: [PATCH 098/113] Test dbt-core#7537 (#437) --- tests/functional/adapter/test_constraints.py | 23 ++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 04095434f..a97c66bbd 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -8,6 +8,7 @@ BaseIncrementalConstraintsRuntimeDdlEnforcement, BaseIncrementalConstraintsRollback, BaseModelConstraintsRuntimeEnforcement, + BaseConstraintQuotedColumn, ) _expected_sql_redshift = """ @@ -124,3 +125,25 @@ def expected_sql(self): ) ; """ + + +class TestRedshiftConstraintQuotedColumn(BaseConstraintQuotedColumn): + @pytest.fixture(scope="class") + def expected_sql(self): + return """ +create table ( + id integer not null, + "from" text not null, + date_day text +) ; +insert into +( + select id, "from", date_day + from ( + select + 'blue' as "from", + 1 as id, + '2019-01-01' as date_day + ) as model_subq +); +""" From cbb45ca585a61b1bb250b48aac69c410f8ef4efb Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Wed, 14 Jun 2023 12:34:06 -0700 Subject: [PATCH 099/113] Pass `region` param on to redshift conn as is (#485) * convert test_store_test_failures to functional test * test not requiring a region in hostname * remove dev-requirements.txt change * add unit test and clean up logic * add changie * simplify region inference logic * remove region logic entirely --- .../unreleased/Fixes-20230612-114853.yaml | 6 ++ dbt/adapters/redshift/connections.py | 45 +------- tests/unit/test_redshift_adapter.py | 101 +++--------------- 3 files changed, 19 insertions(+), 133 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230612-114853.yaml diff --git a/.changes/unreleased/Fixes-20230612-114853.yaml b/.changes/unreleased/Fixes-20230612-114853.yaml new file mode 100644 index 000000000..1650c5704 --- /dev/null +++ b/.changes/unreleased/Fixes-20230612-114853.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: remove requirement for region param +time: 2023-06-12T11:48:53.980327-07:00 +custom: + Author: colin-rogers-dbt + Issue: "484" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index 2c43b46a7..cd653d781 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -7,8 +7,6 @@ import agate import sqlparse import redshift_connector -import urllib.request -import json from redshift_connector.utils.oids import get_datatype_name from dbt.adapters.sql import SQLConnectionManager @@ -48,23 +46,6 @@ def json_schema(self): dbtClassMixin.register_field_encoders({IAMDuration: IAMDurationEncoder()}) -def _get_aws_regions(): - # Extract the prefixes from the AWS IP ranges JSON to determine the available regions - url = "https://ip-ranges.amazonaws.com/ip-ranges.json" - response = urllib.request.urlopen(url) - data = json.loads(response.read().decode()) - regions = set() - - for prefix in data["prefixes"]: - if prefix["service"] == "AMAZON": - regions.add(prefix["region"]) - - return regions - - -_AVAILABLE_AWS_REGIONS = _get_aws_regions() - - class RedshiftConnectionMethod(StrEnum): DATABASE = "database" IAM = "iam" @@ -149,7 +130,7 @@ class RedshiftCredentials(Credentials): role: Optional[str] = None sslmode: Optional[UserSSLMode] = field(default_factory=UserSSLMode.default) retries: int = 1 - region: Optional[str] = None # if not provided, will be determined from host + region: Optional[str] = None # opt-in by default per team deliberation on https://peps.python.org/pep-0249/#autocommit autocommit: Optional[bool] = True @@ -188,13 +169,6 @@ def unique_field(self) -> str: return self.host -def _is_valid_region(region): - if region is None or len(region) == 0: - logger.warning("Couldn't determine AWS regions. Skipping validation to avoid blocking.") - return True - return region in _AVAILABLE_AWS_REGIONS - - class RedshiftConnectMethodFactory: credentials: RedshiftCredentials @@ -212,23 +186,6 @@ def get_connect_method(self): "region": self.credentials.region, "timeout": self.credentials.connect_timeout, } - if kwargs["region"] is None: - logger.debug("No region provided, attempting to determine from host.") - try: - region_value = self.credentials.host.split(".")[2] - except IndexError: - raise dbt.exceptions.FailedToConnectError( - "No region provided and unable to determine region from host: " - "{}".format(self.credentials.host) - ) - - kwargs["region"] = region_value - - # Validate the set region - if not _is_valid_region(kwargs["region"]): - raise dbt.exceptions.FailedToConnectError( - "Invalid region provided: {}".format(kwargs["region"]) - ) redshift_ssl_config = RedshiftSSLConfig.parse(self.credentials.sslmode) kwargs.update(redshift_ssl_config.to_dict()) diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 5e4e00b94..a91eee59c 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -64,24 +64,7 @@ def adapter(self): return self._adapter @mock.patch("redshift_connector.connect", Mock()) - def test_implicit_database_conn(self): - connection = self.adapter.acquire_connection("dummy") - connection.handle - redshift_connector.connect.assert_called_once_with( - host="thishostshouldnotexist.test.us-east-1", - database="redshift", - user="root", - password="password", - port=5439, - auto_create=False, - db_groups=[], - timeout=None, - region="us-east-1", - **DEFAULT_SSL_CONFIG, - ) - - @mock.patch("redshift_connector.connect", Mock()) - def test_explicit_database_conn(self): + def test_explicit_region_with_database_conn(self): self.config.method = "database" connection = self.adapter.acquire_connection("dummy") @@ -94,7 +77,7 @@ def test_explicit_database_conn(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, **DEFAULT_SSL_CONFIG, ) @@ -116,7 +99,7 @@ def test_explicit_iam_conn_without_profile(self): password="", user="", cluster_identifier="my_redshift", - region="us-east-1", + region=None, timeout=None, auto_create=False, db_groups=[], @@ -138,7 +121,7 @@ def test_conn_timeout_30(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=30, **DEFAULT_SSL_CONFIG, ) @@ -160,7 +143,7 @@ def test_explicit_iam_conn_with_profile(self): host="thishostshouldnotexist.test.us-east-1", database="redshift", cluster_identifier="my_redshift", - region="us-east-1", + region=None, auto_create=False, db_groups=[], db_user="root", @@ -187,7 +170,7 @@ def test_explicit_iam_serverless_with_profile(self): host="doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com", database="redshift", cluster_identifier=None, - region="us-east-2", + region=None, auto_create=False, db_groups=[], db_user="root", @@ -228,66 +211,6 @@ def test_explicit_region(self): **DEFAULT_SSL_CONFIG, ) - @mock.patch("redshift_connector.connect", Mock()) - @mock.patch("boto3.Session", Mock()) - def test_explicit_region_failure(self): - # Failure test with no region - self.config.credentials = self.config.credentials.replace( - method="iam", - iam_profile="test", - host="doesnotexist.1233_no_region", - region=None, - ) - - with self.assertRaises(dbt.exceptions.FailedToConnectError): - connection = self.adapter.acquire_connection("dummy") - connection.handle - redshift_connector.connect.assert_called_once_with( - iam=True, - host="doesnotexist.1233_no_region", - database="redshift", - cluster_identifier=None, - auto_create=False, - db_groups=[], - db_user="root", - password="", - user="", - profile="test", - timeout=None, - port=5439, - **DEFAULT_SSL_CONFIG, - ) - - @mock.patch("redshift_connector.connect", Mock()) - @mock.patch("boto3.Session", Mock()) - def test_explicit_invalid_region(self): - # Invalid region test - self.config.credentials = self.config.credentials.replace( - method="iam", - iam_profile="test", - host="doesnotexist.1233_no_region.us-not-a-region-1", - region=None, - ) - - with self.assertRaises(dbt.exceptions.FailedToConnectError): - connection = self.adapter.acquire_connection("dummy") - connection.handle - redshift_connector.connect.assert_called_once_with( - iam=True, - host="doesnotexist.1233_no_region", - database="redshift", - cluster_identifier=None, - auto_create=False, - db_groups=[], - db_user="root", - password="", - user="", - profile="test", - timeout=None, - port=5439, - **DEFAULT_SSL_CONFIG, - ) - @mock.patch("redshift_connector.connect", Mock()) def test_sslmode_disable(self): self.config.credentials.sslmode = "disable" @@ -301,7 +224,7 @@ def test_sslmode_disable(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, ssl=False, sslmode=None, @@ -320,7 +243,7 @@ def test_sslmode_allow(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, ssl=True, sslmode="verify-ca", @@ -339,7 +262,7 @@ def test_sslmode_verify_full(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, ssl=True, sslmode="verify-full", @@ -358,7 +281,7 @@ def test_sslmode_verify_ca(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, ssl=True, sslmode="verify-ca", @@ -377,7 +300,7 @@ def test_sslmode_prefer(self): port=5439, auto_create=False, db_groups=[], - region="us-east-1", + region=None, timeout=None, ssl=True, sslmode="verify-ca", @@ -399,7 +322,7 @@ def test_serverless_iam_failure(self): host="doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com", database="redshift", cluster_identifier=None, - region="us-east-2", + region=None, auto_create=False, db_groups=[], db_user="root", From b244c3ab2654f4ac226245f8bb882679e87f9118 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Thu, 15 Jun 2023 11:41:47 -0600 Subject: [PATCH 100/113] Revert "Escape `%` symbols in table/view/column comments (#466)" (#495) This reverts commit adb44828fefff91dd6e97ee20ee8fb7e395a2ece. --- dbt/include/redshift/macros/adapters.sql | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index 34c61a1a2..e38700aa6 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -309,21 +309,3 @@ {% endif %} {% endmacro %} - -{# - By using dollar-quoting like this, users can embed anything they want into their comments - (including nested dollar-quoting), as long as they do not use this exact dollar-quoting - label. It would be nice to just pick a new one but eventually you do have to give up. -#} -{% macro postgres_escape_comment(comment) -%} - {% if comment is not string %} - {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} - {% endif %} - {%- set magic = '$dbt_comment_literal_block$' -%} - {%- if magic in comment -%} - {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%} - {%- endif -%} - {#- -- escape % until the underlying issue is fixed in redshift_connector -#} - {%- set comment = comment|replace("%", "%%") -%} - {{ magic }}{{ comment }}{{ magic }} -{%- endmacro %} From 592905e0d3a555c11a7bb5eec9e3a3aa19031e4c Mon Sep 17 00:00:00 2001 From: Anders Date: Thu, 15 Jun 2023 14:20:45 -0400 Subject: [PATCH 101/113] Update setup.py (#493) Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> --- .changes/unreleased/Breaking Changes-20230614-144743.yaml | 6 ++++++ setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Breaking Changes-20230614-144743.yaml diff --git a/.changes/unreleased/Breaking Changes-20230614-144743.yaml b/.changes/unreleased/Breaking Changes-20230614-144743.yaml new file mode 100644 index 000000000..c83a7db5e --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20230614-144743.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: require latest version of redshift_connector driver +time: 2023-06-14T14:47:43.90505-04:00 +custom: + Author: dataders + Issue: "492" diff --git a/setup.py b/setup.py index edfe926f4..b5c6f9189 100644 --- a/setup.py +++ b/setup.py @@ -85,7 +85,7 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: f"dbt-core~={_core_version()}", f"dbt-postgres~={_core_version()}", "boto3~=1.26.26", - "redshift-connector~=2.0.910", + "redshift-connector~=2.0.911", ], zip_safe=False, classifiers=[ From 97e265a7564c1cac50e127b1878fe1f72a553e93 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Fri, 16 Jun 2023 09:50:43 -0700 Subject: [PATCH 102/113] add dependent_projects_dict to utils.py (#500) * fix utils.py --- tests/unit/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index f2ca418e3..d872d50d2 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -75,6 +75,7 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= project_root=project_root, project_dict=project, packages_dict=packages, + dependent_projects_dict={}, selectors_dict=selectors, ) return partial.render(renderer) From ed833b1dd35922023d703bf138dc57e1d7c10666 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Fri, 16 Jun 2023 17:13:23 +0000 Subject: [PATCH 103/113] Bumping version to 1.6.0b4 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.6.0-b4.md | 12 ++++++++++++ .../Breaking Changes-20230614-144743.yaml | 0 .../Fixes-20230612-114853.yaml | 0 CHANGELOG.md | 15 ++++++++++++++- dbt/adapters/redshift/__version__.py | 2 +- 6 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 .changes/1.6.0-b4.md rename .changes/{unreleased => 1.6.0}/Breaking Changes-20230614-144743.yaml (100%) rename .changes/{unreleased => 1.6.0}/Fixes-20230612-114853.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 86668d7f2..669ede06e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.6.0b3 +current_version = 1.6.0b4 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-b4.md b/.changes/1.6.0-b4.md new file mode 100644 index 000000000..d633633ea --- /dev/null +++ b/.changes/1.6.0-b4.md @@ -0,0 +1,12 @@ +## dbt-redshift 1.6.0-b4 - June 16, 2023 + +### Breaking Changes + +- require latest version of redshift_connector driver ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + +### Fixes + +- remove requirement for region param ([#484](https://github.com/dbt-labs/dbt-redshift/issues/484)) + +### Contributors +- [@dataders](https://github.com/dataders) ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) diff --git a/.changes/unreleased/Breaking Changes-20230614-144743.yaml b/.changes/1.6.0/Breaking Changes-20230614-144743.yaml similarity index 100% rename from .changes/unreleased/Breaking Changes-20230614-144743.yaml rename to .changes/1.6.0/Breaking Changes-20230614-144743.yaml diff --git a/.changes/unreleased/Fixes-20230612-114853.yaml b/.changes/1.6.0/Fixes-20230612-114853.yaml similarity index 100% rename from .changes/unreleased/Fixes-20230612-114853.yaml rename to .changes/1.6.0/Fixes-20230612-114853.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a93ee08b..8884aa10c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,20 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.6.0-b4 - June 16, 2023 + +### Breaking Changes + +- require latest version of redshift_connector driver ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + +### Fixes + +- remove requirement for region param ([#484](https://github.com/dbt-labs/dbt-redshift/issues/484)) + +### Contributors +- [@dataders](https://github.com/dataders) ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + + ## dbt-redshift 1.6.0-b3 - June 09, 2023 ### Breaking Changes @@ -29,7 +43,6 @@ ### Contributors - [@jiezhen-chen](https://github.com/jiezhen-chen) ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429), [#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) - ## dbt-redshift 1.6.0-b2 - May 25, 2023 ### Fixes diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 0c2870f87..091852496 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.6.0b3" +version = "1.6.0b4" From cf37fa2ec51b2215aa505ffd34b2d6fab782da9d Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 23 Jun 2023 16:51:44 -0400 Subject: [PATCH 104/113] ADAP-2: Materialized Views (#386) * changie * stub materialized view * added test case to verify a materialized view can be created * added materialized view stub, updated test case to use materialized view instead of view * update redshift__get_relations macro to include materialized views * fixed broken unit test setup from recent dbt-core change * broke up the integration tests because we're maxing out the windows free box * ADAP-393: Add configuration change option (#394) * setup most of the ddl, cannot figure out sort and dist keys yet * add autorefresh to the adapter specific config * final draft of change capture * config_change_collection > change_changeset * typo; clarified an error message * updated dist parsing for readability (DRY and consistency) * replace rerun with explicit refresh in test * update tox to run integration tests in series within a test class --------- Co-authored-by: Matthew McKnight Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> --- .../unreleased/Features-20230330-165842.yaml | 6 + .flake8 | 2 + dbt/adapters/redshift/__init__.py | 13 +- dbt/adapters/redshift/column.py | 5 - dbt/adapters/redshift/impl.py | 8 +- dbt/adapters/redshift/relation.py | 108 ++++++- .../redshift/relation_configs/__init__.py | 19 ++ .../redshift/relation_configs/base.py | 70 +++++ .../redshift/relation_configs/dist.py | 164 +++++++++++ .../relation_configs/materialized_view.py | 268 ++++++++++++++++++ .../redshift/relation_configs/policies.py | 19 ++ .../redshift/relation_configs/sort.py | 179 ++++++++++++ dbt/include/redshift/macros/adapters.sql | 15 +- .../materializations/materialized_view.sql | 106 +++++++ dbt/include/redshift/macros/relations.sql | 46 ++- dbt/include/redshift/macros/utils/dateadd.sql | 1 - .../redshift/macros/utils/datediff.sql | 1 - .../redshift/macros/utils/last_day.sql | 1 - setup.py | 2 + .../materialized_view_tests/fixtures.py | 85 ++++++ .../test_materialized_views.py | 239 ++++++++++++++++ tox.ini | 3 +- 22 files changed, 1330 insertions(+), 30 deletions(-) create mode 100644 .changes/unreleased/Features-20230330-165842.yaml delete mode 100644 dbt/adapters/redshift/column.py create mode 100644 dbt/adapters/redshift/relation_configs/__init__.py create mode 100644 dbt/adapters/redshift/relation_configs/base.py create mode 100644 dbt/adapters/redshift/relation_configs/dist.py create mode 100644 dbt/adapters/redshift/relation_configs/materialized_view.py create mode 100644 dbt/adapters/redshift/relation_configs/policies.py create mode 100644 dbt/adapters/redshift/relation_configs/sort.py create mode 100644 dbt/include/redshift/macros/materializations/materialized_view.sql create mode 100644 tests/functional/adapter/materialized_view_tests/fixtures.py create mode 100644 tests/functional/adapter/materialized_view_tests/test_materialized_views.py diff --git a/.changes/unreleased/Features-20230330-165842.yaml b/.changes/unreleased/Features-20230330-165842.yaml new file mode 100644 index 000000000..4f03197aa --- /dev/null +++ b/.changes/unreleased/Features-20230330-165842.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for materialized views +time: 2023-03-30T16:58:42.413699-04:00 +custom: + Author: mikealfare McKnight-42 + Issue: dbt-labs/dbt-core#6911 diff --git a/.flake8 b/.flake8 index bbc3202a0..b08ffcd53 100644 --- a/.flake8 +++ b/.flake8 @@ -12,3 +12,5 @@ ignore = E741, E501, exclude = test +per-file-ignores = + */__init__.py: F401 diff --git a/dbt/adapters/redshift/__init__.py b/dbt/adapters/redshift/__init__.py index 64ac384fe..92ae383e4 100644 --- a/dbt/adapters/redshift/__init__.py +++ b/dbt/adapters/redshift/__init__.py @@ -1,13 +1,14 @@ -from dbt.adapters.redshift.connections import RedshiftConnectionManager # noqa -from dbt.adapters.redshift.connections import RedshiftCredentials -from dbt.adapters.redshift.column import RedshiftColumn # noqa +from dbt.adapters.base import AdapterPlugin + +from dbt.adapters.redshift.connections import ( # noqa: F401 + RedshiftConnectionManager, + RedshiftCredentials, +) from dbt.adapters.redshift.relation import RedshiftRelation # noqa: F401 from dbt.adapters.redshift.impl import RedshiftAdapter +from dbt.include import redshift -from dbt.adapters.base import AdapterPlugin # type: ignore -from dbt.include import redshift # type: ignore - Plugin: AdapterPlugin = AdapterPlugin( adapter=RedshiftAdapter, # type: ignore credentials=RedshiftCredentials, diff --git a/dbt/adapters/redshift/column.py b/dbt/adapters/redshift/column.py deleted file mode 100644 index 4d48746a6..000000000 --- a/dbt/adapters/redshift/column.py +++ /dev/null @@ -1,5 +0,0 @@ -from dbt.adapters.base import Column - - -class RedshiftColumn(Column): - pass # redshift does not inherit from postgres here diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 2d7f3a854..0ceb931d0 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -4,15 +4,15 @@ from dbt.adapters.base import PythonJobHelper from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport -from dbt.adapters.sql import SQLAdapter from dbt.adapters.base.meta import available +from dbt.adapters.sql import SQLAdapter from dbt.contracts.connection import AdapterResponse from dbt.contracts.graph.nodes import ConstraintType from dbt.events import AdapterLogger - import dbt.exceptions -from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation, RedshiftColumn +from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation + logger = AdapterLogger("Redshift") @@ -27,13 +27,13 @@ class RedshiftConfig(AdapterConfig): sort: Optional[str] = None bind: Optional[bool] = None backup: Optional[bool] = True + autorefresh: Optional[bool] = False class RedshiftAdapter(SQLAdapter): Relation = RedshiftRelation ConnectionManager = RedshiftConnectionManager connections: RedshiftConnectionManager - Column = RedshiftColumn # type: ignore AdapterSpecificConfigs = RedshiftConfig # type: ignore diff --git a/dbt/adapters/redshift/relation.py b/dbt/adapters/redshift/relation.py index fa9b8e92f..0ef4fe276 100644 --- a/dbt/adapters/redshift/relation.py +++ b/dbt/adapters/redshift/relation.py @@ -1,13 +1,105 @@ from dataclasses import dataclass -from dbt.adapters.postgres.relation import PostgresRelation +from typing import Optional + +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationConfigChangeAction, + RelationResults, +) +from dbt.context.providers import RuntimeConfigObject +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import RelationType +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs import ( + RedshiftMaterializedViewConfig, + RedshiftMaterializedViewConfigChangeset, + RedshiftAutoRefreshConfigChange, + RedshiftBackupConfigChange, + RedshiftDistConfigChange, + RedshiftSortConfigChange, + RedshiftIncludePolicy, + RedshiftQuotePolicy, + MAX_CHARACTERS_IN_IDENTIFIER, +) @dataclass(frozen=True, eq=False, repr=False) -class RedshiftRelation(PostgresRelation): - # Override the method in the Postgres Relation because Redshift allows - # longer names: "Be between 1 and 127 bytes in length, not including - # quotation marks for delimited identifiers." - # - # see: https://docs.aws.amazon.com/redshift/latest/dg/r_names.html +class RedshiftRelation(BaseRelation): + include_policy = RedshiftIncludePolicy # type: ignore + quote_policy = RedshiftQuotePolicy # type: ignore + relation_configs = { + RelationType.MaterializedView.value: RedshiftMaterializedViewConfig, + } + + def __post_init__(self): + # Check for length of Redshift table/view names. + # Check self.type to exclude test relation identifiers + if ( + self.identifier is not None + and self.type is not None + and len(self.identifier) > MAX_CHARACTERS_IN_IDENTIFIER + ): + raise DbtRuntimeError( + f"Relation name '{self.identifier}' " + f"is longer than {MAX_CHARACTERS_IN_IDENTIFIER} characters" + ) + def relation_max_name_length(self): - return 127 + return MAX_CHARACTERS_IN_IDENTIFIER + + @classmethod + def from_runtime_config(cls, runtime_config: RuntimeConfigObject) -> RelationConfigBase: + model_node: ModelNode = runtime_config.model + relation_type: str = model_node.config.materialized + + if relation_config := cls.relation_configs.get(relation_type): + return relation_config.from_model_node(model_node) + + raise DbtRuntimeError( + f"from_runtime_config() is not supported for the provided relation type: {relation_type}" + ) + + @classmethod + def materialized_view_config_changeset( + cls, relation_results: RelationResults, runtime_config: RuntimeConfigObject + ) -> Optional[RedshiftMaterializedViewConfigChangeset]: + config_change_collection = RedshiftMaterializedViewConfigChangeset() + + existing_materialized_view = RedshiftMaterializedViewConfig.from_relation_results( + relation_results + ) + new_materialized_view = RedshiftMaterializedViewConfig.from_model_node( + runtime_config.model + ) + assert isinstance(existing_materialized_view, RedshiftMaterializedViewConfig) + assert isinstance(new_materialized_view, RedshiftMaterializedViewConfig) + + if new_materialized_view.autorefresh != existing_materialized_view.autorefresh: + config_change_collection.autorefresh = RedshiftAutoRefreshConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.autorefresh, + ) + + if new_materialized_view.backup != existing_materialized_view.backup: + config_change_collection.backup = RedshiftBackupConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.backup, + ) + + if new_materialized_view.dist != existing_materialized_view.dist: + config_change_collection.dist = RedshiftDistConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.dist, + ) + + if new_materialized_view.sort != existing_materialized_view.sort: + config_change_collection.sort = RedshiftSortConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.sort, + ) + + if config_change_collection.has_changes: + return config_change_collection + return None diff --git a/dbt/adapters/redshift/relation_configs/__init__.py b/dbt/adapters/redshift/relation_configs/__init__.py new file mode 100644 index 000000000..26e36c86c --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/__init__.py @@ -0,0 +1,19 @@ +from dbt.adapters.redshift.relation_configs.sort import ( + RedshiftSortConfig, + RedshiftSortConfigChange, +) +from dbt.adapters.redshift.relation_configs.dist import ( + RedshiftDistConfig, + RedshiftDistConfigChange, +) +from dbt.adapters.redshift.relation_configs.materialized_view import ( + RedshiftMaterializedViewConfig, + RedshiftAutoRefreshConfigChange, + RedshiftBackupConfigChange, + RedshiftMaterializedViewConfigChangeset, +) +from dbt.adapters.redshift.relation_configs.policies import ( + RedshiftIncludePolicy, + RedshiftQuotePolicy, + MAX_CHARACTERS_IN_IDENTIFIER, +) diff --git a/dbt/adapters/redshift/relation_configs/base.py b/dbt/adapters/redshift/relation_configs/base.py new file mode 100644 index 000000000..ebbd46b1b --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/base.py @@ -0,0 +1,70 @@ +from dataclasses import dataclass +from typing import Optional + +import agate +from dbt.adapters.base.relation import Policy +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationResults, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import ComponentName + +from dbt.adapters.redshift.relation_configs.policies import ( + RedshiftIncludePolicy, + RedshiftQuotePolicy, +) + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftRelationConfigBase(RelationConfigBase): + """ + This base class implements a few boilerplate methods and provides some light structure for Redshift relations. + """ + + @classmethod + def include_policy(cls) -> Policy: + return RedshiftIncludePolicy() + + @classmethod + def quote_policy(cls) -> Policy: + return RedshiftQuotePolicy() + + @classmethod + def from_model_node(cls, model_node: ModelNode) -> "RelationConfigBase": + relation_config = cls.parse_model_node(model_node) + relation = cls.from_dict(relation_config) + return relation + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + raise NotImplementedError( + "`parse_model_node()` needs to be implemented on this RelationConfigBase instance" + ) + + @classmethod + def from_relation_results(cls, relation_results: RelationResults) -> "RelationConfigBase": + relation_config = cls.parse_relation_results(relation_results) + relation = cls.from_dict(relation_config) + return relation + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> dict: + raise NotImplementedError( + "`parse_relation_results()` needs to be implemented on this RelationConfigBase instance" + ) + + @classmethod + def _render_part(cls, component: ComponentName, value: Optional[str]) -> Optional[str]: + if cls.include_policy().get_part(component) and value: + if cls.quote_policy().get_part(component): + return f'"{value}"' + return value.lower() + return None + + @classmethod + def _get_first_row(cls, results: agate.Table) -> agate.Row: + try: + return results.rows[0] + except IndexError: + return agate.Row(values=set()) diff --git a/dbt/adapters/redshift/relation_configs/dist.py b/dbt/adapters/redshift/relation_configs/dist.py new file mode 100644 index 000000000..668f3f65a --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/dist.py @@ -0,0 +1,164 @@ +from dataclasses import dataclass +from typing import Optional, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationConfigChange, + RelationConfigChangeAction, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.dataclass_schema import StrEnum +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase + + +class RedshiftDistStyle(StrEnum): + auto = "auto" + even = "even" + all = "all" + key = "key" + + @classmethod + def default(cls) -> "RedshiftDistStyle": + return cls.auto + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftDistConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config fallows the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html + + The following parameters are configurable by dbt: + - diststyle: the type of data distribution style to use on the table/materialized view + - distkey: the column to use for the dist key if `dist_style` is `key` + """ + + diststyle: Optional[RedshiftDistStyle] = RedshiftDistStyle.default() + distkey: Optional[str] = None + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # index rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=not ( + self.diststyle == RedshiftDistStyle.key and self.distkey is None + ), + validation_error=DbtRuntimeError( + "A `RedshiftDistConfig` that specifies a `diststyle` of `key` must provide a value for `distkey`." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.diststyle + in (RedshiftDistStyle.auto, RedshiftDistStyle.even, RedshiftDistStyle.all) + and self.distkey is not None + ), + validation_error=DbtRuntimeError( + "A `RedshiftDistConfig` that specifies a `distkey` must be of `diststyle` `key`." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftDistConfig": + kwargs_dict = { + "diststyle": config_dict.get("diststyle"), + "distkey": config_dict.get("distkey"), + } + dist: "RedshiftDistConfig" = super().from_dict(kwargs_dict) # type: ignore + return dist + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + """ + Translate ModelNode objects from the user-provided config into a standard dictionary. + + Args: + model_node: the description of the distkey and diststyle from the user in this format: + + { + "dist": any("auto", "even", "all") or "" + } + + Returns: a standard dictionary describing this `RedshiftDistConfig` instance + """ + dist = model_node.config.extra.get("dist", "") + + diststyle = dist.lower() + + if diststyle == "": + config = {} + + elif diststyle in ( + RedshiftDistStyle.auto, + RedshiftDistStyle.even, + RedshiftDistStyle.all, + ): + config = {"diststyle": diststyle} + + else: + config = {"diststyle": RedshiftDistStyle.key.value, "distkey": dist} + + return config + + @classmethod + def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Args: + relation_results_entry: the description of the distkey and diststyle from the database in this format: + + agate.Row({ + "diststyle": "", # e.g. EVEN | KEY(column1) | AUTO(ALL) | AUTO(KEY(id)) + }) + + Returns: a standard dictionary describing this `RedshiftDistConfig` instance + """ + dist: str = relation_results_entry.get("diststyle") + + try: + # covers `AUTO`, `ALL`, `EVEN`, `KEY`, '', + diststyle = dist.split("(")[0].lower() + except AttributeError: + # covers None + diststyle = "" + + if dist == "": + config = {} + + elif diststyle == RedshiftDistStyle.key: + open_paren = len("KEY(") + close_paren = -len(")") + distkey = dist[open_paren:close_paren] # e.g. KEY(column1) + config = {"diststyle": diststyle, "distkey": distkey} + + else: + config = {"diststyle": diststyle} + + return config + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftDistConfigChange(RelationConfigChange, RelationConfigValidationMixin): + context: RedshiftDistConfig + + @property + def requires_full_refresh(self) -> bool: + return True + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=(self.action == RelationConfigChangeAction.alter), + validation_error=DbtRuntimeError( + "Invalid operation, only `alter` changes are supported for `distkey` / `diststyle`." + ), + ), + } diff --git a/dbt/adapters/redshift/relation_configs/materialized_view.py b/dbt/adapters/redshift/relation_configs/materialized_view.py new file mode 100644 index 000000000..82bc0d084 --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/materialized_view.py @@ -0,0 +1,268 @@ +from dataclasses import dataclass +from typing import Optional, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationResults, + RelationConfigChange, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import ComponentName +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase +from dbt.adapters.redshift.relation_configs.dist import ( + RedshiftDistConfig, + RedshiftDistStyle, + RedshiftDistConfigChange, +) +from dbt.adapters.redshift.relation_configs.policies import MAX_CHARACTERS_IN_IDENTIFIER +from dbt.adapters.redshift.relation_configs.sort import ( + RedshiftSortConfig, + RedshiftSortConfigChange, +) + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftMaterializedViewConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config follow the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/materialized-view-create-sql-command.html + + The following parameters are configurable by dbt: + - mv_name: name of the materialized view + - query: the query that defines the view + - backup: determines if the materialized view is included in automated and manual cluster snapshots + - Note: we cannot currently query this from Redshift, which creates two issues + - a model deployed with this set to False will rebuild every run because the database version will always + look like True + - to deploy this as a change from False to True, a full refresh must be issued since the database version + will always look like True (unless there is another full refresh-triggering change) + - dist: the distribution configuration for the data behind the materialized view, a combination of + a `diststyle` and an optional `distkey` + - Note: the default `diststyle` for materialized views is EVEN, despite the default in general being AUTO + - sort: the sort configuration for the data behind the materialized view, a combination of + a `sortstyle` and an optional `sortkey` + - auto_refresh: specifies whether the materialized view should be automatically refreshed + with latest changes from its base tables + + There are currently no non-configurable parameters. + """ + + mv_name: str + schema_name: str + database_name: str + query: str + backup: bool = True + dist: RedshiftDistConfig = RedshiftDistConfig(diststyle=RedshiftDistStyle.even) + sort: RedshiftSortConfig = RedshiftSortConfig() + autorefresh: bool = False + + @property + def path(self) -> str: + return ".".join( + part + for part in [self.database_name, self.schema_name, self.mv_name] + if part is not None + ) + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # sort and dist rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=len(self.mv_name or "") <= MAX_CHARACTERS_IN_IDENTIFIER, + validation_error=DbtRuntimeError( + f"The materialized view name is more than {MAX_CHARACTERS_IN_IDENTIFIER} " + f"characters: {self.mv_name}" + ), + ), + RelationConfigValidationRule( + validation_check=self.dist.diststyle != RedshiftDistStyle.auto, + validation_error=DbtRuntimeError( + "Redshift materialized views do not support a `diststyle` of `auto`." + ), + ), + RelationConfigValidationRule( + validation_check=len(self.mv_name if self.mv_name else "") <= 127, + validation_error=DbtRuntimeError( + "Redshift does not support object names longer than 127 characters." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftMaterializedViewConfig": + kwargs_dict = { + "mv_name": cls._render_part(ComponentName.Identifier, config_dict.get("mv_name")), + "schema_name": cls._render_part(ComponentName.Schema, config_dict.get("schema_name")), + "database_name": cls._render_part( + ComponentName.Database, config_dict.get("database_name") + ), + "query": config_dict.get("query"), + "backup": config_dict.get("backup"), + "autorefresh": config_dict.get("autorefresh"), + } + + # this preserves the materialized view-specific default of `even` over the general default of `auto` + if dist := config_dict.get("dist"): + kwargs_dict.update({"dist": RedshiftDistConfig.from_dict(dist)}) + + if sort := config_dict.get("sort"): + kwargs_dict.update({"sort": RedshiftSortConfig.from_dict(sort)}) + + materialized_view: "RedshiftMaterializedViewConfig" = super().from_dict(kwargs_dict) # type: ignore + return materialized_view + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + config_dict = { + "mv_name": model_node.identifier, + "schema_name": model_node.schema, + "database_name": model_node.database, + "backup": model_node.config.get("backup"), + "autorefresh": model_node.config.get("auto_refresh"), + } + + if query := model_node.compiled_code: + config_dict.update({"query": query.strip()}) + + if model_node.config.get("dist"): + config_dict.update({"dist": RedshiftDistConfig.parse_model_node(model_node)}) + + if model_node.config.get("sort"): + config_dict.update({"sort": RedshiftSortConfig.parse_model_node(model_node)}) + + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Args: + relation_results: the description of the materialized view from the database in this format: + + { + "materialized_view": agate.Table( + agate.Row({ + "database": "", + "schema": "", + "table": "", + "diststyle": "", # e.g. EVEN | KEY(column1) | AUTO(ALL) | AUTO(KEY(id)), + "sortkey1": "", + "autorefresh: any("t", "f"), + }) + ), + "query": agate.Table( + agate.Row({"definition": "")} + ), + } + + Additional columns in either value is fine, as long as `sortkey` and `sortstyle` are available. + + Returns: a standard dictionary describing this `RedshiftMaterializedViewConfig` instance + """ + materialized_view: agate.Row = cls._get_first_row( + relation_results.get("materialized_view") + ) + query: agate.Row = cls._get_first_row(relation_results.get("query")) + + config_dict = { + "mv_name": materialized_view.get("table"), + "schema_name": materialized_view.get("schema"), + "database_name": materialized_view.get("database"), + "autorefresh": {"t": True, "f": False}.get(materialized_view.get("autorefresh")), + "query": cls._parse_query(query.get("definition")), + } + + # the default for materialized views differs from the default for diststyle in general + # only set it if we got a value + if materialized_view.get("diststyle"): + config_dict.update( + {"dist": RedshiftDistConfig.parse_relation_results(materialized_view)} + ) + + # TODO: this only shows the first column in the sort key + if materialized_view.get("sortkey1"): + config_dict.update( + {"sort": RedshiftSortConfig.parse_relation_results(materialized_view)} + ) + + return config_dict + + @classmethod + def _parse_query(cls, query: str) -> str: + """ + Get the select statement from the materialized view definition in Redshift. + + Args: + query: the `create materialized view` statement from `pg_views`, for example: + + create materialized view my_materialized_view + backup yes + diststyle even + sortkey (id) + auto refresh no + as ( + select * from my_base_table + ); + + Returns: the `select ...` statement, for example: + + select * from my_base_table + + """ + open_paren = query.find("as (") + len("as (") + close_paren = query.find(");") + return query[open_paren:close_paren].strip() + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftAutoRefreshConfigChange(RelationConfigChange): + context: Optional[bool] = None + + @property + def requires_full_refresh(self) -> bool: + return False + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftBackupConfigChange(RelationConfigChange): + context: Optional[bool] = None + + @property + def requires_full_refresh(self) -> bool: + return True + + +@dataclass +class RedshiftMaterializedViewConfigChangeset: + backup: Optional[RedshiftBackupConfigChange] = None + dist: Optional[RedshiftDistConfigChange] = None + sort: Optional[RedshiftSortConfigChange] = None + autorefresh: Optional[RedshiftAutoRefreshConfigChange] = None + + @property + def requires_full_refresh(self) -> bool: + return any( + { + self.autorefresh.requires_full_refresh if self.autorefresh else False, + self.backup.requires_full_refresh if self.backup else False, + self.dist.requires_full_refresh if self.dist else False, + self.sort.requires_full_refresh if self.sort else False, + } + ) + + @property + def has_changes(self) -> bool: + return any( + { + self.backup if self.backup else False, + self.dist if self.dist else False, + self.sort if self.sort else False, + self.autorefresh if self.autorefresh else False, + } + ) diff --git a/dbt/adapters/redshift/relation_configs/policies.py b/dbt/adapters/redshift/relation_configs/policies.py new file mode 100644 index 000000000..7ec8e8acb --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/policies.py @@ -0,0 +1,19 @@ +from dataclasses import dataclass + +from dbt.adapters.base.relation import Policy + + +MAX_CHARACTERS_IN_IDENTIFIER = 127 + + +class RedshiftIncludePolicy(Policy): + database: bool = True + schema: bool = True + identifier: bool = True + + +@dataclass +class RedshiftQuotePolicy(Policy): + database: bool = True + schema: bool = True + identifier: bool = True diff --git a/dbt/adapters/redshift/relation_configs/sort.py b/dbt/adapters/redshift/relation_configs/sort.py new file mode 100644 index 000000000..58104b65f --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/sort.py @@ -0,0 +1,179 @@ +from dataclasses import dataclass +from typing import Optional, FrozenSet, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationConfigChange, + RelationConfigChangeAction, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.dataclass_schema import StrEnum +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase + + +class RedshiftSortStyle(StrEnum): + auto = "auto" + compound = "compound" + interleaved = "interleaved" + + @classmethod + def default(cls) -> "RedshiftSortStyle": + return cls.auto + + @classmethod + def default_with_columns(cls) -> "RedshiftSortStyle": + return cls.compound + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftSortConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config fallows the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html + + The following parameters are configurable by dbt: + - sort_type: the type of sort key on the table/materialized view + - defaults to `auto` if no sort config information is provided + - defaults to `compound` if columns are provided, but type is omitted + - sort_key: the column(s) to use for the sort key; cannot be combined with `sort_type=auto` + """ + + sortstyle: Optional[RedshiftSortStyle] = None + sortkey: Optional[FrozenSet[str]] = None + + def __post_init__(self): + # maintains `frozen=True` while allowing for a variable default on `sort_type` + if self.sortstyle is None and self.sortkey is None: + object.__setattr__(self, "sortstyle", RedshiftSortStyle.default()) + elif self.sortstyle is None: + object.__setattr__(self, "sortstyle", RedshiftSortStyle.default_with_columns()) + super().__post_init__() + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # index rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.auto and self.sortkey is not None + ), + validation_error=DbtRuntimeError( + "A `RedshiftSortConfig` that specifies a `sortkey` does not support the `sortstyle` of `auto`." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle in (RedshiftSortStyle.compound, RedshiftSortStyle.interleaved) + and self.sortkey is None + ), + validation_error=DbtRuntimeError( + "A `sortstyle` of `compound` or `interleaved` requires a `sortkey` to be provided." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.compound + and self.sortkey is not None + and len(self.sortkey) > 400 + ), + validation_error=DbtRuntimeError( + "A compound `sortkey` only supports 400 columns." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.interleaved + and self.sortkey is not None + and len(self.sortkey) > 8 + ), + validation_error=DbtRuntimeError( + "An interleaved `sortkey` only supports 8 columns." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftSortConfig": + kwargs_dict = { + "sortstyle": config_dict.get("sortstyle"), + "sortkey": frozenset(column for column in config_dict.get("sortkey", {})), + } + sort: "RedshiftSortConfig" = super().from_dict(kwargs_dict) # type: ignore + return sort + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + """ + Translate ModelNode objects from the user-provided config into a standard dictionary. + + Args: + model_node: the description of the sortkey and sortstyle from the user in this format: + + { + "sort_key": "" or [""] or ["",...] + "sort_type": any("compound", "interleaved", "auto") + } + + Returns: a standard dictionary describing this `RedshiftSortConfig` instance + """ + config_dict = {} + + if sortstyle := model_node.config.extra.get("sort_type"): + config_dict.update({"sortstyle": sortstyle.lower()}) + + if sortkey := model_node.config.extra.get("sort"): + # we allow users to specify the `sort_key` as a string if it's a single column + if isinstance(sortkey, str): + sortkey = [sortkey] + + config_dict.update({"sortkey": set(sortkey)}) + + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Note: + This was only built for materialized views, which does not specify a sortstyle. + Processing of `sortstyle` has been omitted here, which means it's the default (compound). + + Args: + relation_results_entry: the description of the sortkey and sortstyle from the database in this format: + + agate.Row({ + ..., + "sortkey1": "", + ... + }) + + Returns: a standard dictionary describing this `RedshiftSortConfig` instance + """ + if sortkey := relation_results_entry.get("sortkey1"): + return {"sortkey": {sortkey}} + return {} + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftSortConfigChange(RelationConfigChange, RelationConfigValidationMixin): + context: RedshiftSortConfig + + @property + def requires_full_refresh(self) -> bool: + return True + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=(self.action == RelationConfigChangeAction.alter), + validation_error=DbtRuntimeError( + "Invalid operation, only `alter` changes are supported for `sortkey` / `sortstyle`." + ), + ), + } diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index e38700aa6..62813852b 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -238,7 +238,11 @@ '{{ schema_relation.database }}' as database, viewname as name, schemaname as schema, - 'view' as type + case + when definition ilike '%create materialized view%' + then 'materialized_view' + else 'view' + end as type from pg_views where schemaname ilike '{{ schema_relation.schema }}' {% endcall %} @@ -309,3 +313,12 @@ {% endif %} {% endmacro %} + + +{% macro redshift__get_drop_relation_sql(relation) %} + {%- if relation.is_materialized_view -%} + {{ redshift__drop_materialized_view(relation) }} + {%- else -%} + drop {{ relation.type }} if exists {{ relation }} cascade + {%- endif -%} +{% endmacro %} diff --git a/dbt/include/redshift/macros/materializations/materialized_view.sql b/dbt/include/redshift/macros/materializations/materialized_view.sql new file mode 100644 index 000000000..6343c1a7b --- /dev/null +++ b/dbt/include/redshift/macros/materializations/materialized_view.sql @@ -0,0 +1,106 @@ +{% macro redshift__get_alter_materialized_view_as_sql( + relation, + configuration_changes, + sql, + existing_relation, + backup_relation, + intermediate_relation +) %} + + -- apply a full refresh immediately if needed + {% if configuration_changes.requires_full_refresh %} + + {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }} + + -- otherwise apply individual changes as needed + {% else %} + + {%- set autorefresh = configuration_changes.autorefresh -%} + {%- if autorefresh -%}{{- log('Applying UPDATE AUTOREFRESH to: ' ~ relation) -}}{%- endif -%} + + alter materialized view {{ relation }} + auto refresh {% if autorefresh.context %}yes{% else %}no{% endif %} + + {%- endif -%} + +{% endmacro %} + + +{% macro redshift__get_create_materialized_view_as_sql(relation, sql) %} + + {%- set materialized_view = relation.from_runtime_config(config) -%} + + create materialized view {{ materialized_view.path }} + backup {% if materialized_view.backup %}yes{% else %}no{% endif %} + diststyle {{ materialized_view.dist.diststyle }} + {% if materialized_view.dist.distkey %}distkey ({{ materialized_view.dist.distkey }}){% endif %} + {% if materialized_view.sort.sortkey %}sortkey ({{ ','.join(materialized_view.sort.sortkey) }}){% endif %} + auto refresh {% if materialized_view.auto_refresh %}yes{% else %}no{% endif %} + as ( + {{ materialized_view.query }} + ); + +{% endmacro %} + + +{% macro redshift__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} + {{ redshift__get_drop_relation_sql(existing_relation) }}; + {{ get_create_materialized_view_as_sql(relation, sql) }} +{% endmacro %} + + +{% macro redshift__get_materialized_view_configuration_changes(existing_relation, new_config) %} + {% set _existing_materialized_view = redshift__describe_materialized_view(existing_relation) %} + {% set _configuration_changes = existing_relation.materialized_view_config_changeset(_existing_materialized_view, new_config) %} + {% do return(_configuration_changes) %} +{% endmacro %} + + +{% macro redshift__refresh_materialized_view(relation) -%} + refresh materialized view {{ relation }} +{% endmacro %} + + +{% macro redshift__describe_materialized_view(relation) %} + {#- + These need to be separate queries because redshift will not let you run queries + against svv_table_info and pg_views in the same query. The same is true of svv_redshift_columns. + -#} + + {%- set _materialized_view_sql -%} + select + tb.database, + tb.schema, + tb.table, + tb.diststyle, + tb.sortkey1, + mv.autorefresh + from svv_table_info tb + left join stv_mv_info mv + on mv.db_name = tb.database + and mv.schema = tb.schema + and mv.name = tb.table + where tb.table ilike '{{ relation.identifier }}' + and tb.schema ilike '{{ relation.schema }}' + and tb.database ilike '{{ relation.database }}' + {%- endset %} + {% set _materialized_view = run_query(_materialized_view_sql) %} + + {%- set _query_sql -%} + select + vw.definition + from pg_views vw + where vw.viewname = '{{ relation.identifier }}' + and vw.schemaname = '{{ relation.schema }}' + and vw.definition ilike '%create materialized view%' + {%- endset %} + {% set _query = run_query(_query_sql) %} + + {% do return({'materialized_view': _materialized_view, 'query': _query}) %} + +{% endmacro %} + + +{% macro redshift__drop_materialized_view(relation) -%} + drop materialized view if exists {{ relation }} +{%- endmacro %} diff --git a/dbt/include/redshift/macros/relations.sql b/dbt/include/redshift/macros/relations.sql index 1a5cd34c4..28c6bc377 100644 --- a/dbt/include/redshift/macros/relations.sql +++ b/dbt/include/redshift/macros/relations.sql @@ -1,3 +1,45 @@ -{% macro redshift__get_relations () -%} - {{ return(dbt.postgres_get_relations()) }} +{% macro redshift__get_relations() -%} + +{%- call statement('relations', fetch_result=True) -%} + +with + relation as ( + select + pg_class.oid as relation_id, + pg_class.relname as relation_name, + pg_class.relnamespace as schema_id, + pg_namespace.nspname as schema_name, + pg_class.relkind as relation_type + from pg_class + join pg_namespace + on pg_class.relnamespace = pg_namespace.oid + where pg_namespace.nspname != 'information_schema' + and pg_namespace.nspname not like 'pg\_%' + ), + dependency as ( + select distinct + coalesce(pg_rewrite.ev_class, pg_depend.objid) as dep_relation_id, + pg_depend.refobjid as ref_relation_id, + pg_depend.refclassid as ref_class_id + from pg_depend + left join pg_rewrite + on pg_depend.objid = pg_rewrite.oid + ) + +select distinct + dep.schema_name as dependent_schema, + dep.relation_name as dependent_name, + ref.schema_name as referenced_schema, + ref.relation_name as referenced_name +from dependency +join relation ref + on dependency.ref_relation_id = ref.relation_id +join relation dep + on dependency.dep_relation_id = dep.relation_id +where ref.relation_name != dep.relation_name + +{%- endcall -%} + +{{ return(load_result('relations').table) }} + {% endmacro %} diff --git a/dbt/include/redshift/macros/utils/dateadd.sql b/dbt/include/redshift/macros/utils/dateadd.sql index dc90f9231..ba3e666a3 100644 --- a/dbt/include/redshift/macros/utils/dateadd.sql +++ b/dbt/include/redshift/macros/utils/dateadd.sql @@ -1,4 +1,3 @@ -{#-- redshift should use default instead of postgres --#} {% macro redshift__dateadd(datepart, interval, from_date_or_timestamp) %} dateadd( diff --git a/dbt/include/redshift/macros/utils/datediff.sql b/dbt/include/redshift/macros/utils/datediff.sql index c20513961..1d540b908 100644 --- a/dbt/include/redshift/macros/utils/datediff.sql +++ b/dbt/include/redshift/macros/utils/datediff.sql @@ -1,4 +1,3 @@ -{#-- redshift should use default instead of postgres --#} {% macro redshift__datediff(first_date, second_date, datepart) -%} datediff( diff --git a/dbt/include/redshift/macros/utils/last_day.sql b/dbt/include/redshift/macros/utils/last_day.sql index be0e2253a..8c643644b 100644 --- a/dbt/include/redshift/macros/utils/last_day.sql +++ b/dbt/include/redshift/macros/utils/last_day.sql @@ -1,4 +1,3 @@ -{# redshift should use default instead of postgres #} {% macro redshift__last_day(date, datepart) %} cast( {{dbt.dateadd('day', '-1', diff --git a/setup.py b/setup.py index b5c6f9189..56d4fa101 100644 --- a/setup.py +++ b/setup.py @@ -86,6 +86,8 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: f"dbt-postgres~={_core_version()}", "boto3~=1.26.26", "redshift-connector~=2.0.911", + # installed via dbt-core but referenced directly; don't pin to avoid version conflicts with dbt-core + "agate", ], zip_safe=False, classifiers=[ diff --git a/tests/functional/adapter/materialized_view_tests/fixtures.py b/tests/functional/adapter/materialized_view_tests/fixtures.py new file mode 100644 index 000000000..785931c1b --- /dev/null +++ b/tests/functional/adapter/materialized_view_tests/fixtures.py @@ -0,0 +1,85 @@ +import pytest + +from dbt.tests.adapter.materialized_view.base import Base +from dbt.tests.adapter.materialized_view.on_configuration_change import ( + OnConfigurationChangeBase, + get_model_file, + set_model_file, +) +from dbt.tests.util import relation_from_name, run_sql_with_adapter + + +def refresh_materialized_view(project, name: str): + sql = f"refresh materialized view {relation_from_name(project.adapter, name)}" + run_sql_with_adapter(project.adapter, sql) + + +class RedshiftBasicBase(Base): + @pytest.fixture(scope="class") + def models(self): + base_table = """ + {{ config(materialized='table') }} + select 1 as base_column + """ + base_materialized_view = """ + {{ config(materialized='materialized_view') }} + select * from {{ ref('base_table') }} + """ + return {"base_table.sql": base_table, "base_materialized_view.sql": base_materialized_view} + + +class RedshiftOnConfigurationChangeBase(OnConfigurationChangeBase): + @pytest.fixture(scope="class") + def models(self): + base_table = """ + {{ config( + materialized='table', + ) }} + select + 1 as id, + 100 as value + """ + base_materialized_view = """ + {{ config( + materialized='materialized_view', + sort='id' + ) }} + select * from {{ ref('base_table') }} + """ + return {"base_table.sql": base_table, "base_materialized_view.sql": base_materialized_view} + + @pytest.fixture(scope="function") + def configuration_changes_apply(self, project): + initial_model = get_model_file(project, "base_materialized_view") + + # turn on auto_refresh + new_model = initial_model.replace( + "materialized='materialized_view',", + "materialized='materialized_view', auto_refresh='yes',", + ) + set_model_file(project, "base_materialized_view", new_model) + + yield + + # set this back for the next test + set_model_file(project, "base_materialized_view", initial_model) + + @pytest.fixture(scope="function") + def configuration_changes_refresh(self, project): + initial_model = get_model_file(project, "base_materialized_view") + + # add a sort_key + new_model = initial_model.replace( + "sort='id'", + "sort='value'", + ) + set_model_file(project, "base_materialized_view", new_model) + + yield + + # set this back for the next test + set_model_file(project, "base_materialized_view", initial_model) + + @pytest.fixture(scope="function") + def update_auto_refresh_message(self, project): + return f"Applying UPDATE AUTOREFRESH to: {relation_from_name(project.adapter, 'base_materialized_view')}" diff --git a/tests/functional/adapter/materialized_view_tests/test_materialized_views.py b/tests/functional/adapter/materialized_view_tests/test_materialized_views.py new file mode 100644 index 000000000..ff63f1e01 --- /dev/null +++ b/tests/functional/adapter/materialized_view_tests/test_materialized_views.py @@ -0,0 +1,239 @@ +import pytest + +from dbt.contracts.graph.model_config import OnConfigurationChangeOption +from dbt.contracts.relation import RelationType +from dbt.contracts.results import RunStatus +from dbt.tests.adapter.materialized_view.base import ( + run_model, + assert_model_exists_and_is_correct_type, + insert_record, + get_row_count, +) +from dbt.tests.adapter.materialized_view.on_configuration_change import ( + assert_proper_scenario, +) + +from tests.functional.adapter.materialized_view_tests.fixtures import ( + RedshiftBasicBase, + RedshiftOnConfigurationChangeBase, + refresh_materialized_view, +) + + +class TestBasic(RedshiftBasicBase): + def test_relation_is_materialized_view_on_initial_creation(self, project): + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + assert_model_exists_and_is_correct_type(project, "base_table", RelationType.Table) + + def test_relation_is_materialized_view_when_rerun(self, project): + run_model("base_materialized_view") + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_relation_is_materialized_view_on_full_refresh(self, project): + run_model("base_materialized_view", full_refresh=True) + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_relation_is_materialized_view_on_update(self, project): + run_model("base_materialized_view", run_args=["--vars", "quoting: {identifier: True}"]) + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_updated_base_table_data_only_shows_in_materialized_view_after_refresh(self, project): + # poll database + table_start = get_row_count(project, "base_table") + view_start = get_row_count(project, "base_materialized_view") + assert view_start == table_start + + # insert new record in table + new_record = (2,) + insert_record(project, new_record, "base_table", ["base_column"]) + + # poll database + table_mid = get_row_count(project, "base_table") + view_mid = get_row_count(project, "base_materialized_view") + + # refresh the materialized view + refresh_materialized_view(project, "base_materialized_view") + + # poll database + table_end = get_row_count(project, "base_table") + view_end = get_row_count(project, "base_materialized_view") + assert view_end == table_end + + # new records were inserted in the table but didn't show up in the view until it was refreshed + assert table_start < table_mid == table_end + assert view_start == view_mid < view_end + + +class TestOnConfigurationChangeApply(RedshiftOnConfigurationChangeBase): + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_model_applies_changes_with_small_configuration_changes( + self, configuration_changes_apply, alter_message, update_auto_refresh_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, update_auto_refresh_message], + ) + + def test_model_rebuilds_with_large_configuration_changes( + self, configuration_changes_refresh, alter_message, replace_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, replace_message], + ) + + def test_model_only_rebuilds_with_large_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + alter_message, + replace_message, + update_auto_refresh_message, + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, replace_message], + messages_not_in_logs=[update_auto_refresh_message], + ) + + +class TestOnConfigurationChangeContinue(RedshiftOnConfigurationChangeBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"on_configuration_change": OnConfigurationChangeOption.Continue.value}} + + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_model_is_skipped_with_configuration_changes( + self, configuration_changes_apply, configuration_change_continue_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[configuration_change_continue_message], + ) + + +class TestOnConfigurationChangeFail(RedshiftOnConfigurationChangeBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"on_configuration_change": OnConfigurationChangeOption.Fail.value}} + + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_run_fails_with_configuration_changes( + self, configuration_changes_apply, configuration_change_fail_message + ): + results, logs = run_model("base_materialized_view", expect_pass=False) + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Error, + messages_in_logs=[configuration_change_fail_message], + ) diff --git a/tox.ini b/tox.ini index b42bd89b7..285c1e738 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,8 @@ passenv = REDSHIFT_TEST_* PYTEST_ADDOPTS commands = - redshift: {envpython} -m pytest {posargs} tests/functional + {envpython} -m pytest --dist=loadscope {posargs} tests/functional -k "not tests/functional/adapter/utils" + {envpython} -m pytest --dist=loadscope {posargs} tests/functional/adapter/utils deps = -rdev-requirements.txt -e. From befecc187a1f6a7477d9c8f32ad9fa0ba8dc1655 Mon Sep 17 00:00:00 2001 From: Github Build Bot Date: Fri, 23 Jun 2023 20:58:44 +0000 Subject: [PATCH 105/113] Bumping version to 1.6.0b5 and generate changelog --- .bumpversion.cfg | 2 +- .changes/1.6.0-b5.md | 5 +++++ .../{unreleased => 1.6.0}/Features-20230330-165842.yaml | 0 CHANGELOG.md | 9 ++++++++- dbt/adapters/redshift/__version__.py | 2 +- 5 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 .changes/1.6.0-b5.md rename .changes/{unreleased => 1.6.0}/Features-20230330-165842.yaml (100%) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 669ede06e..46022b789 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.6.0b4 +current_version = 1.6.0b5 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.6.0-b5.md b/.changes/1.6.0-b5.md new file mode 100644 index 000000000..3263cb06e --- /dev/null +++ b/.changes/1.6.0-b5.md @@ -0,0 +1,5 @@ +## dbt-redshift 1.6.0-b5 - June 23, 2023 + +### Features + +- Add support for materialized views ([#dbt-labs/dbt-core#6911](https://github.com/dbt-labs/dbt-redshift/issues/dbt-labs/dbt-core#6911)) diff --git a/.changes/unreleased/Features-20230330-165842.yaml b/.changes/1.6.0/Features-20230330-165842.yaml similarity index 100% rename from .changes/unreleased/Features-20230330-165842.yaml rename to .changes/1.6.0/Features-20230330-165842.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 8884aa10c..dd2d5cafc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-redshift 1.6.0-b5 - June 23, 2023 + +### Features + +- Add support for materialized views ([#dbt-labs/dbt-core#6911](https://github.com/dbt-labs/dbt-redshift/issues/dbt-labs/dbt-core#6911)) + + + ## dbt-redshift 1.6.0-b4 - June 16, 2023 ### Breaking Changes @@ -18,7 +26,6 @@ ### Contributors - [@dataders](https://github.com/dataders) ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) - ## dbt-redshift 1.6.0-b3 - June 09, 2023 ### Breaking Changes diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 091852496..da9f11e34 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.6.0b4" +version = "1.6.0b5" From 36bed56310645669a9bf20b882e6aed51bf1b98a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 24 Jun 2023 13:57:18 -0400 Subject: [PATCH 106/113] Bump mypy from 1.2.0 to 1.4.0 (#503) * Bump mypy from 1.2.0 to 1.4.0 Bumps [mypy](https://github.com/python/mypy) from 1.2.0 to 1.4.0. - [Commits](https://github.com/python/mypy/compare/v1.2.0...v1.4.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230620-195844.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230620-195844.yaml diff --git a/.changes/unreleased/Dependencies-20230620-195844.yaml b/.changes/unreleased/Dependencies-20230620-195844.yaml new file mode 100644 index 000000000..86b482554 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230620-195844.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump mypy from 1.2.0 to 1.4.0" +time: 2023-06-20T19:58:44.00000Z +custom: + Author: dependabot[bot] + PR: 503 diff --git a/dev-requirements.txt b/dev-requirements.txt index e9e4083c5..73b44be2e 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -13,7 +13,7 @@ flake8~=6.0 flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 -mypy==1.2.0 # patch updates have historically introduced breaking changes +mypy==1.4.0 # patch updates have historically introduced breaking changes pip-tools~=6.13 pre-commit~=3.2 pre-commit-hooks~=4.4 From 99d8d9a7791750bd71ff902d42de618a66e1d20c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 24 Jun 2023 14:27:39 -0400 Subject: [PATCH 107/113] Update types-requests requirement from ~=2.28 to ~=2.31 (#461) * Update types-requests requirement from ~=2.28 to ~=2.31 Updates the requirements on [types-requests](https://github.com/python/typeshed) to permit the latest version. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230523-195945.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230523-195945.yaml diff --git a/.changes/unreleased/Dependencies-20230523-195945.yaml b/.changes/unreleased/Dependencies-20230523-195945.yaml new file mode 100644 index 000000000..c2a8a363d --- /dev/null +++ b/.changes/unreleased/Dependencies-20230523-195945.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update types-requests requirement from ~=2.28 to ~=2.31" +time: 2023-05-23T19:59:45.00000Z +custom: + Author: dependabot[bot] + PR: 461 diff --git a/dev-requirements.txt b/dev-requirements.txt index 73b44be2e..910eeb7e0 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -25,6 +25,6 @@ pytest-xdist~=3.2 pytz~=2023.3 tox~=4.5 types-pytz~=2023.3 -types-requests~=2.28 +types-requests~=2.31 twine~=4.0 wheel~=0.40 From f99a9f3a5589117c9226b60f2c35082c0144995b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 10:34:34 -0400 Subject: [PATCH 108/113] Update pre-commit requirement from ~=2.21 to ~=3.3 (#426) * Update pre-commit requirement from ~=2.21 to ~=3.3 Updates the requirements on [pre-commit](https://github.com/pre-commit/pre-commit) to permit the latest version. - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/commits/v3.3.1) --- updated-dependencies: - dependency-name: pre-commit dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230624-182902.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230624-182902.yaml diff --git a/.changes/unreleased/Dependencies-20230624-182902.yaml b/.changes/unreleased/Dependencies-20230624-182902.yaml new file mode 100644 index 000000000..763f74040 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230624-182902.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pre-commit requirement from ~=2.21 to ~=3.3" +time: 2023-06-24T18:29:02.00000Z +custom: + Author: dependabot[bot] + PR: 426 diff --git a/dev-requirements.txt b/dev-requirements.txt index 910eeb7e0..e2fb08e0f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -15,7 +15,7 @@ freezegun~=1.2 ipdb~=0.13.13 mypy==1.4.0 # patch updates have historically introduced breaking changes pip-tools~=6.13 -pre-commit~=3.2 +pre-commit~=3.3 pre-commit-hooks~=4.4 pytest~=7.3 pytest-csv~=3.0 From 8d8c1b8cbe74d1f6a47d739c11a5179d791ab219 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 11:15:55 -0400 Subject: [PATCH 109/113] Update tox requirement from ~=4.5 to ~=4.6 (#481) * Update tox requirement from ~=4.5 to ~=4.6 Updates the requirements on [tox](https://github.com/tox-dev/tox) to permit the latest version. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.5.0...4.6.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230605-195944.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230605-195944.yaml diff --git a/.changes/unreleased/Dependencies-20230605-195944.yaml b/.changes/unreleased/Dependencies-20230605-195944.yaml new file mode 100644 index 000000000..52e3ab254 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230605-195944.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update tox requirement from ~=4.5 to ~=4.6" +time: 2023-06-05T19:59:44.00000Z +custom: + Author: dependabot[bot] + PR: 481 diff --git a/dev-requirements.txt b/dev-requirements.txt index e2fb08e0f..068fb255b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -23,7 +23,7 @@ pytest-dotenv~=0.5.2 pytest-logbook~=1.2 pytest-xdist~=3.2 pytz~=2023.3 -tox~=4.5 +tox~=4.6 types-pytz~=2023.3 types-requests~=2.31 twine~=4.0 From b8034ba696e9c97def730d01e804daa8164a7426 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 13:28:02 -0400 Subject: [PATCH 110/113] Update pytest-xdist requirement from ~=3.2 to ~=3.3 (#449) * Update pytest-xdist requirement from ~=3.2 to ~=3.3 Updates the requirements on [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) to permit the latest version. - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v3.2.0...v3.3.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230515-195952.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230515-195952.yaml diff --git a/.changes/unreleased/Dependencies-20230515-195952.yaml b/.changes/unreleased/Dependencies-20230515-195952.yaml new file mode 100644 index 000000000..6cfb3a14c --- /dev/null +++ b/.changes/unreleased/Dependencies-20230515-195952.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pytest-xdist requirement from ~=3.2 to ~=3.3" +time: 2023-05-15T19:59:52.00000Z +custom: + Author: dependabot[bot] + PR: 449 diff --git a/dev-requirements.txt b/dev-requirements.txt index 068fb255b..2a575d6c8 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -21,7 +21,7 @@ pytest~=7.3 pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 -pytest-xdist~=3.2 +pytest-xdist~=3.3 pytz~=2023.3 tox~=4.6 types-pytz~=2023.3 From 405917da3a88e5a4ae37f792aeb1cae0e36727e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 00:31:47 -0400 Subject: [PATCH 111/113] Bump mypy from 1.4.0 to 1.4.1 (#508) * Bump mypy from 1.4.0 to 1.4.1 Bumps [mypy](https://github.com/python/mypy) from 1.4.0 to 1.4.1. - [Commits](https://github.com/python/mypy/compare/v1.4.0...v1.4.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230626-195934.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230626-195934.yaml diff --git a/.changes/unreleased/Dependencies-20230626-195934.yaml b/.changes/unreleased/Dependencies-20230626-195934.yaml new file mode 100644 index 000000000..4bb681e34 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230626-195934.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump mypy from 1.4.0 to 1.4.1" +time: 2023-06-26T19:59:34.00000Z +custom: + Author: dependabot[bot] + PR: 508 diff --git a/dev-requirements.txt b/dev-requirements.txt index 2a575d6c8..3340ccf5c 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -13,7 +13,7 @@ flake8~=6.0 flaky~=3.7 freezegun~=1.2 ipdb~=0.13.13 -mypy==1.4.0 # patch updates have historically introduced breaking changes +mypy==1.4.1 # patch updates have historically introduced breaking changes pip-tools~=6.13 pre-commit~=3.3 pre-commit-hooks~=4.4 From f46e545162046848041d2907890bc937a96dddd8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 11:28:48 -0400 Subject: [PATCH 112/113] Update pytest requirement from ~=7.3 to ~=7.4 (#507) * Update pytest requirement from ~=7.3 to ~=7.4 Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.0...7.4.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Add automated changelog yaml from template for bot PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Github Build Bot Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare --- .changes/unreleased/Dependencies-20230626-195917.yaml | 6 ++++++ dev-requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20230626-195917.yaml diff --git a/.changes/unreleased/Dependencies-20230626-195917.yaml b/.changes/unreleased/Dependencies-20230626-195917.yaml new file mode 100644 index 000000000..c1dcdc504 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230626-195917.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pytest requirement from ~=7.3 to ~=7.4" +time: 2023-06-26T19:59:17.00000Z +custom: + Author: dependabot[bot] + PR: 507 diff --git a/dev-requirements.txt b/dev-requirements.txt index 3340ccf5c..bed71ec05 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -17,7 +17,7 @@ mypy==1.4.1 # patch updates have historically introduced breaking changes pip-tools~=6.13 pre-commit~=3.3 pre-commit-hooks~=4.4 -pytest~=7.3 +pytest~=7.4 pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 From 607c30922a3c14ecce9ff5e6388812664850bd53 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 28 Jun 2023 13:25:51 -0400 Subject: [PATCH 113/113] remove dependent_projects argument from PartialProject call in unit tests (#509) --- .changes/unreleased/Fixes-20230628-123227.yaml | 6 ++++++ tests/unit/utils.py | 1 - 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Fixes-20230628-123227.yaml diff --git a/.changes/unreleased/Fixes-20230628-123227.yaml b/.changes/unreleased/Fixes-20230628-123227.yaml new file mode 100644 index 000000000..4ece8f404 --- /dev/null +++ b/.changes/unreleased/Fixes-20230628-123227.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Remove dependent_projects argument from PartialProject call in unit tests +time: 2023-06-28T12:32:27.637669-04:00 +custom: + Author: mikealfare + Issue: "7955" diff --git a/tests/unit/utils.py b/tests/unit/utils.py index d872d50d2..f2ca418e3 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -75,7 +75,6 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= project_root=project_root, project_dict=project, packages_dict=packages, - dependent_projects_dict={}, selectors_dict=selectors, ) return partial.render(renderer)