diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1d6478936..46022b789 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,16 +1,24 @@ [bumpversion] -current_version = 1.4.0b1 -parse = (?P\d+) - \.(?P\d+) - \.(?P\d+) - ((?Pa|b|rc)(?P\d+))? +current_version = 1.6.0b5 +parse = (?P[\d]+) # major version number + \.(?P[\d]+) # minor version number + \.(?P[\d]+) # patch version number + (?P # optional pre-release - ex: a1, b2, rc25 + (?Pa|b|rc) # pre-release type + (?P[\d]+) # pre-release version number + )? + ( # optional nightly release indicator + \.(?Pdev[0-9]+) # ex: .dev02142023 + )? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` serialize = - {major}.{minor}.{patch}{prerelease}{num} + {major}.{minor}.{patch}{prekind}{num}.{nightly} + {major}.{minor}.{patch}.{nightly} + {major}.{minor}.{patch}{prekind}{num} {major}.{minor}.{patch} commit = False tag = False -[bumpversion:part:prerelease] +[bumpversion:part:prekind] first_value = a optional_value = final values = @@ -22,6 +30,6 @@ values = [bumpversion:part:num] first_value = 1 -[bumpversion:file:setup.py] +[bumpversion:part:nightly] [bumpversion:file:dbt/adapters/redshift/__version__.py] diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md index 25dc29408..4c3cb6b65 100644 --- a/.changes/0.0.0.md +++ b/.changes/0.0.0.md @@ -1,5 +1,7 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.5](https://github.com/dbt-labs/dbt-redshift/blob/1.5.latest/CHANGELOG.md) +- [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-redshift/blob/1.1.latest/CHANGELOG.md) diff --git a/.changes/1.4.0-b1.md b/.changes/1.4.0-b1.md deleted file mode 100644 index 2376b7759..000000000 --- a/.changes/1.4.0-b1.md +++ /dev/null @@ -1,3 +0,0 @@ -## dbt-redshift 1.4.0-b1 - December 15, 2022 -### Features -- Migrate dbt-utils current_timestamp macros into core + adapters ([#194](https://github.com/dbt-labs/dbt-redshift/issues/194), [#191](https://github.com/dbt-labs/dbt-redshift/pull/191)) diff --git a/.changes/1.4.0/Under the Hood-20220926-101606.yaml b/.changes/1.4.0/Under the Hood-20220926-101606.yaml deleted file mode 100644 index 3b48d2315..000000000 --- a/.changes/1.4.0/Under the Hood-20220926-101606.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Migrate dbt-utils current_timestamp macros into core + adapters -time: 2022-09-26T10:16:06.676737-07:00 -custom: - Author: colin-rogers-dbt - Issue: "194" - PR: "191" diff --git a/.changes/1.6.0-a1.md b/.changes/1.6.0-a1.md new file mode 100644 index 000000000..57e14b79c --- /dev/null +++ b/.changes/1.6.0-a1.md @@ -0,0 +1 @@ +## dbt-redshift 1.6.0-a1 - April 17, 2023 diff --git a/.changes/1.6.0-b1.md b/.changes/1.6.0-b1.md new file mode 100644 index 000000000..07e7c693a --- /dev/null +++ b/.changes/1.6.0-b1.md @@ -0,0 +1,10 @@ +## dbt-redshift 1.6.0-b1 - May 12, 2023 + +### Fixes + +- Adding region as independent param in profiles ([#419](https://github.com/dbt-labs/dbt-redshift/issues/419)) +- Update signature for execute method ([#'](https://github.com/dbt-labs/dbt-redshift/issues/'), [#'](https://github.com/dbt-labs/dbt-redshift/issues/')) + +### Dependencies + +- Update pytest requirement from ~=7.2 to ~=7.3 ([#414](https://github.com/dbt-labs/dbt-redshift/pull/414)) diff --git a/.changes/1.6.0-b2.md b/.changes/1.6.0-b2.md new file mode 100644 index 000000000..d0f1d2871 --- /dev/null +++ b/.changes/1.6.0-b2.md @@ -0,0 +1,11 @@ +## dbt-redshift 1.6.0-b2 - May 25, 2023 + +### Fixes + +- Fix redshift_connector issue of timing out after 30s ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) +- Add a new connection param to reenable certain Redshift commands in macros. ([#463](https://github.com/dbt-labs/dbt-redshift/issues/463)) +- Escape `%` symbols in table/view/column comments ([#441](https://github.com/dbt-labs/dbt-redshift/issues/441)) +- Use smaller default batch size for seeds ([#347](https://github.com/dbt-labs/dbt-redshift/issues/347)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) diff --git a/.changes/1.6.0-b3.md b/.changes/1.6.0-b3.md new file mode 100644 index 000000000..31cc589a9 --- /dev/null +++ b/.changes/1.6.0-b3.md @@ -0,0 +1,23 @@ +## dbt-redshift 1.6.0-b3 - June 09, 2023 + +### Breaking Changes + +- sslmode behavior has changed. To connect without ssl, set sslmode = disable. To connect using ssl, set sslmode to verify-ca, or verify-full. ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Drop support for python 3.7 ([#dbt-core/7082](https://github.com/dbt-labs/dbt-redshift/issues/dbt-core/7082)) + +### Features + +- Standardize the _connection_keys and debug_query for `dbt debug`. ([#PR754](https://github.com/dbt-labs/dbt-redshift/issues/PR754)) + +### Fixes + +- translate psycopg2 sslmode to ssl and sslmode in redshift_connector ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Get autocommit on by default to restore old semantics users had relied on prior to 1.5. Add tests. ([#425](https://github.com/dbt-labs/dbt-redshift/issues/425)) +- remove depdency on postgres__list_relations_without_caching macro ([#488](https://github.com/dbt-labs/dbt-redshift/issues/488)) + +### Under the Hood + +- test constraint rendering of foreign key and unique constraints ([#7417](https://github.com/dbt-labs/dbt-redshift/issues/7417)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429), [#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) diff --git a/.changes/1.6.0-b4.md b/.changes/1.6.0-b4.md new file mode 100644 index 000000000..d633633ea --- /dev/null +++ b/.changes/1.6.0-b4.md @@ -0,0 +1,12 @@ +## dbt-redshift 1.6.0-b4 - June 16, 2023 + +### Breaking Changes + +- require latest version of redshift_connector driver ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + +### Fixes + +- remove requirement for region param ([#484](https://github.com/dbt-labs/dbt-redshift/issues/484)) + +### Contributors +- [@dataders](https://github.com/dataders) ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) diff --git a/.changes/1.6.0-b5.md b/.changes/1.6.0-b5.md new file mode 100644 index 000000000..3263cb06e --- /dev/null +++ b/.changes/1.6.0-b5.md @@ -0,0 +1,5 @@ +## dbt-redshift 1.6.0-b5 - June 23, 2023 + +### Features + +- Add support for materialized views ([#dbt-labs/dbt-core#6911](https://github.com/dbt-labs/dbt-redshift/issues/dbt-labs/dbt-core#6911)) diff --git a/.changes/1.6.0/Breaking Changes-20230522-111945.yaml b/.changes/1.6.0/Breaking Changes-20230522-111945.yaml new file mode 100644 index 000000000..63a03b3ba --- /dev/null +++ b/.changes/1.6.0/Breaking Changes-20230522-111945.yaml @@ -0,0 +1,7 @@ +kind: Breaking Changes +body: sslmode behavior has changed. To connect without ssl, set sslmode = disable. + To connect using ssl, set sslmode to verify-ca, or verify-full. +time: 2023-05-22T11:19:45.927903-07:00 +custom: + Author: jiezhen-chen + Issue: "429" diff --git a/.changes/1.6.0/Breaking Changes-20230530-165542.yaml b/.changes/1.6.0/Breaking Changes-20230530-165542.yaml new file mode 100644 index 000000000..68b4bfdda --- /dev/null +++ b/.changes/1.6.0/Breaking Changes-20230530-165542.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Drop support for python 3.7 +time: 2023-05-30T16:55:42.393416-04:00 +custom: + Author: mikealfare + Issue: dbt-core/7082 diff --git a/.changes/1.6.0/Breaking Changes-20230614-144743.yaml b/.changes/1.6.0/Breaking Changes-20230614-144743.yaml new file mode 100644 index 000000000..c83a7db5e --- /dev/null +++ b/.changes/1.6.0/Breaking Changes-20230614-144743.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: require latest version of redshift_connector driver +time: 2023-06-14T14:47:43.90505-04:00 +custom: + Author: dataders + Issue: "492" diff --git a/.changes/1.6.0/Dependencies-20230421-032407.yaml b/.changes/1.6.0/Dependencies-20230421-032407.yaml new file mode 100644 index 000000000..5b08ed592 --- /dev/null +++ b/.changes/1.6.0/Dependencies-20230421-032407.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: "Update pytest requirement from ~=7.2 to ~=7.3" +time: 2023-04-21T03:24:07.00000Z +custom: + Author: mikealfare + PR: 414 diff --git a/.changes/1.6.0/Features-20230330-165842.yaml b/.changes/1.6.0/Features-20230330-165842.yaml new file mode 100644 index 000000000..4f03197aa --- /dev/null +++ b/.changes/1.6.0/Features-20230330-165842.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for materialized views +time: 2023-03-30T16:58:42.413699-04:00 +custom: + Author: mikealfare McKnight-42 + Issue: dbt-labs/dbt-core#6911 diff --git a/.changes/1.6.0/Features-20230604-041410.yaml b/.changes/1.6.0/Features-20230604-041410.yaml new file mode 100644 index 000000000..e617e3561 --- /dev/null +++ b/.changes/1.6.0/Features-20230604-041410.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Standardize the _connection_keys and debug_query for `dbt debug`. +time: 2023-06-04T04:14:10.191263-07:00 +custom: + Author: versusfacit + Issue: PR754 diff --git a/.changes/1.6.0/Fixes-20230428-142321.yaml b/.changes/1.6.0/Fixes-20230428-142321.yaml new file mode 100644 index 000000000..c7fefda43 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230428-142321.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Adding region as independent param in profiles +time: 2023-04-28T14:23:21.041865-07:00 +custom: + Author: nssalian + Issue: "419" diff --git a/.changes/1.6.0/Fixes-20230508-094834.yaml b/.changes/1.6.0/Fixes-20230508-094834.yaml new file mode 100644 index 000000000..378c428e3 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230508-094834.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix redshift_connector issue of timing out after 30s +time: 2023-05-08T09:48:34.019843-07:00 +custom: + Author: jiezhen-chen + Issue: "427" diff --git a/.changes/1.6.0/Fixes-20230509-143721.yaml b/.changes/1.6.0/Fixes-20230509-143721.yaml new file mode 100644 index 000000000..6d80abf7c --- /dev/null +++ b/.changes/1.6.0/Fixes-20230509-143721.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Update signature for execute method +time: 2023-05-09T14:37:21.163869-07:00 +custom: + Author: nssalian + Issue: ''' ''' diff --git a/.changes/1.6.0/Fixes-20230512-082027.yaml b/.changes/1.6.0/Fixes-20230512-082027.yaml new file mode 100644 index 000000000..7116257d8 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230512-082027.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: translate psycopg2 sslmode to ssl and sslmode in redshift_connector +time: 2023-05-12T08:20:27.486301-07:00 +custom: + Author: jiezhen-chen + Issue: "429" diff --git a/.changes/1.6.0/Fixes-20230520-043039.yaml b/.changes/1.6.0/Fixes-20230520-043039.yaml new file mode 100644 index 000000000..1cb97526e --- /dev/null +++ b/.changes/1.6.0/Fixes-20230520-043039.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Add a new connection param to reenable certain Redshift commands in macros. +time: 2023-05-20T04:30:39.358755-07:00 +custom: + Author: versusfacit + Issue: "463" diff --git a/.changes/1.6.0/Fixes-20230524-151825.yaml b/.changes/1.6.0/Fixes-20230524-151825.yaml new file mode 100644 index 000000000..bed2f78e4 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230524-151825.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Escape `%` symbols in table/view/column comments +time: 2023-05-24T15:18:25.834088-06:00 +custom: + Author: dbeatty10 + Issue: "441" diff --git a/.changes/1.6.0/Fixes-20230524-165236.yaml b/.changes/1.6.0/Fixes-20230524-165236.yaml new file mode 100644 index 000000000..083191405 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230524-165236.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Use smaller default batch size for seeds +time: 2023-05-24T16:52:36.915348-06:00 +custom: + Author: dbeatty10 + Issue: "347" diff --git a/.changes/1.6.0/Fixes-20230531-153347.yaml b/.changes/1.6.0/Fixes-20230531-153347.yaml new file mode 100644 index 000000000..4c4d324ad --- /dev/null +++ b/.changes/1.6.0/Fixes-20230531-153347.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Get autocommit on by default to restore old semantics users had relied on prior + to 1.5. Add tests. +time: 2023-05-31T15:33:47.180508-07:00 +custom: + Author: versusfacit + Issue: "425" diff --git a/.changes/1.6.0/Fixes-20230608-180130.yaml b/.changes/1.6.0/Fixes-20230608-180130.yaml new file mode 100644 index 000000000..47dfda72c --- /dev/null +++ b/.changes/1.6.0/Fixes-20230608-180130.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: remove depdency on postgres__list_relations_without_caching macro +time: 2023-06-08T18:01:30.954976-07:00 +custom: + Author: colin-rogers-dbt + Issue: "488" diff --git a/.changes/1.6.0/Fixes-20230612-114853.yaml b/.changes/1.6.0/Fixes-20230612-114853.yaml new file mode 100644 index 000000000..1650c5704 --- /dev/null +++ b/.changes/1.6.0/Fixes-20230612-114853.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: remove requirement for region param +time: 2023-06-12T11:48:53.980327-07:00 +custom: + Author: colin-rogers-dbt + Issue: "484" diff --git a/.changes/1.6.0/Under the Hood-20230511-162623.yaml b/.changes/1.6.0/Under the Hood-20230511-162623.yaml new file mode 100644 index 000000000..9166641af --- /dev/null +++ b/.changes/1.6.0/Under the Hood-20230511-162623.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: test constraint rendering of foreign key and unique constraints +time: 2023-05-11T16:26:23.49096-04:00 +custom: + Author: michelleark + Issue: "7417" diff --git a/.changes/unreleased/Dependencies-20221209-233905.yaml b/.changes/unreleased/Dependencies-20221209-233905.yaml deleted file mode 100644 index 31b06b6ac..000000000 --- a/.changes/unreleased/Dependencies-20221209-233905.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Dependencies -body: Add support for python 3.11 -time: 2022-12-09T23:39:05.296196-05:00 -custom: - Author: mikealfare - Issue: "225" - PR: "236" diff --git a/.changes/unreleased/Dependencies-20230515-195952.yaml b/.changes/unreleased/Dependencies-20230515-195952.yaml new file mode 100644 index 000000000..6cfb3a14c --- /dev/null +++ b/.changes/unreleased/Dependencies-20230515-195952.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pytest-xdist requirement from ~=3.2 to ~=3.3" +time: 2023-05-15T19:59:52.00000Z +custom: + Author: dependabot[bot] + PR: 449 diff --git a/.changes/unreleased/Dependencies-20230523-195945.yaml b/.changes/unreleased/Dependencies-20230523-195945.yaml new file mode 100644 index 000000000..c2a8a363d --- /dev/null +++ b/.changes/unreleased/Dependencies-20230523-195945.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update types-requests requirement from ~=2.28 to ~=2.31" +time: 2023-05-23T19:59:45.00000Z +custom: + Author: dependabot[bot] + PR: 461 diff --git a/.changes/unreleased/Dependencies-20230605-195944.yaml b/.changes/unreleased/Dependencies-20230605-195944.yaml new file mode 100644 index 000000000..52e3ab254 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230605-195944.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update tox requirement from ~=4.5 to ~=4.6" +time: 2023-06-05T19:59:44.00000Z +custom: + Author: dependabot[bot] + PR: 481 diff --git a/.changes/unreleased/Dependencies-20230620-195844.yaml b/.changes/unreleased/Dependencies-20230620-195844.yaml new file mode 100644 index 000000000..86b482554 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230620-195844.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump mypy from 1.2.0 to 1.4.0" +time: 2023-06-20T19:58:44.00000Z +custom: + Author: dependabot[bot] + PR: 503 diff --git a/.changes/unreleased/Dependencies-20230624-182902.yaml b/.changes/unreleased/Dependencies-20230624-182902.yaml new file mode 100644 index 000000000..763f74040 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230624-182902.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pre-commit requirement from ~=2.21 to ~=3.3" +time: 2023-06-24T18:29:02.00000Z +custom: + Author: dependabot[bot] + PR: 426 diff --git a/.changes/unreleased/Dependencies-20230626-195917.yaml b/.changes/unreleased/Dependencies-20230626-195917.yaml new file mode 100644 index 000000000..c1dcdc504 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230626-195917.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pytest requirement from ~=7.3 to ~=7.4" +time: 2023-06-26T19:59:17.00000Z +custom: + Author: dependabot[bot] + PR: 507 diff --git a/.changes/unreleased/Dependencies-20230626-195934.yaml b/.changes/unreleased/Dependencies-20230626-195934.yaml new file mode 100644 index 000000000..4bb681e34 --- /dev/null +++ b/.changes/unreleased/Dependencies-20230626-195934.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump mypy from 1.4.0 to 1.4.1" +time: 2023-06-26T19:59:34.00000Z +custom: + Author: dependabot[bot] + PR: 508 diff --git a/.changes/unreleased/Fixes-20230628-123227.yaml b/.changes/unreleased/Fixes-20230628-123227.yaml new file mode 100644 index 000000000..4ece8f404 --- /dev/null +++ b/.changes/unreleased/Fixes-20230628-123227.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Remove dependent_projects argument from PartialProject call in unit tests +time: 2023-06-28T12:32:27.637669-04:00 +custom: + Author: mikealfare + Issue: "7955" diff --git a/.changes/unreleased/Under the Hood-20221219-163610.yaml b/.changes/unreleased/Under the Hood-20221219-163610.yaml deleted file mode 100644 index 131d55986..000000000 --- a/.changes/unreleased/Under the Hood-20221219-163610.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Consistent capitalization for `CONTRIBUTING.md` -time: 2022-12-19T16:36:10.416838-07:00 -custom: - Author: dbeatty10 - Issue: "252" - PR: "252" diff --git a/.changie.yaml b/.changie.yaml index 77d5422e4..620305ac4 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -4,59 +4,127 @@ headerPath: header.tpl.md versionHeaderPath: "" changelogPath: CHANGELOG.md versionExt: md +envPrefix: "CHANGIE_" versionFormat: '## dbt-redshift {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' -changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' +changeFormat: |- + {{- $IssueList := list }} + {{- $changes := splitList " " $.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}}) kinds: - label: Breaking Changes - label: Features - label: Fixes - label: Under the Hood - label: Dependencies - changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 - label: Security - changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-redshift/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-redshift/pull/{{.Custom.PR}}))' + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 + +newlines: + afterChangelogHeader: 1 + afterKind: 1 + afterChangelogVersion: 1 + beforeKind: 1 + endOfVersion: 1 + custom: - key: Author label: GitHub Username(s) (separated by a single space if multiple) type: string minLength: 3 - key: Issue - label: GitHub Issue Number - type: int - minLength: 4 -- key: PR - label: GitHub Pull Request Number - type: int - minLength: 4 + label: GitHub Issue Number (separated by a single space if multiple) + type: string + minLength: 1 + footerFormat: | {{- $contributorDict := dict }} - {{- /* any names added to this list should be all lowercase for later matching purposes */}} - {{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "mikealfare" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }} + {{- /* ensure all names in this list are all lowercase for later matching purposes */}} + {{- $core_team := splitList " " .Env.CORE_TEAM }} + {{- /* ensure we always skip snyk and dependabot in addition to the core team */}} + {{- $maintainers := list "dependabot[bot]" "snyk-bot"}} + {{- range $team_member := $core_team }} + {{- $team_member_lower := lower $team_member }} + {{- $maintainers = append $maintainers $team_member_lower }} + {{- end }} {{- range $change := .Changes }} {{- $authorList := splitList " " $change.Custom.Author }} - {{- /* loop through all authors for a PR */}} + {{- /* loop through all authors for a single changelog */}} {{- range $author := $authorList }} {{- $authorLower := lower $author }} {{- /* we only want to include non-core team contributors */}} - {{- if not (has $authorLower $core_team)}} - {{- $pr := $change.Custom.PR }} - {{- /* check if this contributor has other PRs associated with them already */}} - {{- if hasKey $contributorDict $author }} - {{- $prList := get $contributorDict $author }} - {{- $prList = append $prList $pr }} - {{- $contributorDict := set $contributorDict $author $prList }} - {{- else }} - {{- $prList := list $change.Custom.PR }} - {{- $contributorDict := set $contributorDict $author $prList }} - {{- end }} - {{- end}} + {{- if not (has $authorLower $maintainers)}} + {{- $changeList := splitList " " $change.Custom.Author }} + {{- $IssueList := list }} + {{- $changeLink := $change.Kind }} + {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} + {{- $changes := splitList " " $change.Custom.PR }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/pull/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- else }} + {{- $changes := splitList " " $change.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-redshift/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- end }} + {{- /* check if this contributor has other changes associated with them already */}} + {{- if hasKey $contributorDict $author }} + {{- $contributionList := get $contributorDict $author }} + {{- $contributionList = concat $contributionList $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- else }} + {{- $contributionList := $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- end }} + {{- end}} {{- end}} {{- end }} {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} {{- if $contributorDict}} ### Contributors {{- range $k,$v := $contributorDict }} - - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-redshift/pull/{{$element}}){{end}}) + - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) {{- end }} {{- end }} diff --git a/.flake8 b/.flake8 index f39d154c0..b08ffcd53 100644 --- a/.flake8 +++ b/.flake8 @@ -4,9 +4,13 @@ select = W F ignore = - W503 # makes Flake8 work like black - W504 - E203 # makes Flake8 work like black - E741 - E501 + # makes Flake8 work like black + W503, + W504, + # makes Flake8 work like black + E203, + E741, + E501, exclude = test +per-file-ignores = + */__init__.py: F401 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..f6283d123 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# This codeowners file is used to ensure all PRs require reviews from the adapters team + +* @dbt-labs/core-adapters diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js index 9e7698ef5..7db445d9e 100644 --- a/.github/scripts/integration-test-matrix.js +++ b/.github/scripts/integration-test-matrix.js @@ -1,6 +1,6 @@ module.exports = ({ context }) => { const defaultPythonVersion = "3.8"; - const supportedPythonVersions = ["3.7", "3.8", "3.9", "3.10", "3.11"]; + const supportedPythonVersions = ["3.8", "3.9", "3.10", "3.11"]; const supportedAdapters = ["redshift"]; // if PR, generate matrix based on files changed and PR labels diff --git a/.github/scripts/update_dbt_core_branch.sh b/.github/scripts/update_dbt_core_branch.sh new file mode 100755 index 000000000..d28a40c35 --- /dev/null +++ b/.github/scripts/update_dbt_core_branch.sh @@ -0,0 +1,20 @@ +#!/bin/bash -e +set -e + +git_branch=$1 +target_req_file="dev-requirements.txt" +core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g" +postgres_req_sed_pattern="s|dbt-core.git.*#egg=dbt-postgres|dbt-core.git@${git_branch}#egg=dbt-postgres|g" +tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$postgres_req_sed_pattern" $target_req_file + sed -i "" "$tests_req_sed_pattern" $target_req_file +else + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$postgres_req_sed_pattern" $target_req_file + sed -i "$tests_req_sed_pattern" $target_req_file +fi +core_version=$(curl "https://raw.githubusercontent.com/dbt-labs/dbt-core/${git_branch}/core/dbt/version.py" | grep "__version__ = *"|cut -d'=' -f2) +bumpversion --allow-dirty --new-version "$core_version" major diff --git a/.github/scripts/update_dependencies.sh b/.github/scripts/update_dependencies.sh new file mode 100644 index 000000000..6000b5006 --- /dev/null +++ b/.github/scripts/update_dependencies.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e +set -e + +git_branch=$1 +target_req_file="dev-requirements.txt" +core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g" +postgres_req_sed_pattern="s|dbt-core.git.*#egg=dbt-postgres|dbt-core.git@${git_branch}#egg=dbt-postgres|g" +tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$postgres_req_sed_pattern" $target_req_file + sed -i "" "$tests_req_sed_pattern" $target_req_file +else + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$postgres_req_sed_pattern" $target_req_file + sed -i "$tests_req_sed_pattern" $target_req_file +fi diff --git a/.github/scripts/update_release_branch.sh b/.github/scripts/update_release_branch.sh new file mode 100644 index 000000000..75b9ccef6 --- /dev/null +++ b/.github/scripts/update_release_branch.sh @@ -0,0 +1,11 @@ +#!/bin/bash -e +set -e + +release_branch=$1 +target_req_file=".github/workflows/nightly-release.yml" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "s|[0-9].[0-9].latest|$release_branch|" $target_req_file +else + sed -i "s|[0-9].[0-9].latest|$release_branch|" $target_req_file +fi diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index d5c7fffed..46f240347 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -35,6 +35,6 @@ jobs: github.event.pull_request.merged && contains(github.event.label.name, 'backport') steps: - - uses: tibdex/backport@v2.0.2 + - uses: tibdex/backport@v2 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/bot-changelog.yml b/.github/workflows/bot-changelog.yml index 1df5573bb..94498d25f 100644 --- a/.github/workflows/bot-changelog.yml +++ b/.github/workflows/bot-changelog.yml @@ -9,7 +9,6 @@ # time: # custom: # Author: -# Issue: 4904 # PR: # # **why?** @@ -40,7 +39,7 @@ jobs: matrix: include: - label: "dependencies" - changie_kind: "Dependency" + changie_kind: "Dependencies" - label: "snyk" changie_kind: "Security" runs-on: ubuntu-latest @@ -50,7 +49,7 @@ jobs: - name: Create and commit changelog on bot PR if: "contains(github.event.pull_request.labels.*.name, ${{ matrix.label }})" id: bot_changelog - uses: emmyoop/changie_bot@v1.0 + uses: emmyoop/changie_bot@v1 with: GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} commit_author_name: "Github Build Bot" @@ -58,4 +57,4 @@ jobs: commit_message: "Add automated changelog yaml from template for bot PR" changie_kind: ${{ matrix.changie_kind }} label: ${{ matrix.label }} - custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 150\n PR: ${{ github.event.pull_request.number }}\n" + custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}\n" diff --git a/.github/workflows/cut-release-branch.yml b/.github/workflows/cut-release-branch.yml new file mode 100644 index 000000000..1c487a8a8 --- /dev/null +++ b/.github/workflows/cut-release-branch.yml @@ -0,0 +1,42 @@ +# **what?** +# Calls a centralize3d workflow that will: +# 1. Cut a new branch (generally `*.latest`) +# 2. Also cleans up all files in `.changes/unreleased` and `.changes/previous version on +# `main` and bumps `main` to the input version. + +# **why?** +# Generally reduces the workload of engineers and reduces error. Allow automation. + +# **when?** +# This will run when called manually. + +name: Cut new release branch + +on: + workflow_dispatch: + inputs: + version_to_bump_main: + description: 'The alpha version main should bump to (ex. 1.6.0a1)' + required: true + new_branch_name: + description: 'The full name of the new branch (ex. 1.5.latest)' + required: true + +defaults: + run: + shell: bash + +permissions: + contents: write + +jobs: + cut_branch: + name: "Cut branch and clean up main for dbt-redshift" + uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main + with: + version_to_bump_main: ${{ inputs.version_to_bump_main }} + new_branch_name: ${{ inputs.new_branch_name }} + PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch" + PR_body: "This PR will fail CI until the dbt-core PR has been merged due to release version conflicts." + secrets: + FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }} diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 1fe33e148..0d6d91855 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -33,9 +33,11 @@ on: pull_request_target: # manual trigger workflow_dispatch: - # run this once per night to ensure no regressions from latest dbt-core changes - schedule: - - cron: '0 5 * * *' # 5 UTC + inputs: + dbt-core-branch: + description: "branch of dbt-core to use in dev-requirements.txt" + required: false + type: string # explicitly turn off permissions for `GITHUB_TOKEN` permissions: read-all @@ -66,13 +68,13 @@ jobs: steps: - name: Check out the repository (non-PR) if: github.event_name != 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} @@ -93,10 +95,11 @@ jobs: redshift: - 'dbt/**' - 'tests/**' + - 'dev-requirements.txt' - name: Generate integration test matrix id: generate-matrix - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CHANGES: ${{ steps.get-changes.outputs.changes }} with: @@ -135,7 +138,7 @@ jobs: steps: - name: Check out the repository if: github.event_name != 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false @@ -143,13 +146,13 @@ jobs: # this is necessary for the `pull_request_target` event - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -160,6 +163,12 @@ jobs: python -m pip --version tox --version + - name: Update dev_requirements.txt + if: inputs.dbt-core-branch != '' + run: | + pip install bumpversion + ./.github/scripts/update_dbt_core_branch.sh ${{ inputs.dbt-core-branch }} + - name: Run tox (redshift) if: matrix.adapter == 'redshift' env: @@ -173,7 +182,7 @@ jobs: DBT_TEST_USER_3: dbt_test_user_3 run: tox - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: logs @@ -182,9 +191,10 @@ jobs: - name: Get current date if: always() id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + run: | + echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv @@ -220,7 +230,7 @@ jobs: steps: - name: Posting scheduled run failures - uses: ravsamhq/notify-slack-action@v1 + uses: ravsamhq/notify-slack-action@v2 if: ${{ github.event_name == 'schedule' }} with: notification_title: 'Redshift nightly integration test failed' diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml index b4016befc..2611a8bdd 100644 --- a/.github/workflows/jira-creation.yml +++ b/.github/workflows/jira-creation.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main + uses: dbt-labs/actions/.github/workflows/jira-creation.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index 3da2e3a38..1637cbe38 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -19,7 +19,9 @@ permissions: jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main + uses: dbt-labs/actions/.github/workflows/jira-label.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml index ed9f9cd4f..99158a15f 100644 --- a/.github/workflows/jira-transition.yml +++ b/.github/workflows/jira-transition.yml @@ -15,9 +15,14 @@ on: issues: types: [closed, deleted, reopened] +# no special access is needed +permissions: read-all + jobs: call-label-action: - uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main + uses: dbt-labs/actions/.github/workflows/jira-transition.yml@main + with: + project_key: ADAP secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 909766cc1..a76df7e9e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,12 +43,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: '3.8' @@ -72,7 +72,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11'] env: TOXENV: "unit" @@ -80,12 +80,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -102,9 +102,11 @@ jobs: - name: Get current date if: always() id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + #no colons allowed for artifacts + run: | + echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: always() with: name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv @@ -120,12 +122,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: '3.8' @@ -154,9 +156,9 @@ jobs: run: | export is_alpha=0 if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi - echo "::set-output name=is_alpha::$is_alpha" + echo "is_alpha=$is_alpha" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: dist path: dist/ @@ -174,11 +176,11 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -188,7 +190,7 @@ jobs: python -m pip install --upgrade wheel python -m pip --version - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: name: dist path: dist/ diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml new file mode 100644 index 000000000..46db5b749 --- /dev/null +++ b/.github/workflows/nightly-release.yml @@ -0,0 +1,116 @@ +# **what?** +# Nightly releases to GitHub and PyPI. This workflow produces the following outcome: +# - generate and validate data for night release (commit SHA, version number, release branch); +# - pass data to release workflow; +# - night release will be pushed to GitHub as a draft release; +# - night build will be pushed to test PyPI; +# +# **why?** +# Ensure an automated and tested release process for nightly builds +# +# **when?** +# This workflow runs on schedule or can be run manually on demand. + +name: Nightly Test Release to GitHub and PyPI + +on: + workflow_dispatch: # for manual triggering + schedule: + - cron: 0 9 * * * + +permissions: + contents: write # this is the permission that allows creating a new release + +defaults: + run: + shell: bash + +env: + RELEASE_BRANCH: "1.5.latest" # must test against most recent .latest branch to have parity for dependency with core + +jobs: + aggregate-release-data: + runs-on: ubuntu-latest + + outputs: + commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }} + version_number: ${{ steps.nightly-release-version.outputs.number }} + release_branch: ${{ steps.release-branch.outputs.name }} + + steps: + - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" + uses: actions/checkout@v3 + with: + ref: ${{ env.RELEASE_BRANCH }} + + - name: "Resolve Commit To Release" + id: resolve-commit-sha + run: | + commit_sha=$(git rev-parse HEAD) + echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT + + - name: "Get Current Version Number" + id: version-number-sources + run: | + current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '` + echo "current_version=$current_version" >> $GITHUB_OUTPUT + + - name: "Audit Version And Parse Into Parts" + id: semver + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.version-number-sources.outputs.current_version }} + + - name: "Get Current Date" + id: current-date + run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT + + # Bump to the next patch because when this is a previously released patch, the changelog + # markdown will already exist and cause a failure in another step + - name: "Bump Patch Number" + id: bump_patch + run: | + echo "patch=$((${{ steps.semver.outputs.patch }}+1))" >> $GITHUB_OUTPUT + + - name: "Generate Nightly Release Version Number" + id: nightly-release-version + run: | + number="${{ steps.semver.outputs.major }}.${{ steps.semver.outputs.minor }}.${{ steps.bump_patch.outputs.patch }}.dev${{ steps.current-date.outputs.date }}" + echo "number=$number" >> $GITHUB_OUTPUT + + - name: "Audit Nightly Release Version And Parse Into Parts" + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.nightly-release-version.outputs.number }} + + - name: "Set Release Branch" + id: release-branch + run: | + echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT + + log-outputs-aggregate-release-data: + runs-on: ubuntu-latest + needs: [aggregate-release-data] + + steps: + - name: "[DEBUG] Log Outputs" + run: | + echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }} + echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} + + release-github-pypi: + needs: [aggregate-release-data] + + uses: ./.github/workflows/release.yml + with: + sha: ${{ needs.aggregate-release-data.outputs.commit_sha }} + target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} + version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + build_script_path: "scripts/build-dist.sh" + env_setup_script_path: "scripts/env-setup.sh" + s3_bucket_name: "core-team-artifacts" + package_test_command: "dbt -h" + test_run: true + nightly_release: true + secrets: inherit diff --git a/.github/workflows/release-branch-tests.yml b/.github/workflows/release-branch-tests.yml new file mode 100644 index 000000000..004c6fb29 --- /dev/null +++ b/.github/workflows/release-branch-tests.yml @@ -0,0 +1,31 @@ +# **what?** +# The purpose of this workflow is to trigger CI to run for each release +# branch on a regular cadence. If the CI workflow fails for a branch, it +# will post to dev-core-alerts to raise awareness. The + +# **why?** +# Ensures release branches are always shippable and not broken. +# Also, can catch any dependencies shifting beneath us that might +# introduce breaking changes (could also impact Cloud). + +# **when?** +# Once each morning. Manual trigger can also test on demand + +name: Release branch scheduled testing + +on: + # run this once per night to ensure no regressions from latest dbt-core changes + schedule: + - cron: '0 5 * * *' # 5 UTC + + workflow_dispatch: # for manual triggering + +# no special access is needed +permissions: read-all + +jobs: + run_tests: + uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main + with: + workflows_to_run: '["main.yml", "integration.yml"]' + secrets: inherit diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 91c3114e4..1c0885001 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,14 +1,19 @@ # **what?** -# Take the given commit, run unit tests specifically on that sha, build and -# package it, and then release to GitHub with that specific build (PyPi to follow later) - +# Release workflow provides the following steps: +# - checkout the given commit; +# - validate version in sources and changelog file for given version; +# - bump the version and generate a changelog if needed; +# - merge all changes to the target branch if needed; +# - run unit and integration tests against given commit; +# - build and package that SHA; +# - release it to GitHub and PyPI with that specific build; +# # **why?** # Ensure an automated and tested release process - +# # **when?** -# This will only run manually with a given sha and version - -name: Build, Test, and Package +# This workflow can be run manually on demand or can be called by other workflows +name: Release to GitHub and PyPI on: workflow_dispatch: @@ -17,17 +22,85 @@ on: description: "The last commit sha in the release" type: string required: true - changelog_path: - description: "Path to changes log" + target_branch: + description: "The branch to release from" + type: string + required: true + version_number: + description: "The release version number (i.e. 1.0.0b1)" + type: string + required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" type: string - default: "./CHANGELOG.md" + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true + test_run: + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" + type: boolean + default: false required: false + workflow_call: + inputs: + sha: + description: "The last commit sha in the release" + type: string + required: true + target_branch: + description: "The branch to release from" + type: string + required: true version_number: description: "The release version number (i.e. 1.0.0b1)" type: string required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" + type: string + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true test_run: - description: "Test run (Publish release as draft to GitHub)" + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" type: boolean default: false required: false @@ -35,10 +108,6 @@ on: permissions: contents: write # this is the permission that allows creating a new release -env: - PYTHON_TARGET_VERSION: 3.8 - ARTIFACT_RETENTION_DAYS: 2 - defaults: run: shell: bash @@ -50,163 +119,110 @@ jobs: steps: - name: "[DEBUG] Print Variables" run: | - echo The last commit sha in the release: ${{ inputs.sha }} - echo The release version number: ${{ inputs.version_number }} - echo The path to the changelog markdpown: ${{ inputs.changelog_path }} - echo This is a test run: ${{ inputs.test_run }} - echo Python target version: ${{ env.PYTHON_TARGET_VERSION }} - echo Artifact retention days: ${{ env.ARTIFACT_RETENTION_DAYS }} - - unit: - name: Unit Test - runs-on: ubuntu-latest + echo The last commit sha in the release: ${{ inputs.sha }} + echo The branch to release from: ${{ inputs.target_branch }} + echo The release version number: ${{ inputs.version_number }} + echo Build script path: ${{ inputs.build_script_path }} + echo Environment setup script path: ${{ inputs.env_setup_script_path }} + echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }} + echo Package test command: ${{ inputs.package_test_command }} + echo Test run: ${{ inputs.test_run }} + echo Nightly release: ${{ inputs.nightly_release }} - env: - TOXENV: "unit" + bump-version-generate-changelog: + name: Bump package version, Generate changelog - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ github.event.inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install tox - python -m pip --version - python -m tox --version + uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main - - name: "Run Tox" - run: tox - - build: - name: Build Packages - - runs-on: ubuntu-latest - - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade setuptools wheel twine check-wheel-contents - python -m pip --version - - - name: "Build Distributions" - run: ./scripts/build-dist.sh - - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Check Distribution Descriptions" - run: | - twine check dist/* - - - name: "[DEBUG] Check Wheel Contents" - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 + with: + sha: ${{ inputs.sha }} + version_number: ${{ inputs.version_number }} + target_branch: ${{ inputs.target_branch }} + env_setup_script_path: ${{ inputs.env_setup_script_path }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "Upload Build Artifact - ${{ inputs.version_number }}" - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: | - dist/ - !dist/dbt-${{ inputs.version_number }}.tar.gz - retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }} + secrets: inherit - test-build: - name: Verify Packages + log-outputs-bump-version-generate-changelog: + name: "[Log output] Bump package version, Generate changelog" + if: ${{ !failure() && !cancelled() }} - needs: [unit, build] + needs: [bump-version-generate-changelog] runs-on: ubuntu-latest steps: - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" + - name: Print variables run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade wheel - python -m pip --version - - - name: "Download Build Artifact - ${{ inputs.version_number }}" - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: dist/ + echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Install Wheel Distributions" - run: | - find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + build-test-package: + name: Build, Test, Package + if: ${{ !failure() && !cancelled() }} + needs: [bump-version-generate-changelog] - - name: "[DEBUG] Check Wheel Distributions" - run: | - dbt --version + uses: dbt-labs/dbt-release/.github/workflows/build.yml@main - - name: "Install Source Distributions" - run: | - find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + with: + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + version_number: ${{ inputs.version_number }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} + build_script_path: ${{ inputs.build_script_path }} + s3_bucket_name: ${{ inputs.s3_bucket_name }} + package_test_command: ${{ inputs.package_test_command }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "[DEBUG] Check Source Distributions" - run: | - dbt --version + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release if: ${{ !failure() && !cancelled() }} - needs: test-build - # pin to commit since this is workflow is WIP but this commit has been tested as working - uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@7b6e01d73d2c8454e06302cc66ef4c2dbd4dbe4e + needs: [bump-version-generate-changelog, build-test-package] + + uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main with: - sha: ${{ inputs.sha }} + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} - changelog_path: ${{ inputs.changelog_path }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} test_run: ${{ inputs.test_run }} pypi-release: - name: Pypi release - # only release to PyPi if we're not testing - will release to PyPi test when workflow gets rewritten - if: inputs.test_run == 'false' + name: PyPI Release - runs-on: ubuntu-latest + needs: [github-release] - needs: github-release + uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main - environment: PypiProd - steps: - - uses: actions/download-artifact@v2 - with: - name: dist - path: 'dist' - - - name: Publish distribution to PyPI - uses: pypa/gh-action-pypi-publish@v1.4.2 - with: - password: ${{ secrets.PYPI_API_TOKEN }} + with: + version_number: ${{ inputs.version_number }} + test_run: ${{ inputs.test_run }} + + secrets: + PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + + slack-notification: + name: Slack Notification + if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }} + + needs: + [ + bump-version-generate-changelog, + build-test-package, + github-release, + pypi-release, + ] + + uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main + with: + status: "failure" + + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }} diff --git a/.github/workflows/triage-labels.yml b/.github/workflows/triage-labels.yml new file mode 100644 index 000000000..91f529e3e --- /dev/null +++ b/.github/workflows/triage-labels.yml @@ -0,0 +1,31 @@ +# **what?** +# When the core team triages, we sometimes need more information from the issue creator. In +# those cases we remove the `triage` label and add the `awaiting_response` label. Once we +# recieve a response in the form of a comment, we want the `awaiting_response` label removed +# in favor of the `triage` label so we are aware that the issue needs action. + +# **why?** +# To help with out team triage issue tracking + +# **when?** +# This will run when a comment is added to an issue and that issue has to `awaiting_response` label. + +name: Update Triage Label + +on: issue_comment + +defaults: + run: + shell: bash + +permissions: + issues: write + +jobs: + triage_label: + if: contains(github.event.issue.labels.*.name, 'awaiting_response') + uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main + with: + add_label: "triage" + remove_label: "awaiting_response" + secrets: inherit # this is only acceptable because we own the action we're calling diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a4e34d870..3d80b955c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,15 +1,12 @@ # For more on configuring pre-commit hooks (see https://pre-commit.com/) -# TODO: remove global exclusion of tests when testing overhaul is complete -exclude: '^tests/.*' - # Force all unspecified python hooks to run python 3.8 default_language_version: - python: python3.8 + python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.4.0 hooks: - id: check-yaml args: [--unsafe] @@ -18,31 +15,31 @@ repos: - id: trailing-whitespace - id: check-case-conflict - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 23.1.0 hooks: - id: black - additional_dependencies: ['click==8.0.4'] + additional_dependencies: ['click~=8.1'] args: - "--line-length=99" - "--target-version=py38" - id: black alias: black-check stages: [manual] - additional_dependencies: ['click==8.0.4'] + additional_dependencies: ['click~=8.1'] args: - "--line-length=99" - "--target-version=py38" - "--check" - "--diff" - repo: https://github.com/pycqa/flake8 - rev: 4.0.1 + rev: 6.0.0 hooks: - id: flake8 - id: flake8 alias: flake8-check stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.942 + rev: v1.1.1 hooks: - id: mypy # N.B.: Mypy is... a bit fragile. @@ -55,12 +52,12 @@ repos: # of our control to the mix. Unfortunately, there's nothing we can # do about per pre-commit's author. # See https://github.com/pre-commit/pre-commit/issues/730 for details. - args: [--show-error-codes, --ignore-missing-imports] + args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases] files: ^dbt/adapters/.* language: system - id: mypy alias: mypy-check stages: [manual] - args: [--show-error-codes, --pretty, --ignore-missing-imports] + args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases] files: ^dbt/adapters language: system diff --git a/CHANGELOG.md b/CHANGELOG.md index 83c2e942f..dd2d5cafc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,81 @@ - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-redshift/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-redshift 1.4.0-b1 - December 15, 2022 + +## dbt-redshift 1.6.0-b5 - June 23, 2023 + +### Features + +- Add support for materialized views ([#dbt-labs/dbt-core#6911](https://github.com/dbt-labs/dbt-redshift/issues/dbt-labs/dbt-core#6911)) + + + +## dbt-redshift 1.6.0-b4 - June 16, 2023 + +### Breaking Changes + +- require latest version of redshift_connector driver ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + +### Fixes + +- remove requirement for region param ([#484](https://github.com/dbt-labs/dbt-redshift/issues/484)) + +### Contributors +- [@dataders](https://github.com/dataders) ([#492](https://github.com/dbt-labs/dbt-redshift/issues/492)) + +## dbt-redshift 1.6.0-b3 - June 09, 2023 + +### Breaking Changes + +- sslmode behavior has changed. To connect without ssl, set sslmode = disable. To connect using ssl, set sslmode to verify-ca, or verify-full. ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Drop support for python 3.7 ([#dbt-core/7082](https://github.com/dbt-labs/dbt-redshift/issues/dbt-core/7082)) + ### Features -- Migrate dbt-utils current_timestamp macros into core + adapters ([#194](https://github.com/dbt-labs/dbt-redshift/issues/194), [#191](https://github.com/dbt-labs/dbt-redshift/pull/191)) + +- Standardize the _connection_keys and debug_query for `dbt debug`. ([#PR754](https://github.com/dbt-labs/dbt-redshift/issues/PR754)) + +### Fixes + +- translate psycopg2 sslmode to ssl and sslmode in redshift_connector ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) +- Get autocommit on by default to restore old semantics users had relied on prior to 1.5. Add tests. ([#425](https://github.com/dbt-labs/dbt-redshift/issues/425)) +- remove depdency on postgres__list_relations_without_caching macro ([#488](https://github.com/dbt-labs/dbt-redshift/issues/488)) + +### Under the Hood + +- test constraint rendering of foreign key and unique constraints ([#7417](https://github.com/dbt-labs/dbt-redshift/issues/7417)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#429](https://github.com/dbt-labs/dbt-redshift/issues/429), [#429](https://github.com/dbt-labs/dbt-redshift/issues/429)) + +## dbt-redshift 1.6.0-b2 - May 25, 2023 + +### Fixes + +- Fix redshift_connector issue of timing out after 30s ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) +- Add a new connection param to reenable certain Redshift commands in macros. ([#463](https://github.com/dbt-labs/dbt-redshift/issues/463)) +- Escape `%` symbols in table/view/column comments ([#441](https://github.com/dbt-labs/dbt-redshift/issues/441)) +- Use smaller default batch size for seeds ([#347](https://github.com/dbt-labs/dbt-redshift/issues/347)) + +### Contributors +- [@jiezhen-chen](https://github.com/jiezhen-chen) ([#427](https://github.com/dbt-labs/dbt-redshift/issues/427)) + +## dbt-redshift 1.6.0-b1 - May 12, 2023 + +### Fixes + +- Adding region as independent param in profiles ([#419](https://github.com/dbt-labs/dbt-redshift/issues/419)) +- Update signature for execute method ([#'](https://github.com/dbt-labs/dbt-redshift/issues/'), [#'](https://github.com/dbt-labs/dbt-redshift/issues/')) + +### Dependencies + +- Update pytest requirement from ~=7.2 to ~=7.3 ([#414](https://github.com/dbt-labs/dbt-redshift/pull/414)) + +## dbt-redshift 1.6.0-a1 - April 17, 2023 ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.5](https://github.com/dbt-labs/dbt-redshift/blob/1.5.latest/CHANGELOG.md) +- [1.4](https://github.com/dbt-labs/dbt-redshift/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-redshift/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-redshift/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-redshift/blob/1.1.latest/CHANGELOG.md) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b33972697..ff08b6190 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -55,9 +55,9 @@ To confirm you have correct `dbt-core` and adapter versions installed please run ### Initial Setup -`dbt-redshift` contains [unit](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/unit) and [integration](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/integration) tests. Integration tests require testing against an actual Redshift warehouse. We have CI set up to test against a Redshift warehouse during PR checks. +`dbt-redshift` contains [unit](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/unit) and [functional](https://github.com/dbt-labs/dbt-redshift/tree/main/tests/functional) tests. Functional tests require testing against an actual Redshift warehouse. We have CI set up to test against a Redshift warehouse during PR checks. -In order to run integration tests locally, you will need a `test.env` file in the root of the repository that contains credentials for your Redshift warehouse. +In order to run functional tests locally, you will need a `test.env` file in the root of the repository that contains credentials for your Redshift warehouse. Note: This `test.env` file is git-ignored, but please be extra careful to never check in credentials or other sensitive information when developing. To create your `test.env` file, copy the provided example file, then supply your relevant credentials. @@ -70,14 +70,14 @@ $EDITOR test.env There are a few methods for running tests locally. #### `tox` -`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel. For example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, Python 3.10, and `flake8` checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration of these tests are located in `tox.ini`. +`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel. For example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10, and `flake8` checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration of these tests are located in `tox.ini`. #### `pytest` Finally, you can also run a specific test or group of tests using `pytest` directly. With a Python virtualenv active and dev dependencies installed you can do things like: ```sh -# run specific redshift integration tests -python -m pytest -m profile_redshift tests/integration/simple_seed_test +# run specific redshift functional tests +python -m pytest tests/functional/adapter/concurrent_transactions # run specific redshift functional tests in a file python -m pytest tests/functional/adapter/test_basic.py # run all unit tests in a file @@ -110,6 +110,6 @@ You don't need to worry about which `dbt-redshift` version your change will go i dbt Labs provides a CI environment to test changes to the `dbt-redshift` adapter and periodic checks against the development version of `dbt-core` through Github Actions. -A `dbt-redshift` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. +A `dbt-redshift` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or functional test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. Once all tests are passing and your PR has been approved, a `dbt-redshift` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada: diff --git a/Makefile b/Makefile index 924c6a26f..0cc3a43d6 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,11 @@ dev: ## Installs adapter in develop mode along with development dependencies @\ pip install -e . -r dev-requirements.txt && pre-commit install +.PHONY: dev-uninstall +dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment + ## Useful when updating versions, or if you accidentally installed into the system interpreter + pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y + .PHONY: mypy mypy: ## Runs mypy against staged changes for static type checking. @\ diff --git a/dbt/adapters/redshift/__init__.py b/dbt/adapters/redshift/__init__.py index be348f6d5..92ae383e4 100644 --- a/dbt/adapters/redshift/__init__.py +++ b/dbt/adapters/redshift/__init__.py @@ -1,13 +1,14 @@ -from dbt.adapters.redshift.connections import RedshiftConnectionManager # noqa -from dbt.adapters.redshift.connections import RedshiftCredentials -from dbt.adapters.redshift.relation import RedshiftColumn # noqa +from dbt.adapters.base import AdapterPlugin + +from dbt.adapters.redshift.connections import ( # noqa: F401 + RedshiftConnectionManager, + RedshiftCredentials, +) from dbt.adapters.redshift.relation import RedshiftRelation # noqa: F401 from dbt.adapters.redshift.impl import RedshiftAdapter +from dbt.include import redshift -from dbt.adapters.base import AdapterPlugin # type: ignore -from dbt.include import redshift # type: ignore - Plugin: AdapterPlugin = AdapterPlugin( adapter=RedshiftAdapter, # type: ignore credentials=RedshiftCredentials, diff --git a/dbt/adapters/redshift/__version__.py b/dbt/adapters/redshift/__version__.py index 27cfeecd9..da9f11e34 100644 --- a/dbt/adapters/redshift/__version__.py +++ b/dbt/adapters/redshift/__version__.py @@ -1 +1 @@ -version = "1.4.0b1" +version = "1.6.0b5" diff --git a/dbt/adapters/redshift/connections.py b/dbt/adapters/redshift/connections.py index be4d626d3..cd653d781 100644 --- a/dbt/adapters/redshift/connections.py +++ b/dbt/adapters/redshift/connections.py @@ -1,25 +1,39 @@ +import re from multiprocessing import Lock from contextlib import contextmanager -from typing import NewType +from typing import NewType, Tuple, Union, Optional, List +from dataclasses import dataclass, field + +import agate +import sqlparse +import redshift_connector +from redshift_connector.utils.oids import get_datatype_name -from dbt.adapters.postgres import PostgresConnectionManager -from dbt.adapters.postgres import PostgresCredentials +from dbt.adapters.sql import SQLConnectionManager +from dbt.contracts.connection import AdapterResponse, Connection, Credentials +from dbt.contracts.util import Replaceable +from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum, ValidationError from dbt.events import AdapterLogger -import dbt.exceptions +from dbt.exceptions import DbtRuntimeError, CompilationError import dbt.flags +from dbt.helper_types import Port -import boto3 -from dbt.dataclass_schema import FieldEncoder, dbtClassMixin, StrEnum +class SSLConfigError(CompilationError): + def __init__(self, exc: ValidationError): + self.exc = exc + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + validator_msg = self.validator_error_message(self.exc) + msg = f"Could not parse SSL config: {validator_msg}" + return msg -from dataclasses import dataclass, field -from typing import Optional, List logger = AdapterLogger("Redshift") drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock() # type: ignore - IAMDuration = NewType("IAMDuration", int) @@ -37,8 +51,71 @@ class RedshiftConnectionMethod(StrEnum): IAM = "iam" +class UserSSLMode(StrEnum): + disable = "disable" + allow = "allow" + prefer = "prefer" + require = "require" + verify_ca = "verify-ca" + verify_full = "verify-full" + + @classmethod + def default(cls) -> "UserSSLMode": + # default for `psycopg2`, which aligns with dbt-redshift 1.4 and provides backwards compatibility + return cls.prefer + + +class RedshiftSSLMode(StrEnum): + verify_ca = "verify-ca" + verify_full = "verify-full" + + +SSL_MODE_TRANSLATION = { + UserSSLMode.disable: None, + UserSSLMode.allow: RedshiftSSLMode.verify_ca, + UserSSLMode.prefer: RedshiftSSLMode.verify_ca, + UserSSLMode.require: RedshiftSSLMode.verify_ca, + UserSSLMode.verify_ca: RedshiftSSLMode.verify_ca, + UserSSLMode.verify_full: RedshiftSSLMode.verify_full, +} + + +@dataclass +class RedshiftSSLConfig(dbtClassMixin, Replaceable): # type: ignore + ssl: bool = True + sslmode: Optional[RedshiftSSLMode] = SSL_MODE_TRANSLATION[UserSSLMode.default()] + + @classmethod + def parse(cls, user_sslmode: UserSSLMode) -> "RedshiftSSLConfig": + try: + raw_redshift_ssl = { + "ssl": user_sslmode != UserSSLMode.disable, + "sslmode": SSL_MODE_TRANSLATION[user_sslmode], + } + cls.validate(raw_redshift_ssl) + except ValidationError as exc: + raise SSLConfigError(exc) + + redshift_ssl = cls.from_dict(raw_redshift_ssl) + + if redshift_ssl.ssl: + message = ( + f"Establishing connection using ssl with `sslmode` set to '{user_sslmode}'." + f"To connect without ssl, set `sslmode` to 'disable'." + ) + else: + message = "Establishing connection without ssl." + + logger.debug(message) + + return redshift_ssl + + @dataclass -class RedshiftCredentials(PostgresCredentials): +class RedshiftCredentials(Credentials): + host: str + user: str + port: Port method: str = RedshiftConnectionMethod.DATABASE # type: ignore password: Optional[str] = None # type: ignore cluster_id: Optional[str] = field( @@ -46,35 +123,187 @@ class RedshiftCredentials(PostgresCredentials): metadata={"description": "If using IAM auth, the name of the cluster"}, ) iam_profile: Optional[str] = None - iam_duration_seconds: int = 900 - search_path: Optional[str] = None - keepalives_idle: int = 4 autocreate: bool = False db_groups: List[str] = field(default_factory=list) ra3_node: Optional[bool] = False + connect_timeout: Optional[int] = None + role: Optional[str] = None + sslmode: Optional[UserSSLMode] = field(default_factory=UserSSLMode.default) + retries: int = 1 + region: Optional[str] = None + # opt-in by default per team deliberation on https://peps.python.org/pep-0249/#autocommit + autocommit: Optional[bool] = True + + _ALIASES = {"dbname": "database", "pass": "password"} @property def type(self): return "redshift" def _connection_keys(self): - keys = super()._connection_keys() - return keys + ("method", "cluster_id", "iam_profile", "iam_duration_seconds") + return ( + "host", + "user", + "port", + "database", + "method", + "cluster_id", + "iam_profile", + "schema", + "sslmode", + "region", + "sslmode", + "region", + "iam_profile", + "autocreate", + "db_groups", + "ra3_node", + "connect_timeout", + "role", + "retries", + "autocommit", + ) + + @property + def unique_field(self) -> str: + return self.host + + +class RedshiftConnectMethodFactory: + credentials: RedshiftCredentials + + def __init__(self, credentials): + self.credentials = credentials + def get_connect_method(self): + method = self.credentials.method + kwargs = { + "host": self.credentials.host, + "database": self.credentials.database, + "port": self.credentials.port if self.credentials.port else 5439, + "auto_create": self.credentials.autocreate, + "db_groups": self.credentials.db_groups, + "region": self.credentials.region, + "timeout": self.credentials.connect_timeout, + } + + redshift_ssl_config = RedshiftSSLConfig.parse(self.credentials.sslmode) + kwargs.update(redshift_ssl_config.to_dict()) + + # Support missing 'method' for backwards compatibility + if method == RedshiftConnectionMethod.DATABASE or method is None: + # this requirement is really annoying to encode into json schema, + # so validate it here + if self.credentials.password is None: + raise dbt.exceptions.FailedToConnectError( + "'password' field is required for 'database' credentials" + ) + + def connect(): + logger.debug("Connecting to redshift with username/password based auth...") + c = redshift_connector.connect( + user=self.credentials.user, + password=self.credentials.password, + **kwargs, + ) + if self.credentials.autocommit: + c.autocommit = True + if self.credentials.role: + c.cursor().execute("set role {}".format(self.credentials.role)) + return c + + elif method == RedshiftConnectionMethod.IAM: + if not self.credentials.cluster_id and "serverless" not in self.credentials.host: + raise dbt.exceptions.FailedToConnectError( + "Failed to use IAM method. 'cluster_id' must be provided for provisioned cluster. " + "'host' must be provided for serverless endpoint." + ) -class RedshiftConnectionManager(PostgresConnectionManager): + def connect(): + logger.debug("Connecting to redshift with IAM based auth...") + c = redshift_connector.connect( + iam=True, + db_user=self.credentials.user, + password="", + user="", + cluster_identifier=self.credentials.cluster_id, + profile=self.credentials.iam_profile, + **kwargs, + ) + if self.credentials.autocommit: + c.autocommit = True + if self.credentials.role: + c.cursor().execute("set role {}".format(self.credentials.role)) + return c + + else: + raise dbt.exceptions.FailedToConnectError( + "Invalid 'method' in profile: '{}'".format(method) + ) + + return connect + + +class RedshiftConnectionManager(SQLConnectionManager): TYPE = "redshift" + def _get_backend_pid(self): + sql = "select pg_backend_pid()" + _, cursor = self.add_query(sql) + res = cursor.fetchone() + return res + + def cancel(self, connection: Connection): + try: + pid = self._get_backend_pid() + except redshift_connector.InterfaceError as e: + if "is closed" in str(e): + logger.debug(f"Connection {connection.name} was already closed") + return + raise + + sql = f"select pg_terminate_backend({pid})" + _, cursor = self.add_query(sql) + res = cursor.fetchone() + logger.debug(f"Cancel query '{connection.name}': {res}") + + @classmethod + def get_response(cls, cursor: redshift_connector.Cursor) -> AdapterResponse: + # redshift_connector.Cursor doesn't have a status message attribute but + # this function is only used for successful run, so we can just return a dummy + rows = cursor.rowcount + message = "SUCCESS" + return AdapterResponse(_message=message, rows_affected=rows) + + @contextmanager + def exception_handler(self, sql): + try: + yield + except redshift_connector.DatabaseError as e: + try: + err_msg = e.args[0]["M"] # this is a type redshift sets, so we must use these keys + except Exception: + err_msg = str(e).strip() + logger.debug(f"Redshift error: {err_msg}") + self.rollback_if_open() + raise dbt.exceptions.DbtDatabaseError(err_msg) from e + + except Exception as e: + logger.debug("Error running SQL: {}", sql) + logger.debug("Rolling back transaction.") + self.rollback_if_open() + # Raise DBT native exceptions as is. + if isinstance(e, dbt.exceptions.DbtRuntimeError): + raise + raise dbt.exceptions.DbtRuntimeError(str(e)) from e + @contextmanager - def fresh_transaction(self, name=None): + def fresh_transaction(self): """On entrance to this context manager, hold an exclusive lock and create a fresh transaction for redshift, then commit and begin a new one before releasing the lock on exit. See drop_relation in RedshiftAdapter for more information. - - :param Optional[str] name: The name of the connection to use, or None - to use the default. """ with drop_lock: connection = self.get_thread_connection() @@ -84,88 +313,84 @@ def fresh_transaction(self, name=None): self.begin() yield - self.commit() + self.begin() @classmethod - def fetch_cluster_credentials( - cls, db_user, db_name, cluster_id, iam_profile, duration_s, autocreate, db_groups - ): - """Fetches temporary login credentials from AWS. The specified user - must already exist in the database, or else an error will occur""" - - if iam_profile is None: - session = boto3.Session() - boto_client = session.client("redshift") + def open(cls, connection): + if connection.state == "open": + logger.debug("Connection is already open, skipping open.") + return connection + + credentials = connection.credentials + connect_method_factory = RedshiftConnectMethodFactory(credentials) + + def exponential_backoff(attempt: int): + return attempt * attempt + + retryable_exceptions = [ + redshift_connector.OperationalError, + redshift_connector.DatabaseError, + redshift_connector.DataError, + ] + + return cls.retry_connection( + connection, + connect=connect_method_factory.get_connect_method(), + logger=logger, + retry_limit=credentials.retries, + retry_timeout=exponential_backoff, + retryable_exceptions=retryable_exceptions, + ) + + def execute( + self, + sql: str, + auto_begin: bool = False, + fetch: bool = False, + limit: Optional[int] = None, + ) -> Tuple[AdapterResponse, agate.Table]: + _, cursor = self.add_query(sql, auto_begin) + response = self.get_response(cursor) + if fetch: + table = self.get_result_from_cursor(cursor, limit) else: - logger.debug("Connecting to Redshift using 'IAM'" + f"with profile {iam_profile}") - boto_session = boto3.Session(profile_name=iam_profile) - boto_client = boto_session.client("redshift") + table = dbt.clients.agate_helper.empty_table() + return response, table - try: - return boto_client.get_cluster_credentials( - DbUser=db_user, - DbName=db_name, - ClusterIdentifier=cluster_id, - DurationSeconds=duration_s, - AutoCreate=autocreate, - DbGroups=db_groups, - ) + def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): + connection = None + cursor = None - except boto_client.exceptions.ClientError as e: - raise dbt.exceptions.FailedToConnectException( - "Unable to get temporary Redshift cluster credentials: {}".format(e) - ) + queries = sqlparse.split(sql) - @classmethod - def get_tmp_iam_cluster_credentials(cls, credentials): - cluster_id = credentials.cluster_id + for query in queries: + # Strip off comments from the current query + without_comments = re.sub( + re.compile(r"(\".*?\"|\'.*?\')|(/\*.*?\*/|--[^\r\n]*$)", re.MULTILINE), + "", + query, + ).strip() - # default via: - # boto3.readthedocs.io/en/latest/reference/services/redshift.html - iam_duration_s = credentials.iam_duration_seconds + if without_comments == "": + continue - if not cluster_id: - raise dbt.exceptions.FailedToConnectException( - "'cluster_id' must be provided in profile if IAM " "authentication method selected" + connection, cursor = super().add_query( + query, auto_begin, bindings=bindings, abridge_sql_log=abridge_sql_log ) - cluster_creds = cls.fetch_cluster_credentials( - credentials.user, - credentials.database, - credentials.cluster_id, - credentials.iam_profile, - iam_duration_s, - credentials.autocreate, - credentials.db_groups, - ) + if cursor is None: + conn = self.get_thread_connection() + conn_name = conn.name if conn and conn.name else "" + raise DbtRuntimeError(f"Tried to run invalid SQL: {sql} on {conn_name}") - # replace username and password with temporary redshift credentials - return credentials.replace( - user=cluster_creds.get("DbUser"), password=cluster_creds.get("DbPassword") - ) + return connection, cursor @classmethod def get_credentials(cls, credentials): - method = credentials.method + return credentials - # Support missing 'method' for backwards compatibility - if method == "database" or method is None: - logger.debug("Connecting to Redshift using 'database' credentials") - # this requirement is really annoying to encode into json schema, - # so validate it here - if credentials.password is None: - raise dbt.exceptions.FailedToConnectException( - "'password' field is required for 'database' credentials" - ) - return credentials - - elif method == "iam": - logger.debug("Connecting to Redshift using 'IAM' credentials") - return cls.get_tmp_iam_cluster_credentials(credentials) - - else: - raise dbt.exceptions.FailedToConnectException( - "Invalid 'method' in profile: '{}'".format(method) - ) + @classmethod + def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: + return get_datatype_name(type_code) diff --git a/dbt/adapters/redshift/impl.py b/dbt/adapters/redshift/impl.py index 206185f57..0ceb931d0 100644 --- a/dbt/adapters/redshift/impl.py +++ b/dbt/adapters/redshift/impl.py @@ -1,18 +1,25 @@ from dataclasses import dataclass -from typing import Optional -from dbt.adapters.base.impl import AdapterConfig -from dbt.adapters.sql import SQLAdapter +from typing import Optional, Set, Any, Dict, Type +from collections import namedtuple + +from dbt.adapters.base import PythonJobHelper +from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport from dbt.adapters.base.meta import available -from dbt.adapters.postgres import PostgresAdapter -from dbt.adapters.redshift import RedshiftConnectionManager -from dbt.adapters.redshift import RedshiftColumn -from dbt.adapters.redshift import RedshiftRelation +from dbt.adapters.sql import SQLAdapter +from dbt.contracts.connection import AdapterResponse +from dbt.contracts.graph.nodes import ConstraintType from dbt.events import AdapterLogger import dbt.exceptions +from dbt.adapters.redshift import RedshiftConnectionManager, RedshiftRelation + + logger = AdapterLogger("Redshift") +GET_RELATIONS_MACRO_NAME = "redshift__get_relations" + + @dataclass class RedshiftConfig(AdapterConfig): sort_type: Optional[str] = None @@ -20,15 +27,24 @@ class RedshiftConfig(AdapterConfig): sort: Optional[str] = None bind: Optional[bool] = None backup: Optional[bool] = True + autorefresh: Optional[bool] = False -class RedshiftAdapter(PostgresAdapter, SQLAdapter): +class RedshiftAdapter(SQLAdapter): Relation = RedshiftRelation ConnectionManager = RedshiftConnectionManager - Column = RedshiftColumn # type: ignore + connections: RedshiftConnectionManager AdapterSpecificConfigs = RedshiftConfig # type: ignore + CONSTRAINT_SUPPORT = { + ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, + ConstraintType.not_null: ConstraintSupport.ENFORCED, + ConstraintType.unique: ConstraintSupport.NOT_ENFORCED, + ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED, + ConstraintType.foreign_key: ConstraintSupport.NOT_ENFORCED, + } + @classmethod def date_function(cls): return "getdate()" @@ -72,7 +88,7 @@ def verify_database(self, database): ra3_node = self.config.credentials.ra3_node if database.lower() != expected.lower() and not ra3_node: - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "Cross-db references allowed only in RA3.* node. ({} vs {})".format( database, expected ) @@ -85,9 +101,69 @@ def _get_catalog_schemas(self, manifest): schemas = super(SQLAdapter, self)._get_catalog_schemas(manifest) try: return schemas.flatten(allow_multiple_databases=self.config.credentials.ra3_node) - except dbt.exceptions.RuntimeException as exc: - dbt.exceptions.raise_compiler_error( - "Cross-db references allowed only in {} RA3.* node. Got {}".format( - self.type(), exc.msg - ) + except dbt.exceptions.DbtRuntimeError as exc: + msg = f"Cross-db references allowed only in {self.type()} RA3.* node. Got {exc.msg}" + raise dbt.exceptions.CompilationError(msg) + + def valid_incremental_strategies(self): + """The set of standard builtin strategies which this adapter supports out-of-the-box. + Not used to validate custom strategies defined by end users. + """ + return ["append", "delete+insert"] + + def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: + return f"{add_to} + interval '{number} {interval}'" + + def _link_cached_database_relations(self, schemas: Set[str]): + """ + :param schemas: The set of schemas that should have links added. + """ + database = self.config.credentials.database + _Relation = namedtuple("_Relation", "database schema identifier") + links = [ + ( + _Relation(database, dep_schema, dep_identifier), + _Relation(database, ref_schema, ref_identifier), + ) + for dep_schema, dep_identifier, ref_schema, ref_identifier in self.execute_macro( + GET_RELATIONS_MACRO_NAME ) + # don't record in cache if this relation isn't in a relevant schema + if ref_schema in schemas + ] + + for dependent, referenced in links: + self.cache.add_link( + referenced=self.Relation.create(**referenced._asdict()), + dependent=self.Relation.create(**dependent._asdict()), + ) + + def _link_cached_relations(self, manifest): + schemas = set( + relation.schema.lower() + for relation in self._get_cache_schemas(manifest) + if self.verify_database(relation.database) == "" + ) + self._link_cached_database_relations(schemas) + + def _relations_cache_for_schemas(self, manifest, cache_schemas=None): + super()._relations_cache_for_schemas(manifest, cache_schemas) + self._link_cached_relations(manifest) + + # avoid non-implemented abstract methods warning + # make it clear what needs to be implemented while still raising the error in super() + # we can update these with Redshift-specific messages if needed + @property + def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]: + return super().python_submission_helpers + + @property + def default_python_submission_method(self) -> str: + return super().default_python_submission_method + + def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: + return super().generate_python_submission_response(submission_result) + + def debug_query(self): + """Override for DebugTask method""" + self.execute("select 1 as id") diff --git a/dbt/adapters/redshift/relation.py b/dbt/adapters/redshift/relation.py index 33e7c6897..0ef4fe276 100644 --- a/dbt/adapters/redshift/relation.py +++ b/dbt/adapters/redshift/relation.py @@ -1,18 +1,105 @@ -from dbt.adapters.base import Column from dataclasses import dataclass -from dbt.adapters.postgres.relation import PostgresRelation +from typing import Optional + +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationConfigChangeAction, + RelationResults, +) +from dbt.context.providers import RuntimeConfigObject +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import RelationType +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs import ( + RedshiftMaterializedViewConfig, + RedshiftMaterializedViewConfigChangeset, + RedshiftAutoRefreshConfigChange, + RedshiftBackupConfigChange, + RedshiftDistConfigChange, + RedshiftSortConfigChange, + RedshiftIncludePolicy, + RedshiftQuotePolicy, + MAX_CHARACTERS_IN_IDENTIFIER, +) @dataclass(frozen=True, eq=False, repr=False) -class RedshiftRelation(PostgresRelation): - # Override the method in the Postgres Relation because Redshift allows - # longer names: "Be between 1 and 127 bytes in length, not including - # quotation marks for delimited identifiers." - # - # see: https://docs.aws.amazon.com/redshift/latest/dg/r_names.html +class RedshiftRelation(BaseRelation): + include_policy = RedshiftIncludePolicy # type: ignore + quote_policy = RedshiftQuotePolicy # type: ignore + relation_configs = { + RelationType.MaterializedView.value: RedshiftMaterializedViewConfig, + } + + def __post_init__(self): + # Check for length of Redshift table/view names. + # Check self.type to exclude test relation identifiers + if ( + self.identifier is not None + and self.type is not None + and len(self.identifier) > MAX_CHARACTERS_IN_IDENTIFIER + ): + raise DbtRuntimeError( + f"Relation name '{self.identifier}' " + f"is longer than {MAX_CHARACTERS_IN_IDENTIFIER} characters" + ) + def relation_max_name_length(self): - return 127 + return MAX_CHARACTERS_IN_IDENTIFIER + + @classmethod + def from_runtime_config(cls, runtime_config: RuntimeConfigObject) -> RelationConfigBase: + model_node: ModelNode = runtime_config.model + relation_type: str = model_node.config.materialized + + if relation_config := cls.relation_configs.get(relation_type): + return relation_config.from_model_node(model_node) + + raise DbtRuntimeError( + f"from_runtime_config() is not supported for the provided relation type: {relation_type}" + ) + + @classmethod + def materialized_view_config_changeset( + cls, relation_results: RelationResults, runtime_config: RuntimeConfigObject + ) -> Optional[RedshiftMaterializedViewConfigChangeset]: + config_change_collection = RedshiftMaterializedViewConfigChangeset() + + existing_materialized_view = RedshiftMaterializedViewConfig.from_relation_results( + relation_results + ) + new_materialized_view = RedshiftMaterializedViewConfig.from_model_node( + runtime_config.model + ) + assert isinstance(existing_materialized_view, RedshiftMaterializedViewConfig) + assert isinstance(new_materialized_view, RedshiftMaterializedViewConfig) + + if new_materialized_view.autorefresh != existing_materialized_view.autorefresh: + config_change_collection.autorefresh = RedshiftAutoRefreshConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.autorefresh, + ) + + if new_materialized_view.backup != existing_materialized_view.backup: + config_change_collection.backup = RedshiftBackupConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.backup, + ) + + if new_materialized_view.dist != existing_materialized_view.dist: + config_change_collection.dist = RedshiftDistConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.dist, + ) + if new_materialized_view.sort != existing_materialized_view.sort: + config_change_collection.sort = RedshiftSortConfigChange( + action=RelationConfigChangeAction.alter, + context=new_materialized_view.sort, + ) -class RedshiftColumn(Column): - pass # redshift does not inherit from postgres here + if config_change_collection.has_changes: + return config_change_collection + return None diff --git a/dbt/adapters/redshift/relation_configs/__init__.py b/dbt/adapters/redshift/relation_configs/__init__.py new file mode 100644 index 000000000..26e36c86c --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/__init__.py @@ -0,0 +1,19 @@ +from dbt.adapters.redshift.relation_configs.sort import ( + RedshiftSortConfig, + RedshiftSortConfigChange, +) +from dbt.adapters.redshift.relation_configs.dist import ( + RedshiftDistConfig, + RedshiftDistConfigChange, +) +from dbt.adapters.redshift.relation_configs.materialized_view import ( + RedshiftMaterializedViewConfig, + RedshiftAutoRefreshConfigChange, + RedshiftBackupConfigChange, + RedshiftMaterializedViewConfigChangeset, +) +from dbt.adapters.redshift.relation_configs.policies import ( + RedshiftIncludePolicy, + RedshiftQuotePolicy, + MAX_CHARACTERS_IN_IDENTIFIER, +) diff --git a/dbt/adapters/redshift/relation_configs/base.py b/dbt/adapters/redshift/relation_configs/base.py new file mode 100644 index 000000000..ebbd46b1b --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/base.py @@ -0,0 +1,70 @@ +from dataclasses import dataclass +from typing import Optional + +import agate +from dbt.adapters.base.relation import Policy +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationResults, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import ComponentName + +from dbt.adapters.redshift.relation_configs.policies import ( + RedshiftIncludePolicy, + RedshiftQuotePolicy, +) + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftRelationConfigBase(RelationConfigBase): + """ + This base class implements a few boilerplate methods and provides some light structure for Redshift relations. + """ + + @classmethod + def include_policy(cls) -> Policy: + return RedshiftIncludePolicy() + + @classmethod + def quote_policy(cls) -> Policy: + return RedshiftQuotePolicy() + + @classmethod + def from_model_node(cls, model_node: ModelNode) -> "RelationConfigBase": + relation_config = cls.parse_model_node(model_node) + relation = cls.from_dict(relation_config) + return relation + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + raise NotImplementedError( + "`parse_model_node()` needs to be implemented on this RelationConfigBase instance" + ) + + @classmethod + def from_relation_results(cls, relation_results: RelationResults) -> "RelationConfigBase": + relation_config = cls.parse_relation_results(relation_results) + relation = cls.from_dict(relation_config) + return relation + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> dict: + raise NotImplementedError( + "`parse_relation_results()` needs to be implemented on this RelationConfigBase instance" + ) + + @classmethod + def _render_part(cls, component: ComponentName, value: Optional[str]) -> Optional[str]: + if cls.include_policy().get_part(component) and value: + if cls.quote_policy().get_part(component): + return f'"{value}"' + return value.lower() + return None + + @classmethod + def _get_first_row(cls, results: agate.Table) -> agate.Row: + try: + return results.rows[0] + except IndexError: + return agate.Row(values=set()) diff --git a/dbt/adapters/redshift/relation_configs/dist.py b/dbt/adapters/redshift/relation_configs/dist.py new file mode 100644 index 000000000..668f3f65a --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/dist.py @@ -0,0 +1,164 @@ +from dataclasses import dataclass +from typing import Optional, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationConfigChange, + RelationConfigChangeAction, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.dataclass_schema import StrEnum +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase + + +class RedshiftDistStyle(StrEnum): + auto = "auto" + even = "even" + all = "all" + key = "key" + + @classmethod + def default(cls) -> "RedshiftDistStyle": + return cls.auto + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftDistConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config fallows the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html + + The following parameters are configurable by dbt: + - diststyle: the type of data distribution style to use on the table/materialized view + - distkey: the column to use for the dist key if `dist_style` is `key` + """ + + diststyle: Optional[RedshiftDistStyle] = RedshiftDistStyle.default() + distkey: Optional[str] = None + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # index rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=not ( + self.diststyle == RedshiftDistStyle.key and self.distkey is None + ), + validation_error=DbtRuntimeError( + "A `RedshiftDistConfig` that specifies a `diststyle` of `key` must provide a value for `distkey`." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.diststyle + in (RedshiftDistStyle.auto, RedshiftDistStyle.even, RedshiftDistStyle.all) + and self.distkey is not None + ), + validation_error=DbtRuntimeError( + "A `RedshiftDistConfig` that specifies a `distkey` must be of `diststyle` `key`." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftDistConfig": + kwargs_dict = { + "diststyle": config_dict.get("diststyle"), + "distkey": config_dict.get("distkey"), + } + dist: "RedshiftDistConfig" = super().from_dict(kwargs_dict) # type: ignore + return dist + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + """ + Translate ModelNode objects from the user-provided config into a standard dictionary. + + Args: + model_node: the description of the distkey and diststyle from the user in this format: + + { + "dist": any("auto", "even", "all") or "" + } + + Returns: a standard dictionary describing this `RedshiftDistConfig` instance + """ + dist = model_node.config.extra.get("dist", "") + + diststyle = dist.lower() + + if diststyle == "": + config = {} + + elif diststyle in ( + RedshiftDistStyle.auto, + RedshiftDistStyle.even, + RedshiftDistStyle.all, + ): + config = {"diststyle": diststyle} + + else: + config = {"diststyle": RedshiftDistStyle.key.value, "distkey": dist} + + return config + + @classmethod + def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Args: + relation_results_entry: the description of the distkey and diststyle from the database in this format: + + agate.Row({ + "diststyle": "", # e.g. EVEN | KEY(column1) | AUTO(ALL) | AUTO(KEY(id)) + }) + + Returns: a standard dictionary describing this `RedshiftDistConfig` instance + """ + dist: str = relation_results_entry.get("diststyle") + + try: + # covers `AUTO`, `ALL`, `EVEN`, `KEY`, '', + diststyle = dist.split("(")[0].lower() + except AttributeError: + # covers None + diststyle = "" + + if dist == "": + config = {} + + elif diststyle == RedshiftDistStyle.key: + open_paren = len("KEY(") + close_paren = -len(")") + distkey = dist[open_paren:close_paren] # e.g. KEY(column1) + config = {"diststyle": diststyle, "distkey": distkey} + + else: + config = {"diststyle": diststyle} + + return config + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftDistConfigChange(RelationConfigChange, RelationConfigValidationMixin): + context: RedshiftDistConfig + + @property + def requires_full_refresh(self) -> bool: + return True + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=(self.action == RelationConfigChangeAction.alter), + validation_error=DbtRuntimeError( + "Invalid operation, only `alter` changes are supported for `distkey` / `diststyle`." + ), + ), + } diff --git a/dbt/adapters/redshift/relation_configs/materialized_view.py b/dbt/adapters/redshift/relation_configs/materialized_view.py new file mode 100644 index 000000000..82bc0d084 --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/materialized_view.py @@ -0,0 +1,268 @@ +from dataclasses import dataclass +from typing import Optional, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationResults, + RelationConfigChange, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.relation import ComponentName +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase +from dbt.adapters.redshift.relation_configs.dist import ( + RedshiftDistConfig, + RedshiftDistStyle, + RedshiftDistConfigChange, +) +from dbt.adapters.redshift.relation_configs.policies import MAX_CHARACTERS_IN_IDENTIFIER +from dbt.adapters.redshift.relation_configs.sort import ( + RedshiftSortConfig, + RedshiftSortConfigChange, +) + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftMaterializedViewConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config follow the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/materialized-view-create-sql-command.html + + The following parameters are configurable by dbt: + - mv_name: name of the materialized view + - query: the query that defines the view + - backup: determines if the materialized view is included in automated and manual cluster snapshots + - Note: we cannot currently query this from Redshift, which creates two issues + - a model deployed with this set to False will rebuild every run because the database version will always + look like True + - to deploy this as a change from False to True, a full refresh must be issued since the database version + will always look like True (unless there is another full refresh-triggering change) + - dist: the distribution configuration for the data behind the materialized view, a combination of + a `diststyle` and an optional `distkey` + - Note: the default `diststyle` for materialized views is EVEN, despite the default in general being AUTO + - sort: the sort configuration for the data behind the materialized view, a combination of + a `sortstyle` and an optional `sortkey` + - auto_refresh: specifies whether the materialized view should be automatically refreshed + with latest changes from its base tables + + There are currently no non-configurable parameters. + """ + + mv_name: str + schema_name: str + database_name: str + query: str + backup: bool = True + dist: RedshiftDistConfig = RedshiftDistConfig(diststyle=RedshiftDistStyle.even) + sort: RedshiftSortConfig = RedshiftSortConfig() + autorefresh: bool = False + + @property + def path(self) -> str: + return ".".join( + part + for part in [self.database_name, self.schema_name, self.mv_name] + if part is not None + ) + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # sort and dist rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=len(self.mv_name or "") <= MAX_CHARACTERS_IN_IDENTIFIER, + validation_error=DbtRuntimeError( + f"The materialized view name is more than {MAX_CHARACTERS_IN_IDENTIFIER} " + f"characters: {self.mv_name}" + ), + ), + RelationConfigValidationRule( + validation_check=self.dist.diststyle != RedshiftDistStyle.auto, + validation_error=DbtRuntimeError( + "Redshift materialized views do not support a `diststyle` of `auto`." + ), + ), + RelationConfigValidationRule( + validation_check=len(self.mv_name if self.mv_name else "") <= 127, + validation_error=DbtRuntimeError( + "Redshift does not support object names longer than 127 characters." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftMaterializedViewConfig": + kwargs_dict = { + "mv_name": cls._render_part(ComponentName.Identifier, config_dict.get("mv_name")), + "schema_name": cls._render_part(ComponentName.Schema, config_dict.get("schema_name")), + "database_name": cls._render_part( + ComponentName.Database, config_dict.get("database_name") + ), + "query": config_dict.get("query"), + "backup": config_dict.get("backup"), + "autorefresh": config_dict.get("autorefresh"), + } + + # this preserves the materialized view-specific default of `even` over the general default of `auto` + if dist := config_dict.get("dist"): + kwargs_dict.update({"dist": RedshiftDistConfig.from_dict(dist)}) + + if sort := config_dict.get("sort"): + kwargs_dict.update({"sort": RedshiftSortConfig.from_dict(sort)}) + + materialized_view: "RedshiftMaterializedViewConfig" = super().from_dict(kwargs_dict) # type: ignore + return materialized_view + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + config_dict = { + "mv_name": model_node.identifier, + "schema_name": model_node.schema, + "database_name": model_node.database, + "backup": model_node.config.get("backup"), + "autorefresh": model_node.config.get("auto_refresh"), + } + + if query := model_node.compiled_code: + config_dict.update({"query": query.strip()}) + + if model_node.config.get("dist"): + config_dict.update({"dist": RedshiftDistConfig.parse_model_node(model_node)}) + + if model_node.config.get("sort"): + config_dict.update({"sort": RedshiftSortConfig.parse_model_node(model_node)}) + + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Args: + relation_results: the description of the materialized view from the database in this format: + + { + "materialized_view": agate.Table( + agate.Row({ + "database": "", + "schema": "", + "table": "", + "diststyle": "", # e.g. EVEN | KEY(column1) | AUTO(ALL) | AUTO(KEY(id)), + "sortkey1": "", + "autorefresh: any("t", "f"), + }) + ), + "query": agate.Table( + agate.Row({"definition": "")} + ), + } + + Additional columns in either value is fine, as long as `sortkey` and `sortstyle` are available. + + Returns: a standard dictionary describing this `RedshiftMaterializedViewConfig` instance + """ + materialized_view: agate.Row = cls._get_first_row( + relation_results.get("materialized_view") + ) + query: agate.Row = cls._get_first_row(relation_results.get("query")) + + config_dict = { + "mv_name": materialized_view.get("table"), + "schema_name": materialized_view.get("schema"), + "database_name": materialized_view.get("database"), + "autorefresh": {"t": True, "f": False}.get(materialized_view.get("autorefresh")), + "query": cls._parse_query(query.get("definition")), + } + + # the default for materialized views differs from the default for diststyle in general + # only set it if we got a value + if materialized_view.get("diststyle"): + config_dict.update( + {"dist": RedshiftDistConfig.parse_relation_results(materialized_view)} + ) + + # TODO: this only shows the first column in the sort key + if materialized_view.get("sortkey1"): + config_dict.update( + {"sort": RedshiftSortConfig.parse_relation_results(materialized_view)} + ) + + return config_dict + + @classmethod + def _parse_query(cls, query: str) -> str: + """ + Get the select statement from the materialized view definition in Redshift. + + Args: + query: the `create materialized view` statement from `pg_views`, for example: + + create materialized view my_materialized_view + backup yes + diststyle even + sortkey (id) + auto refresh no + as ( + select * from my_base_table + ); + + Returns: the `select ...` statement, for example: + + select * from my_base_table + + """ + open_paren = query.find("as (") + len("as (") + close_paren = query.find(");") + return query[open_paren:close_paren].strip() + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftAutoRefreshConfigChange(RelationConfigChange): + context: Optional[bool] = None + + @property + def requires_full_refresh(self) -> bool: + return False + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftBackupConfigChange(RelationConfigChange): + context: Optional[bool] = None + + @property + def requires_full_refresh(self) -> bool: + return True + + +@dataclass +class RedshiftMaterializedViewConfigChangeset: + backup: Optional[RedshiftBackupConfigChange] = None + dist: Optional[RedshiftDistConfigChange] = None + sort: Optional[RedshiftSortConfigChange] = None + autorefresh: Optional[RedshiftAutoRefreshConfigChange] = None + + @property + def requires_full_refresh(self) -> bool: + return any( + { + self.autorefresh.requires_full_refresh if self.autorefresh else False, + self.backup.requires_full_refresh if self.backup else False, + self.dist.requires_full_refresh if self.dist else False, + self.sort.requires_full_refresh if self.sort else False, + } + ) + + @property + def has_changes(self) -> bool: + return any( + { + self.backup if self.backup else False, + self.dist if self.dist else False, + self.sort if self.sort else False, + self.autorefresh if self.autorefresh else False, + } + ) diff --git a/dbt/adapters/redshift/relation_configs/policies.py b/dbt/adapters/redshift/relation_configs/policies.py new file mode 100644 index 000000000..7ec8e8acb --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/policies.py @@ -0,0 +1,19 @@ +from dataclasses import dataclass + +from dbt.adapters.base.relation import Policy + + +MAX_CHARACTERS_IN_IDENTIFIER = 127 + + +class RedshiftIncludePolicy(Policy): + database: bool = True + schema: bool = True + identifier: bool = True + + +@dataclass +class RedshiftQuotePolicy(Policy): + database: bool = True + schema: bool = True + identifier: bool = True diff --git a/dbt/adapters/redshift/relation_configs/sort.py b/dbt/adapters/redshift/relation_configs/sort.py new file mode 100644 index 000000000..58104b65f --- /dev/null +++ b/dbt/adapters/redshift/relation_configs/sort.py @@ -0,0 +1,179 @@ +from dataclasses import dataclass +from typing import Optional, FrozenSet, Set + +import agate +from dbt.adapters.relation_configs import ( + RelationConfigChange, + RelationConfigChangeAction, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.contracts.graph.nodes import ModelNode +from dbt.dataclass_schema import StrEnum +from dbt.exceptions import DbtRuntimeError + +from dbt.adapters.redshift.relation_configs.base import RedshiftRelationConfigBase + + +class RedshiftSortStyle(StrEnum): + auto = "auto" + compound = "compound" + interleaved = "interleaved" + + @classmethod + def default(cls) -> "RedshiftSortStyle": + return cls.auto + + @classmethod + def default_with_columns(cls) -> "RedshiftSortStyle": + return cls.compound + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftSortConfig(RedshiftRelationConfigBase, RelationConfigValidationMixin): + """ + This config fallows the specs found here: + https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html + + The following parameters are configurable by dbt: + - sort_type: the type of sort key on the table/materialized view + - defaults to `auto` if no sort config information is provided + - defaults to `compound` if columns are provided, but type is omitted + - sort_key: the column(s) to use for the sort key; cannot be combined with `sort_type=auto` + """ + + sortstyle: Optional[RedshiftSortStyle] = None + sortkey: Optional[FrozenSet[str]] = None + + def __post_init__(self): + # maintains `frozen=True` while allowing for a variable default on `sort_type` + if self.sortstyle is None and self.sortkey is None: + object.__setattr__(self, "sortstyle", RedshiftSortStyle.default()) + elif self.sortstyle is None: + object.__setattr__(self, "sortstyle", RedshiftSortStyle.default_with_columns()) + super().__post_init__() + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # index rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.auto and self.sortkey is not None + ), + validation_error=DbtRuntimeError( + "A `RedshiftSortConfig` that specifies a `sortkey` does not support the `sortstyle` of `auto`." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle in (RedshiftSortStyle.compound, RedshiftSortStyle.interleaved) + and self.sortkey is None + ), + validation_error=DbtRuntimeError( + "A `sortstyle` of `compound` or `interleaved` requires a `sortkey` to be provided." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.compound + and self.sortkey is not None + and len(self.sortkey) > 400 + ), + validation_error=DbtRuntimeError( + "A compound `sortkey` only supports 400 columns." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.sortstyle == RedshiftSortStyle.interleaved + and self.sortkey is not None + and len(self.sortkey) > 8 + ), + validation_error=DbtRuntimeError( + "An interleaved `sortkey` only supports 8 columns." + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "RedshiftSortConfig": + kwargs_dict = { + "sortstyle": config_dict.get("sortstyle"), + "sortkey": frozenset(column for column in config_dict.get("sortkey", {})), + } + sort: "RedshiftSortConfig" = super().from_dict(kwargs_dict) # type: ignore + return sort + + @classmethod + def parse_model_node(cls, model_node: ModelNode) -> dict: + """ + Translate ModelNode objects from the user-provided config into a standard dictionary. + + Args: + model_node: the description of the sortkey and sortstyle from the user in this format: + + { + "sort_key": "" or [""] or ["",...] + "sort_type": any("compound", "interleaved", "auto") + } + + Returns: a standard dictionary describing this `RedshiftSortConfig` instance + """ + config_dict = {} + + if sortstyle := model_node.config.extra.get("sort_type"): + config_dict.update({"sortstyle": sortstyle.lower()}) + + if sortkey := model_node.config.extra.get("sort"): + # we allow users to specify the `sort_key` as a string if it's a single column + if isinstance(sortkey, str): + sortkey = [sortkey] + + config_dict.update({"sortkey": set(sortkey)}) + + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: + """ + Translate agate objects from the database into a standard dictionary. + + Note: + This was only built for materialized views, which does not specify a sortstyle. + Processing of `sortstyle` has been omitted here, which means it's the default (compound). + + Args: + relation_results_entry: the description of the sortkey and sortstyle from the database in this format: + + agate.Row({ + ..., + "sortkey1": "", + ... + }) + + Returns: a standard dictionary describing this `RedshiftSortConfig` instance + """ + if sortkey := relation_results_entry.get("sortkey1"): + return {"sortkey": {sortkey}} + return {} + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class RedshiftSortConfigChange(RelationConfigChange, RelationConfigValidationMixin): + context: RedshiftSortConfig + + @property + def requires_full_refresh(self) -> bool: + return True + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=(self.action == RelationConfigChangeAction.alter), + validation_error=DbtRuntimeError( + "Invalid operation, only `alter` changes are supported for `sortkey` / `sortstyle`." + ), + ), + } diff --git a/dbt/include/redshift/macros/adapters.sql b/dbt/include/redshift/macros/adapters.sql index ca888d566..62813852b 100644 --- a/dbt/include/redshift/macros/adapters.sql +++ b/dbt/include/redshift/macros/adapters.sql @@ -43,6 +43,27 @@ {{ sql_header if sql_header is not none }} + {%- set contract_config = config.get('contract') -%} + {%- if contract_config.enforced -%} + + create {% if temporary -%}temporary{%- endif %} table + {{ relation.include(database=(not temporary), schema=(not temporary)) }} + {{ get_table_columns_and_constraints() }} + {{ get_assert_columns_equivalent(sql) }} + {%- set sql = get_select_subquery(sql) %} + {% if backup == false -%}backup no{%- endif %} + {{ dist(_dist) }} + {{ sort(_sort_type, _sort) }} + ; + + insert into {{ relation.include(database=(not temporary), schema=(not temporary)) }} + ( + {{ sql }} + ) + ; + + {%- else %} + create {% if temporary -%}temporary{%- endif %} table {{ relation.include(database=(not temporary), schema=(not temporary)) }} {% if backup == false -%}backup no{%- endif %} @@ -51,6 +72,8 @@ as ( {{ sql }} ); + + {%- endif %} {%- endmacro %} @@ -62,7 +85,11 @@ {{ sql_header if sql_header is not none }} - create view {{ relation }} as ( + create view {{ relation }} + {%- set contract_config = config.get('contract') -%} + {%- if contract_config.enforced -%} + {{ get_assert_columns_equivalent(sql) }} + {%- endif %} as ( {{ sql }} ) {{ bind_qualifier }}; {% endmacro %} @@ -198,7 +225,28 @@ {% macro redshift__list_relations_without_caching(schema_relation) %} - {{ return(postgres__list_relations_without_caching(schema_relation)) }} + {% call statement('list_relations_without_caching', fetch_result=True) -%} + select + '{{ schema_relation.database }}' as database, + tablename as name, + schemaname as schema, + 'table' as type + from pg_tables + where schemaname ilike '{{ schema_relation.schema }}' + union all + select + '{{ schema_relation.database }}' as database, + viewname as name, + schemaname as schema, + case + when definition ilike '%create materialized view%' + then 'materialized_view' + else 'view' + end as type + from pg_views + where schemaname ilike '{{ schema_relation.schema }}' + {% endcall %} + {{ return(load_result('list_relations_without_caching').table) }} {% endmacro %} @@ -265,3 +313,12 @@ {% endif %} {% endmacro %} + + +{% macro redshift__get_drop_relation_sql(relation) %} + {%- if relation.is_materialized_view -%} + {{ redshift__drop_materialized_view(relation) }} + {%- else -%} + drop {{ relation.type }} if exists {{ relation }} cascade + {%- endif -%} +{% endmacro %} diff --git a/dbt/include/redshift/macros/materializations/materialized_view.sql b/dbt/include/redshift/macros/materializations/materialized_view.sql new file mode 100644 index 000000000..6343c1a7b --- /dev/null +++ b/dbt/include/redshift/macros/materializations/materialized_view.sql @@ -0,0 +1,106 @@ +{% macro redshift__get_alter_materialized_view_as_sql( + relation, + configuration_changes, + sql, + existing_relation, + backup_relation, + intermediate_relation +) %} + + -- apply a full refresh immediately if needed + {% if configuration_changes.requires_full_refresh %} + + {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }} + + -- otherwise apply individual changes as needed + {% else %} + + {%- set autorefresh = configuration_changes.autorefresh -%} + {%- if autorefresh -%}{{- log('Applying UPDATE AUTOREFRESH to: ' ~ relation) -}}{%- endif -%} + + alter materialized view {{ relation }} + auto refresh {% if autorefresh.context %}yes{% else %}no{% endif %} + + {%- endif -%} + +{% endmacro %} + + +{% macro redshift__get_create_materialized_view_as_sql(relation, sql) %} + + {%- set materialized_view = relation.from_runtime_config(config) -%} + + create materialized view {{ materialized_view.path }} + backup {% if materialized_view.backup %}yes{% else %}no{% endif %} + diststyle {{ materialized_view.dist.diststyle }} + {% if materialized_view.dist.distkey %}distkey ({{ materialized_view.dist.distkey }}){% endif %} + {% if materialized_view.sort.sortkey %}sortkey ({{ ','.join(materialized_view.sort.sortkey) }}){% endif %} + auto refresh {% if materialized_view.auto_refresh %}yes{% else %}no{% endif %} + as ( + {{ materialized_view.query }} + ); + +{% endmacro %} + + +{% macro redshift__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} + {{ redshift__get_drop_relation_sql(existing_relation) }}; + {{ get_create_materialized_view_as_sql(relation, sql) }} +{% endmacro %} + + +{% macro redshift__get_materialized_view_configuration_changes(existing_relation, new_config) %} + {% set _existing_materialized_view = redshift__describe_materialized_view(existing_relation) %} + {% set _configuration_changes = existing_relation.materialized_view_config_changeset(_existing_materialized_view, new_config) %} + {% do return(_configuration_changes) %} +{% endmacro %} + + +{% macro redshift__refresh_materialized_view(relation) -%} + refresh materialized view {{ relation }} +{% endmacro %} + + +{% macro redshift__describe_materialized_view(relation) %} + {#- + These need to be separate queries because redshift will not let you run queries + against svv_table_info and pg_views in the same query. The same is true of svv_redshift_columns. + -#} + + {%- set _materialized_view_sql -%} + select + tb.database, + tb.schema, + tb.table, + tb.diststyle, + tb.sortkey1, + mv.autorefresh + from svv_table_info tb + left join stv_mv_info mv + on mv.db_name = tb.database + and mv.schema = tb.schema + and mv.name = tb.table + where tb.table ilike '{{ relation.identifier }}' + and tb.schema ilike '{{ relation.schema }}' + and tb.database ilike '{{ relation.database }}' + {%- endset %} + {% set _materialized_view = run_query(_materialized_view_sql) %} + + {%- set _query_sql -%} + select + vw.definition + from pg_views vw + where vw.viewname = '{{ relation.identifier }}' + and vw.schemaname = '{{ relation.schema }}' + and vw.definition ilike '%create materialized view%' + {%- endset %} + {% set _query = run_query(_query_sql) %} + + {% do return({'materialized_view': _materialized_view, 'query': _query}) %} + +{% endmacro %} + + +{% macro redshift__drop_materialized_view(relation) -%} + drop materialized view if exists {{ relation }} +{%- endmacro %} diff --git a/dbt/include/redshift/macros/materializations/seeds/helpers.sql b/dbt/include/redshift/macros/materializations/seeds/helpers.sql index b04ad7c5b..32afdae81 100644 --- a/dbt/include/redshift/macros/materializations/seeds/helpers.sql +++ b/dbt/include/redshift/macros/materializations/seeds/helpers.sql @@ -21,3 +21,7 @@ {{ return(sql) }} {% endmacro %} + +{% macro redshift__get_batch_size() %} + {{ return(500) }} +{% endmacro %} diff --git a/dbt/include/redshift/macros/relations.sql b/dbt/include/redshift/macros/relations.sql index ed682ae7d..28c6bc377 100644 --- a/dbt/include/redshift/macros/relations.sql +++ b/dbt/include/redshift/macros/relations.sql @@ -1,3 +1,45 @@ -{% macro redshift__get_relations () -%} - {{ return(dbt.postgres__get_relations()) }} +{% macro redshift__get_relations() -%} + +{%- call statement('relations', fetch_result=True) -%} + +with + relation as ( + select + pg_class.oid as relation_id, + pg_class.relname as relation_name, + pg_class.relnamespace as schema_id, + pg_namespace.nspname as schema_name, + pg_class.relkind as relation_type + from pg_class + join pg_namespace + on pg_class.relnamespace = pg_namespace.oid + where pg_namespace.nspname != 'information_schema' + and pg_namespace.nspname not like 'pg\_%' + ), + dependency as ( + select distinct + coalesce(pg_rewrite.ev_class, pg_depend.objid) as dep_relation_id, + pg_depend.refobjid as ref_relation_id, + pg_depend.refclassid as ref_class_id + from pg_depend + left join pg_rewrite + on pg_depend.objid = pg_rewrite.oid + ) + +select distinct + dep.schema_name as dependent_schema, + dep.relation_name as dependent_name, + ref.schema_name as referenced_schema, + ref.relation_name as referenced_name +from dependency +join relation ref + on dependency.ref_relation_id = ref.relation_id +join relation dep + on dependency.dep_relation_id = dep.relation_id +where ref.relation_name != dep.relation_name + +{%- endcall -%} + +{{ return(load_result('relations').table) }} + {% endmacro %} diff --git a/dbt/include/redshift/macros/utils/dateadd.sql b/dbt/include/redshift/macros/utils/dateadd.sql index dc90f9231..ba3e666a3 100644 --- a/dbt/include/redshift/macros/utils/dateadd.sql +++ b/dbt/include/redshift/macros/utils/dateadd.sql @@ -1,4 +1,3 @@ -{#-- redshift should use default instead of postgres --#} {% macro redshift__dateadd(datepart, interval, from_date_or_timestamp) %} dateadd( diff --git a/dbt/include/redshift/macros/utils/datediff.sql b/dbt/include/redshift/macros/utils/datediff.sql index c20513961..1d540b908 100644 --- a/dbt/include/redshift/macros/utils/datediff.sql +++ b/dbt/include/redshift/macros/utils/datediff.sql @@ -1,4 +1,3 @@ -{#-- redshift should use default instead of postgres --#} {% macro redshift__datediff(first_date, second_date, datepart) -%} datediff( diff --git a/dbt/include/redshift/macros/utils/last_day.sql b/dbt/include/redshift/macros/utils/last_day.sql index be0e2253a..8c643644b 100644 --- a/dbt/include/redshift/macros/utils/last_day.sql +++ b/dbt/include/redshift/macros/utils/last_day.sql @@ -1,4 +1,3 @@ -{# redshift should use default instead of postgres #} {% macro redshift__last_day(date, datepart) %} cast( {{dbt.dateadd('day', '-1', diff --git a/dev-requirements.txt b/dev-requirements.txt index 6eac3d2b4..bed71ec05 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,22 +4,27 @@ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugins/postgres -black~=22.8.0 -click~=8.1.3 +# if version 1.x or greater -> pin to major version +# if version 0.x -> pin to minor +black~=23.3 bumpversion~=0.6.0 -flake8 -flaky~=3.7.0 -freezegun~=0.3.12 -ipdb~=0.13.9 -mypy~=0.971.0 -pip-tools~=6.11.0 -pre-commit~=2.20.0 -pytest~=7.2.0 +click~=8.1 +flake8~=6.0 +flaky~=3.7 +freezegun~=1.2 +ipdb~=0.13.13 +mypy==1.4.1 # patch updates have historically introduced breaking changes +pip-tools~=6.13 +pre-commit~=3.3 +pre-commit-hooks~=4.4 +pytest~=7.4 +pytest-csv~=3.0 pytest-dotenv~=0.5.2 -pytest-logbook~=1.2.0 -pytest-csv~=3.0.0 -pytest-xdist~=3.1.0 -pytz~=2022.6.0 -tox~=4.0.0 -twine~=4.0.2 -wheel~=0.37.1 +pytest-logbook~=1.2 +pytest-xdist~=3.3 +pytz~=2023.3 +tox~=4.6 +types-pytz~=2023.3 +types-requests~=2.31 +twine~=4.0 +wheel~=0.40 diff --git a/pytest.ini b/pytest.ini index b04a6ccf3..b3d74bc14 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,5 +6,4 @@ env_files = test.env testpaths = tests/unit - tests/integration tests/functional diff --git a/scripts/env-setup.sh b/scripts/env-setup.sh new file mode 100644 index 000000000..866d8f749 --- /dev/null +++ b/scripts/env-setup.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Set TOXENV environment variable for subsequent steps +echo "TOXENV=integration-redshift" >> $GITHUB_ENV +# Set INTEGRATION_TESTS_SECRETS_PREFIX environment variable for subsequent steps +# All GH secrets that have this prefix will be set as environment variables +echo "INTEGRATION_TESTS_SECRETS_PREFIX=REDSHIFT_TEST" >> $GITHUB_ENV +# Set environment variables required for integration tests +echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV +echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV +echo "DBT_TEST_USER_3=dbt_test_user_3" >> $GITHUB_ENV diff --git a/setup.py b/setup.py index 6f82944de..56d4fa101 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,9 @@ #!/usr/bin/env python import sys -if sys.version_info < (3, 7): +if sys.version_info < (3, 8): print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.7 or higher.") + print("Please upgrade to Python 3.8 or higher.") sys.exit(1) @@ -62,7 +62,8 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: plugin_version: the version of this plugin, this is an argument in case we ever want to unit test this """ try: - major, minor, plugin_patch = plugin_version.split(".") + # *_ may indicate a dev release which won't affect the core version needed + major, minor, plugin_patch, *_ = plugin_version.split(".", maxsplit=3) except ValueError: raise ValueError(f"Invalid version: {plugin_version}") @@ -84,6 +85,9 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: f"dbt-core~={_core_version()}", f"dbt-postgres~={_core_version()}", "boto3~=1.26.26", + "redshift-connector~=2.0.911", + # installed via dbt-core but referenced directly; don't pin to avoid version conflicts with dbt-core + "agate", ], zip_safe=False, classifiers=[ @@ -92,11 +96,10 @@ def _core_version(plugin_version: str = _plugin_version()) -> str: "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], - python_requires=">=3.7", + python_requires=">=3.8", ) diff --git a/tests/conftest.py b/tests/conftest.py index 18fcbb714..96f0d43e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,12 +11,12 @@ @pytest.fixture(scope="class") def dbt_profile_target(): return { - 'type': 'redshift', - 'threads': 1, - 'retries': 6, - 'host': os.getenv('REDSHIFT_TEST_HOST'), - 'port': int(os.getenv('REDSHIFT_TEST_PORT')), - 'user': os.getenv('REDSHIFT_TEST_USER'), - 'pass': os.getenv('REDSHIFT_TEST_PASS'), - 'dbname': os.getenv('REDSHIFT_TEST_DBNAME'), + "type": "redshift", + "threads": 1, + "retries": 6, + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), } diff --git a/tests/functional/adapter/backup_tests/models.py b/tests/functional/adapter/backup_tests/models.py new file mode 100644 index 000000000..6432e7319 --- /dev/null +++ b/tests/functional/adapter/backup_tests/models.py @@ -0,0 +1,61 @@ +BACKUP_IS_FALSE = """ +{{ config( + materialized='table', + backup=False +) }} +select 1 as my_col +""" + + +BACKUP_IS_TRUE = """ +{{ config( + materialized='table', + backup=True +) }} +select 1 as my_col +""" + + +BACKUP_IS_UNDEFINED = """ +{{ config( + materialized='table' +) }} +select 1 as my_col +""" + + +BACKUP_IS_TRUE_VIEW = """ +{{ config( + materialized='view', + backup=True +) }} +select 1 as my_col +""" + + +SYNTAX_WITH_DISTKEY = """ +{{ config( + materialized='table', + backup=False, + dist='my_col' +) }} +select 1 as my_col +""" + + +SYNTAX_WITH_SORTKEY = """ +{{ config( + materialized='table', + backup=False, + sort='my_col' +) }} +select 1 as my_col +""" + + +BACKUP_IS_UNDEFINED_DEPENDENT_VIEW = """ +{{ config( + materialized='view', +) }} +select * from {{ ref('backup_is_undefined') }} +""" diff --git a/tests/functional/adapter/backup_tests/test_backup_table.py b/tests/functional/adapter/backup_tests/test_backup_table.py new file mode 100644 index 000000000..6871b70a7 --- /dev/null +++ b/tests/functional/adapter/backup_tests/test_backup_table.py @@ -0,0 +1,108 @@ +import pytest + +from dbt.tests.util import run_dbt + +from tests.functional.adapter.backup_tests import models + + +class BackupTableBase: + @pytest.fixture(scope="class", autouse=True) + def _run_dbt(self, project): + run_dbt(["run"]) + + +class TestBackupTableOption(BackupTableBase): + @pytest.fixture(scope="class") + def models(self): + return { + "backup_is_false.sql": models.BACKUP_IS_FALSE, + "backup_is_true.sql": models.BACKUP_IS_TRUE, + "backup_is_undefined.sql": models.BACKUP_IS_UNDEFINED, + "backup_is_true_view.sql": models.BACKUP_IS_TRUE_VIEW, + } + + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [ + ("backup_is_false", False), + ("backup_is_true", True), + ("backup_is_undefined", True), + ("backup_is_true_view", True), + ], + indirect=["model_ddl"], + ) + def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: bool): + """ + Test different scenarios of configuration at the MODEL level and verify the expected setting for backup + + This test looks for whether `backup no` appears in the DDL file. If it does, then the table will not be backed + up. If it does not appear, the table will be backed up. + + Args: + model_ddl: the DDL for each model as a string + backup_expected: whether backup is expected for this model + """ + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected + + +class TestBackupTableSyntax(BackupTableBase): + @pytest.fixture(scope="class") + def models(self): + return { + "syntax_with_distkey.sql": models.SYNTAX_WITH_DISTKEY, + "syntax_with_sortkey.sql": models.SYNTAX_WITH_SORTKEY, + } + + @pytest.mark.parametrize( + "model_ddl,search_phrase", + [ + ("syntax_with_distkey", "diststyle key distkey"), + ("syntax_with_sortkey", "compound sortkey"), + ], + indirect=["model_ddl"], + ) + def test_backup_predicate_precedes_secondary_predicates(self, model_ddl, search_phrase): + """ + Test whether `backup no` appears roughly in the correct spot in the DDL + + This test verifies that the backup predicate comes before the secondary predicates. + This test does not guarantee that the resulting DDL is properly formed. + + Args: + model_ddl: the DDL for each model as a string + search_phrase: the string within the DDL that indicates the distkey or sortkey + """ + assert model_ddl.find("backup no") < model_ddl.find(search_phrase) + + +class TestBackupTableProjectDefault(BackupTableBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"backup": False}} + + @pytest.fixture(scope="class") + def models(self): + return { + "backup_is_true.sql": models.BACKUP_IS_TRUE, + "backup_is_undefined.sql": models.BACKUP_IS_UNDEFINED, + } + + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [("backup_is_true", True), ("backup_is_undefined", False)], + indirect=["model_ddl"], + ) + def test_setting_defaults_to_project_option(self, model_ddl: str, backup_expected: bool): + """ + Test different scenarios of configuration at the PROJECT level and verify the expected setting for backup + + This test looks for whether `backup no` appears in the DDL file. If it does, then the table will not be backed + up. If it does not appear, the table will be backed up. + + Args: + model_ddl: the DDL for each model as a string + backup_expected: whether backup is expected for this model + """ + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected diff --git a/tests/functional/adapter/conftest.py b/tests/functional/adapter/conftest.py new file mode 100644 index 000000000..c5c980154 --- /dev/null +++ b/tests/functional/adapter/conftest.py @@ -0,0 +1,25 @@ +import pytest + + +@pytest.fixture +def model_ddl(request) -> str: + """ + Returns the contents of the DDL file for the model provided. Use with pytest parameterization. + + Example: + === + @pytest.mark.parametrize( + "model_ddl,backup_expected", + [("backup_is_false", False)], + indirect=["model_ddl"] + ) + def test_setting_reflects_config_option(self, model_ddl: str, backup_expected: bool): + backup_will_occur = "backup no" not in model_ddl.lower() + assert backup_will_occur == backup_expected + === + + In this example, the fixture returns the contents of the backup_is_false DDL file as a string. + This string is then referenced in the test as model_ddl. + """ + with open(f"target/run/test/models/{request.param}.sql", "r") as ddl_file: + yield "\n".join(ddl_file.readlines()) diff --git a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py new file mode 100644 index 000000000..7b73d212b --- /dev/null +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -0,0 +1,7 @@ +from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( + BaseIncrementalOnSchemaChange, +) + + +class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): + pass diff --git a/tests/functional/adapter/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py similarity index 94% rename from tests/functional/adapter/test_incremental_unique_id.py rename to tests/functional/adapter/incremental/test_incremental_unique_id.py index 14e9b7ea8..5fcdfbe16 100644 --- a/tests/functional/adapter/test_incremental_unique_id.py +++ b/tests/functional/adapter/incremental/test_incremental_unique_id.py @@ -2,4 +2,4 @@ class TestUniqueKeyRedshift(BaseIncrementalUniqueKey): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/materialized_view_tests/fixtures.py b/tests/functional/adapter/materialized_view_tests/fixtures.py new file mode 100644 index 000000000..785931c1b --- /dev/null +++ b/tests/functional/adapter/materialized_view_tests/fixtures.py @@ -0,0 +1,85 @@ +import pytest + +from dbt.tests.adapter.materialized_view.base import Base +from dbt.tests.adapter.materialized_view.on_configuration_change import ( + OnConfigurationChangeBase, + get_model_file, + set_model_file, +) +from dbt.tests.util import relation_from_name, run_sql_with_adapter + + +def refresh_materialized_view(project, name: str): + sql = f"refresh materialized view {relation_from_name(project.adapter, name)}" + run_sql_with_adapter(project.adapter, sql) + + +class RedshiftBasicBase(Base): + @pytest.fixture(scope="class") + def models(self): + base_table = """ + {{ config(materialized='table') }} + select 1 as base_column + """ + base_materialized_view = """ + {{ config(materialized='materialized_view') }} + select * from {{ ref('base_table') }} + """ + return {"base_table.sql": base_table, "base_materialized_view.sql": base_materialized_view} + + +class RedshiftOnConfigurationChangeBase(OnConfigurationChangeBase): + @pytest.fixture(scope="class") + def models(self): + base_table = """ + {{ config( + materialized='table', + ) }} + select + 1 as id, + 100 as value + """ + base_materialized_view = """ + {{ config( + materialized='materialized_view', + sort='id' + ) }} + select * from {{ ref('base_table') }} + """ + return {"base_table.sql": base_table, "base_materialized_view.sql": base_materialized_view} + + @pytest.fixture(scope="function") + def configuration_changes_apply(self, project): + initial_model = get_model_file(project, "base_materialized_view") + + # turn on auto_refresh + new_model = initial_model.replace( + "materialized='materialized_view',", + "materialized='materialized_view', auto_refresh='yes',", + ) + set_model_file(project, "base_materialized_view", new_model) + + yield + + # set this back for the next test + set_model_file(project, "base_materialized_view", initial_model) + + @pytest.fixture(scope="function") + def configuration_changes_refresh(self, project): + initial_model = get_model_file(project, "base_materialized_view") + + # add a sort_key + new_model = initial_model.replace( + "sort='id'", + "sort='value'", + ) + set_model_file(project, "base_materialized_view", new_model) + + yield + + # set this back for the next test + set_model_file(project, "base_materialized_view", initial_model) + + @pytest.fixture(scope="function") + def update_auto_refresh_message(self, project): + return f"Applying UPDATE AUTOREFRESH to: {relation_from_name(project.adapter, 'base_materialized_view')}" diff --git a/tests/functional/adapter/materialized_view_tests/test_materialized_views.py b/tests/functional/adapter/materialized_view_tests/test_materialized_views.py new file mode 100644 index 000000000..ff63f1e01 --- /dev/null +++ b/tests/functional/adapter/materialized_view_tests/test_materialized_views.py @@ -0,0 +1,239 @@ +import pytest + +from dbt.contracts.graph.model_config import OnConfigurationChangeOption +from dbt.contracts.relation import RelationType +from dbt.contracts.results import RunStatus +from dbt.tests.adapter.materialized_view.base import ( + run_model, + assert_model_exists_and_is_correct_type, + insert_record, + get_row_count, +) +from dbt.tests.adapter.materialized_view.on_configuration_change import ( + assert_proper_scenario, +) + +from tests.functional.adapter.materialized_view_tests.fixtures import ( + RedshiftBasicBase, + RedshiftOnConfigurationChangeBase, + refresh_materialized_view, +) + + +class TestBasic(RedshiftBasicBase): + def test_relation_is_materialized_view_on_initial_creation(self, project): + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + assert_model_exists_and_is_correct_type(project, "base_table", RelationType.Table) + + def test_relation_is_materialized_view_when_rerun(self, project): + run_model("base_materialized_view") + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_relation_is_materialized_view_on_full_refresh(self, project): + run_model("base_materialized_view", full_refresh=True) + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_relation_is_materialized_view_on_update(self, project): + run_model("base_materialized_view", run_args=["--vars", "quoting: {identifier: True}"]) + assert_model_exists_and_is_correct_type( + project, "base_materialized_view", RelationType.MaterializedView + ) + + def test_updated_base_table_data_only_shows_in_materialized_view_after_refresh(self, project): + # poll database + table_start = get_row_count(project, "base_table") + view_start = get_row_count(project, "base_materialized_view") + assert view_start == table_start + + # insert new record in table + new_record = (2,) + insert_record(project, new_record, "base_table", ["base_column"]) + + # poll database + table_mid = get_row_count(project, "base_table") + view_mid = get_row_count(project, "base_materialized_view") + + # refresh the materialized view + refresh_materialized_view(project, "base_materialized_view") + + # poll database + table_end = get_row_count(project, "base_table") + view_end = get_row_count(project, "base_materialized_view") + assert view_end == table_end + + # new records were inserted in the table but didn't show up in the view until it was refreshed + assert table_start < table_mid == table_end + assert view_start == view_mid < view_end + + +class TestOnConfigurationChangeApply(RedshiftOnConfigurationChangeBase): + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_model_applies_changes_with_small_configuration_changes( + self, configuration_changes_apply, alter_message, update_auto_refresh_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, update_auto_refresh_message], + ) + + def test_model_rebuilds_with_large_configuration_changes( + self, configuration_changes_refresh, alter_message, replace_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, replace_message], + ) + + def test_model_only_rebuilds_with_large_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + alter_message, + replace_message, + update_auto_refresh_message, + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Apply, + results, + logs, + RunStatus.Success, + messages_in_logs=[alter_message, replace_message], + messages_not_in_logs=[update_auto_refresh_message], + ) + + +class TestOnConfigurationChangeContinue(RedshiftOnConfigurationChangeBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"on_configuration_change": OnConfigurationChangeOption.Continue.value}} + + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_model_is_skipped_with_configuration_changes( + self, configuration_changes_apply, configuration_change_continue_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Continue, + results, + logs, + RunStatus.Success, + messages_in_logs=[configuration_change_continue_message], + ) + + +class TestOnConfigurationChangeFail(RedshiftOnConfigurationChangeBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"on_configuration_change": OnConfigurationChangeOption.Fail.value}} + + def test_full_refresh_takes_precedence_over_any_configuration_changes( + self, + configuration_changes_apply, + configuration_changes_refresh, + replace_message, + configuration_change_message, + ): + results, logs = run_model("base_materialized_view", full_refresh=True) + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Success, + messages_in_logs=[replace_message], + messages_not_in_logs=[configuration_change_message], + ) + + def test_model_is_refreshed_with_no_configuration_changes( + self, refresh_message, configuration_change_message + ): + results, logs = run_model("base_materialized_view") + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Success, + messages_in_logs=[refresh_message, configuration_change_message], + ) + + def test_run_fails_with_configuration_changes( + self, configuration_changes_apply, configuration_change_fail_message + ): + results, logs = run_model("base_materialized_view", expect_pass=False) + assert_proper_scenario( + OnConfigurationChangeOption.Fail, + results, + logs, + RunStatus.Error, + messages_in_logs=[configuration_change_fail_message], + ) diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py index 06cf9948f..8f8198a27 100644 --- a/tests/functional/adapter/test_basic.py +++ b/tests/functional/adapter/test_basic.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import AnyStringWith +from dbt.tests.util import AnyStringWith, run_dbt from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral @@ -12,10 +12,17 @@ from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate, BaseDocsGenReferences -from dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats, expected_references_catalog +from dbt.tests.adapter.basic.expected_catalog import ( + base_expected_catalog, + no_stats, + expected_references_catalog, +) from dbt.tests.adapter.basic.files import seeds_base_csv, seeds_added_csv, seeds_newcolumns_csv -from tests.functional.adapter.expected_stats import redshift_stats, redshift_ephemeral_summary_stats +from tests.functional.adapter.expected_stats import ( + redshift_stats, + redshift_ephemeral_summary_stats, +) # set the datatype of the name column in the 'added' seed so that it can hold the '_update' that's added @@ -86,19 +93,19 @@ class TestBaseAdapterMethod(BaseAdapterMethod): class TestDocsGenerateRedshift(BaseDocsGenerate): - @pytest.fixture(scope="class") + @pytest.fixture(scope="class") def expected_catalog(self, project, profile_user): return base_expected_catalog( - project, - role=profile_user, - id_type="integer", + project, + role=profile_user, + id_type="integer", text_type=AnyStringWith("character varying"), time_type="timestamp without time zone", - view_type="VIEW", - table_type="BASE TABLE", + view_type="VIEW", + table_type="BASE TABLE", model_stats=no_stats(), seed_stats=redshift_stats(), - ) + ) # TODO: update this or delete it @@ -120,3 +127,41 @@ def expected_catalog(self, project, profile_user): view_summary_stats=no_stats(), ephemeral_summary_stats=redshift_ephemeral_summary_stats(), ) + + +class TestViewRerun: + """ + This test addresses: https://github.com/dbt-labs/dbt-redshift/issues/365 + """ + + @pytest.fixture(scope="class") + def models(self): + return { + "base_table.sql": "{{ config(materialized='table') }} select 1 as id", + "base_view.sql": "{{ config(bind=True) }} select * from {{ ref('base_table') }}", + } + + def test_rerunning_dependent_view_refreshes(self, project): + """ + Assert that subsequent runs of `dbt run` will correctly recreate a view. + """ + + def db_objects(): + check_objects_exist_sql = f""" + select tablename + from pg_tables + where schemaname ilike '{project.test_schema}' + union all + select viewname + from pg_views + where schemaname ilike '{project.test_schema}' + order by 1 + """ + return project.run_sql(check_objects_exist_sql, fetch="all") + + results = run_dbt(["run"]) + assert len(results) == 2 + assert db_objects() == (["base_table"], ["base_view"]) + results = run_dbt(["run"]) + assert len(results) == 2 + assert db_objects() == (["base_table"], ["base_view"]) diff --git a/tests/functional/adapter/test_changing_relation_type.py b/tests/functional/adapter/test_changing_relation_type.py index 1f0ba15ad..81ba99918 100644 --- a/tests/functional/adapter/test_changing_relation_type.py +++ b/tests/functional/adapter/test_changing_relation_type.py @@ -1,4 +1,5 @@ from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator + class TestRedshiftChangeRelationTypes(BaseChangeRelationTypeValidator): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/test_column_types.py b/tests/functional/adapter/test_column_types.py new file mode 100644 index 000000000..e24167456 --- /dev/null +++ b/tests/functional/adapter/test_column_types.py @@ -0,0 +1,56 @@ +import pytest +from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes + +_MODEL_SQL = """ +select + 1::smallint as smallint_col, + 2::int as int_col, + 3::bigint as bigint_col, + 4::int2 as int2_col, + 5::int4 as int4_col, + 6::int8 as int8_col, + 7::integer as integer_col, + 8.0::real as real_col, + 9.0::float4 as float4_col, + 10.0::float8 as float8_col, + 11.0::float as float_col, + 12.0::double precision as double_col, + 13.0::numeric as numeric_col, + 14.0::decimal as decimal_col, + '15'::varchar(20) as varchar_col, + '16'::text as text_col +""" + +_SCHEMA_YML = """ +version: 2 +models: + - name: model + tests: + - is_type: + column_map: + smallint_col: ['integer', 'number'] + int_col: ['integer', 'number'] + bigint_col: ['integer', 'number'] + int2_col: ['integer', 'number'] + int4_col: ['integer', 'number'] + int8_col: ['integer', 'number'] + integer_col: ['integer', 'number'] + real_col: ['float', 'number'] + double_col: ['float', 'number'] + float4_col: ['float', 'number'] + float8_col: ['float', 'number'] + float_col: ['float', 'number'] + numeric_col: ['numeric', 'number'] + decimal_col: ['numeric', 'number'] + varchar_col: ['string', 'not number'] + text_col: ['string', 'not number'] +""" + + +class TestRedshiftColumnTypes(BaseColumnTypes): + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": _MODEL_SQL, "schema.yml": _SCHEMA_YML} + + def test_run_and_test(self, project): + self.run_and_test() diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py new file mode 100644 index 000000000..a97c66bbd --- /dev/null +++ b/tests/functional/adapter/test_constraints.py @@ -0,0 +1,149 @@ +import pytest +from dbt.tests.adapter.constraints.test_constraints import ( + BaseTableConstraintsColumnsEqual, + BaseViewConstraintsColumnsEqual, + BaseIncrementalConstraintsColumnsEqual, + BaseConstraintsRuntimeDdlEnforcement, + BaseConstraintsRollback, + BaseIncrementalConstraintsRuntimeDdlEnforcement, + BaseIncrementalConstraintsRollback, + BaseModelConstraintsRuntimeEnforcement, + BaseConstraintQuotedColumn, +) + +_expected_sql_redshift = """ +create table ( + id integer not null primary key references (id) unique, + color text, + date_day text +) ; +insert into +( + select + id, + color, + date_day from + ( + -- depends_on: + select + 'blue' as color, + 1 as id, + '2019-01-01' as date_day + ) as model_subq +) +; +""" + + +class RedshiftColumnEqualSetup: + @pytest.fixture + def data_types(self, schema_int_type, int_type, string_type): + # NOTE: Unlike some other adapters, we don't test array or JSON types here, because + # Redshift does not support them as materialized table column types. + + # sql_column_value, schema_data_type, error_data_type + return [ + ["1", schema_int_type, int_type], + ["'1'", string_type, string_type], + ["cast('2019-01-01' as date)", "date", "DATE"], + ["true", "bool", "BOOL"], + ["'2013-11-03 00:00:00-07'::timestamptz", "timestamptz", "TIMESTAMPTZ"], + ["'2013-11-03 00:00:00-07'::timestamp", "timestamp", "TIMESTAMP"], + ["'1'::numeric", "numeric", "NUMERIC"], + ] + + +class TestRedshiftTableConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseTableConstraintsColumnsEqual +): + pass + + +class TestRedshiftViewConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseViewConstraintsColumnsEqual +): + pass + + +class TestRedshiftIncrementalConstraintsColumnsEqual( + RedshiftColumnEqualSetup, BaseIncrementalConstraintsColumnsEqual +): + pass + + +class TestRedshiftTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + return _expected_sql_redshift + + +class TestRedshiftTableConstraintsRollback(BaseConstraintsRollback): + @pytest.fixture(scope="class") + def expected_error_messages(self): + return ["Cannot insert a NULL value into column id"] + + +class TestRedshiftIncrementalConstraintsRuntimeDdlEnforcement( + BaseIncrementalConstraintsRuntimeDdlEnforcement +): + @pytest.fixture(scope="class") + def expected_sql(self): + return _expected_sql_redshift + + +class TestRedshiftIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback): + @pytest.fixture(scope="class") + def expected_error_messages(self): + return ["Cannot insert a NULL value into column id"] + + +class TestRedshiftModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + return """ +create table ( + id integer not null, + color text, + date_day text, + primary key (id), + constraint strange_uniqueness_requirement unique (color, date_day), + foreign key (id) references (id) +) ; +insert into +( + select + id, + color, + date_day from + ( + -- depends_on: + select + 'blue' as color, + 1 as id, + '2019-01-01' as date_day + ) as model_subq +) +; +""" + + +class TestRedshiftConstraintQuotedColumn(BaseConstraintQuotedColumn): + @pytest.fixture(scope="class") + def expected_sql(self): + return """ +create table ( + id integer not null, + "from" text not null, + date_day text +) ; +insert into +( + select id, "from", date_day + from ( + select + 'blue' as "from", + 1 as id, + '2019-01-01' as date_day + ) as model_subq +); +""" diff --git a/tests/functional/adapter/test_grants.py b/tests/functional/adapter/test_grants.py index bbad59f96..b627e450a 100644 --- a/tests/functional/adapter/test_grants.py +++ b/tests/functional/adapter/test_grants.py @@ -1,7 +1,5 @@ -import pytest from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants -from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants diff --git a/tests/functional/adapter/test_late_binding_view.py b/tests/functional/adapter/test_late_binding_view.py new file mode 100644 index 000000000..013bf06be --- /dev/null +++ b/tests/functional/adapter/test_late_binding_view.py @@ -0,0 +1,49 @@ +import pytest + +from dbt.tests.util import run_dbt, run_sql_with_adapter + +_MODEL_SQL = """{{ + config( + materialized='view', + bind=False + ) +}} +select * from {{ ref('seed') }} +""" + +_SEED_CSV = """ +id,first_name,email,ip_address,updated_at +1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 +""".lstrip() + + +class TestLateBindingView: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_SQL, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": _SEED_CSV} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + } + } + + def test_late_binding_view_query(self, project): + seed_run_result = run_dbt(["seed"]) + assert len(seed_run_result) == 1 + run_result = run_dbt() + assert len(run_result) == 1 + # drop the table. Use 'cascade' here so that if late-binding views + # didn't work as advertised, the following dbt run will fail. + drop_query = """drop table if exists {}.seed cascade""".format(project.test_schema) + run_sql_with_adapter(project.adapter, drop_query) + run_result = run_dbt() + assert len(run_result) == 1 diff --git a/tests/functional/adapter/test_macros.py b/tests/functional/adapter/test_macros.py new file mode 100644 index 000000000..0596ab549 --- /dev/null +++ b/tests/functional/adapter/test_macros.py @@ -0,0 +1,50 @@ +import pytest +from dbt.tests.util import run_dbt + +_MODEL_SQL = """ +{{ dispatch_to_parent() }} +select 1 as id +""" + +_MACRO_SQL = """ +{% macro do_something2(foo2, bar2) %} + + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 + +{% endmacro %} + +{% macro with_ref() %} + + {{ ref('table_model') }} + +{% endmacro %} + +{% macro dispatch_to_parent() %} + {% set macro = adapter.dispatch('dispatch_to_parent') %} + {{ macro() }} +{% endmacro %} + +{% macro default__dispatch_to_parent() %} + {% set msg = 'No default implementation of dispatch_to_parent' %} + {{ exceptions.raise_compiler_error(msg) }} +{% endmacro %} + +{% macro postgres__dispatch_to_parent() %} + {{ return('') }} +{% endmacro %} +""" + + +class TestRedshift: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACRO_SQL} + + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": _MODEL_SQL} + + def test_inherited_macro(self, project): + run_dbt() diff --git a/tests/functional/adapter/test_persist_docs.py b/tests/functional/adapter/test_persist_docs.py new file mode 100644 index 000000000..61b8bd5a6 --- /dev/null +++ b/tests/functional/adapter/test_persist_docs.py @@ -0,0 +1,57 @@ +import json +import pytest + +from dbt.tests.util import run_dbt + +from dbt.tests.adapter.persist_docs.test_persist_docs import ( + BasePersistDocsBase, + BasePersistDocs, + BasePersistDocsColumnMissing, + BasePersistDocsCommentOnQuotedColumn, +) + + +class TestPersistDocs(BasePersistDocs): + pass + + +class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing): + pass + + +class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn): + pass + + +class TestPersistDocsLateBinding(BasePersistDocsBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "+persist_docs": { + "relation": True, + "columns": True, + }, + "view_model": { + "bind": False, + }, + } + } + } + + def test_comment_on_late_binding_view(self, project): + run_dbt() + run_dbt(["docs", "generate"]) + with open("target/catalog.json") as fp: + catalog_data = json.load(fp) + assert "nodes" in catalog_data + assert len(catalog_data["nodes"]) == 4 + table_node = catalog_data["nodes"]["model.test.table_model"] + view_node = self._assert_has_table_comments(table_node) + + view_node = catalog_data["nodes"]["model.test.view_model"] + self._assert_has_view_comments(view_node, False, False) + + no_docs_node = catalog_data["nodes"]["model.test.no_docs_model"] + self._assert_has_view_comments(no_docs_node, False, False) diff --git a/tests/functional/adapter/query_comment_tests/test_query_comment.py b/tests/functional/adapter/test_query_comment.py similarity index 96% rename from tests/functional/adapter/query_comment_tests/test_query_comment.py rename to tests/functional/adapter/test_query_comment.py index 281a90867..db6a440d7 100644 --- a/tests/functional/adapter/query_comment_tests/test_query_comment.py +++ b/tests/functional/adapter/test_query_comment.py @@ -1,4 +1,3 @@ -import pytest from dbt.tests.adapter.query_comment.test_query_comment import ( BaseQueryComments, BaseMacroQueryComments, @@ -12,17 +11,22 @@ class TestQueryCommentsRedshift(BaseQueryComments): pass + class TestMacroQueryCommentsRedshift(BaseMacroQueryComments): pass + class TestMacroArgsQueryCommentsRedshift(BaseMacroArgsQueryComments): pass + class TestMacroInvalidQueryCommentsRedshift(BaseMacroInvalidQueryComments): pass + class TestNullQueryCommentsRedshift(BaseNullQueryComments): pass + class TestEmptyQueryCommentsRedshift(BaseEmptyQueryComments): - pass \ No newline at end of file + pass diff --git a/tests/functional/adapter/relation_name_tests/test_relation_name.py b/tests/functional/adapter/test_relation_name.py similarity index 88% rename from tests/functional/adapter/relation_name_tests/test_relation_name.py rename to tests/functional/adapter/test_relation_name.py index 733ef7f49..f17bbda63 100644 --- a/tests/functional/adapter/relation_name_tests/test_relation_name.py +++ b/tests/functional/adapter/test_relation_name.py @@ -68,9 +68,7 @@ def setUp(self, project): @pytest.fixture(scope="class") def seeds(self): - return { - "seed.csv": seeds__seed - } + return {"seed.csv": seeds__seed} @pytest.fixture(scope="class") def project_config_update(self): @@ -84,12 +82,8 @@ def project_config_update(self): class TestAdapterDDL(TestAdapterDDLBase): @pytest.fixture(scope="class") def models(self): - relname_51_chars_long = ( - "incremental_table_whose_name_is_51_characters_abcde.sql" - ) - relname_52_chars_long = ( - "relation_whose_name_is_52_chars_long_abcdefghijklmno.sql" - ) + relname_51_chars_long = "incremental_table_whose_name_is_51_characters_abcde.sql" + relname_52_chars_long = "relation_whose_name_is_52_chars_long_abcdefghijklmno.sql" relname_63_chars_long = ( "relation_whose_name_is_63_chars_long_abcdefghijklmnopqrstuvwxyz.sql" ) @@ -110,7 +104,7 @@ def models(self): relname_63_chars_long: models__relationname_63_chars_long, relname_63_chars_long_b: models__relationname_63_chars_long, relname_64_chars_long: models__relationname_64_chars_long, - relname_127_chars_long: models__relationname_127_chars_long + relname_127_chars_long: models__relationname_127_chars_long, } def test_long_name_succeeds(self, project): @@ -127,9 +121,7 @@ def models(self): "relation_whose_name_is_127_characters89012345678901234567890123456" "78901234567890123456789012345678901234567890123456789012345678.sql" ) - return { - relname_128_chars_long: models__relationname_127_chars_long - } + return {relname_128_chars_long: models__relationname_127_chars_long} def test_too_long_of_name_fails(self, project): results = run_dbt(["run"], expect_pass=False) diff --git a/tests/functional/adapter/test_simple_seed.py b/tests/functional/adapter/test_simple_seed.py new file mode 100644 index 000000000..1e8cc1cd5 --- /dev/null +++ b/tests/functional/adapter/test_simple_seed.py @@ -0,0 +1,84 @@ +import pytest +from dbt.tests.adapter.simple_seed.test_seed_type_override import BaseSimpleSeedColumnOverride +from dbt.tests.adapter.utils.base_utils import run_dbt + +_SCHEMA_YML = """ +version: 2 +seeds: +- name: seed_enabled + columns: + - name: birthday + tests: + - column_type: + type: date + - name: seed_id + tests: + - column_type: + type: character varying(256) + +- name: seed_tricky + columns: + - name: seed_id + tests: + - column_type: + type: integer + - name: seed_id_str + tests: + - column_type: + type: character varying(256) + - name: a_bool + tests: + - column_type: + type: boolean + - name: looks_like_a_bool + tests: + - column_type: + type: character varying(256) + - name: a_date + tests: + - column_type: + type: timestamp without time zone + - name: looks_like_a_date + tests: + - column_type: + type: character varying(256) + - name: relative + tests: + - column_type: + type: character varying(9) + - name: weekday + tests: + - column_type: + type: character varying(8) +""".lstrip() + + +class TestSimpleSeedColumnOverride(BaseSimpleSeedColumnOverride): + @pytest.fixture(scope="class") + def schema(self): + return "simple_seed" + + @pytest.fixture(scope="class") + def models(self): + return {"models-rs.yml": _SCHEMA_YML} + + @staticmethod + def seed_enabled_types(): + return { + "seed_id": "text", + "birthday": "date", + } + + @staticmethod + def seed_tricky_types(): + return { + "seed_id_str": "text", + "looks_like_a_bool": "text", + "looks_like_a_date": "text", + } + + def test_redshift_simple_seed_with_column_override_redshift(self, project): + seed_results = run_dbt(["seed"]) + assert len(seed_results) == 2 + test_results = run_dbt(["test"]) + assert len(test_results) == 10 diff --git a/tests/functional/adapter/test_simple_snapshot.py b/tests/functional/adapter/test_simple_snapshot.py new file mode 100644 index 000000000..4db5b2330 --- /dev/null +++ b/tests/functional/adapter/test_simple_snapshot.py @@ -0,0 +1,9 @@ +from dbt.tests.adapter.simple_snapshot.test_snapshot import BaseSnapshotCheck, BaseSimpleSnapshot + + +class TestSnapshot(BaseSimpleSnapshot): + pass + + +class TestSnapshotCheck(BaseSnapshotCheck): + pass diff --git a/tests/functional/adapter/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py new file mode 100644 index 000000000..5d6b70fbb --- /dev/null +++ b/tests/functional/adapter/test_store_test_failures.py @@ -0,0 +1,7 @@ +from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( + TestStoreTestFailures, +) + + +class RedshiftTestStoreTestFailures(TestStoreTestFailures): + pass diff --git a/tests/functional/adapter/utils/test_data_types.py b/tests/functional/adapter/utils/test_data_types.py index 147a962b5..3201afcfb 100644 --- a/tests/functional/adapter/utils/test_data_types.py +++ b/tests/functional/adapter/utils/test_data_types.py @@ -1,4 +1,3 @@ -import pytest from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt @@ -11,23 +10,23 @@ class TestTypeBigInt(BaseTypeBigInt): pass - + class TestTypeFloat(BaseTypeFloat): pass - + class TestTypeInt(BaseTypeInt): pass - + class TestTypeNumeric(BaseTypeNumeric): pass - + class TestTypeString(BaseTypeString): pass - + class TestTypeTimestamp(BaseTypeTimestamp): pass diff --git a/tests/functional/adapter/utils/test_timestamps.py b/tests/functional/adapter/utils/test_timestamps.py index 417bfab2b..6c525be44 100644 --- a/tests/functional/adapter/utils/test_timestamps.py +++ b/tests/functional/adapter/utils/test_timestamps.py @@ -17,4 +17,4 @@ def expected_sql(self): select getdate() as current_timestamp, getdate() as current_timestamp_in_utc_backcompat, getdate() as current_timestamp_backcompat - """ \ No newline at end of file + """ diff --git a/tests/functional/adapter/utils/test_utils.py b/tests/functional/adapter/utils/test_utils.py index 03b9cc916..266103fbc 100644 --- a/tests/functional/adapter/utils/test_utils.py +++ b/tests/functional/adapter/utils/test_utils.py @@ -1,5 +1,3 @@ -import pytest - from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct @@ -12,7 +10,6 @@ from dbt.tests.adapter.utils.test_datediff import BaseDateDiff from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc from dbt.tests.adapter.utils.test_escape_single_quotes import BaseEscapeSingleQuotesQuote -from dbt.tests.adapter.utils.test_escape_single_quotes import BaseEscapeSingleQuotesBackslash from dbt.tests.adapter.utils.test_except import BaseExcept from dbt.tests.adapter.utils.test_hash import BaseHash from dbt.tests.adapter.utils.test_intersect import BaseIntersect diff --git a/tests/functional/test_autocommit.py b/tests/functional/test_autocommit.py new file mode 100644 index 000000000..e5e54a34f --- /dev/null +++ b/tests/functional/test_autocommit.py @@ -0,0 +1,171 @@ +import os +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture + +_MACROS__CREATE_DB = """ +{% macro create_db_fake() %} + +{% set database = "db_for_test__do_delete_if_you_see_this" %} + +{# IF NOT EXISTS not avaiable but Redshift merely returns an error for trying to overwrite #} +{% set create_command %} + CREATE DATABASE {{ database }} +{% endset %} + +{{ log(create_command, info=True) }} + +{% do run_query(create_command) %} + +{{ log("Created redshift database " ~ database, info=True) }} + +{% endmacro %} +""" + +_MACROS__UPDATE_MY_MODEL = """ +{% macro update_some_model(alert_ids, sent_at, table_name) %} + {% set update_query %} + UPDATE {{ ref('my_model') }} set status = 'sent' + {% endset %} + {% do run_query(update_query) %} +{% endmacro %} +""" + +_MACROS__UPDATE_MY_SEED = """ +{% macro update_my_seed() %} +update {{ ref("my_seed") }} set status = 'done' +{% endmacro %} +""" + +_MODELS__MY_MODEL = """ +{{ config(materialized="table") }} + +select 1 as id, 'pending' as status +""" + +_MODELS__AFTER_COMMIT = """ +{{ + config( + post_hook=after_commit("{{ update_my_seed() }}") + ) +}} + +select 1 as id +""" + +_SEEDS_MY_SEED = """ +id,status +1,pending +""".lstrip() + + +class TestTransactionBlocksPreventCertainCommands: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + def test_autocommit_deactivated_prevents_DDL(self, project): + """Scenario: user has autocommit=True in their target to run macros with normally + forbidden commands like CREATE DATABASE and VACUUM""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" not in out + + +class TestAutocommitUnblocksDDLInTransactions: + @pytest.fixture(scope="class") + def dbt_profile_target(self): + return { + "type": "redshift", + "threads": 1, + "retries": 6, + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), + "autocommit": False, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__CREATE_DB} + + def test_default_setting_allows_DDL(self, project): + """Monitor if status quo in Redshift connector changes""" + result, out = run_dbt_and_capture(["run-operation", "create_db_fake"], expect_pass=False) + assert "CREATE DATABASE cannot run inside a transaction block" in out + + +class TestUpdateDDLCommits: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_MODEL} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__MY_MODEL} + + def test_update_will_go_through(self, project): + run_dbt() + run_dbt(["run-operation", "update_some_model"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_model".format(project.test_schema)] + ) + assert "1 | sent" in out + + +class TestUpdateDDLDoesNotCommitWithoutAutocommit: + @pytest.fixture(scope="class") + def dbt_profile_target(self): + return { + "type": "redshift", + "host": os.getenv("REDSHIFT_TEST_HOST"), + "port": int(os.getenv("REDSHIFT_TEST_PORT")), + "user": os.getenv("REDSHIFT_TEST_USER"), + "pass": os.getenv("REDSHIFT_TEST_PASS"), + "dbname": os.getenv("REDSHIFT_TEST_DBNAME"), + "autocommit": False, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_MODEL} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__MY_MODEL} + + def test_update_will_not_go_through(self, project): + run_dbt() + run_dbt(["run-operation", "update_some_model"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_model".format(project.test_schema)] + ) + assert "1 | pending" in out + + +class TestAfterCommitMacroTakesEffect: + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": _MACROS__UPDATE_MY_SEED} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": _MODELS__AFTER_COMMIT} + + @pytest.fixture(scope="class") + def seeds(self): + return {"my_seed.csv": _SEEDS_MY_SEED} + + def test_update_happens_via_macro_in_config(self, project): + run_dbt(["seed"]) + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_seed".format(project.test_schema)] + ) + assert "1 | pending" in out + + run_dbt() + _, out = run_dbt_and_capture( + ["show", "--inline", "select * from {}.my_seed".format(project.test_schema)] + ) + assert "1 | done" in out diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/backup_table_tests/models/model_backup_false.sql b/tests/integration/backup_table_tests/models/model_backup_false.sql deleted file mode 100644 index 67900ac06..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_false.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False - ) -}} - -select 1 diff --git a/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql b/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql deleted file mode 100644 index 87c586265..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_param_before_distkey.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False, dist='distkey' - ) -}} - -select 1 as distkey \ No newline at end of file diff --git a/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql b/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql deleted file mode 100644 index 380aacb5c..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_param_before_sortkey.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=False, sort='sortkey' - ) -}} - -select 1 as sortkey \ No newline at end of file diff --git a/tests/integration/backup_table_tests/models/model_backup_true.sql b/tests/integration/backup_table_tests/models/model_backup_true.sql deleted file mode 100644 index 882522da9..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_true.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table', backup=True - ) -}} - -select 2 diff --git a/tests/integration/backup_table_tests/models/model_backup_true_view.sql b/tests/integration/backup_table_tests/models/model_backup_true_view.sql deleted file mode 100644 index 841070a0a..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_true_view.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='view', backup=True - ) -}} - -select 3 diff --git a/tests/integration/backup_table_tests/models/model_backup_undefined.sql b/tests/integration/backup_table_tests/models/model_backup_undefined.sql deleted file mode 100644 index 54468d510..000000000 --- a/tests/integration/backup_table_tests/models/model_backup_undefined.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='table' - ) -}} - -select 4 diff --git a/tests/integration/backup_table_tests/test_backup_table_option.py b/tests/integration/backup_table_tests/test_backup_table_option.py deleted file mode 100644 index e32bca803..000000000 --- a/tests/integration/backup_table_tests/test_backup_table_option.py +++ /dev/null @@ -1,133 +0,0 @@ -import os - -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestBackupTableOption(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2 - } - - def check_backup_param_template(self, test_table_name, backup_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_undefined should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_undefined', True) - - # model_backup_true should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_true', True) - - # model_backup_false should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_false', False) - - # Any view should not contain a BACKUP NO parameter, regardless of the specified config (create will fail) - self.check_backup_param_template('model_backup_true_view', True) - -class TestBackupTableOptionProjectFalse(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - # Update project config to set backup to False. - # This should make the 'model_backup_undefined' switch to BACKUP NO - return { - 'config-version': 2, - 'models': {'backup': False} - } - - def check_backup_param_template(self, test_table_name, backup_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option_project_config_false(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_undefined should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_undefined', False) - - # model_backup_true should not contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_true', True) - - # model_backup_false should contain a BACKUP NO parameter in the table DDL - self.check_backup_param_template('model_backup_false', False) - - # Any view should not contain a BACKUP NO parameter, regardless of the specified config (create will fail) - self.check_backup_param_template('model_backup_true_view', True) - -class TestBackupTableOptionOrder(DBTIntegrationTest): - @property - def schema(self): - return 'backup_table_tests' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2 - } - - def check_backup_param_template(self, test_table_name, backup_flag_is_expected): - # Use raw DDL statement to confirm backup is set correctly on new table - with open('target/run/test/models/{}.sql'.format(test_table_name), 'r') as ddl_file: - ddl_statement = ddl_file.readlines() - lowercase_statement = ' '.join(ddl_statement).lower() - self.assertEqual('backup no' not in lowercase_statement, backup_flag_is_expected) - if backup_flag_is_expected: - distkey_index = lowercase_statement.find('distkey') - sortkey_index = lowercase_statement.find('sortkey') - backup_index = lowercase_statement.find('backup no') - self.assertEqual((backup_index < distkey_index) or distkey_index == -1, backup_flag_is_expected) - self.assertEqual((backup_index < sortkey_index) or sortkey_index == -1, backup_flag_is_expected) - - @use_profile('redshift') - def test__redshift_backup_table_option_project_config_false(self): - self.assertEqual(len(self.run_dbt()), 6) - - # model_backup_param_before_distkey should contain a BACKUP NO parameter which precedes a DISTKEY in the table ddl - self.check_backup_param_template('model_backup_param_before_distkey', False) - - # model_backup_param_before_sortkey should contain a BACKUP NO parameter which precedes a SORTKEY in the table ddl - self.check_backup_param_template('model_backup_param_before_sortkey', False) \ No newline at end of file diff --git a/tests/integration/base.py b/tests/integration/base.py deleted file mode 100644 index 9222bade9..000000000 --- a/tests/integration/base.py +++ /dev/null @@ -1,955 +0,0 @@ -import os -import io -import random -import shutil -import sys -import tempfile -import traceback -import unittest -import warnings -from contextlib import contextmanager -from datetime import datetime -from functools import wraps - -import pytest -import yaml -from unittest.mock import patch - -import dbt.main as dbt -from dbt import flags -from dbt.deprecations import reset_deprecations -from dbt.adapters.factory import get_adapter, reset_adapters, register_adapter -from dbt.clients.jinja import template_cache -from dbt.config import RuntimeConfig -from dbt.context import providers -from dbt.logger import log_manager -from dbt.events.functions import ( - capture_stdout_logs, fire_event, setup_event_logger, stop_capture_stdout_logs -) -from dbt.events import AdapterLogger -from dbt.contracts.graph.manifest import Manifest - - -logger = AdapterLogger("Redshift") -INITIAL_ROOT = os.getcwd() - - -def normalize(path): - """On windows, neither is enough on its own: - - >>> normcase('C:\\documents/ALL CAPS/subdir\\..') - 'c:\\documents\\all caps\\subdir\\..' - >>> normpath('C:\\documents/ALL CAPS/subdir\\..') - 'C:\\documents\\ALL CAPS' - >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) - 'c:\\documents\\all caps' - """ - return os.path.normcase(os.path.normpath(path)) - - -class Normalized: - def __init__(self, value): - self.value = value - - def __repr__(self): - return f'Normalized({self.value!r})' - - def __str__(self): - return f'Normalized({self.value!s})' - - def __eq__(self, other): - return normalize(self.value) == normalize(other) - - -class FakeArgs: - def __init__(self): - self.threads = 1 - self.defer = False - self.full_refresh = False - self.models = None - self.select = None - self.exclude = None - self.single_threaded = False - self.selector_name = None - self.state = None - self.defer = None - - -class TestArgs: - def __init__(self, kwargs): - self.which = 'run' - self.single_threaded = False - self.profiles_dir = None - self.project_dir = None - self.__dict__.update(kwargs) - - -def _profile_from_test_name(test_name): - adapter_names = ('redshift',) - adapters_in_name = sum(x in test_name for x in adapter_names) - if adapters_in_name != 1: - raise ValueError( - 'test names must have exactly 1 profile choice embedded, {} has {}' - .format(test_name, adapters_in_name) - ) - - for adapter_name in adapter_names: - if adapter_name in test_name: - return adapter_name - - raise ValueError( - 'could not find adapter name in test name {}'.format(test_name) - ) - - -def _pytest_test_name(): - return os.environ['PYTEST_CURRENT_TEST'].split()[0] - - -def _pytest_get_test_root(): - test_path = _pytest_test_name().split('::')[0] - relative_to = INITIAL_ROOT - head = os.path.relpath(test_path, relative_to) - - path_parts = [] - while head: - head, tail = os.path.split(head) - path_parts.append(tail) - path_parts.reverse() - # dbt tests are all of the form 'tests/integration/suite_name' - target = os.path.join(*path_parts[:3]) # TODO: try to not hard code this - return os.path.join(relative_to, target) - - -def _really_makedirs(path): - while not os.path.exists(path): - try: - os.makedirs(path) - except EnvironmentError: - raise - - -class DBTIntegrationTest(unittest.TestCase): - CREATE_SCHEMA_STATEMENT = 'CREATE SCHEMA {}' - DROP_SCHEMA_STATEMENT = 'DROP SCHEMA IF EXISTS {} CASCADE' - - _randint = random.randint(0, 9999) - _runtime_timedelta = (datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0)) - _runtime = ( - (int(_runtime_timedelta.total_seconds() * 1e6)) + - _runtime_timedelta.microseconds - ) - - prefix = f'test{_runtime}{_randint:04}' - setup_alternate_db = False - - def redshift_profile(self): - return { - 'config': { - 'send_anonymous_usage_stats': False - }, - 'test': { - 'outputs': { - 'default2': { - 'type': 'redshift', - 'threads': 1, - 'retries': 6, - 'host': os.getenv('REDSHIFT_TEST_HOST'), - 'port': int(os.getenv('REDSHIFT_TEST_PORT')), - 'user': os.getenv('REDSHIFT_TEST_USER'), - 'pass': os.getenv('REDSHIFT_TEST_PASS'), - 'dbname': os.getenv('REDSHIFT_TEST_DBNAME'), - 'schema': self.unique_schema() - } - }, - 'target': 'default2' - } - } - - @property - def packages_config(self): - return None - - @property - def selectors_config(self): - return None - - def unique_schema(self): - schema = self.schema - - to_return = "{}_{}".format(self.prefix, schema) - - return to_return.lower() - - @property - def default_database(self): - database = self.config.credentials.database - return database - - @property - def alternative_database(self): - return None - - def get_profile(self, adapter_type): - if adapter_type == 'redshift': - return self.redshift_profile() - else: - raise ValueError('invalid adapter type {}'.format(adapter_type)) - - def _pick_profile(self): - test_name = self.id().split('.')[-1] - return _profile_from_test_name(test_name) - - def _symlink_test_folders(self): - for entry in os.listdir(self.test_original_source_path): - src = os.path.join(self.test_original_source_path, entry) - tst = os.path.join(self.test_root_dir, entry) - if os.path.isdir(src) or src.endswith('.sql'): - # symlink all sql files and all directories. - os.symlink(src, tst) - os.symlink(self._logs_dir, os.path.join(self.test_root_dir, 'logs')) - - @property - def test_root_realpath(self): - if sys.platform == 'darwin': - return os.path.realpath(self.test_root_dir) - else: - return self.test_root_dir - - def _generate_test_root_dir(self): - return normalize(tempfile.mkdtemp(prefix='dbt-int-test-')) - - def setUp(self): - # Logbook warnings are ignored so we don't have to fork logbook to support python 3.10. - # This _only_ works for tests in `test/integration`. - warnings.filterwarnings( - "ignore", - category=DeprecationWarning, - module="logbook" - ) - self.dbt_core_install_root = os.path.dirname(dbt.__file__) - log_manager.reset_handlers() - self.initial_dir = INITIAL_ROOT - os.chdir(self.initial_dir) - # before we go anywhere, collect the initial path info - self._logs_dir = os.path.join(self.initial_dir, 'logs', self.prefix) - setup_event_logger(self._logs_dir) - _really_makedirs(self._logs_dir) - self.test_original_source_path = _pytest_get_test_root() - self.test_root_dir = self._generate_test_root_dir() - - os.chdir(self.test_root_dir) - try: - self._symlink_test_folders() - except Exception as exc: - msg = '\n\t'.join(( - 'Failed to symlink test folders!', - 'initial_dir={0.initial_dir}', - 'test_original_source_path={0.test_original_source_path}', - 'test_root_dir={0.test_root_dir}' - )).format(self) - logger.exception(msg) - - # if logging isn't set up, I still really want this message. - print(msg) - traceback.print_exc() - - raise - - self._created_schemas = set() - reset_deprecations() - template_cache.clear() - - self.use_profile(self._pick_profile()) - self.use_default_project() - self.set_packages() - self.set_selectors() - self.load_config() - - def use_default_project(self, overrides=None): - # create a dbt_project.yml - base_project_config = { - 'name': 'test', - 'version': '1.0', - 'config-version': 2, - 'test-paths': [], - 'model-paths': [self.models], - 'profile': 'test', - } - - project_config = {} - project_config.update(base_project_config) - project_config.update(self.project_config) - project_config.update(overrides or {}) - - with open("dbt_project.yml", 'w') as f: - yaml.safe_dump(project_config, f, default_flow_style=True) - - def use_profile(self, adapter_type): - self.adapter_type = adapter_type - - profile_config = {} - default_profile_config = self.get_profile(adapter_type) - - profile_config.update(default_profile_config) - profile_config.update(self.profile_config) - - if not os.path.exists(self.test_root_dir): - os.makedirs(self.test_root_dir) - - flags.PROFILES_DIR = self.test_root_dir - profiles_path = os.path.join(self.test_root_dir, 'profiles.yml') - with open(profiles_path, 'w') as f: - yaml.safe_dump(profile_config, f, default_flow_style=True) - self._profile_config = profile_config - - def set_packages(self): - if self.packages_config is not None: - with open('packages.yml', 'w') as f: - yaml.safe_dump(self.packages_config, f, default_flow_style=True) - - def set_selectors(self): - if self.selectors_config is not None: - with open('selectors.yml', 'w') as f: - yaml.safe_dump(self.selectors_config, f, default_flow_style=True) - - def load_config(self): - # we've written our profile and project. Now we want to instantiate a - # fresh adapter for the tests. - # it's important to use a different connection handle here so - # we don't look into an incomplete transaction - kwargs = { - 'profile': None, - 'profiles_dir': self.test_root_dir, - 'target': None, - } - - config = RuntimeConfig.from_args(TestArgs(kwargs)) - - register_adapter(config) - adapter = get_adapter(config) - adapter.cleanup_connections() - self.adapter_type = adapter.type() - self.adapter = adapter - self.config = config - - self._drop_schemas() - self._create_schemas() - - def quote_as_configured(self, value, quote_key): - return self.adapter.quote_as_configured(value, quote_key) - - def tearDown(self): - # get any current run adapter and clean up its connections before we - # reset them. It'll probably be different from ours because - # handle_and_check() calls reset_adapters(). - register_adapter(self.config) - adapter = get_adapter(self.config) - if adapter is not self.adapter: - adapter.cleanup_connections() - if not hasattr(self, 'adapter'): - self.adapter = adapter - - self._drop_schemas() - - self.adapter.cleanup_connections() - reset_adapters() - os.chdir(INITIAL_ROOT) - try: - shutil.rmtree(self.test_root_dir) - except EnvironmentError: - logger.exception('Could not clean up after test - {} not removable' - .format(self.test_root_dir)) - - def _get_schema_fqn(self, database, schema): - schema_fqn = self.quote_as_configured(schema, 'schema') - return schema_fqn - - def _create_schema_named(self, database, schema): - schema_fqn = self._get_schema_fqn(database, schema) - self.run_sql(self.CREATE_SCHEMA_STATEMENT.format(schema_fqn)) - self._created_schemas.add(schema_fqn) - - def _drop_schema_named(self, database, schema): - schema_fqn = self._get_schema_fqn(database, schema) - self.run_sql(self.DROP_SCHEMA_STATEMENT.format(schema_fqn)) - - def _create_schemas(self): - schema = self.unique_schema() - with self.adapter.connection_named('__test'): - self._create_schema_named(self.default_database, schema) - - def _drop_schemas_sql(self): - schema = self.unique_schema() - # we always want to drop these if necessary, we'll clear it soon. - self._created_schemas.add( - self._get_schema_fqn(self.default_database, schema) - ) - drop_alternative = ( - self.setup_alternate_db and - self.adapter_type not in {'redshift'} and - self.alternative_database - ) - if drop_alternative: - self._created_schemas.add( - self._get_schema_fqn(self.alternative_database, schema) - ) - - for schema_fqn in self._created_schemas: - self.run_sql(self.DROP_SCHEMA_STATEMENT.format(schema_fqn)) - - self._created_schemas.clear() - - def _drop_schemas(self): - with self.adapter.connection_named('__test'): - self._drop_schemas_sql() - - @property - def project_config(self): - return { - 'config-version': 2, - } - - @property - def profile_config(self): - return {} - - def run_dbt(self, args=None, expect_pass=True, profiles_dir=True): - res, success = self.run_dbt_and_check(args=args, profiles_dir=profiles_dir) - self.assertEqual( - success, expect_pass, - "dbt exit state did not match expected") - - return res - - - def run_dbt_and_capture(self, *args, **kwargs): - try: - stringbuf = capture_stdout_logs() - res = self.run_dbt(*args, **kwargs) - stdout = stringbuf.getvalue() - - finally: - stop_capture_stdout_logs() - - return res, stdout - - def run_dbt_and_check(self, args=None, profiles_dir=True): - log_manager.reset_handlers() - if args is None: - args = ["run"] - - final_args = [] - - if os.getenv('DBT_TEST_SINGLE_THREADED') in ('y', 'Y', '1'): - final_args.append('--single-threaded') - - final_args.extend(args) - - if profiles_dir: - final_args.extend(['--profiles-dir', self.test_root_dir]) - final_args.append('--log-cache-events') - - logger.info("Invoking dbt with {}".format(final_args)) - return dbt.handle_and_check(final_args) - - def run_sql_file(self, path, kwargs=None): - with open(path, 'r') as f: - statements = f.read().split(";") - for statement in statements: - self.run_sql(statement, kwargs=kwargs) - - def transform_sql(self, query, kwargs=None): - to_return = query - - base_kwargs = { - 'schema': self.unique_schema(), - 'database': self.adapter.quote(self.default_database), - } - if kwargs is None: - kwargs = {} - base_kwargs.update(kwargs) - - to_return = to_return.format(**base_kwargs) - - return to_return - - def run_sql_common(self, sql, fetch, conn): - with conn.handle.cursor() as cursor: - try: - cursor.execute(sql) - conn.handle.commit() - if fetch == 'one': - return cursor.fetchone() - elif fetch == 'all': - return cursor.fetchall() - else: - return - except BaseException as e: - if conn.handle and not getattr(conn.handle, 'closed', True): - conn.handle.rollback() - print(sql) - print(e) - raise - finally: - conn.transaction_open = False - - def run_sql(self, query, fetch='None', kwargs=None, connection_name=None): - if connection_name is None: - connection_name = '__test' - - if query.strip() == "": - return - - sql = self.transform_sql(query, kwargs=kwargs) - - with self.get_connection(connection_name) as conn: - logger.debug('test connection "{}" executing: {}'.format(conn.name, sql)) - return self.run_sql_common(sql, fetch, conn) - - def _ilike(self, target, value): - return "{} ilike '{}'".format(target, value) - - def get_many_table_columns_information_schema(self, tables, schema, database=None): - columns = 'table_name, column_name, data_type, character_maximum_length' - - sql = """ - select {columns} - from {db_string}information_schema.columns - where {schema_filter} - and ({table_filter}) - order by column_name asc""" - - db_string = '' - if database: - db_string = self.quote_as_configured(database, 'database') + '.' - - table_filters_s = " OR ".join( - self._ilike('table_name', table.replace('"', '')) - for table in tables - ) - schema_filter = self._ilike('table_schema', schema) - - sql = sql.format( - columns=columns, - schema_filter=schema_filter, - table_filter=table_filters_s, - db_string=db_string) - - columns = self.run_sql(sql, fetch='all') - return list(map(self.filter_many_columns, columns)) - - def get_many_table_columns(self, tables, schema, database=None): - result = self.get_many_table_columns_information_schema(tables, schema, database) - result.sort(key=lambda x: '{}.{}'.format(x[0], x[1])) - return result - - def filter_many_columns(self, column): - if len(column) == 3: - table_name, column_name, data_type = column - char_size = None - else: - table_name, column_name, data_type, char_size = column - return (table_name, column_name, data_type, char_size) - - @contextmanager - def get_connection(self, name=None): - """Create a test connection context where all executed macros, etc will - get self.adapter as the adapter. - - This allows tests to run normal adapter macros as if reset_adapters() - were not called by handle_and_check (for asserts, etc) - """ - if name is None: - name = '__test' - with patch.object(providers, 'get_adapter', return_value=self.adapter): - with self.adapter.connection_named(name): - conn = self.adapter.connections.get_thread_connection() - yield conn - - def get_relation_columns(self, relation): - with self.get_connection(): - columns = self.adapter.get_columns_in_relation(relation) - - return sorted(((c.name, c.dtype, c.char_size) for c in columns), - key=lambda x: x[0]) - - def get_table_columns(self, table, schema=None, database=None): - schema = self.unique_schema() if schema is None else schema - database = self.default_database if database is None else database - relation = self.adapter.Relation.create( - database=database, - schema=schema, - identifier=table, - type='table', - quote_policy=self.config.quoting - ) - return self.get_relation_columns(relation) - - def get_table_columns_as_dict(self, tables, schema=None): - col_matrix = self.get_many_table_columns(tables, schema) - res = {} - for row in col_matrix: - table_name = row[0] - col_def = row[1:] - if table_name not in res: - res[table_name] = [] - res[table_name].append(col_def) - return res - - def get_models_in_schema(self, schema=None): - schema = self.unique_schema() if schema is None else schema - sql = """ - select table_name, - case when table_type = 'BASE TABLE' then 'table' - when table_type = 'VIEW' then 'view' - else table_type - end as materialization - from information_schema.tables - where {} - order by table_name - """ - - sql = sql.format(self._ilike('table_schema', schema)) - result = self.run_sql(sql, fetch='all') - - return {model_name: materialization for (model_name, materialization) in result} - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - if columns is None: - columns = self.get_relation_columns(relation_a) - column_names = [c[0] for c in columns] - - sql = self.adapter.get_rows_different_sql( - relation_a, relation_b, column_names - ) - - return sql - - def assertTablesEqual(self, table_a, table_b, - table_a_schema=None, table_b_schema=None, - table_a_db=None, table_b_db=None): - if table_a_schema is None: - table_a_schema = self.unique_schema() - - if table_b_schema is None: - table_b_schema = self.unique_schema() - - if table_a_db is None: - table_a_db = self.default_database - - if table_b_db is None: - table_b_db = self.default_database - - relation_a = self._make_relation(table_a, table_a_schema, table_a_db) - relation_b = self._make_relation(table_b, table_b_schema, table_b_db) - - self._assertTableColumnsEqual(relation_a, relation_b) - - sql = self._assertTablesEqualSql(relation_a, relation_b) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - def _make_relation(self, identifier, schema=None, database=None): - if schema is None: - schema = self.unique_schema() - if database is None: - database = self.default_database - return self.adapter.Relation.create( - database=database, - schema=schema, - identifier=identifier, - quote_policy=self.config.quoting - ) - - def get_many_relation_columns(self, relations): - """Returns a dict of (datbase, schema) -> (dict of (table_name -> list of columns)) - """ - schema_fqns = {} - for rel in relations: - this_schema = schema_fqns.setdefault((rel.database, rel.schema), []) - this_schema.append(rel.identifier) - - column_specs = {} - for key, tables in schema_fqns.items(): - database, schema = key - columns = self.get_many_table_columns(tables, schema, database=database) - table_columns = {} - for col in columns: - table_columns.setdefault(col[0], []).append(col[1:]) - for rel_name, columns in table_columns.items(): - key = (database, schema, rel_name) - column_specs[key] = columns - - return column_specs - - def assertManyRelationsEqual(self, relations, default_schema=None, default_database=None): - if default_schema is None: - default_schema = self.unique_schema() - if default_database is None: - default_database = self.default_database - - specs = [] - for relation in relations: - if not isinstance(relation, (tuple, list)): - relation = [relation] - - assert len(relation) <= 3 - - if len(relation) == 3: - relation = self._make_relation(*relation) - elif len(relation) == 2: - relation = self._make_relation(relation[0], relation[1], default_database) - elif len(relation) == 1: - relation = self._make_relation(relation[0], default_schema, default_database) - else: - raise ValueError('relation must be a sequence of 1, 2, or 3 values') - - specs.append(relation) - - with self.get_connection(): - column_specs = self.get_many_relation_columns(specs) - - # make sure everyone has equal column definitions - first_columns = None - for relation in specs: - key = (relation.database, relation.schema, relation.identifier) - # get a good error here instead of a hard-to-diagnose KeyError - self.assertIn(key, column_specs, f'No columns found for {key}') - columns = column_specs[key] - if first_columns is None: - first_columns = columns - else: - self.assertEqual( - first_columns, columns, - '{} did not match {}'.format(str(specs[0]), str(relation)) - ) - - # make sure everyone has the same data. if we got here, everyone had - # the same column specs! - first_relation = None - for relation in specs: - if first_relation is None: - first_relation = relation - else: - sql = self._assertTablesEqualSql(first_relation, relation, - columns=first_columns) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - def assertManyTablesEqual(self, *args): - schema = self.unique_schema() - - all_tables = [] - for table_equivalencies in args: - all_tables += list(table_equivalencies) - - all_cols = self.get_table_columns_as_dict(all_tables, schema) - - for table_equivalencies in args: - first_table = table_equivalencies[0] - first_relation = self._make_relation(first_table) - - # assert that all tables have the same columns - base_result = all_cols[first_table] - self.assertTrue(len(base_result) > 0) - - for other_table in table_equivalencies[1:]: - other_result = all_cols[other_table] - self.assertTrue(len(other_result) > 0) - self.assertEqual(base_result, other_result) - - other_relation = self._make_relation(other_table) - sql = self._assertTablesEqualSql(first_relation, - other_relation, - columns=base_result) - result = self.run_sql(sql, fetch='one') - - self.assertEqual( - result[0], - 0, - 'row_count_difference nonzero: ' + sql - ) - self.assertEqual( - result[1], - 0, - 'num_mismatched nonzero: ' + sql - ) - - - def _assertTableRowCountsEqual(self, relation_a, relation_b): - cmp_query = """ - with table_a as ( - - select count(*) as num_rows from {} - - ), table_b as ( - - select count(*) as num_rows from {} - - ) - - select table_a.num_rows - table_b.num_rows as difference - from table_a, table_b - - """.format(str(relation_a), str(relation_b)) - - res = self.run_sql(cmp_query, fetch='one') - - self.assertEqual(int(res[0]), 0, "Row count of table {} doesn't match row count of table {}. ({} rows different)".format( - relation_a.identifier, - relation_b.identifier, - res[0] - ) - ) - - def assertTableDoesNotExist(self, table, schema=None, database=None): - columns = self.get_table_columns(table, schema, database) - - self.assertEqual( - len(columns), - 0 - ) - - def assertTableDoesExist(self, table, schema=None, database=None): - columns = self.get_table_columns(table, schema, database) - - self.assertGreater( - len(columns), - 0 - ) - - def _assertTableColumnsEqual(self, relation_a, relation_b): - table_a_result = self.get_relation_columns(relation_a) - table_b_result = self.get_relation_columns(relation_b) - - text_types = {'text', 'character varying', 'character', 'varchar'} - - self.assertEqual(len(table_a_result), len(table_b_result)) - for a_column, b_column in zip(table_a_result, table_b_result): - a_name, a_type, a_size = a_column - b_name, b_type, b_size = b_column - self.assertEqual(a_name, b_name, - '{} vs {}: column "{}" != "{}"'.format( - relation_a, relation_b, a_name, b_name - )) - - self.assertEqual(a_type, b_type, - '{} vs {}: column "{}" has type "{}" != "{}"'.format( - relation_a, relation_b, a_name, a_type, b_type - )) - - self.assertEqual(a_size, b_size, - '{} vs {}: column "{}" has size "{}" != "{}"'.format( - relation_a, relation_b, a_name, a_size, b_size - )) - - def assertEquals(self, *args, **kwargs): - # assertEquals is deprecated. This makes the warnings less chatty - self.assertEqual(*args, **kwargs) - - def assertBetween(self, timestr, start, end=None): - datefmt = '%Y-%m-%dT%H:%M:%S.%fZ' - if end is None: - end = datetime.utcnow() - - parsed = datetime.strptime(timestr, datefmt) - - self.assertLessEqual(start, parsed, - 'parsed date {} happened before {}'.format( - parsed, - start.strftime(datefmt)) - ) - self.assertGreaterEqual(end, parsed, - 'parsed date {} happened after {}'.format( - parsed, - end.strftime(datefmt)) - ) - - -def use_profile(profile_name): - """A decorator to declare a test method as using a particular profile. - Handles both setting the nose attr and calling self.use_profile. - - Use like this: - - class TestSomething(DBIntegrationTest): - @use_profile('postgres') - def test_postgres_thing(self): - self.assertEqual(self.adapter_type, 'postgres') - - @use_profile('snowflake') - def test_snowflake_thing(self): - self.assertEqual(self.adapter_type, 'snowflake') - """ - def outer(wrapped): - @getattr(pytest.mark, 'profile_'+profile_name) - @wraps(wrapped) - def func(self, *args, **kwargs): - return wrapped(self, *args, **kwargs) - # sanity check at import time - assert _profile_from_test_name(wrapped.__name__) == profile_name - return func - return outer - - -class AnyFloat: - """Any float. Use this in assertEqual() calls to assert that it is a float. - """ - def __eq__(self, other): - return isinstance(other, float) - - -class AnyString: - """Any string. Use this in assertEqual() calls to assert that it is a string. - """ - def __eq__(self, other): - return isinstance(other, str) - - -class AnyStringWith: - def __init__(self, contains=None): - self.contains = contains - - def __eq__(self, other): - if not isinstance(other, str): - return False - - if self.contains is None: - return True - - return self.contains in other - - def __repr__(self): - return 'AnyStringWith<{!r}>'.format(self.contains) - - -def get_manifest(): - path = './target/partial_parse.msgpack' - if os.path.exists(path): - with open(path, 'rb') as fp: - manifest_mp = fp.read() - manifest: Manifest = Manifest.from_msgpack(manifest_mp) - return manifest - else: - return None diff --git a/tests/integration/column_comments_tests/models/quote_model.sql b/tests/integration/column_comments_tests/models/quote_model.sql deleted file mode 100644 index 2255b4bd7..000000000 --- a/tests/integration/column_comments_tests/models/quote_model.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as {{ adapter.quote("2id") }} diff --git a/tests/integration/column_comments_tests/models/schema.yml b/tests/integration/column_comments_tests/models/schema.yml deleted file mode 100644 index 1e82165fa..000000000 --- a/tests/integration/column_comments_tests/models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: quote_model - description: "model to test column quotes and comments" - columns: - - name: 2id - description: "XXX My description" - quote: true - diff --git a/tests/integration/column_comments_tests/test_column_comments.py b/tests/integration/column_comments_tests/test_column_comments.py deleted file mode 100644 index 0cd8c2940..000000000 --- a/tests/integration/column_comments_tests/test_column_comments.py +++ /dev/null @@ -1,43 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - -import json - - -class TestColumnComment(DBTIntegrationTest): - @property - def schema(self): - return "column_comment" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - 'materialized': 'table', - '+persist_docs': { - "relation": True, - "columns": True, - }, - } - } - } - - def run_has_comments(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 1 - column_node = catalog_data['nodes']['model.test.quote_model'] - column_comment = column_node['columns']['2id']['comment'] - assert column_comment.startswith('XXX') - - @use_profile('redshift') - def test_redshift_comments(self): - self.run_has_comments() diff --git a/tests/integration/column_quoting/models-unquoted/model.sql b/tests/integration/column_quoting/models-unquoted/model.sql deleted file mode 100644 index e6862aa2e..000000000 --- a/tests/integration/column_quoting/models-unquoted/model.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% set col_a = '"col_a"' %} -{% set col_b = '"col_b"' %} -{% if adapter.type() == 'bigquery' %} - {% set col_a = '`col_a`' %} - {% set col_b = '`col_b`' %} -{% elif adapter.type() == 'snowflake' %} - {% set col_a = '"COL_A"' %} - {% set col_b = '"COL_B"' %} -{% endif %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/models/model.sql b/tests/integration/column_quoting/models/model.sql deleted file mode 100644 index 8c19c6546..000000000 --- a/tests/integration/column_quoting/models/model.sql +++ /dev/null @@ -1,16 +0,0 @@ -{% set col_a = '"col_A"' %} -{% set col_b = '"col_B"' %} -{% if adapter.type() == 'bigquery' %} - {% set col_a = '`col_A`' %} - {% set col_b = '`col_B`' %} -{% endif %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/seeds/seed.csv b/tests/integration/column_quoting/seeds/seed.csv deleted file mode 100644 index d4a1e26ee..000000000 --- a/tests/integration/column_quoting/seeds/seed.csv +++ /dev/null @@ -1,4 +0,0 @@ -col_A,col_B -1,2 -3,4 -5,6 diff --git a/tests/integration/column_quoting/test_column_quotes.py b/tests/integration/column_quoting/test_column_quotes.py deleted file mode 100644 index 2d15ee7de..000000000 --- a/tests/integration/column_quoting/test_column_quotes.py +++ /dev/null @@ -1,78 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import os - - -class BaseColumnQuotingTest(DBTIntegrationTest): - def column_quoting(self): - raise NotImplementedError('column_quoting not implemented') - - @property - def schema(self): - return 'dbt_column_quoting' - - @staticmethod - def dir(value): - return os.path.normpath(value) - - def _run_columnn_quotes(self, strategy='delete+insert'): - strategy_vars = '{{"strategy": "{}"}}'.format(strategy) - self.run_dbt(['seed', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - - -class TestColumnQuotingDefault(BaseColumnQuotingTest): - @property - def project_config(self): - return { - 'config-version': 2 - } - - @property - def models(self): - return self.dir('models-unquoted') - - def run_dbt(self, *args, **kwargs): - return super().run_dbt(*args, **kwargs) - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() - - -class TestColumnQuotingDisabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models-unquoted') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': False, - }, - } - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() - - -class TestColumnQuotingEnabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': True, - }, - } - - @use_profile('redshift') - def test_redshift_column_quotes(self): - self._run_columnn_quotes() diff --git a/tests/integration/column_type_tests/macros/test_alter_column_type.sql b/tests/integration/column_type_tests/macros/test_alter_column_type.sql deleted file mode 100644 index 133d59fad..000000000 --- a/tests/integration/column_type_tests/macros/test_alter_column_type.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Macro to alter a column type -{% macro test_alter_column_type(model_name, column_name, new_column_type) %} - {% set relation = ref(model_name) %} - {{ alter_column_type(relation, column_name, new_column_type) }} -{% endmacro %} diff --git a/tests/integration/column_type_tests/macros/test_is_type.sql b/tests/integration/column_type_tests/macros/test_is_type.sql deleted file mode 100644 index 2f1ffde2b..000000000 --- a/tests/integration/column_type_tests/macros/test_is_type.sql +++ /dev/null @@ -1,72 +0,0 @@ - -{% macro simple_type_check_column(column, check) %} - {% if check == 'string' %} - {{ return(column.is_string()) }} - {% elif check == 'float' %} - {{ return(column.is_float()) }} - {% elif check == 'number' %} - {{ return(column.is_number()) }} - {% elif check == 'numeric' %} - {{ return(column.is_numeric()) }} - {% elif check == 'integer' %} - {{ return(column.is_integer()) }} - {% else %} - {% do exceptions.raise_compiler_error('invalid type check value: ' ~ check) %} - {% endif %} -{% endmacro %} - -{% macro type_check_column(column, type_checks) %} - {% set failures = [] %} - {% for type_check in type_checks %} - {% if type_check.startswith('not ') %} - {% if simple_type_check_column(column, type_check[4:]) %} - {% do log('simple_type_check_column got ', True) %} - {% do failures.append(type_check) %} - {% endif %} - {% else %} - {% if not simple_type_check_column(column, type_check) %} - {% do failures.append(type_check) %} - {% endif %} - {% endif %} - {% endfor %} - {% if (failures | length) > 0 %} - {% do log('column ' ~ column.name ~ ' had failures: ' ~ failures, info=True) %} - {% endif %} - {% do return((failures | length) == 0) %} -{% endmacro %} - -{% test is_type(model, column_map) %} - {% if not execute %} - {{ return(None) }} - {% endif %} - {% if not column_map %} - {% do exceptions.raise_compiler_error('test_is_type must have a column name') %} - {% endif %} - {% set columns = adapter.get_columns_in_relation(model) %} - {% if (column_map | length) != (columns | length) %} - {% set column_map_keys = (column_map | list | string) %} - {% set column_names = (columns | map(attribute='name') | list | string) %} - {% do exceptions.raise_compiler_error('did not get all the columns/all columns not specified:\n' ~ column_map_keys ~ '\nvs\n' ~ column_names) %} - {% endif %} - {% set bad_columns = [] %} - {% for column in columns %} - {% set column_key = (column.name | lower) %} - {% if column_key in column_map %} - {% set type_checks = column_map[column_key] %} - {% if not type_checks %} - {% do exceptions.raise_compiler_error('no type checks?') %} - {% endif %} - {% if not type_check_column(column, type_checks) %} - {% do bad_columns.append(column.name) %} - {% endif %} - {% else %} - {% do exceptions.raise_compiler_error('column key ' ~ column_key ~ ' not found in ' ~ (column_map | list | string)) %} - {% endif %} - {% endfor %} - {% do log('bad columns: ' ~ bad_columns, info=True) %} - {% for bad_column in bad_columns %} - select '{{ bad_column }}' as bad_column - {{ 'union all' if not loop.last }} - {% endfor %} - select * from (select 1 limit 0) as nothing -{% endtest %} diff --git a/tests/integration/column_type_tests/rs_models/model.sql b/tests/integration/column_type_tests/rs_models/model.sql deleted file mode 100644 index f8e972103..000000000 --- a/tests/integration/column_type_tests/rs_models/model.sql +++ /dev/null @@ -1,17 +0,0 @@ -select - 1::smallint as smallint_col, - 2::int as int_col, - 3::bigint as bigint_col, - 4::int2 as int2_col, - 5::int4 as int4_col, - 6::int8 as int8_col, - 7::integer as integer_col, - 8.0::real as real_col, - 9.0::float4 as float4_col, - 10.0::float8 as float8_col, - 11.0::float as float_col, - 12.0::double precision as double_col, - 13.0::numeric as numeric_col, - 14.0::decimal as decimal_col, - '15'::varchar(20) as varchar_col, - '16'::text as text_col diff --git a/tests/integration/column_type_tests/rs_models/schema.yml b/tests/integration/column_type_tests/rs_models/schema.yml deleted file mode 100644 index 5b35ce025..000000000 --- a/tests/integration/column_type_tests/rs_models/schema.yml +++ /dev/null @@ -1,22 +0,0 @@ -version: 2 -models: - - name: model - tests: - - is_type: - column_map: - smallint_col: ['integer', 'number'] - int_col: ['integer', 'number'] - bigint_col: ['integer', 'number'] - int2_col: ['integer', 'number'] - int4_col: ['integer', 'number'] - int8_col: ['integer', 'number'] - integer_col: ['integer', 'number'] - real_col: ['float', 'number'] - double_col: ['float', 'number'] - float4_col: ['float', 'number'] - float8_col: ['float', 'number'] - float_col: ['float', 'number'] - numeric_col: ['numeric', 'number'] - decimal_col: ['numeric', 'number'] - varchar_col: ['string', 'not number'] - text_col: ['string', 'not number'] diff --git a/tests/integration/column_type_tests/test_column_types.py b/tests/integration/column_type_tests/test_column_types.py deleted file mode 100644 index 52e4c2f5b..000000000 --- a/tests/integration/column_type_tests/test_column_types.py +++ /dev/null @@ -1,21 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestColumnTypes(DBTIntegrationTest): - @property - def schema(self): - return 'column_types' - - def run_and_test(self): - self.assertEqual(len(self.run_dbt(['run'])), 1) - self.assertEqual(len(self.run_dbt(['test'])), 1) - - -class TestRedshiftColumnTypes(TestColumnTypes): - @property - def models(self): - return 'rs_models' - - @use_profile('redshift') - def test_redshift_column_types(self): - self.run_and_test() diff --git a/tests/integration/concurrent_transaction_test/README.md b/tests/integration/concurrent_transaction_test/README.md deleted file mode 100644 index 48ece8615..000000000 --- a/tests/integration/concurrent_transaction_test/README.md +++ /dev/null @@ -1,33 +0,0 @@ - -This test warrants some explanation. In dbt <=0.10.1, Redshift table and view materializations suffered from issues around concurrent transactions. In order to reliably reproduce this error, a query needs to select from a dbt model as the table is being rebuilt. Critically, this concurrent select needs to query the table during the drop/swap portition of the materialization. This looks like: - -```sql -begin; -create table as (...); -drop table old_table cascade; -// <---- The concurrent query needs to be running here! -alter table new_table rename to old_table; -commit; -``` - -In order to reliably reproduce this failure, the model shown above needs to block for a long time between the `drop` and `alter` statements. We can't just stick a sleep() call in there, as this code is defined in the materialization. Instead, we can reliably reproduce the failure by: - -1) creating a view that depends on this model -2) issuing a long-running query on the view before `dbt run` is invoked -3) issuing _another_ long-running query against the original model - -Since long-running query (step 2) is selecting from the view, Redshift blocks on the `drop ... cascade`, of the materialization, which causes the query from step 3 time to overlap with the critical section of the materialization between the `drop` and `alter` statements. - -In dbt v0.10.1, this integration test results in: - -``` -====================================================================== -FAIL: test__redshift__concurrent_transaction (test_concurrent_transaction.TestConcurrentTransaction) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "/usr/src/app/test/integration/032_concurrent_transaction_test/test_concurrent_transaction.py", line 84, in test__redshift__concurrent_transaction - self.assertEqual(self.query_state['model_1'], 'good') -AssertionError: 'error: table 3379442 dropped by concurrent transaction\n' != 'good' -- error: table 3379442 dropped by concurrent transaction -+ good -``` diff --git a/tests/integration/concurrent_transaction_test/macros/udfs.sql b/tests/integration/concurrent_transaction_test/macros/udfs.sql deleted file mode 100644 index 8fc46d110..000000000 --- a/tests/integration/concurrent_transaction_test/macros/udfs.sql +++ /dev/null @@ -1,13 +0,0 @@ - -{% macro create_udfs() %} - -CREATE OR REPLACE FUNCTION {{ target.schema }}.f_sleep (x float) -RETURNS bool IMMUTABLE -AS -$$ - from time import sleep - sleep(x) - return True -$$ LANGUAGE plpythonu; - -{% endmacro %} diff --git a/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql b/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql deleted file mode 100644 index 3d8ac43ba..000000000 --- a/tests/integration/concurrent_transaction_test/models-incremental/model_1.sql +++ /dev/null @@ -1,9 +0,0 @@ - -{{ config(materialized='incremental', unique_key='id') }} - --- incremental model -select 1 as id - -{% if is_incremental() %} - where TRUE -{% endif %} diff --git a/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql b/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-incremental/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/models-table/model_1.sql b/tests/integration/concurrent_transaction_test/models-table/model_1.sql deleted file mode 100644 index 344a2e8bd..000000000 --- a/tests/integration/concurrent_transaction_test/models-table/model_1.sql +++ /dev/null @@ -1,5 +0,0 @@ - -{{ config(materialized='table') }} - --- table model -select 1 as id diff --git a/tests/integration/concurrent_transaction_test/models-table/view_model.sql b/tests/integration/concurrent_transaction_test/models-table/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-table/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/models-view/model_1.sql b/tests/integration/concurrent_transaction_test/models-view/model_1.sql deleted file mode 100644 index 21a96e98c..000000000 --- a/tests/integration/concurrent_transaction_test/models-view/model_1.sql +++ /dev/null @@ -1,5 +0,0 @@ - -{{ config(materialized='view') }} - --- view model -select 1 as id diff --git a/tests/integration/concurrent_transaction_test/models-view/view_model.sql b/tests/integration/concurrent_transaction_test/models-view/view_model.sql deleted file mode 100644 index 40b85c8fc..000000000 --- a/tests/integration/concurrent_transaction_test/models-view/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ - - -select * from {{ ref('model_1') }} diff --git a/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py b/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py deleted file mode 100644 index 8da9f7fb9..000000000 --- a/tests/integration/concurrent_transaction_test/test_concurrent_transaction.py +++ /dev/null @@ -1,136 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import threading -from dbt.adapters.factory import FACTORY - - -def get_adapter_standalone(config): - plugin = FACTORY.plugins[config.credentials.type] - cls = plugin.adapter - return cls(config) - - -class BaseTestConcurrentTransaction(DBTIntegrationTest): - - def reset(self): - self.query_state = { - 'view_model': 'wait', - 'model_1': 'wait', - } - - def setUp(self): - super().setUp() - self._secret_adapter = get_adapter_standalone(self.config) - self.reset() - - def tearDown(self): - self._secret_adapter.cleanup_connections() - super().tearDown() - - @property - def schema(self): - return "concurrent_transaction" - - @property - def project_config(self): - return { - 'config-version': 2, - "macro-paths": ["macros"], - "on-run-start": [ - "{{ create_udfs() }}", - ], - } - - def run_select_and_check(self, rel, sql): - connection_name = '__test_{}'.format(id(threading.current_thread())) - try: - with self._secret_adapter.connection_named(connection_name): - conn = self._secret_adapter.connections.get_thread_connection() - res = self.run_sql_common(self.transform_sql(sql), 'one', conn) - - # The result is the output of f_sleep(), which is True - if res[0]: - self.query_state[rel] = 'good' - else: - self.query_state[rel] = 'bad' - - except Exception as e: - if 'concurrent transaction' in str(e): - self.query_state[rel] = 'error: {}'.format(e) - else: - self.query_state[rel] = 'error: {}'.format(e) - - def async_select(self, rel, sleep=10): - # Run the select statement in a thread. When the query returns, the global - # query_state will be update with a state of good/bad/error, and the associated - # error will be reported if one was raised. - - schema = self.unique_schema() - query = ''' - -- async_select: {rel} - select {schema}.f_sleep({sleep}) from {schema}.{rel} - '''.format( - schema=schema, - sleep=sleep, - rel=rel) - - thread = threading.Thread(target=self.run_select_and_check, args=(rel, query)) - thread.start() - return thread - - def run_test(self): - self.use_profile("redshift") - - # First run the project to make sure the models exist - results = self.run_dbt(args=['run']) - self.assertEqual(len(results), 2) - - # Execute long-running queries in threads - t1 = self.async_select('view_model', 10) - t2 = self.async_select('model_1', 5) - - # While the queries are executing, re-run the project - res = self.run_dbt(args=['run', '--threads', '8']) - self.assertEqual(len(res), 2) - - # Finally, wait for these threads to finish - t1.join() - t2.join() - - self.assertTrue(len(res) > 0) - - # If the query succeeded, the global query_state should be 'good' - self.assertEqual(self.query_state['view_model'], 'good') - self.assertEqual(self.query_state['model_1'], 'good') - - -class TableTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-table" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_table(self): -# self.reset() -# self.run_test() - - -class ViewTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-view" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_view(self): -# self.reset() -# self.run_test() - - -class IncrementalTestConcurrentTransaction(BaseTestConcurrentTransaction): - @property - def models(self): - return "models-incremental" - -# @use_profile("redshift") -# def test__redshift__concurrent_transaction_incremental(self): -# self.reset() -# self.run_test() diff --git a/tests/integration/defer_state_test/changed_models/ephemeral_model.sql b/tests/integration/defer_state_test/changed_models/ephemeral_model.sql deleted file mode 100644 index 2f976e3a9..000000000 --- a/tests/integration/defer_state_test/changed_models/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from {{ ref('view_model') }} diff --git a/tests/integration/defer_state_test/changed_models/schema.yml b/tests/integration/defer_state_test/changed_models/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models/table_model.sql b/tests/integration/defer_state_test/changed_models/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/changed_models/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/changed_models/view_model.sql b/tests/integration/defer_state_test/changed_models/view_model.sql deleted file mode 100644 index bddbbb23c..000000000 --- a/tests/integration/defer_state_test/changed_models/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql b/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql deleted file mode 100644 index 5155dfa47..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_bad/schema.yml b/tests/integration/defer_state_test/changed_models_bad/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models_bad/table_model.sql b/tests/integration/defer_state_test/changed_models_bad/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/changed_models_bad/view_model.sql b/tests/integration/defer_state_test/changed_models_bad/view_model.sql deleted file mode 100644 index bddbbb23c..000000000 --- a/tests/integration/defer_state_test/changed_models_bad/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from no.such.table diff --git a/tests/integration/defer_state_test/changed_models_missing/schema.yml b/tests/integration/defer_state_test/changed_models_missing/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/changed_models_missing/table_model.sql b/tests/integration/defer_state_test/changed_models_missing/table_model.sql deleted file mode 100644 index 22b040d2c..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/table_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as fun diff --git a/tests/integration/defer_state_test/changed_models_missing/view_model.sql b/tests/integration/defer_state_test/changed_models_missing/view_model.sql deleted file mode 100644 index 4b91aa0f2..000000000 --- a/tests/integration/defer_state_test/changed_models_missing/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed') }} diff --git a/tests/integration/defer_state_test/macros/macros.sql b/tests/integration/defer_state_test/macros/macros.sql deleted file mode 100644 index 79519c1b6..000000000 --- a/tests/integration/defer_state_test/macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro my_macro() %} - {% do log('in a macro' ) %} -{% endmacro %} diff --git a/tests/integration/defer_state_test/models/ephemeral_model.sql b/tests/integration/defer_state_test/models/ephemeral_model.sql deleted file mode 100644 index 2f976e3a9..000000000 --- a/tests/integration/defer_state_test/models/ephemeral_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='ephemeral') }} -select * from {{ ref('view_model') }} diff --git a/tests/integration/defer_state_test/models/exposures.yml b/tests/integration/defer_state_test/models/exposures.yml deleted file mode 100644 index 489dec3c3..000000000 --- a/tests/integration/defer_state_test/models/exposures.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 -exposures: - - name: my_exposure - type: application - depends_on: - - ref('view_model') - owner: - email: test@example.com diff --git a/tests/integration/defer_state_test/models/schema.yml b/tests/integration/defer_state_test/models/schema.yml deleted file mode 100644 index 1ec506d3d..000000000 --- a/tests/integration/defer_state_test/models/schema.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: 2 -models: - - name: view_model - columns: - - name: id - tests: - - unique - - not_null - - name: name diff --git a/tests/integration/defer_state_test/models/table_model.sql b/tests/integration/defer_state_test/models/table_model.sql deleted file mode 100644 index 65909318b..000000000 --- a/tests/integration/defer_state_test/models/table_model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} \ No newline at end of file diff --git a/tests/integration/defer_state_test/models/view_model.sql b/tests/integration/defer_state_test/models/view_model.sql deleted file mode 100644 index 4b91aa0f2..000000000 --- a/tests/integration/defer_state_test/models/view_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed') }} diff --git a/tests/integration/defer_state_test/seeds/seed.csv b/tests/integration/defer_state_test/seeds/seed.csv deleted file mode 100644 index 1a728c8ab..000000000 --- a/tests/integration/defer_state_test/seeds/seed.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,name -1,Alice -2,Bob diff --git a/tests/integration/defer_state_test/snapshots/my_snapshot.sql b/tests/integration/defer_state_test/snapshots/my_snapshot.sql deleted file mode 100644 index 6a7d2b31b..000000000 --- a/tests/integration/defer_state_test/snapshots/my_snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot my_cool_snapshot %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['id'], - ) - }} - select * from {{ ref('view_model') }} - -{% endsnapshot %} diff --git a/tests/integration/defer_state_test/test_defer_state.py b/tests/integration/defer_state_test/test_defer_state.py deleted file mode 100644 index fc942abc0..000000000 --- a/tests/integration/defer_state_test/test_defer_state.py +++ /dev/null @@ -1,153 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import copy -import json -import os -import shutil - - -class TestDeferState(DBTIntegrationTest): - @property - def schema(self): - return "defer_state" - - @property - def models(self): - return "models" - - def setUp(self): - self.other_schema = None - super().setUp() - self._created_schemas.add(self.other_schema) - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'test': { - 'quote_columns': False, - } - } - } - - def get_profile(self, adapter_type): - if self.other_schema is None: - self.other_schema = self.unique_schema() + '_other' - if self.adapter_type == 'snowflake': - self.other_schema = self.other_schema.upper() - profile = super().get_profile(adapter_type) - default_name = profile['test']['target'] - profile['test']['outputs']['otherschema'] = copy.deepcopy(profile['test']['outputs'][default_name]) - profile['test']['outputs']['otherschema']['schema'] = self.other_schema - return profile - - def copy_state(self): - assert not os.path.exists('state') - os.makedirs('state') - shutil.copyfile('target/manifest.json', 'state/manifest.json') - - def run_and_defer(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - assert not any(r.node.deferred for r in results) - results = self.run_dbt(['run']) - assert len(results) == 2 - assert not any(r.node.deferred for r in results) - results = self.run_dbt(['test']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - # test tests first, because run will change things - # no state, wrong schema, failure. - self.run_dbt(['test', '--target', 'otherschema'], expect_pass=False) - - # no state, run also fails - self.run_dbt(['run', '--target', 'otherschema'], expect_pass=False) - - # defer test, it succeeds - results = self.run_dbt(['test', '-m', 'view_model+', '--state', 'state', '--defer', '--target', 'otherschema']) - - # with state it should work though - results = self.run_dbt(['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema']) - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - with open('target/manifest.json') as fp: - data = json.load(fp) - assert data['nodes']['seed.test.seed']['deferred'] - - assert len(results) == 1 - - def run_switchdirs_defer(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - self.use_default_project({'model-paths': ['changed_models']}) - # the sql here is just wrong, so it should fail - self.run_dbt( - ['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=False, - ) - # but this should work since we just use the old happy model - self.run_dbt( - ['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=True, - ) - - self.use_default_project({'model-paths': ['changed_models_bad']}) - # this should fail because the table model refs a broken ephemeral - # model, which it should see - self.run_dbt( - ['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=False, - ) - - def run_defer_iff_not_exists(self): - results = self.run_dbt(['seed', '--target', 'otherschema']) - assert len(results) == 1 - results = self.run_dbt(['run', '--target', 'otherschema']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run', '--state', 'state', '--defer']) - assert len(results) == 2 - - # because the seed now exists in our schema, we shouldn't defer it - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - def run_defer_deleted_upstream(self): - results = self.run_dbt(['seed']) - assert len(results) == 1 - results = self.run_dbt(['run']) - assert len(results) == 2 - - # copy files over from the happy times when we had a good target - self.copy_state() - - self.use_default_project({'model-paths': ['changed_models_missing']}) - # ephemeral_model is now gone. previously this caused a - # keyerror (dbt#2875), now it should pass - self.run_dbt( - ['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'], - expect_pass=True, - ) - - # despite deferral, test should use models just created in our schema - results = self.run_dbt(['test', '--state', 'state', '--defer']) - assert self.other_schema not in results[0].node.compiled_code - assert self.unique_schema() in results[0].node.compiled_code - - @use_profile('redshift') - def test_redshift_state_changetarget(self): - self.run_and_defer() diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql deleted file mode 100644 index 18d0d5d88..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql +++ /dev/null @@ -1,29 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 19c8ea616..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1,28 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index 419fdf96b..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id, - cast(field1 as {{string_type}}) as field1, - cast(CASE WHEN id > 3 THEN NULL ELSE field2 END as {{string_type}}) AS field2, - cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3, - cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql deleted file mode 100644 index 55ed7b2c5..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,cast(field1 as {{string_type}}) as field1 - ,cast(field2 as {{string_type}}) as field2 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_fail.sql b/tests/integration/incremental_schema_tests/models/incremental_fail.sql deleted file mode 100644 index 590f5b56d..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_fail.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='fail' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2 FROM source_data - -{% else %} - -SELECT id, field1, field3 FROm source_data - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore.sql deleted file mode 100644 index 51dee6022..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='ignore' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, field1, field2 FROM source_data LIMIT 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql deleted file mode 100644 index 92d4564e0..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql +++ /dev/null @@ -1,15 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,field1 - ,field2 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql deleted file mode 100644 index 56a3e3c0f..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql +++ /dev/null @@ -1,31 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='sync_all_columns' - - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, -- to validate new fields - cast(field4 as {{string_type}}) AS field4 -- to validate new fields - -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -select id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 - -from source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql deleted file mode 100644 index abffbf746..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql +++ /dev/null @@ -1,20 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -{% set string_type = 'string' if target.type == 'bigquery' else 'varchar(10)' %} - -select id - ,cast(field1 as {{string_type}}) as field1 - --,field2 - ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 - ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 - -from source_data -order by id \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/model_a.sql b/tests/integration/incremental_schema_tests/models/model_a.sql deleted file mode 100644 index 2a0b2ddaf..000000000 --- a/tests/integration/incremental_schema_tests/models/model_a.sql +++ /dev/null @@ -1,22 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4 - union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4 - union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4 - union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4 - union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4 - union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4 - -) - -select id - ,field1 - ,field2 - ,field3 - ,field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/schema.yml b/tests/integration/incremental_schema_tests/models/schema.yml deleted file mode 100644 index 6d2a85bea..000000000 --- a/tests/integration/incremental_schema_tests/models/schema.yml +++ /dev/null @@ -1,68 +0,0 @@ -version: 2 - -models: - - name: model_a - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns_target - columns: - - name: id - tags: [column_leveL_tag] - tests: - - unique - - - \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/test_incremental_schema.py b/tests/integration/incremental_schema_tests/test_incremental_schema.py deleted file mode 100644 index 7dff20c1e..000000000 --- a/tests/integration/incremental_schema_tests/test_incremental_schema.py +++ /dev/null @@ -1,148 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, FakeArgs, use_profile - - -class TestSelectionExpansion(DBTIntegrationTest): - @property - def schema(self): - return "test_incremental_schema" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["tests"] - } - - def list_tests_and_assert(self, include, exclude, expected_tests): - list_args = ['ls', '--resource-type', 'test'] - if include: - list_args.extend(('--select', include)) - if exclude: - list_args.extend(('--exclude', exclude)) - - listed = self.run_dbt(list_args) - print(listed) - assert len(listed) == len(expected_tests) - - test_names = [name.split('.')[-1] for name in listed] - assert sorted(test_names) == sorted(expected_tests) - - def run_tests_and_assert( - self, include, exclude, expected_tests, compare_source, compare_target - ): - - run_args = ['run'] - if include: - run_args.extend(('--models', include)) - - results_one = self.run_dbt(run_args) - results_two = self.run_dbt(run_args) - - self.assertEqual(len(results_one), 3) - self.assertEqual(len(results_two), 3) - - test_args = ['test'] - if include: - test_args.extend(('--models', include)) - if exclude: - test_args.extend(('--exclude', exclude)) - - results = self.run_dbt(test_args) - tests_run = [r.node.name for r in results] - assert len(tests_run) == len(expected_tests) - assert sorted(tests_run) == sorted(expected_tests) - self.assertTablesEqual(compare_source, compare_target) - - def run_incremental_ignore(self): - select = 'model_a incremental_ignore incremental_ignore_target' - compare_source = 'incremental_ignore' - compare_target = 'incremental_ignore_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_ignore', - 'select_from_incremental_ignore_target', - 'unique_model_a_id', - 'unique_incremental_ignore_id', - 'unique_incremental_ignore_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns(self): - select = 'model_a incremental_append_new_columns incremental_append_new_columns_target' - compare_source = 'incremental_append_new_columns' - compare_target = 'incremental_append_new_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns', - 'select_from_incremental_append_new_columns_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_id', - 'unique_incremental_append_new_columns_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns_remove_one(self): - select = 'model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target' - compare_source = 'incremental_append_new_columns_remove_one' - compare_target = 'incremental_append_new_columns_remove_one_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns_remove_one', - 'select_from_incremental_append_new_columns_remove_one_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_remove_one_id', - 'unique_incremental_append_new_columns_remove_one_target_id' - ] - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_sync_all_columns(self): - select = 'model_a incremental_sync_all_columns incremental_sync_all_columns_target' - compare_source = 'incremental_sync_all_columns' - compare_target = 'incremental_sync_all_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_sync_all_columns', - 'select_from_incremental_sync_all_columns_target', - 'unique_model_a_id', - 'unique_incremental_sync_all_columns_id', - 'unique_incremental_sync_all_columns_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_fail_on_schema_change(self): - select = 'model_a incremental_fail' - self.run_dbt(['run', '--models', select, '--full-refresh']) - results = self.run_dbt(['run', '--models', select], expect_pass=False) - self.assertIn('Compilation Error', results[1].message) - - @use_profile('redshift') - def test__redshift__run_incremental_ignore(self): - self.run_incremental_ignore() - - @use_profile('redshift') - def test__redshift__run_incremental_append_new_columns(self): - self.run_incremental_append_new_columns() - self.run_incremental_append_new_columns_remove_one() - - @use_profile('redshift') - def test__redshift__run_incremental_sync_all_columns(self): - self.run_incremental_sync_all_columns() - - @use_profile('redshift') - def test__redshift__run_incremental_fail_on_schema_change(self): - self.run_incremental_fail_on_schema_change() - diff --git a/tests/integration/incremental_schema_tests/tests/select_from_a.sql b/tests/integration/incremental_schema_tests/tests/select_from_a.sql deleted file mode 100644 index 3dc8f2857..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_a.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('model_a') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql deleted file mode 100644 index 947e84588..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 06d52c6d6..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index 07d2412b0..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql deleted file mode 100644 index 8b86eddd7..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql deleted file mode 100644 index d565c8464..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql deleted file mode 100644 index 35d535c5c..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql deleted file mode 100644 index aedc9f803..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql deleted file mode 100644 index 4b703c988..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns_target') }} where false \ No newline at end of file diff --git a/tests/integration/macro_tests/dispatch-inheritance-models/model.sql b/tests/integration/macro_tests/dispatch-inheritance-models/model.sql deleted file mode 100644 index 7b8c49be3..000000000 --- a/tests/integration/macro_tests/dispatch-inheritance-models/model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ dispatch_to_parent() }} -select 1 as id diff --git a/tests/integration/macro_tests/macros/my_macros.sql b/tests/integration/macro_tests/macros/my_macros.sql deleted file mode 100644 index 827f3f09f..000000000 --- a/tests/integration/macro_tests/macros/my_macros.sql +++ /dev/null @@ -1,30 +0,0 @@ - -{% macro do_something2(foo2, bar2) %} - - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 - -{% endmacro %} - - -{% macro with_ref() %} - - {{ ref('table_model') }} - -{% endmacro %} - - -{% macro dispatch_to_parent() %} - {% set macro = adapter.dispatch('dispatch_to_parent') %} - {{ macro() }} -{% endmacro %} - -{% macro default__dispatch_to_parent() %} - {% set msg = 'No default implementation of dispatch_to_parent' %} - {{ exceptions.raise_compiler_error(msg) }} -{% endmacro %} - -{% macro postgres__dispatch_to_parent() %} - {{ return('') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/models/.gitkeep b/tests/integration/macro_tests/models/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/macro_tests/models/dep_macro.sql b/tests/integration/macro_tests/models/dep_macro.sql deleted file mode 100644 index 45cfa0040..000000000 --- a/tests/integration/macro_tests/models/dep_macro.sql +++ /dev/null @@ -1,4 +0,0 @@ - -{{ - dbt_integration_project.do_something("arg1", "arg2") -}} diff --git a/tests/integration/macro_tests/models/local_macro.sql b/tests/integration/macro_tests/models/local_macro.sql deleted file mode 100644 index edb66b7a3..000000000 --- a/tests/integration/macro_tests/models/local_macro.sql +++ /dev/null @@ -1,12 +0,0 @@ - -{{ - do_something2("arg1", "arg2") -}} - -union all - -{{ - test.do_something2("arg3", "arg4") -}} - - diff --git a/tests/integration/macro_tests/models/ref_macro.sql b/tests/integration/macro_tests/models/ref_macro.sql deleted file mode 100644 index 0085f6954..000000000 --- a/tests/integration/macro_tests/models/ref_macro.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ with_ref() }} diff --git a/tests/integration/macro_tests/no-default-macros/my_macros.sql b/tests/integration/macro_tests/no-default-macros/my_macros.sql deleted file mode 100644 index 8980c6c56..000000000 --- a/tests/integration/macro_tests/no-default-macros/my_macros.sql +++ /dev/null @@ -1,35 +0,0 @@ - -{% macro do_something2(foo2, bar2) %} - - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 - -{% endmacro %} - - -{% macro with_ref() %} - - {{ ref('table_model') }} - -{% endmacro %} - -{# there is no default__dispatch_to_nowhere! #} -{% macro dispatch_to_nowhere() %} - {% set macro = adapter.dispatch('dispatch_to_nowhere') %} - {{ macro() }} -{% endmacro %} - -{% macro dispatch_to_parent() %} - {% set macro = adapter.dispatch('dispatch_to_parent') %} - {{ macro() }} -{% endmacro %} - -{% macro default__dispatch_to_parent() %} - {% set msg = 'No default implementation of dispatch_to_parent' %} - {{ exceptions.raise_compiler_error(msg) }} -{% endmacro %} - -{% macro postgres__dispatch_to_parent() %} - {{ return('') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/override-get-columns-macros/macros.sql b/tests/integration/macro_tests/override-get-columns-macros/macros.sql deleted file mode 100644 index 73fe0ccfb..000000000 --- a/tests/integration/macro_tests/override-get-columns-macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro get_columns_in_relation(relation) %} - {{ return('a string') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/override-get-columns-models/model.sql b/tests/integration/macro_tests/override-get-columns-models/model.sql deleted file mode 100644 index 7be007e24..000000000 --- a/tests/integration/macro_tests/override-get-columns-models/model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% set result = adapter.get_columns_in_relation(this) %} -{% if execute and result != 'a string' %} - {% do exceptions.raise_compiler_error('overriding get_columns_in_relation failed') %} -{% endif %} -select 1 as id diff --git a/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml b/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml deleted file mode 100644 index bcf5c9285..000000000 --- a/tests/integration/macro_tests/package_macro_overrides/dbt_project.yml +++ /dev/null @@ -1,7 +0,0 @@ -name: 'package_macro_overrides' -version: '1.0' -config-version: 2 - -profile: 'default' - -macro-paths: ["macros"] diff --git a/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql b/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql deleted file mode 100644 index 73fe0ccfb..000000000 --- a/tests/integration/macro_tests/package_macro_overrides/macros/macros.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro get_columns_in_relation(relation) %} - {{ return('a string') }} -{% endmacro %} diff --git a/tests/integration/macro_tests/seed.sql b/tests/integration/macro_tests/seed.sql deleted file mode 100644 index 8556efdec..000000000 --- a/tests/integration/macro_tests/seed.sql +++ /dev/null @@ -1,24 +0,0 @@ -create table {schema}.expected_dep_macro ( - foo TEXT, - bar TEXT -); - -create table {schema}.expected_local_macro ( - foo2 TEXT, - bar2 TEXT -); - -create table {schema}.seed ( - id integer, - updated_at timestamp -); - -insert into {schema}.expected_dep_macro (foo, bar) -values ('arg1', 'arg2'); - -insert into {schema}.expected_local_macro (foo2, bar2) -values ('arg1', 'arg2'), ('arg3', 'arg4'); - -insert into {schema}.seed (id, updated_at) -values (1, '2017-01-01'), (2, '2017-01-02'); - diff --git a/tests/integration/macro_tests/test_macros.py b/tests/integration/macro_tests/test_macros.py deleted file mode 100644 index 2817332ac..000000000 --- a/tests/integration/macro_tests/test_macros.py +++ /dev/null @@ -1,15 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestDispatchMacroUseParent(DBTIntegrationTest): - @property - def schema(self): - return "test_macros" - - @property - def models(self): - return "dispatch-inheritance-models" - - @use_profile('redshift') - def test_redshift_inherited_macro(self): - self.run_dbt(['run']) diff --git a/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql b/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql deleted file mode 100644 index 642b0f14a..000000000 --- a/tests/integration/persist_docs_tests/models-column-missing/missing_column.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as id, 'Ed' as name diff --git a/tests/integration/persist_docs_tests/models-column-missing/schema.yml b/tests/integration/persist_docs_tests/models-column-missing/schema.yml deleted file mode 100644 index aa7b4f888..000000000 --- a/tests/integration/persist_docs_tests/models-column-missing/schema.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 -models: - - name: missing_column - columns: - - name: id - description: "test id column description" - - name: column_that_does_not_exist - description: "comment that cannot be created" diff --git a/tests/integration/persist_docs_tests/models/my_fun_docs.md b/tests/integration/persist_docs_tests/models/my_fun_docs.md deleted file mode 100644 index f3c0fbf55..000000000 --- a/tests/integration/persist_docs_tests/models/my_fun_docs.md +++ /dev/null @@ -1,10 +0,0 @@ -{% docs my_fun_doc %} -name Column description "with double quotes" -and with 'single quotes' as welll as other; -'''abc123''' -reserved -- characters --- -/* comment */ -Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - -{% enddocs %} diff --git a/tests/integration/persist_docs_tests/models/no_docs_model.sql b/tests/integration/persist_docs_tests/models/no_docs_model.sql deleted file mode 100644 index e39a7a156..000000000 --- a/tests/integration/persist_docs_tests/models/no_docs_model.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id, 'Alice' as name diff --git a/tests/integration/persist_docs_tests/models/schema.yml b/tests/integration/persist_docs_tests/models/schema.yml deleted file mode 100644 index 5a9091624..000000000 --- a/tests/integration/persist_docs_tests/models/schema.yml +++ /dev/null @@ -1,70 +0,0 @@ -version: 2 - -models: - - name: table_model - description: | - Table model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - - name: name - description: | - Some stuff here and then a call to - {{ doc('my_fun_doc')}} - - name: view_model - description: | - View model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - -seeds: - - name: seed - description: | - Seed model description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - columns: - - name: id - description: | - id Column description "with double quotes" - and with 'single quotes' as welll as other; - '''abc123''' - reserved -- characters - -- - /* comment */ - Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting - - name: name - description: | - Some stuff here and then a call to - {{ doc('my_fun_doc')}} diff --git a/tests/integration/persist_docs_tests/models/table_model.sql b/tests/integration/persist_docs_tests/models/table_model.sql deleted file mode 100644 index c0e93c3f3..000000000 --- a/tests/integration/persist_docs_tests/models/table_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='table') }} -select 1 as id, 'Joe' as name diff --git a/tests/integration/persist_docs_tests/models/view_model.sql b/tests/integration/persist_docs_tests/models/view_model.sql deleted file mode 100644 index a6f96a16d..000000000 --- a/tests/integration/persist_docs_tests/models/view_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(materialized='view') }} -select 2 as id, 'Bob' as name diff --git a/tests/integration/persist_docs_tests/seeds/seed.csv b/tests/integration/persist_docs_tests/seeds/seed.csv deleted file mode 100644 index 1a728c8ab..000000000 --- a/tests/integration/persist_docs_tests/seeds/seed.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,name -1,Alice -2,Bob diff --git a/tests/integration/persist_docs_tests/test_persist_docs.py b/tests/integration/persist_docs_tests/test_persist_docs.py deleted file mode 100644 index f0374fb42..000000000 --- a/tests/integration/persist_docs_tests/test_persist_docs.py +++ /dev/null @@ -1,132 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import os - -import json - - -class BasePersistDocsTest(DBTIntegrationTest): - @property - def schema(self): - return "persist_docs" - - @property - def models(self): - return "models" - - def _assert_common_comments(self, *comments): - for comment in comments: - assert '"with double quotes"' in comment - assert """'''abc123'''""" in comment - assert '\n' in comment - assert 'Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting' in comment - assert '/* comment */' in comment - if os.name == 'nt': - assert '--\r\n' in comment or '--\n' in comment - else: - assert '--\n' in comment - - def _assert_has_table_comments(self, table_node): - table_comment = table_node['metadata']['comment'] - assert table_comment.startswith('Table model description') - - table_id_comment = table_node['columns']['id']['comment'] - assert table_id_comment.startswith('id Column description') - - table_name_comment = table_node['columns']['name']['comment'] - assert table_name_comment.startswith( - 'Some stuff here and then a call to') - - self._assert_common_comments( - table_comment, table_id_comment, table_name_comment - ) - - def _assert_has_view_comments(self, view_node, has_node_comments=True, - has_column_comments=True): - view_comment = view_node['metadata']['comment'] - if has_node_comments: - assert view_comment.startswith('View model description') - self._assert_common_comments(view_comment) - else: - assert view_comment is None - - view_id_comment = view_node['columns']['id']['comment'] - if has_column_comments: - assert view_id_comment.startswith('id Column description') - self._assert_common_comments(view_id_comment) - else: - assert view_id_comment is None - - view_name_comment = view_node['columns']['name']['comment'] - assert view_name_comment is None - - -class TestPersistDocs(BasePersistDocsTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - '+persist_docs': { - "relation": True, - "columns": True, - }, - } - } - } - - def run_has_comments_pglike(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 3 - table_node = catalog_data['nodes']['model.test.table_model'] - view_node = self._assert_has_table_comments(table_node) - - view_node = catalog_data['nodes']['model.test.view_model'] - self._assert_has_view_comments(view_node) - - no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] - self._assert_has_view_comments(no_docs_node, False, False) - - @use_profile('redshift') - def test_redshift_comments(self): - self.run_has_comments_pglike() - - -class TestPersistDocsLateBinding(BasePersistDocsTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'models': { - 'test': { - '+persist_docs': { - "relation": True, - "columns": True, - }, - 'view_model': { - 'bind': False, - } - } - } - } - - @use_profile('redshift') - def test_redshift_late_binding_view(self): - self.run_dbt() - self.run_dbt(['docs', 'generate']) - with open('target/catalog.json') as fp: - catalog_data = json.load(fp) - assert 'nodes' in catalog_data - assert len(catalog_data['nodes']) == 3 - table_node = catalog_data['nodes']['model.test.table_model'] - view_node = self._assert_has_table_comments(table_node) - - view_node = catalog_data['nodes']['model.test.view_model'] - self._assert_has_view_comments(view_node, False, False) - - no_docs_node = catalog_data['nodes']['model.test.no_docs_model'] - self._assert_has_view_comments(no_docs_node, False, False) diff --git a/tests/integration/redshift_test/models/model.sql b/tests/integration/redshift_test/models/model.sql deleted file mode 100644 index 8ca73c7a0..000000000 --- a/tests/integration/redshift_test/models/model.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ - config( - materialized='view', bind=False - ) -}} - -select * from {{ ref('seed') }} diff --git a/tests/integration/redshift_test/seed/seed.csv b/tests/integration/redshift_test/seed/seed.csv deleted file mode 100644 index ef154f552..000000000 --- a/tests/integration/redshift_test/seed/seed.csv +++ /dev/null @@ -1,2 +0,0 @@ -id,first_name,email,ip_address,updated_at -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 diff --git a/tests/integration/redshift_test/test_late_binding_view.py b/tests/integration/redshift_test/test_late_binding_view.py deleted file mode 100644 index 8098c572b..000000000 --- a/tests/integration/redshift_test/test_late_binding_view.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestLateBindingView(DBTIntegrationTest): - @property - def schema(self): - return 'late_binding_view' - - @staticmethod - def dir(path): - return os.path.normpath(path) - - @property - def models(self): - return self.dir("models") - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': [self.dir('seed')], - 'seeds': { - 'quote_columns': False, - } - } - - @use_profile('redshift') - def test__redshift_late_binding_view_query(self): - self.assertEqual(len(self.run_dbt(["seed"])), 1) - self.assertEqual(len(self.run_dbt()), 1) - # remove the table. Use 'cascade' here so that if late-binding views - # didn't work as advertised, the following dbt run will fail. - drop = 'drop table if exists {}.seed cascade'.format( - self.unique_schema() - ) - self.run_sql(drop) - self.assertEqual(len(self.run_dbt()), 1) diff --git a/tests/integration/simple_seed_test/macros/schema_test.sql b/tests/integration/simple_seed_test/macros/schema_test.sql deleted file mode 100644 index 5c7f25964..000000000 --- a/tests/integration/simple_seed_test/macros/schema_test.sql +++ /dev/null @@ -1,22 +0,0 @@ - -{% test column_type(model, column_name, type) %} - - {% set cols = adapter.get_columns_in_relation(model) %} - - {% set col_types = {} %} - {% for col in cols %} - {% do col_types.update({col.name: col.data_type}) %} - {% endfor %} - - {% set validation_message = 'Got a column type of ' ~ col_types.get(column_name) ~ ', expected ' ~ type %} - - {% set val = 0 if col_types.get(column_name) == type else 1 %} - {% if val == 1 and execute %} - {{ log(validation_message, info=True) }} - {% endif %} - - select '{{ validation_message }}' as validation_error - from (select true) as nothing - where {{ val }} = 1 - -{% endtest %} diff --git a/tests/integration/simple_seed_test/models-downstream-seed/model.sql b/tests/integration/simple_seed_test/models-downstream-seed/model.sql deleted file mode 100644 index 6d56d7de5..000000000 --- a/tests/integration/simple_seed_test/models-downstream-seed/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed_actual') }} diff --git a/tests/integration/simple_seed_test/models-exist/model.sql b/tests/integration/simple_seed_test/models-exist/model.sql deleted file mode 100644 index 809a05ba8..000000000 --- a/tests/integration/simple_seed_test/models-exist/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ this.schema }}.seed_expected diff --git a/tests/integration/simple_seed_test/models-rs/schema.yml b/tests/integration/simple_seed_test/models-rs/schema.yml deleted file mode 100644 index 00a79bf93..000000000 --- a/tests/integration/simple_seed_test/models-rs/schema.yml +++ /dev/null @@ -1,47 +0,0 @@ -version: 2 -seeds: -- name: seed_enabled - columns: - - name: birthday - tests: - - column_type: - type: date - - name: id - tests: - - column_type: - type: character varying(256) - -- name: seed_tricky - columns: - - name: id - tests: - - column_type: - type: integer - - name: id_str - tests: - - column_type: - type: character varying(256) - - name: a_bool - tests: - - column_type: - type: boolean - - name: looks_like_a_bool - tests: - - column_type: - type: character varying(256) - - name: a_date - tests: - - column_type: - type: timestamp without time zone - - name: looks_like_a_date - tests: - - column_type: - type: character varying(256) - - name: relative - tests: - - column_type: - type: character varying(9) - - name: weekday - tests: - - column_type: - type: character varying(8) diff --git a/tests/integration/simple_seed_test/seed.sql b/tests/integration/simple_seed_test/seed.sql deleted file mode 100644 index 0f35d90ef..000000000 --- a/tests/integration/simple_seed_test/seed.sql +++ /dev/null @@ -1,512 +0,0 @@ -create table {schema}.seed_expected ( - id INTEGER, - first_name TEXT, - email TEXT, - ip_address TEXT, - birthday TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_expected - ("id","first_name","email","ip_address","birthday") -VALUES - (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), - (2,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), - (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), - (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), - (5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), - (6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), - (7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), - (8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), - (9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), - (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), - (11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), - (12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), - (13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), - (14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), - (15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), - (16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), - (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), - (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), - (19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), - (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), - (21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), - (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), - (23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), - (24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), - (25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), - (26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), - (27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), - (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), - (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), - (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), - (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), - (32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), - (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), - (34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), - (35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), - (36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), - (37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), - (38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), - (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), - (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), - (41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), - (42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), - (43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), - (44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), - (45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), - (46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), - (47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), - (48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), - (49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), - (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), - (51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), - (52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), - (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), - (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), - (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), - (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), - (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), - (58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), - (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), - (60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), - (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), - (62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), - (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), - (64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), - (65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), - (66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), - (67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), - (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), - (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), - (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), - (71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), - (72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), - (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), - (74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), - (75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), - (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), - (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), - (78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), - (79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), - (80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), - (81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), - (82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), - (83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), - (84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), - (85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), - (86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), - (87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), - (88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), - (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), - (90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), - (91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), - (92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), - (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), - (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), - (95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), - (96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), - (97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), - (98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), - (99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), - (100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'), - (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191','1970-07-08 08:54:23'), - (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254','2000-04-12 02:39:36'), - (103,'Edward','eburns2u@dailymotion.com','253.89.118.18','1993-10-10 10:54:01'), - (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176','1978-08-26 17:07:29'), - (105,'Donald','dlawrence2w@wp.com','139.200.159.227','2007-07-21 20:56:20'), - (106,'Matthew','mfreeman2x@google.fr','205.26.239.92','2014-12-05 17:05:39'), - (107,'Sean','ssanders2y@trellian.com','143.89.82.108','1993-07-14 21:45:02'), - (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54','1977-04-06 19:07:03'), - (109,'Jennifer','jwatson30@t-online.de','196.102.127.7','1998-03-07 05:12:23'), - (110,'Clarence','cbrooks31@si.edu','218.93.234.73','2002-11-06 17:22:25'), - (111,'Jose','jflores32@goo.gl','185.105.244.231','1995-01-05 06:32:21'), - (112,'George','glee33@adobe.com','173.82.249.196','2015-01-04 02:47:46'), - (113,'Larry','lhill34@linkedin.com','66.5.206.195','2010-11-02 10:21:17'), - (114,'Marie','mmeyer35@mysql.com','151.152.88.107','1990-05-22 20:52:51'), - (115,'Clarence','cwebb36@skype.com','130.198.55.217','1972-10-27 07:38:54'), - (116,'Sarah','scarter37@answers.com','80.89.18.153','1971-08-24 19:29:30'), - (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174','1973-01-27 09:00:42'), - (118,'Teresa','thenry39@hao123.com','32.187.239.106','2015-11-06 01:48:44'), - (119,'Billy','bgutierrez3a@sun.com','52.37.70.134','2002-03-19 03:20:19'), - (120,'Anthony','agibson3b@github.io','154.251.232.213','1991-04-19 01:08:15'), - (121,'Sandra','sromero3c@wikia.com','44.124.171.2','1998-09-06 20:30:34'), - (122,'Paula','pandrews3d@blogs.com','153.142.118.226','2003-06-24 16:31:24'), - (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219','1970-12-09 23:57:21'), - (124,'Lois','lwilson3f@reuters.com','147.44.171.83','1971-01-09 22:28:51'), - (125,'Sara','smorgan3g@nature.com','197.67.192.230','1992-01-28 20:33:24'), - (126,'Charles','ctorres3h@china.com.cn','156.115.216.2','1993-10-02 19:36:34'), - (127,'Richard','ralexander3i@marriott.com','248.235.180.59','1999-02-03 18:40:55'), - (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129','1978-09-13 00:37:32'), - (129,'Steve','sadams3k@economist.com','112.248.91.98','2004-03-21 09:07:43'), - (130,'Katherine','krobertson3l@ow.ly','37.220.107.28','1977-03-18 19:28:50'), - (131,'Donna','dgibson3m@state.gov','222.218.76.221','1999-02-01 06:46:16'), - (132,'Christina','cwest3n@mlb.com','152.114.6.160','1979-12-24 15:30:35'), - (133,'Sandra','swillis3o@meetup.com','180.71.49.34','1984-09-27 08:05:54'), - (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186','1979-04-16 16:52:10'), - (135,'Ruby','rjames3q@wp.com','98.61.54.20','2007-01-13 14:25:52'), - (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172','2009-07-25 04:34:30'), - (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146','2012-05-09 22:12:33'), - (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173','1999-11-09 08:08:44'), - (139,'Deborah','dray3u@i2i.jp','11.108.186.217','2014-02-04 03:15:19'), - (140,'Paul','parmstrong3v@alexa.com','6.250.59.43','2009-12-21 10:08:53'), - (141,'Aaron','abishop3w@opera.com','207.145.249.62','1996-04-25 23:20:23'), - (142,'Henry','hsanders3x@google.ru','140.215.203.171','2012-01-29 11:52:32'), - (143,'Anne','aanderson3y@1688.com','74.150.102.118','1982-04-03 13:46:17'), - (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152','1987-11-03 19:58:41'), - (145,'Evelyn','ereid40@pbs.org','249.122.33.117','1977-12-14 17:09:57'), - (146,'Brian','bgonzalez41@wikia.com','246.254.235.141','1991-02-24 00:45:58'), - (147,'Sandra','sgray42@squarespace.com','150.73.28.159','1972-07-28 17:26:32'), - (148,'Alice','ajones43@a8.net','78.253.12.177','2002-12-05 16:57:46'), - (149,'Jessica','jhanson44@mapquest.com','87.229.30.160','1994-01-30 11:40:04'), - (150,'Louise','lbailey45@reuters.com','191.219.31.101','2011-09-07 21:11:45'), - (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239','1984-10-24 14:58:04'), - (152,'Gregory','gcollins47@yandex.ru','28.176.10.115','1998-07-25 17:17:10'), - (153,'Jane','jperkins48@usnews.com','46.53.164.159','1979-08-19 15:25:00'), - (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2','1985-07-06 02:16:36'), - (155,'Adam','acarter4a@scribd.com','78.48.148.204','2005-07-20 03:31:09'), - (156,'Frank','fweaver4b@angelfire.com','199.180.255.224','2011-03-04 23:07:54'), - (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231','1991-01-11 10:39:41'), - (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223','2009-01-17 21:05:15'), - (159,'Rose','rfoster4e@woothemes.com','203.169.53.16','1991-04-21 02:09:38'), - (160,'George','ggarrett4f@uiuc.edu','186.61.5.167','1989-11-11 11:29:42'), - (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38','2012-06-22 18:01:23'), - (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203','1980-04-16 15:26:34'), - (163,'Martin','mwells4i@ifeng.com','97.223.55.105','2010-05-26 14:08:18'), - (164,'Diana','dstone4j@google.ru','90.155.52.47','2013-02-11 00:14:54'), - (165,'Walter','wferguson4k@blogger.com','30.63.212.44','1986-02-20 17:46:46'), - (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77','1992-05-13 20:14:14'), - (167,'Philip','pknight4m@xing.com','15.28.135.167','2000-09-11 18:41:13'), - (168,'Russell','rcarr4n@youtube.com','113.55.165.50','2008-07-10 17:49:27'), - (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111','1992-02-10 17:24:58'), - (170,'Anne','along4p@squidoo.com','36.154.58.107','2012-08-19 23:35:31'), - (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114','1972-03-11 21:46:44'), - (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69','2013-01-13 21:34:51'), - (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144','1998-09-15 14:20:04'), - (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102','2009-01-22 05:34:38'), - (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226','1984-09-05 22:33:15'), - (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46','1990-01-20 12:19:15'), - (177,'Arthur','afisher4w@jugem.jp','107.255.244.98','1983-10-13 11:08:46'), - (178,'Nicole','nryan4x@wsj.com','243.211.33.221','1974-05-30 23:19:14'), - (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101','1992-09-23 02:02:19'), - (180,'Terry','tcox4z@reference.com','20.189.120.106','1982-02-13 12:43:14'), - (181,'Ashley','astanley50@kickstarter.com','86.3.56.98','1976-05-09 01:27:16'), - (182,'Michael','mrivera51@about.me','72.118.249.0','1971-11-11 17:28:37'), - (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47','2002-08-24 14:59:25'), - (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30','2002-03-11 13:41:29'), - (185,'Nicole','nhenderson54@usda.gov','39.253.60.30','1995-04-24 05:55:07'), - (186,'Ralph','rharper55@purevolume.com','167.147.142.189','1980-02-10 18:35:45'), - (187,'Heather','hcunningham56@photobucket.com','96.222.196.229','2007-06-15 05:37:50'), - (188,'Nancy','nlittle57@cbc.ca','241.53.255.175','2007-07-12 23:42:48'), - (189,'Juan','jramirez58@pinterest.com','190.128.84.27','1978-11-07 23:37:37'), - (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49','1979-03-31 23:27:28'), - (191,'Shirley','sstevens5a@prlog.org','200.97.231.248','2011-12-06 07:08:50'), - (192,'Annie','areyes5b@squidoo.com','223.32.182.101','2011-05-28 02:42:09'), - (193,'Jack','jkelley5c@tiny.cc','47.34.118.150','1981-12-05 17:31:40'), - (194,'Keith','krobinson5d@1und1.de','170.210.209.31','1999-03-09 11:05:43'), - (195,'Joseph','jmiller5e@google.com.au','136.74.212.139','1984-10-08 13:18:20'), - (196,'Annie','aday5f@blogspot.com','71.99.186.69','1986-02-18 12:27:34'), - (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107','1983-10-20 17:51:20'), - (198,'Tammy','tward5h@ucoz.ru','141.43.164.70','1980-03-31 04:45:29'), - (199,'Doris','dryan5i@ted.com','239.117.202.188','1985-07-03 03:17:53'), - (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79','1973-04-21 21:36:40'), - (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161','2001-01-20 01:42:26'), - (202,'Samuel','soliver5l@people.com.cn','86.127.246.140','1970-09-02 02:19:00'), - (203,'Carl','csanchez5m@mysql.com','50.149.237.107','1993-12-01 07:02:09'), - (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201','2004-07-06 18:39:33'), - (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170','2014-11-11 18:52:19'), - (206,'Keith','kwillis5p@youtube.com','181.43.206.100','1998-06-13 06:30:51'), - (207,'Justin','jwebb5q@intel.com','211.54.245.74','2000-11-04 16:58:26'), - (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104','1992-12-01 19:48:28'), - (209,'Martin','mpowell5s@flickr.com','153.67.12.241','1983-06-30 06:24:32'), - (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172','2005-01-08 20:50:22'), - (211,'Edward','eray5u@a8.net','205.187.246.65','2011-09-26 08:04:44'), - (212,'Steven','slawson5v@senate.gov','238.150.250.36','1978-11-22 02:48:09'), - (213,'Robert','rthompson5w@furl.net','70.7.89.236','2001-09-12 08:52:07'), - (214,'Jack','jporter5x@diigo.com','220.172.29.99','1976-07-26 14:29:21'), - (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180','2010-03-20 19:21:16'), - (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173','2001-03-11 05:36:40'), - (217,'Jimmy','jstephens60@weather.com','145.101.93.235','1983-04-12 09:35:30'), - (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253','1997-08-29 19:34:34'), - (219,'Lawrence','lgilbert62@ted.com','243.132.8.78','2015-04-08 22:06:56'), - (220,'David','dgardner63@4shared.com','204.40.46.136','1971-07-09 03:29:11'), - (221,'Charles','ckennedy64@gmpg.org','211.83.233.2','2011-02-26 11:55:04'), - (222,'Lillian','lbanks65@msu.edu','124.233.12.80','2010-05-16 20:29:02'), - (223,'Ernest','enguyen66@baidu.com','82.45.128.148','1996-07-04 10:07:04'), - (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223','1983-08-05 12:36:29'), - (225,'Donald','ddavis68@ustream.tv','47.39.218.137','1989-05-27 02:30:56'), - (226,'Joe','jscott69@blogspot.com','140.23.131.75','1973-03-16 12:21:31'), - (227,'Anne','amarshall6a@google.ca','113.162.200.197','1988-12-09 03:38:29'), - (228,'Willie','wturner6b@constantcontact.com','85.83.182.249','1991-10-06 01:51:10'), - (229,'Nicole','nwilson6c@sogou.com','30.223.51.135','1977-05-29 19:54:56'), - (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144','2011-03-13 12:48:47'), - (231,'Lois','lcarr6e@statcounter.com','0.41.36.53','1993-02-06 04:52:01'), - (232,'Shirley','scruz6f@tmall.com','37.156.39.223','2007-02-18 17:47:01'), - (233,'Patrick','pford6g@reverbnation.com','36.198.200.89','1977-03-06 15:47:24'), - (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137','2014-10-28 01:56:56'), - (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202','1987-11-30 16:44:47'), - (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148','1979-01-18 06:33:45'), - (237,'Pamela','pramirez6k@census.gov','138.233.34.163','2012-01-29 10:35:20'), - (238,'Daniel','dcarr6l@php.net','146.21.152.242','1984-11-17 08:22:59'), - (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36','2001-05-30 22:16:44'), - (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38','2000-01-05 20:35:41'), - (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220','1996-11-08 01:27:31'), - (242,'Johnny','jmason6p@flickr.com','192.8.232.114','2013-05-14 05:35:50'), - (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197','1977-08-20 02:05:17'), - (244,'Douglas','dcole6r@cmu.edu','128.135.212.69','2016-10-26 17:40:36'), - (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235','1987-04-26 07:07:13'), - (246,'Donna','dgreen6t@chron.com','227.116.46.107','2011-07-25 12:59:54'), - (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89','1974-05-03 20:28:18'), - (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178','2011-12-20 17:40:18'), - (249,'Melissa','mmurray6w@bbb.org','114.234.118.137','1991-02-26 12:45:44'), - (250,'Jean','jlittle6x@epa.gov','141.21.163.254','1991-08-16 04:57:09'), - (251,'Daniel','dolson6y@nature.com','125.75.104.97','2010-04-23 06:25:54'), - (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249','2015-01-31 02:21:50'), - (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26','1971-12-11 10:31:31'), - (254,'Beverly','broberts71@bluehost.com','244.40.158.89','2013-09-21 13:02:31'), - (255,'Pamela','pmurray72@netscape.com','218.54.95.216','1985-04-16 00:34:00'), - (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229','2000-11-11 09:48:28'), - (257,'Mildred','mpalmer74@is.gd','234.125.95.132','1992-05-25 02:25:02'), - (258,'Jessica','jcampbell75@google.it','55.98.30.140','2014-08-26 00:26:34'), - (259,'Beverly','bthomas76@cpanel.net','48.78.228.176','1970-08-18 10:40:05'), - (260,'Eugene','eward77@cargocollective.com','139.226.204.2','1996-12-04 23:17:00'), - (261,'Andrea','aallen78@webnode.com','160.31.214.38','2009-07-06 07:22:37'), - (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122','2005-06-06 11:44:19'), - (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128','2001-07-03 02:00:10'), - (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221','1994-08-18 19:45:40'), - (265,'Russell','rmiller7c@instagram.com','184.130.152.253','1977-11-06 01:58:12'), - (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206','1990-12-03 22:45:24'), - (267,'Raymond','rknight7e@yandex.ru','161.2.44.252','1995-08-25 04:31:19'), - (268,'Bonnie','brussell7f@elpais.com','199.237.57.207','1991-03-29 08:32:06'), - (269,'Marie','mhenderson7g@elpais.com','52.203.131.144','2004-06-04 21:50:28'), - (270,'Alan','acarr7h@trellian.com','147.51.205.72','2005-03-03 10:51:31'), - (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226','2004-08-04 13:42:40'), - (272,'Christina','cdaniels7j@census.gov','0.238.61.251','1972-10-18 12:47:33'), - (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56','2013-01-13 10:41:35'), - (274,'Laura','lwood7l@icio.us','149.153.38.205','2011-06-25 09:33:59'), - (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172','1999-03-05 03:25:36'), - (276,'Denise','dparker7n@icq.com','0.213.88.138','2011-11-04 09:43:06'), - (277,'Phillip','pparker7o@discuz.net','219.242.165.240','1973-10-19 04:22:29'), - (278,'Joan','jpierce7p@salon.com','63.31.213.202','1989-04-09 22:06:24'), - (279,'Irene','ibaker7q@cbc.ca','102.33.235.114','1992-09-04 13:00:57'), - (280,'Betty','bbowman7r@ted.com','170.91.249.242','2015-09-28 08:14:22'), - (281,'Teresa','truiz7s@boston.com','82.108.158.207','1999-07-18 05:17:09'), - (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187','2003-01-06 15:45:29'), - (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184','2010-05-28 01:56:37'), - (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220','1993-04-03 20:33:51'), - (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78','1990-01-05 17:28:43'), - (286,'Terry','tross7x@macromedia.com','29.112.114.133','2009-08-29 21:32:17'), - (287,'Angela','abradley7y@icq.com','177.44.27.72','1989-10-04 21:46:06'), - (288,'Maria','mhart7z@dailymotion.com','55.27.55.202','1975-01-21 01:22:57'), - (289,'Raymond','randrews80@pinterest.com','88.90.78.67','1992-03-16 21:37:40'), - (290,'Kathy','krice81@bluehost.com','212.63.196.102','2000-12-14 03:06:44'), - (291,'Cynthia','cramos82@nymag.com','107.89.190.6','2005-06-28 02:02:33'), - (292,'Kimberly','kjones83@mysql.com','86.169.101.101','2007-06-13 22:56:49'), - (293,'Timothy','thansen84@microsoft.com','108.100.254.90','2003-04-04 10:31:57'), - (294,'Carol','cspencer85@berkeley.edu','75.118.144.187','1999-03-30 14:53:21'), - (295,'Louis','lmedina86@latimes.com','141.147.163.24','1991-04-11 17:53:13'), - (296,'Margaret','mcole87@google.fr','53.184.26.83','1991-12-19 01:54:10'), - (297,'Mary','mgomez88@yellowpages.com','208.56.57.99','1976-05-21 18:05:08'), - (298,'Amanda','aanderson89@geocities.com','147.73.15.252','1987-08-22 15:05:28'), - (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220','1976-07-15 04:25:04'), - (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193','1990-09-03 21:39:31'), - (301,'Lois','lkennedy8c@amazon.de','194.169.29.187','2007-07-29 14:09:31'), - (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249','2013-09-05 11:32:46'), - (303,'Betty','belliott8e@wired.com','183.105.222.199','1979-09-19 19:29:13'), - (304,'Bobby','bmeyer8f@census.gov','36.13.161.145','2014-05-24 14:34:39'), - (305,'Ann','amorrison8g@sfgate.com','72.154.54.137','1978-10-05 14:22:34'), - (306,'Daniel','djackson8h@wunderground.com','144.95.32.34','1990-07-27 13:23:05'), - (307,'Joe','jboyd8i@alibaba.com','187.105.86.178','2011-09-28 16:46:32'), - (308,'Ralph','rdunn8j@fc2.com','3.19.87.255','1984-10-18 08:00:40'), - (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215','1998-07-04 12:15:21'), - (310,'Paula','pdean8l@hhs.gov','161.100.173.197','1973-02-13 09:38:55'), - (311,'Andrew','agarrett8m@behance.net','199.253.123.218','1991-02-14 13:36:32'), - (312,'Janet','jhowell8n@alexa.com','39.189.139.79','2012-11-24 20:17:33'), - (313,'Keith','khansen8o@godaddy.com','116.186.223.196','1987-08-23 21:22:05'), - (314,'Nicholas','nedwards8p@state.gov','142.175.142.11','1977-03-28 18:27:27'), - (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192','1994-10-26 11:44:26'), - (316,'Frank','fgardner8r@mapy.cz','154.77.119.169','1983-01-29 19:19:51'), - (317,'Eric','eharrison8s@google.cn','245.139.65.123','1984-02-04 09:54:36'), - (318,'Gregory','gcooper8t@go.com','171.147.0.221','2004-06-14 05:22:08'), - (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5','1977-01-07 18:23:43'), - (320,'Juan','jlewis8v@shinystat.com','216.181.171.189','2001-08-23 17:32:43'), - (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28','1983-02-17 00:05:50'), - (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148','2004-02-15 10:12:03'), - (323,'Annie','ahunter8y@example.com','63.36.34.103','2003-07-23 21:15:25'), - (324,'Melissa','mflores8z@cbc.ca','151.230.217.90','1983-11-02 14:53:56'), - (325,'Jane','jweaver90@about.me','0.167.235.217','1987-07-29 00:13:44'), - (326,'Anthony','asmith91@oracle.com','97.87.48.41','2001-05-31 18:44:11'), - (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51','2015-09-12 23:13:55'), - (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186','2010-10-28 08:26:27'), - (329,'Chris','cmurray94@bluehost.com','25.158.167.97','2004-05-05 16:10:31'), - (330,'Denise','dfuller95@hugedomains.com','216.210.149.28','1979-04-20 08:57:24'), - (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213','2009-08-15 03:26:16'), - (332,'Jesse','jhoward97@google.cn','46.181.118.30','1974-04-18 14:08:41'), - (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87','2006-06-30 14:46:52'), - (334,'Janice','jwoods99@pen.io','229.245.237.182','1988-04-06 11:52:58'), - (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80','1976-10-05 08:44:16'), - (336,'Joshua','jray9b@opensource.org','192.253.12.198','1971-12-25 22:27:07'), - (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215','2001-12-31 22:08:13'), - (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201','1983-02-18 20:07:47'), - (339,'Christine','chudson9e@yelp.com','64.198.43.56','1997-09-08 08:03:43'), - (340,'Joe','jparker9f@earthlink.net','251.215.148.153','1973-11-04 05:08:18'), - (341,'Thomas','tkim9g@answers.com','49.187.34.47','1991-08-07 21:13:48'), - (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16','2009-12-08 02:35:49'), - (343,'James','jmitchell9i@umich.edu','43.121.18.147','2011-04-28 17:04:09'), - (344,'Charles','cgardner9j@purevolume.com','197.78.240.240','1998-02-11 06:47:07'), - (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88','2002-05-10 15:33:14'), - (346,'Chris','cgray9l@4shared.com','249.70.192.240','1998-10-03 16:43:42'), - (347,'Gloria','ghayes9m@hibu.com','81.103.138.26','1999-12-26 11:23:13'), - (348,'Edward','eramirez9n@shareasale.com','38.136.90.136','2010-08-19 08:01:06'), - (349,'Cheryl','cbutler9o@google.ca','172.180.78.172','1995-05-27 20:03:52'), - (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6','2014-10-21 01:42:58'), - (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208','2001-02-08 12:19:06'), - (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190','1970-01-05 11:29:12'), - (353,'Sandra','slewis9s@dyndns.org','77.215.201.236','1974-01-05 07:04:04'), - (354,'Todd','tfranklin9t@g.co','167.125.181.82','2009-09-28 10:13:58'), - (355,'Joseph','jlewis9u@webmd.com','244.204.6.11','1990-10-21 15:49:57'), - (356,'Alan','aknight9v@nydailynews.com','152.197.95.83','1996-03-08 08:43:17'), - (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26','1985-11-30 12:09:24'), - (358,'Annie','awright9x@cafepress.com','190.45.231.111','2000-08-24 11:56:06'), - (359,'Diane','dhamilton9y@youtube.com','85.146.171.196','2015-02-24 02:03:57'), - (360,'Antonio','alane9z@auda.org.au','61.63.146.203','2001-05-13 03:43:34'), - (361,'Matthew','mallena0@hhs.gov','29.97.32.19','1973-02-19 23:43:32'), - (362,'Bonnie','bfowlera1@soup.io','251.216.99.53','2013-08-01 15:35:41'), - (363,'Margaret','mgraya2@examiner.com','69.255.151.79','1998-01-23 22:24:59'), - (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61','1973-07-13 00:30:22'), - (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214','2006-11-19 11:52:26'), - (366,'Mark','mcartera5@cpanel.net','220.33.102.142','2007-09-09 09:43:27'), - (367,'Paula','ppricea6@msn.com','36.182.238.124','2009-11-11 09:13:05'), - (368,'Catherine','cgreena7@army.mil','228.203.58.19','2005-08-09 16:52:15'), - (369,'Helen','hhamiltona8@symantec.com','155.56.194.99','2005-02-01 05:40:36'), - (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213','2013-11-06 22:10:23'), - (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48','1994-12-26 23:43:42'), - (372,'Mark','mmarshallab@tumblr.com','114.74.60.47','2016-09-29 18:03:01'), - (373,'Andrew','amartinezac@google.cn','182.54.37.130','1976-06-06 17:04:17'), - (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123','1977-03-28 19:06:59'), - (375,'Bonnie','bstoneae@php.net','196.149.79.137','1970-02-05 17:05:58'), - (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218','2008-11-20 21:51:55'), - (377,'Willie','wwestag@army.mil','35.189.92.118','1992-07-24 05:08:08'), - (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141','2010-01-25 06:29:01'), - (379,'Sandra','swardai@baidu.com','63.11.113.240','1985-05-23 08:07:37'), - (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50','2015-03-05 09:42:07'), - (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24','1975-07-22 22:22:02'), - (382,'Juan','jcastilloal@yelp.com','24.174.74.202','2007-01-17 09:32:43'), - (383,'Donna','dfrazieram@artisteer.com','205.26.147.45','1990-02-11 20:55:46'), - (384,'Rachel','rfloresan@w3.org','109.60.216.162','1983-05-22 22:42:18'), - (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130','2009-05-01 18:02:51'), - (386,'Donald','dbradleyap@etsy.com','42.54.35.126','1997-01-16 16:31:52'), - (387,'Rachel','rfisheraq@nih.gov','160.243.250.45','2006-02-17 22:05:49'), - (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111','1976-06-21 03:36:29'), - (389,'Timothy','twhiteas@ca.gov','36.128.23.70','1975-09-24 03:51:18'), - (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184','1983-04-27 09:02:50'), - (391,'Billy','bfowlerau@jimdo.com','91.200.68.196','1995-01-29 06:57:35'), - (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125','1992-10-27 04:31:39'), - (393,'Linda','lromeroaw@usa.gov','100.71.233.19','1992-06-08 15:13:18'), - (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193','2001-11-11 06:51:01'), - (395,'Katherine','kburkeay@wix.com','151.156.242.141','2010-06-14 19:54:28'), - (396,'Martha','mharrisonaz@youku.com','21.222.10.199','1989-10-16 14:17:55'), - (397,'Dennis','dwellsb0@youtu.be','103.16.29.3','1985-12-21 06:05:51'), - (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234','1982-08-27 01:04:43'), - (399,'Brenda','bfullerb2@t.co','33.253.63.90','2011-04-20 05:00:35'), - (400,'Larry','lhendersonb3@disqus.com','88.95.132.128','1982-08-31 02:15:12'), - (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150','1979-04-15 14:08:09'), - (402,'Terry','thuntb5@usa.gov','65.91.103.240','1998-05-15 11:50:49'), - (403,'Harry','hburnsb6@nasa.gov','33.38.21.244','1981-04-12 14:02:20'), - (404,'Diana','dellisb7@mlb.com','218.229.81.135','1997-01-29 00:17:25'), - (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216','1984-03-09 17:24:03'), - (406,'Julia','jlongb9@fotki.com','10.210.12.104','2005-10-26 03:54:13'), - (407,'Lois','lscottba@msu.edu','188.79.136.138','1973-02-02 18:40:39'), - (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108','2012-06-09 18:22:26'), - (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119','1983-09-28 21:11:27'), - (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79','1970-08-31 21:08:01'), - (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226','1993-05-22 08:07:07'), - (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152','1978-12-31 20:24:51'), - (413,'Christine','cgreenebg@flickr.com','97.25.140.118','1978-07-17 12:55:30'), - (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211','1984-02-27 13:40:08'), - (415,'Kimberly','kromerobi@aol.com','73.56.88.247','1976-09-16 14:22:04'), - (416,'George','gjohnstonbj@fda.gov','240.36.245.185','1979-07-24 14:36:02'), - (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140','2012-09-12 01:56:41'), - (418,'Andrea','astevensbl@goo.gl','31.152.207.204','1979-05-24 11:06:21'), - (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241','1984-02-23 04:07:41'), - (420,'Terry','tmorenobn@blinklist.com','92.161.34.42','1994-06-25 14:01:35'), - (421,'Christopher','cmorenobo@go.com','158.86.176.82','1973-09-05 09:18:47'), - (422,'Dennis','dhansonbp@ning.com','40.160.81.75','1982-01-20 10:19:41'), - (423,'Beverly','brussellbq@de.vu','138.32.56.204','1997-11-06 07:20:19'), - (424,'Howard','hparkerbr@163.com','103.171.134.171','2015-06-24 15:37:10'), - (425,'Helen','hmccoybs@fema.gov','61.200.4.71','1995-06-20 08:59:10'), - (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125','1977-04-11 07:59:28'), - (427,'Tina','twestbu@nhs.uk','80.213.117.74','1992-08-19 05:54:44'), - (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95','1991-09-19 23:22:55'), - (429,'Ashley','aburtonbw@wix.com','233.176.205.109','2009-11-10 05:01:20'), - (430,'Eric','emyersbx@1und1.de','168.91.212.67','1987-08-10 07:16:20'), - (431,'Barbara','blittleby@lycos.com','242.14.189.239','2008-08-02 12:13:04'), - (432,'Sean','sevansbz@instagram.com','14.39.177.13','2007-04-16 17:28:49'), - (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76','1980-12-10 02:19:29'), - (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117','1987-03-01 02:25:45'), - (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141','1972-09-22 12:59:34'), - (436,'Nicole','nstewartc3@1688.com','8.164.23.115','1998-10-27 00:10:17'), - (437,'Earl','ekimc4@ovh.net','100.26.244.177','2013-01-22 10:05:46'), - (438,'Beverly','breedc5@reuters.com','174.12.226.27','1974-09-22 07:29:36'), - (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164','1992-06-05 00:43:40'), - (440,'Charles','cmoorec7@ucoz.com','252.197.131.69','1990-04-09 02:34:05'), - (441,'Alice','alawsonc8@live.com','183.73.220.232','1989-02-28 09:11:04'), - (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14','2005-05-02 19:57:21'), - (443,'Carolyn','cfowlerca@go.com','213.109.55.202','1978-09-10 20:18:20'), - (444,'Anthony','alongcb@free.fr','169.221.158.204','1984-09-13 01:59:23'), - (445,'Annie','amoorecc@e-recht24.de','50.34.148.61','2009-03-26 03:41:07'), - (446,'Carlos','candrewscd@ihg.com','236.69.59.212','1972-03-29 22:42:48'), - (447,'Beverly','bramosce@google.ca','164.250.184.49','1982-11-10 04:34:01'), - (448,'Teresa','tlongcf@umich.edu','174.88.53.223','1987-05-17 12:48:00'), - (449,'Roy','rboydcg@uol.com.br','91.58.243.215','1974-06-16 17:59:54'), - (450,'Ashley','afieldsch@tamu.edu','130.138.11.126','1983-09-15 05:52:36'), - (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245','2003-10-22 12:24:03'), - (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103','1980-11-13 11:01:26'), - (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151','1982-05-17 23:58:03'), - (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166','1998-10-04 00:02:21'), - (455,'Shirley','speterscm@salon.com','248.126.31.15','1987-01-30 06:04:59'), - (456,'Annie','abowmancn@economist.com','222.213.248.59','2006-03-14 23:52:59'), - (457,'Jean','jlarsonco@blogspot.com','71.41.25.195','2007-09-08 23:49:45'), - (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28','2011-03-14 20:25:40'), - (459,'Norma','nrobinsoncq@economist.com','28.225.21.54','1989-10-21 01:22:43'), - (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153','2008-06-27 02:27:30'), - (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249','2016-01-28 16:43:44'), - (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77','2000-05-24 17:33:06'), - (463,'Edward','ebryantcu@jigsy.com','140.31.98.193','1992-12-17 08:32:47'), - (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206','1970-12-05 03:42:19'), - (465,'Michael','mweavercw@reference.com','7.233.133.213','1987-03-29 02:30:54'), - (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224','1990-07-30 07:20:57'), - (467,'Philip','phowardcy@comsenz.com','159.170.247.249','2010-10-15 10:18:37'), - (468,'Mary','mmarshallcz@xing.com','125.132.189.70','2007-07-19 13:48:47'), - (469,'Scott','salvarezd0@theguardian.com','78.49.103.230','1987-10-31 06:10:44'), - (470,'Wayne','wcarrolld1@blog.com','238.1.120.204','1980-11-19 03:26:10'), - (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49','2010-05-06 22:17:04'), - (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240','2007-12-12 19:02:51'), - (473,'Steven','sdixond4@wisc.edu','167.55.237.52','1984-05-05 11:44:37'), - (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133','2000-07-06 08:44:37'), - (475,'Jason','jrobinsond6@hexun.com','138.119.139.56','2006-02-03 05:27:45'), - (476,'Doris','dwoodd7@fema.gov','180.220.156.190','1978-05-11 20:14:20'), - (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229','2006-11-18 08:29:06'), - (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1','1985-09-17 02:46:52'), - (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118','1974-06-07 19:04:05'), - (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174','1999-03-08 02:43:25'), - (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234','2007-05-15 13:17:32'), - (482,'Harold','hmurraydd@parallels.com','133.26.188.80','1993-11-15 03:42:07'), - (483,'Paula','phowellde@samsung.com','34.215.28.216','1993-11-29 15:55:00'), - (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123','1986-08-17 10:19:38'), - (485,'Phyllis','paustindg@vk.com','50.84.34.178','1994-04-13 03:05:24'), - (486,'Laura','lfosterdh@usnews.com','37.8.101.33','2001-06-30 08:58:59'), - (487,'Eric','etaylordi@com.com','103.183.253.45','2006-09-15 20:18:46'), - (488,'Doris','driveradj@prweb.com','247.16.2.199','1989-05-08 09:27:09'), - (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232','1989-08-01 18:36:06'), - (490,'Steve','smoralesdl@jigsy.com','3.76.84.207','2011-03-13 17:01:05'), - (491,'Louis','lsullivandm@who.int','78.135.44.208','1975-11-26 16:01:23'), - (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21','1990-03-13 16:14:56'), - (493,'Ann','adixondo@gmpg.org','191.136.222.111','2002-06-05 14:22:18'), - (494,'Johnny','jhartdp@amazon.com','103.252.198.39','1988-07-30 23:54:49'), - (495,'Susan','srichardsdq@skype.com','126.247.192.11','2005-01-09 12:08:14'), - (496,'Brenda','bparkerdr@skype.com','63.232.216.86','1974-05-18 05:58:29'), - (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112','2014-08-05 18:22:25'), - (498,'Larry','lhayesdt@wordpress.com','162.146.13.46','1997-02-26 14:01:53'), - (499,NULL,'ethomasdu@hhs.gov','6.241.88.250','2007-09-14 13:03:34'), - (500,'Paula','pshawdv@networksolutions.com','123.27.47.249','2003-10-30 21:19:20'); diff --git a/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv b/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv deleted file mode 100644 index 0227dd609..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_disabled.csv +++ /dev/null @@ -1,21 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 diff --git a/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv b/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv deleted file mode 100644 index 0227dd609..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_enabled.csv +++ /dev/null @@ -1,21 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 diff --git a/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv b/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv deleted file mode 100644 index 3e90a18e3..000000000 --- a/tests/integration/simple_seed_test/seeds-config/seed_tricky.csv +++ /dev/null @@ -1,7 +0,0 @@ -id,id_str,a_bool,looks_like_a_bool,a_date,looks_like_a_date,relative,weekday -1,1,true,true,2019-01-01 12:32:30,2019-01-01 12:32:30,tomorrow,Saturday -2,2,True,True,2019-01-01 12:32:31,2019-01-01 12:32:31,today,Sunday -3,3,TRUE,TRUE,2019-01-01 12:32:32,2019-01-01 12:32:32,yesterday,Monday -4,4,false,false,2019-01-01 01:32:32,2019-01-01 01:32:32,tomorrow,Saturday -5,5,False,False,2019-01-01 01:32:32,2019-01-01 01:32:32,today,Sunday -6,6,FALSE,FALSE,2019-01-01 01:32:32,2019-01-01 01:32:32,yesterday,Monday diff --git a/tests/integration/simple_seed_test/seeds/seed_actual.csv b/tests/integration/simple_seed_test/seeds/seed_actual.csv deleted file mode 100644 index fc1b80145..000000000 --- a/tests/integration/simple_seed_test/seeds/seed_actual.csv +++ /dev/null @@ -1,501 +0,0 @@ -id,first_name,email,ip_address,birthday -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 -3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 -4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 -5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 -6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 -7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 -8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 -9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 -10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 -11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 -12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 -13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 -14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 -15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 -16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 -17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 -18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 -19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 -20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 -21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49 -22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56 -23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43 -24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44 -25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02 -26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25 -27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18 -28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37 -29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12 -30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23 -31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50 -32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27 -33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44 -34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05 -35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39 -36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52 -37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25 -38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51 -39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29 -40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54 -41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56 -42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31 -43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28 -44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07 -45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15 -46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45 -47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19 -48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24 -49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48 -50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49 -51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12 -52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23 -53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51 -54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19 -55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40 -56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32 -57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09 -58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40 -59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27 -60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06 -61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44 -62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47 -63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37 -64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24 -65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56 -66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35 -67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45 -68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23 -69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21 -70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46 -71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20 -72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54 -73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55 -74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24 -75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30 -76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01 -77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42 -78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33 -79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52 -80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24 -81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56 -82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59 -83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36 -84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27 -85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42 -86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39 -87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49 -88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44 -89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39 -90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04 -91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19 -92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14 -93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49 -94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54 -95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19 -96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10 -97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45 -98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58 -99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55 -100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39 -101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23 -102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36 -103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01 -104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29 -105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20 -106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39 -107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02 -108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03 -109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23 -110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25 -111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21 -112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46 -113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17 -114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51 -115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54 -116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30 -117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42 -118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44 -119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19 -120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15 -121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34 -122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24 -123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21 -124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51 -125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24 -126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34 -127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55 -128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32 -129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43 -130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50 -131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16 -132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35 -133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54 -134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10 -135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52 -136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30 -137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33 -138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44 -139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19 -140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53 -141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23 -142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32 -143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17 -144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41 -145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57 -146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58 -147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32 -148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46 -149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04 -150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45 -151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04 -152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10 -153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00 -154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36 -155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09 -156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54 -157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41 -158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15 -159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38 -160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42 -161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23 -162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34 -163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18 -164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54 -165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46 -166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14 -167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13 -168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27 -169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58 -170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31 -171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44 -172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51 -173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04 -174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38 -175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15 -176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15 -177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46 -178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14 -179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19 -180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14 -181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16 -182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37 -183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25 -184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29 -185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07 -186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45 -187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50 -188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48 -189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37 -190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28 -191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50 -192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09 -193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40 -194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43 -195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20 -196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34 -197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20 -198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29 -199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53 -200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40 -201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26 -202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00 -203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09 -204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33 -205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19 -206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51 -207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26 -208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28 -209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32 -210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22 -211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44 -212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09 -213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07 -214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21 -215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16 -216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40 -217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30 -218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34 -219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56 -220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11 -221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04 -222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02 -223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04 -224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29 -225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56 -226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31 -227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29 -228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10 -229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56 -230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47 -231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01 -232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01 -233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24 -234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56 -235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47 -236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45 -237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20 -238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59 -239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44 -240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41 -241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31 -242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50 -243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17 -244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36 -245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13 -246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54 -247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18 -248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18 -249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44 -250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09 -251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54 -252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50 -253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31 -254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31 -255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00 -256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28 -257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02 -258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34 -259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05 -260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00 -261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37 -262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19 -263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10 -264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40 -265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12 -266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24 -267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19 -268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06 -269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28 -270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31 -271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40 -272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33 -273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35 -274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59 -275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36 -276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06 -277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29 -278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24 -279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57 -280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22 -281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09 -282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29 -283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37 -284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51 -285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43 -286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17 -287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06 -288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57 -289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40 -290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44 -291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33 -292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49 -293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57 -294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21 -295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13 -296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10 -297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08 -298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28 -299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04 -300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31 -301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31 -302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46 -303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13 -304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39 -305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34 -306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05 -307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32 -308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40 -309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21 -310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55 -311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32 -312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33 -313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05 -314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27 -315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26 -316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51 -317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36 -318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08 -319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43 -320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43 -321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50 -322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03 -323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25 -324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56 -325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44 -326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11 -327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55 -328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27 -329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31 -330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24 -331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16 -332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41 -333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52 -334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58 -335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16 -336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07 -337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13 -338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47 -339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43 -340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18 -341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48 -342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49 -343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09 -344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07 -345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14 -346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42 -347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13 -348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06 -349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52 -350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58 -351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06 -352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12 -353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04 -354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58 -355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57 -356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17 -357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24 -358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06 -359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57 -360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34 -361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32 -362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41 -363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59 -364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22 -365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26 -366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27 -367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05 -368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15 -369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36 -370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23 -371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42 -372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01 -373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17 -374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59 -375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58 -376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55 -377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08 -378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01 -379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37 -380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07 -381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02 -382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43 -383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46 -384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18 -385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51 -386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52 -387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49 -388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29 -389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18 -390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50 -391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35 -392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39 -393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18 -394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01 -395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28 -396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55 -397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51 -398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43 -399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35 -400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12 -401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09 -402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49 -403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20 -404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25 -405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03 -406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13 -407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39 -408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26 -409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27 -410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01 -411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07 -412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51 -413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30 -414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08 -415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04 -416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02 -417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41 -418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21 -419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41 -420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35 -421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47 -422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41 -423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19 -424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10 -425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10 -426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28 -427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44 -428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55 -429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20 -430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20 -431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04 -432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49 -433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29 -434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45 -435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34 -436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17 -437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46 -438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36 -439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40 -440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05 -441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04 -442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21 -443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20 -444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23 -445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07 -446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48 -447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01 -448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00 -449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54 -450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36 -451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03 -452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26 -453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03 -454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21 -455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59 -456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59 -457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45 -458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40 -459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43 -460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30 -461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44 -462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06 -463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47 -464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19 -465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54 -466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57 -467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37 -468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47 -469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44 -470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10 -471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04 -472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51 -473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37 -474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37 -475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45 -476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20 -477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06 -478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52 -479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05 -480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25 -481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32 -482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07 -483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00 -484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38 -485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24 -486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59 -487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46 -488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09 -489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06 -490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05 -491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23 -492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56 -493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18 -494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49 -495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14 -496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29 -497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25 -498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53 -499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34 -500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20 diff --git a/tests/integration/simple_seed_test/test_seed_type_override.py b/tests/integration/simple_seed_test/test_seed_type_override.py deleted file mode 100644 index aafb2f3c6..000000000 --- a/tests/integration/simple_seed_test/test_seed_type_override.py +++ /dev/null @@ -1,58 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestSimpleSeedColumnOverride(DBTIntegrationTest): - - @property - def schema(self): - return "simple_seed" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds-config'], - 'macro-paths': ['macros'], - 'seeds': { - 'test': { - 'enabled': False, - 'quote_columns': True, - 'seed_enabled': { - 'enabled': True, - '+column_types': self.seed_enabled_types() - }, - 'seed_tricky': { - 'enabled': True, - '+column_types': self.seed_tricky_types(), - }, - }, - }, - } - - @property - def models(self): - return "models-rs" - - @property - def profile_config(self): - return self.redshift_profile() - - def seed_enabled_types(self): - return { - "id": "text", - "birthday": "date", - } - - def seed_tricky_types(self): - return { - 'id_str': 'text', - 'looks_like_a_bool': 'text', - 'looks_like_a_date': 'text', - } - - @use_profile('redshift') - def test_redshift_simple_seed_with_column_override_redshift(self): - results = self.run_dbt(["seed", "--show"]) - self.assertEqual(len(results), 2) - results = self.run_dbt(["test"]) - self.assertEqual(len(results), 10) diff --git a/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql b/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql deleted file mode 100644 index e1babb82c..000000000 --- a/tests/integration/simple_snapshot_test/add_column_to_source_bq.sql +++ /dev/null @@ -1,56 +0,0 @@ - -create or replace table {schema}.seed as ( - - select *, - [ - struct( - 1 as field_1, - 2 as field_2 - ), - struct( - 3 as field_1, - 4 as field_2 - ) - ] as repeated_nested_field, - - struct( - 1 as field_1, - 2 as field_2 - ) as nested_field, - - [ - 1, - 2 - ] as repeated_field - - from {schema}.seed - -); - -create or replace table {schema}.snapshot_expected as ( - - select *, - [ - struct( - 1 as field_1, - 2 as field_2 - ), - struct( - 3 as field_1, - 4 as field_2 - ) - ] as repeated_nested_field, - - struct( - 1 as field_1, - 2 as field_2 - ) as nested_field, - - [ - 1, - 2 - ] as repeated_field - - from {schema}.snapshot_expected - -); diff --git a/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql b/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql deleted file mode 100644 index 414afb472..000000000 --- a/tests/integration/simple_snapshot_test/check-snapshots-expected/check_snapshots_test_current.sql +++ /dev/null @@ -1,51 +0,0 @@ - - -with query as ( - - -- check that the current value for id=1 is red - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous 'red' value for id=1 is invalidated - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there's only one current record for id=2 - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'pink' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous value for id=2 is represented - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'green' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there are 5 records total in the table - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - ) = 5 then 0 else 1 end as failures - -) - -select * -from query -where failures = 1 diff --git a/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql b/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql deleted file mode 100644 index 8b36f35a1..000000000 --- a/tests/integration/simple_snapshot_test/check-snapshots/check_cols_cycle.sql +++ /dev/null @@ -1,33 +0,0 @@ - -{% snapshot check_cols_cycle %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['color'] - ) - }} - - {% if var('version') == 1 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 2 %} - - select 1 as id, 'blue' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 3 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'pink' as color - - {% else %} - {% do exceptions.raise_compiler_error("Got bad version: " ~ var('version')) %} - {% endif %} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql b/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql deleted file mode 100644 index 4347088e4..000000000 --- a/tests/integration/simple_snapshot_test/custom-snapshot-macros/custom.sql +++ /dev/null @@ -1,18 +0,0 @@ -{# A "custom" strategy that's really just the timestamp one #} -{% macro snapshot_custom_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - - {% set row_changed_expr -%} - ({{ snapshotted_rel }}.{{ updated_at }} < {{ current_rel }}.{{ updated_at }}) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr - }) %} -{% endmacro %} diff --git a/tests/integration/simple_snapshot_test/invalidate_bigquery.sql b/tests/integration/simple_snapshot_test/invalidate_bigquery.sql deleted file mode 100644 index d4641d451..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_bigquery.sql +++ /dev/null @@ -1,12 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {database}.{schema}.seed set - updated_at = timestamp_add(updated_at, interval 1 hour), - email = case when id = 20 then 'pfoxj@creativecommons.org' else concat('new_', email) end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {database}.{schema}.snapshot_expected set - dbt_valid_to = timestamp_add(updated_at, interval 1 hour) -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/invalidate_postgres.sql b/tests/integration/simple_snapshot_test/invalidate_postgres.sql deleted file mode 100644 index b0bef3c6c..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_postgres.sql +++ /dev/null @@ -1,27 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {schema}.seed set - updated_at = updated_at + interval '1 hour', - email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {schema}.snapshot_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_castillo_expected set - dbt_valid_to = "1-updated_at" + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_alvarez_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_kelly_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/invalidate_snowflake.sql b/tests/integration/simple_snapshot_test/invalidate_snowflake.sql deleted file mode 100644 index 57c4b71d6..000000000 --- a/tests/integration/simple_snapshot_test/invalidate_snowflake.sql +++ /dev/null @@ -1,12 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {database}.{schema}.seed set - updated_at = DATEADD(hour, 1, updated_at), - email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {database}.{schema}.snapshot_expected set - dbt_valid_to = DATEADD(hour, 1, updated_at) -where id >= 10 and id <= 20; diff --git a/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql b/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql deleted file mode 100644 index 6d432193c..000000000 --- a/tests/integration/simple_snapshot_test/macros/test_no_overlaps.sql +++ /dev/null @@ -1,85 +0,0 @@ -{% macro get_snapshot_unique_id() -%} - {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} -{%- endmacro %} - -{% macro default__get_snapshot_unique_id() -%} - {% do return("id || '-' || first_name") %} -{%- endmacro %} - - -{% macro bigquery__get_snapshot_unique_id() -%} - {%- do return('concat(cast(id as string), "-", first_name)') -%} -{%- endmacro %} - -{# - mostly copy+pasted from dbt_utils, but I removed some parameters and added - a query that calls get_snapshot_unique_id -#} -{% test mutually_exclusive_ranges(model) %} - -with base as ( - select {{ get_snapshot_unique_id() }} as dbt_unique_id, - * - from {{ model }} -), -window_functions as ( - - select - dbt_valid_from as lower_bound, - coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, - - lead(dbt_valid_from) over ( - partition by dbt_unique_id - order by dbt_valid_from - ) as next_lower_bound, - - row_number() over ( - partition by dbt_unique_id - order by dbt_valid_from desc - ) = 1 as is_last_record - - from base - -), - -calc as ( - -- We want to return records where one of our assumptions fails, so we'll use - -- the `not` function with `and` statements so we can write our assumptions nore cleanly - select - *, - - -- For each record: lower_bound should be < upper_bound. - -- Coalesce it to return an error on the null case (implicit assumption - -- these columns are not_null) - coalesce( - lower_bound < upper_bound, - is_last_record - ) as lower_bound_less_than_upper_bound, - - -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. - -- Coalesce it to handle null cases for the last record. - coalesce( - upper_bound = next_lower_bound, - is_last_record, - false - ) as upper_bound_equal_to_next_lower_bound - - from window_functions - -), - -validation_errors as ( - - select - * - from calc - - where not( - -- THE FOLLOWING SHOULD BE TRUE -- - lower_bound_less_than_upper_bound - and upper_bound_equal_to_next_lower_bound - ) -) - -select * from validation_errors -{% endtest %} diff --git a/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql b/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql deleted file mode 100644 index 43258a714..000000000 --- a/tests/integration/simple_snapshot_test/models-collision/snapshot_actual.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id diff --git a/tests/integration/simple_snapshot_test/models-slow/gen.sql b/tests/integration/simple_snapshot_test/models-slow/gen.sql deleted file mode 100644 index 7e71a2bfd..000000000 --- a/tests/integration/simple_snapshot_test/models-slow/gen.sql +++ /dev/null @@ -1,44 +0,0 @@ - -{{ config(materialized='ephemeral') }} - - -/* - Generates 50 rows that "appear" to update every - second to a query-er. - - 1 2020-04-21 20:44:00-04 0 - 2 2020-04-21 20:43:59-04 59 - 3 2020-04-21 20:43:58-04 58 - 4 2020-04-21 20:43:57-04 57 - - .... 1 second later .... - - 1 2020-04-21 20:44:01-04 1 - 2 2020-04-21 20:44:00-04 0 - 3 2020-04-21 20:43:59-04 59 - 4 2020-04-21 20:43:58-04 58 - - This view uses pg_sleep(2) to make queries against - the view take a non-trivial amount of time - - Use statement_timestamp() as it changes during a transactions. - If we used now() or current_time or similar, then the timestamp - of the start of the transaction would be returned instead. -*/ - -with gen as ( - - select - id, - date_trunc('second', statement_timestamp()) - (interval '1 second' * id) as updated_at - - from generate_series(1, 10) id - -) - -select - id, - updated_at, - extract(seconds from updated_at)::int as seconds - -from gen, pg_sleep(2) diff --git a/tests/integration/simple_snapshot_test/models/.gitkeep b/tests/integration/simple_snapshot_test/models/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/integration/simple_snapshot_test/models/ref_snapshot.sql b/tests/integration/simple_snapshot_test/models/ref_snapshot.sql deleted file mode 100644 index c453929ce..000000000 --- a/tests/integration/simple_snapshot_test/models/ref_snapshot.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('snapshot_actual') }} diff --git a/tests/integration/simple_snapshot_test/models/schema.yml b/tests/integration/simple_snapshot_test/models/schema.yml deleted file mode 100644 index 259e55b95..000000000 --- a/tests/integration/simple_snapshot_test/models/schema.yml +++ /dev/null @@ -1,5 +0,0 @@ -version: 2 -snapshots: - - name: snapshot_actual - tests: - - mutually_exclusive_ranges diff --git a/tests/integration/simple_snapshot_test/seed.sql b/tests/integration/simple_snapshot_test/seed.sql deleted file mode 100644 index 8f3422e36..000000000 --- a/tests/integration/simple_snapshot_test/seed.sql +++ /dev/null @@ -1,220 +0,0 @@ -create table {database}.{schema}.seed ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -create table {database}.{schema}.snapshot_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- seed inserts -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', 'jkennedy0@phpbb.com', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', 'akelly1@eepurl.com', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed; - -create table {database}.{schema}.snapshot_castillo_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE - -); - --- one entry -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Castillo'; - -create table {database}.{schema}.snapshot_alvarez_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- 0 entries -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Alvarez'; - -create table {database}.{schema}.snapshot_kelly_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id VARCHAR(32), - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- 2 entries -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/seed_bq.sql b/tests/integration/simple_snapshot_test/seed_bq.sql deleted file mode 100644 index 5ea93fee4..000000000 --- a/tests/integration/simple_snapshot_test/seed_bq.sql +++ /dev/null @@ -1,81 +0,0 @@ -create table {database}.{schema}.seed ( - id INT64, - first_name STRING, - last_name STRING, - email STRING, - gender STRING, - ip_address STRING, - updated_at TIMESTAMP -); - -create table {database}.{schema}.snapshot_expected ( - id INT64, - first_name STRING, - last_name STRING, - email STRING, - gender STRING, - ip_address STRING, - - -- snapshotting fields - updated_at TIMESTAMP, - dbt_valid_from TIMESTAMP, - dbt_valid_to TIMESTAMP, - dbt_scd_id STRING, - dbt_updated_at TIMESTAMP -); - - --- seed inserts -insert {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', 'jkennedy0@phpbb.com', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', 'akelly1@eepurl.com', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed; - diff --git a/tests/integration/simple_snapshot_test/seed_longtext.sql b/tests/integration/simple_snapshot_test/seed_longtext.sql deleted file mode 100644 index a27008ea7..000000000 --- a/tests/integration/simple_snapshot_test/seed_longtext.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table {database}.{schema}.super_long ( - id INTEGER, - longstring TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -insert into {database}.{schema}.super_long (id, longstring, updated_at) VALUES -(1, 'short', current_timestamp), -(2, repeat('a', 500), current_timestamp); diff --git a/tests/integration/simple_snapshot_test/seed_pg.sql b/tests/integration/simple_snapshot_test/seed_pg.sql deleted file mode 100644 index a22a2359c..000000000 --- a/tests/integration/simple_snapshot_test/seed_pg.sql +++ /dev/null @@ -1,223 +0,0 @@ - create table {database}.{schema}.seed ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -create table {database}.{schema}.snapshot_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- seed inserts --- use the same email for two users to verify that duplicated check_cols values --- are handled appropriately -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed; - - - -create table {database}.{schema}.snapshot_castillo_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- one entry -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Castillo'; - -create table {database}.{schema}.snapshot_alvarez_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- 0 entries -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Alvarez'; - -create table {database}.{schema}.snapshot_kelly_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- 2 entries -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/seeds/seed.csv b/tests/integration/simple_snapshot_test/seeds/seed.csv deleted file mode 100644 index 9da8d46ff..000000000 --- a/tests/integration/simple_snapshot_test/seeds/seed.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name -1,Judith -2,Arthur -3,Rachel diff --git a/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv b/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv deleted file mode 100644 index 005517bda..000000000 --- a/tests/integration/simple_snapshot_test/seeds/seed_newcol.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name,last_name -1,Judith,Kennedy -2,Arthur,Kelly -3,Rachel,Moreno diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql deleted file mode 100644 index 9c8459756..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots-bq/snapshot.sql +++ /dev/null @@ -1,29 +0,0 @@ -{% snapshot snapshot_actual %} - {# this used to be check_cols=('email',), which ought to be totally valid, - but is not because type systems are hard. #} - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='check', - check_cols=['email'], - ) - }} - select * from `{{target.database}}`.`{{schema}}`.seed -{% endsnapshot %} - - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='check', - check_cols='all', - ) - }} - select * from `{{target.database}}`.`{{schema}}`.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql deleted file mode 100644 index daf4cf312..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots-noconfig/snapshot.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% snapshot snapshot_actual %} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ config(check_cols='all') }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql b/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql deleted file mode 100644 index dd85ed753..000000000 --- a/tests/integration/simple_snapshot_test/test-check-col-snapshots/snapshot.sql +++ /dev/null @@ -1,28 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols=['email'], - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols='all', - ) - }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql deleted file mode 100644 index 7ffdedbcc..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-bq/snapshot.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_project=var('target_database', database), - target_dataset=var('target_schema', schema), - unique_key='concat(cast(id as string) , "-", first_name)', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from `{{target.database}}`.`{{schema}}`.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql deleted file mode 100644 index e1184c353..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy-tests/test_snapshot.sql +++ /dev/null @@ -1,32 +0,0 @@ - -{# /* - Given the repro case for the snapshot build, we'd - expect to see both records have color='pink' - in their most recent rows. -*/ #} - -with expected as ( - - select 1 as id, 'pink' as color union all - select 2 as id, 'pink' as color - -), - -actual as ( - - select id, color - from {{ ref('my_snapshot') }} - where color = 'pink' - and dbt_valid_to is null - -) - -select * from expected -except -select * from actual - -union all - -select * from actual -except -select * from expected diff --git a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql deleted file mode 100644 index dd21fa63c..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-changing-strategy/snapshot.sql +++ /dev/null @@ -1,55 +0,0 @@ - -{# - REPRO: - 1. Run with check strategy - 2. Add a new ts column and run with check strategy - 3. Run with timestamp strategy on new ts column - - Expect: new entry is added for changed rows in (3) -#} - - -{% snapshot my_snapshot %} - - {#--------------- Configuration ------------ #} - - {{ config( - target_schema=schema, - unique_key='id' - ) }} - - {% if var('strategy') == 'timestamp' %} - {{ config(strategy='timestamp', updated_at='updated_at') }} - {% else %} - {{ config(strategy='check', check_cols=['color']) }} - {% endif %} - - {#--------------- Test setup ------------ #} - - {% if var('step') == 1 %} - - select 1 as id, 'blue' as color - union all - select 2 as id, 'red' as color - - {% elif var('step') == 2 %} - - -- change id=1 color from blue to green - -- id=2 is unchanged when using the check strategy - select 1 as id, 'green' as color, '2020-01-01'::date as updated_at - union all - select 2 as id, 'red' as color, '2020-01-01'::date as updated_at - - {% elif var('step') == 3 %} - - -- bump timestamp for both records. Expect that after this runs - -- using the timestamp strategy, both ids should have the color - -- 'pink' in the database. This should be in the future b/c we're - -- going to compare to the check timestamp, which will be _now_ - select 1 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - union all - select 2 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - - {% endif %} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql deleted file mode 100644 index b9cd002ca..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-checkall/snapshot.sql +++ /dev/null @@ -1,4 +0,0 @@ -{% snapshot my_snapshot %} - {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} - select * from {{ ref(var('seed_name', 'seed')) }} -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql deleted file mode 100644 index 6d0561f62..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-invalid/snapshot.sql +++ /dev/null @@ -1,13 +0,0 @@ -{# make sure to never name this anything with `target_schema` in the name, or the test will be invalid! #} -{% snapshot missing_field_target_underscore_schema %} - {# missing the mandatory target_schema parameter #} - {{ - config( - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql deleted file mode 100644 index 35a563e3f..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-longtext/snapshot.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% snapshot snapshot_actual %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.super_long -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql deleted file mode 100644 index 2398e9e9a..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-invalid/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - {# this custom strategy does not exist in the 'dbt' package #} - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='dbt.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql deleted file mode 100644 index 8b14b9d00..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom-namespaced/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='test.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql deleted file mode 100644 index d59a0b60d..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg-custom/snapshot.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql deleted file mode 100644 index ae5aac087..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-pg/snapshot.sql +++ /dev/null @@ -1,19 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql deleted file mode 100644 index a62218b2c..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-select-noconfig/snapshot.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} - -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql deleted file mode 100644 index 06245f36f..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-select/snapshot.sql +++ /dev/null @@ -1,44 +0,0 @@ -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - {{ - config( - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql b/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql deleted file mode 100644 index c8687ceaf..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-slow-tests/test_timestamps.sql +++ /dev/null @@ -1,23 +0,0 @@ - -/* - Assert that the dbt_valid_from of the latest record - is equal to the dbt_valid_to of the previous record -*/ - -with snapshot as ( - - select * from {{ ref('my_slow_snapshot') }} - -) - -select - snap1.id, - snap1.dbt_valid_from as new_valid_from, - snap2.dbt_valid_from as old_valid_from, - snap2.dbt_valid_to as old_valid_to - -from snapshot as snap1 -join snapshot as snap2 on snap1.id = snap2.id -where snap1.dbt_valid_to is null - and snap2.dbt_valid_to is not null - and snap1.dbt_valid_from != snap2.dbt_valid_to diff --git a/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql b/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql deleted file mode 100644 index 260d0b967..000000000 --- a/tests/integration/simple_snapshot_test/test-snapshots-slow/snapshot.sql +++ /dev/null @@ -1,21 +0,0 @@ - -{% snapshot my_slow_snapshot %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at' - ) - }} - - select - id, - updated_at, - seconds - - from {{ ref('gen') }} - -{% endsnapshot %} diff --git a/tests/integration/simple_snapshot_test/test_simple_snapshot.py b/tests/integration/simple_snapshot_test/test_simple_snapshot.py deleted file mode 100644 index d1423cb0a..000000000 --- a/tests/integration/simple_snapshot_test/test_simple_snapshot.py +++ /dev/null @@ -1,397 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -from datetime import datetime -import pytz - - -class BaseSimpleSnapshotTest(DBTIntegrationTest): - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models" - - def run_snapshot(self): - return self.run_dbt(['snapshot']) - - def dbt_run_seed_snapshot(self): - self.run_sql_file('seed.sql') - - results = self.run_snapshot() - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - def assert_case_tables_equal(self, actual, expected): - self.assertTablesEqual(actual, expected) - - def assert_expected(self): - self.run_dbt(['test']) - self.assert_case_tables_equal('snapshot_actual', 'snapshot_expected') - - -class TestSimpleSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - "seed-paths": ['seeds'], - "snapshot-paths": ['test-snapshots-pg'], - 'macro-paths': ['macros'], - } - - @use_profile('redshift') - def test__redshift__simple_snapshot(self): - self.dbt_run_seed_snapshot() - - self.assert_expected() - - self.run_sql_file("invalidate_postgres.sql") - self.run_sql_file("update.sql") - - results = self.run_snapshot() - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - self.assert_expected() - - -class TestSimpleColumnSnapshotFiles(DBTIntegrationTest): - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models-checkall" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-checkall'], - 'seeds': { - 'quote_columns': False, - } - } - - def _run_snapshot_test(self): - self.run_dbt(['seed']) - self.run_dbt(['snapshot']) - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - results = self.run_sql( - 'select * from {}.{}.my_snapshot'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - for result in results: - self.assertEqual(len(result), 6) - - self.run_dbt(['snapshot', '--vars', '{seed_name: seed_newcol}']) - results = self.run_sql( - 'select * from {}.{}.my_snapshot where last_name is not NULL'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - - for result in results: - # new column - self.assertEqual(len(result), 7) - self.assertIsNotNone(result[-1]) - - results = self.run_sql( - 'select * from {}.{}.my_snapshot where last_name is NULL'.format(database, self.unique_schema()), - fetch='all' - ) - self.assertEqual(len(results), 3) - for result in results: - # new column - self.assertEqual(len(result), 7) - - @use_profile('redshift') - def test_redshift_renamed_source(self): - self._run_snapshot_test() - - -class TestCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom'], - } - - -class TestNamespacedCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom-namespaced'], - } - - -class TestInvalidNamespacedCustomSnapshotFiles(BaseSimpleSnapshotTest): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'macro-paths': ['custom-snapshot-macros', 'macros'], - 'snapshot-paths': ['test-snapshots-pg-custom-invalid'], - } - - def run_snapshot(self): - return self.run_dbt(['snapshot'], expect_pass=False) - - -class TestCheckCols(TestSimpleSnapshotFiles): - NUM_SNAPSHOT_MODELS = 2 - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - # When building the equality tests, only test columns that don't start - # with 'dbt_', because those are time-sensitive - if columns is None: - columns = [c for c in self.get_relation_columns(relation_a) if not c[0].lower().startswith('dbt_')] - return super()._assertTablesEqualSql(relation_a, relation_b, columns=columns) - - def assert_expected(self): - super().assert_expected() - self.assert_case_tables_equal('snapshot_checkall', 'snapshot_expected') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots'], - 'macro-paths': ['macros'], - } - - -class TestConfiguredCheckCols(TestCheckCols): - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots-noconfig'], - "snapshots": { - "test": { - "target_schema": self.unique_schema(), - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": ["email"], - }, - }, - 'macro-paths': ['macros'], - } - - -class TestUpdatedAtCheckCols(TestCheckCols): - - def _assertTablesEqualSql(self, relation_a, relation_b, columns=None): - revived_records = self.run_sql( - ''' - select - id, - updated_at, - dbt_valid_from - from {} - '''.format(relation_b), - fetch='all' - ) - - for result in revived_records: - # result is a tuple, the updated_at is second and dbt_valid_from is latest - self.assertIsInstance(result[1], datetime) - self.assertIsInstance(result[2], datetime) - self.assertEqual(result[1].replace(tzinfo=pytz.UTC), result[2].replace(tzinfo=pytz.UTC)) - - if columns is None: - columns = [c for c in self.get_relation_columns(relation_a) if not c[0].lower().startswith('dbt_')] - return super()._assertTablesEqualSql(relation_a, relation_b, columns=columns) - - def assert_expected(self): - super().assert_expected() - self.assertTablesEqual('snapshot_checkall', 'snapshot_expected') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": ['test-check-col-snapshots-noconfig'], - "snapshots": { - "test": { - "target_schema": self.unique_schema(), - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": "all", - "updated_at": "updated_at", - }, - }, - 'macro-paths': ['macros'], - } - - -class TestSnapshotHardDelete(DBTIntegrationTest): - # These tests uses the same seed data, containing 20 records of which we hard delete the last 10. - # These deleted records set the dbt_valid_to to time the snapshot was ran. - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot_004" - - @property - def models(self): - return "models" - - @property - def project_config(self): - if self.adapter_type == 'bigquery': - paths = ['test-snapshots-bq'] - else: - paths = ['test-snapshots-pg'] - - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - "snapshot-paths": paths, - 'macro-paths': ['macros'], - } - - def _test_snapshot_hard_delete(self): - self._snapshot() - - if self.adapter_type == 'snowflake': - self.assertTablesEqual("SNAPSHOT_EXPECTED", "SNAPSHOT_ACTUAL") - else: - self.assertTablesEqual("snapshot_expected", "snapshot_actual") - - self._invalidated_snapshot_datetime = None - self._revived_snapshot_datetime = None - - self._delete_records() - self._snapshot_and_assert_invalidated() - self._revive_records() - self._snapshot_and_assert_revived() - - def _snapshot(self): - begin_snapshot_datetime = datetime.now(pytz.UTC) - results = self.run_dbt(['snapshot', '--vars', '{invalidate_hard_deletes: true}']) - self.assertEqual(len(results), self.NUM_SNAPSHOT_MODELS) - - return begin_snapshot_datetime - - def _delete_records(self): - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - self.run_sql( - 'delete from {}.{}.seed where id >= 10;'.format(database, self.unique_schema()) - ) - - def _snapshot_and_assert_invalidated(self): - self._invalidated_snapshot_datetime = self._snapshot() - - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - snapshotted = self.run_sql( - ''' - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - order by id - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(snapshotted), 20) - for result in snapshotted[10:]: - # result is a tuple, the dbt_valid_to column is the latest - self.assertIsInstance(result[-1], datetime) - self.assertGreaterEqual(result[-1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime) - - def _revive_records(self): - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - revival_timestamp = datetime.now(pytz.UTC).strftime(r'%Y-%m-%d %H:%M:%S') - self.run_sql( - ''' - insert into {}.{}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values - (10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '{}'), - (11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '{}') - '''.format(database, self.unique_schema(), revival_timestamp, revival_timestamp) - ) - - def _snapshot_and_assert_revived(self): - self._revived_snapshot_datetime = self._snapshot() - - database = self.default_database - if self.adapter_type == 'bigquery': - database = self.adapter.quote(database) - - # records which weren't revived (id != 10, 11) - invalidated_records = self.run_sql( - ''' - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is not null - order by id - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(invalidated_records), 11) - for result in invalidated_records: - # result is a tuple, the dbt_valid_to column is the latest - self.assertIsInstance(result[1], datetime) - self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime) - - # records which weren't revived (id != 10, 11) - revived_records = self.run_sql( - ''' - select - id, - dbt_valid_from, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is null - and id IN (10, 11) - '''.format(database, self.unique_schema()), - fetch='all' - ) - - self.assertEqual(len(revived_records), 2) - for result in revived_records: - # result is a tuple, the dbt_valid_from is second and dbt_valid_to is latest - self.assertIsInstance(result[1], datetime) - # there are milliseconds (part of microseconds in datetime objects) in the - # invalidated_snapshot_datetime and not in result datetime so set the microseconds to 0 - self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime.replace(microsecond=0)) - self.assertIsNone(result[2]) - - @use_profile('redshift') - def test__redshift__snapshot_hard_delete(self): - self.run_sql_file('seed.sql') - self._test_snapshot_hard_delete() - diff --git a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py b/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py deleted file mode 100644 index 4bc9262ed..000000000 --- a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py +++ /dev/null @@ -1,40 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestSimpleSnapshotFiles(DBTIntegrationTest): - NUM_SNAPSHOT_MODELS = 1 - - @property - def schema(self): - return "simple_snapshot" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - "snapshot-paths": ['check-snapshots'], - "test-paths": ['check-snapshots-expected'], - "model-paths": [], - } - - def snapshot_check_cols_cycle(self): - results = self.run_dbt(["snapshot", '--vars', 'version: 1']) - self.assertEqual(len(results), 1) - - results = self.run_dbt(["snapshot", '--vars', 'version: 2']) - self.assertEqual(len(results), 1) - - results = self.run_dbt(["snapshot", '--vars', 'version: 3']) - self.assertEqual(len(results), 1) - - def assert_expected(self): - self.run_dbt(['test', '--select', 'test_type:singular', '--vars', 'version: 3']) - - @use_profile('redshift') - def test__redshift__simple_snapshot(self): - self.snapshot_check_cols_cycle() - self.assert_expected() diff --git a/tests/integration/simple_snapshot_test/update.sql b/tests/integration/simple_snapshot_test/update.sql deleted file mode 100644 index 890959f32..000000000 --- a/tests/integration/simple_snapshot_test/update.sql +++ /dev/null @@ -1,261 +0,0 @@ --- insert v2 of the 11 - 21 records - -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20; - - -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Castillo'; - - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Alvarez'; - - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Kelly'; - --- insert 10 new records -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), -(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), -(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), -(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), -(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), -(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), -(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), -(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), -(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), -(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20; - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Castillo'; - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Alvarez'; - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Kelly'; diff --git a/tests/integration/simple_snapshot_test/update_bq.sql b/tests/integration/simple_snapshot_test/update_bq.sql deleted file mode 100644 index 5c972d8af..000000000 --- a/tests/integration/simple_snapshot_test/update_bq.sql +++ /dev/null @@ -1,78 +0,0 @@ --- insert v2 of the 11 - 21 records - -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20; - - --- insert 10 new records -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), -(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), -(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), -(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), -(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), -(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), -(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), -(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), -(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), -(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); - - --- add these new records to the snapshot table -insert {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - cast(null as timestamp) as dbt_valid_to, - updated_at as dbt_updated_at, - to_hex(md5(concat(cast(id as string), '-', first_name, '|', cast(updated_at as string)))) as dbt_scd_id -from {database}.{schema}.seed -where id > 20; - diff --git a/tests/integration/sources_test/error_models/model.sql b/tests/integration/sources_test/error_models/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/error_models/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/error_models/schema.yml b/tests/integration/sources_test/error_models/schema.yml deleted file mode 100644 index 69cf1f304..000000000 --- a/tests/integration/sources_test/error_models/schema.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: invalid - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at diff --git a/tests/integration/sources_test/filtered_models/schema.yml b/tests/integration/sources_test/filtered_models/schema.yml deleted file mode 100644 index edad7f6ec..000000000 --- a/tests/integration/sources_test/filtered_models/schema.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - filter: id > 1 - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at - freshness: - error_after: {count: 18, period: hour} - filter: id > 101 diff --git a/tests/integration/sources_test/macros/macro.sql b/tests/integration/sources_test/macros/macro.sql deleted file mode 100644 index a607a6e4c..000000000 --- a/tests/integration/sources_test/macros/macro.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro override_me() -%} - {{ exceptions.raise_compiler_error('this is a bad macro') }} -{%- endmacro %} - -{% macro happy_little_macro() -%} - {{ override_me() }} -{%- endmacro %} - - -{% macro vacuum_source(source_name, table_name) -%} - {% call statement('stmt', auto_begin=false, fetch_result=false) %} - vacuum {{ source(source_name, table_name) }} - {% endcall %} -{%- endmacro %} diff --git a/tests/integration/sources_test/malformed_models/descendant_model.sql b/tests/integration/sources_test/malformed_models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_models/schema.yml b/tests/integration/sources_test/malformed_models/schema.yml deleted file mode 100644 index 544d18d65..000000000 --- a/tests/integration/sources_test/malformed_models/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - tests: - - relationships: - # this is invalid (list of 3 1-key dicts instead of a single 3-key dict) - - column_name: favorite_color - - to: ref('descendant_model') - - field: favorite_color diff --git a/tests/integration/sources_test/malformed_schema_tests/model.sql b/tests/integration/sources_test/malformed_schema_tests/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_schema_tests/schema.yml b/tests/integration/sources_test/malformed_schema_tests/schema.yml deleted file mode 100644 index d72ab2eee..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - columns: - - name: favorite_color - tests: - - relationships: - to: ref('model') - # this will get rendered as its literal - field: "{{ 'favorite' ~ 'color' }}" diff --git a/tests/integration/sources_test/models/descendant_model.sql b/tests/integration/sources_test/models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/models/ephemeral_model.sql b/tests/integration/sources_test/models/ephemeral_model.sql deleted file mode 100644 index 8de35cd3e..000000000 --- a/tests/integration/sources_test/models/ephemeral_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{{ config(materialized='ephemeral') }} - -select 1 as id diff --git a/tests/integration/sources_test/models/multi_source_model.sql b/tests/integration/sources_test/models/multi_source_model.sql deleted file mode 100644 index e310206b0..000000000 --- a/tests/integration/sources_test/models/multi_source_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -select * from {{ source('test_source', 'other_test_table')}} - join {{ source('other_source', 'test_table')}} using (id) diff --git a/tests/integration/sources_test/models/nonsource_descendant.sql b/tests/integration/sources_test/models/nonsource_descendant.sql deleted file mode 100644 index 97f2151c7..000000000 --- a/tests/integration/sources_test/models/nonsource_descendant.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ schema }}.source diff --git a/tests/integration/sources_test/models/schema.yml b/tests/integration/sources_test/models/schema.yml deleted file mode 100644 index f02eb1345..000000000 --- a/tests/integration/sources_test/models/schema.yml +++ /dev/null @@ -1,77 +0,0 @@ -version: 2 -models: - - name: descendant_model - columns: - - name: favorite_color - tests: - - relationships: - to: source('test_source', 'test_table') - field: favorite_color - -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tags: - - my_test_source_tag - tables: - - name: test_table - identifier: source - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - freshness: - error_after: {count: 18, period: hour} - tags: - - my_test_source_table_tag - columns: - - name: favorite_color - description: The favorite color - - name: id - description: The user ID - tests: - - unique - - not_null - tags: - - id_column - - name: first_name - description: The first name of the user - tests: [] - - name: email - description: The email address of the user - - name: ip_address - description: The last IP address the user logged in from - - name: updated_at - description: The last update time for this user - tests: - - relationships: - # do this as a table-level test, just to test out that aspect - column_name: favorite_color - to: ref('descendant_model') - field: favorite_color - - name: other_test_table - identifier: other_table - columns: - - name: id - tests: - - not_null - - unique - tags: - - id_column - - name: disabled_test_table - freshness: null - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - - name: other_source - schema: "{{ var('test_run_schema') }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: other_source_table - - name: external_source - schema: "{{ var('test_run_alt_schema', var('test_run_schema')) }}" - tables: - - name: table diff --git a/tests/integration/sources_test/models/view_model.sql b/tests/integration/sources_test/models/view_model.sql deleted file mode 100644 index ecb330804..000000000 --- a/tests/integration/sources_test/models/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{# See here: https://github.com/dbt-labs/dbt/pull/1729 #} - -select * from {{ ref('ephemeral_model') }} diff --git a/tests/integration/sources_test/seed.sql b/tests/integration/sources_test/seed.sql deleted file mode 100644 index 40110b990..000000000 --- a/tests/integration/sources_test/seed.sql +++ /dev/null @@ -1,113 +0,0 @@ -create table {schema}.seed_expected ( - favorite_color TEXT, - id INTEGER, - first_name TEXT, - email TEXT, - ip_address TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_expected - ("favorite_color","id","first_name","email","ip_address","updated_at") -VALUES - ('blue',1,'Larry','lking0@miitbeian.gov.cn','''69.135.206.194''','2008-09-12 19:08:31'), - ('blue',2,'Larry','lperkins1@toplist.cz','''64.210.133.162''','1978-05-09 04:15:14'), - ('blue',3,'Anna','amontgomery2@miitbeian.gov.cn','''168.104.64.114''','2011-10-16 04:07:57'), - ('blue',4,'Sandra','sgeorge3@livejournal.com','''229.235.252.98''','1973-07-19 10:52:43'), - ('blue',5,'Fred','fwoods4@google.cn','''78.229.170.124''','2012-09-30 16:38:29'), - ('blue',6,'Stephen','shanson5@livejournal.com','''182.227.157.105''','1995-11-07 21:40:50'), - ('blue',7,'William','wmartinez6@upenn.edu','''135.139.249.50''','1982-09-05 03:11:59'), - ('blue',8,'Jessica','jlong7@hao123.com','''203.62.178.210''','1991-10-16 11:03:15'), - ('blue',9,'Douglas','dwhite8@tamu.edu','''178.187.247.1''','1979-10-01 09:49:48'), - ('blue',10,'Lisa','lcoleman9@nydailynews.com','''168.234.128.249''','2011-05-26 07:45:49'), - ('blue',11,'Ralph','rfieldsa@home.pl','''55.152.163.149''','1972-11-18 19:06:11'), - ('blue',12,'Louise','lnicholsb@samsung.com','''141.116.153.154''','2014-11-25 20:56:14'), - ('blue',13,'Clarence','cduncanc@sfgate.com','''81.171.31.133''','2011-11-17 07:02:36'), - ('blue',14,'Daniel','dfranklind@omniture.com','''8.204.211.37''','1980-09-13 00:09:04'), - ('blue',15,'Katherine','klanee@auda.org.au','''176.96.134.59''','1997-08-22 19:36:56'), - ('blue',16,'Billy','bwardf@wikia.com','''214.108.78.85''','2003-10-19 02:14:47'), - ('blue',17,'Annie','agarzag@ocn.ne.jp','''190.108.42.70''','1988-10-28 15:12:35'), - ('blue',18,'Shirley','scolemanh@fastcompany.com','''109.251.164.84''','1988-08-24 10:50:57'), - ('blue',19,'Roger','rfrazieri@scribd.com','''38.145.218.108''','1985-12-31 15:17:15'), - ('blue',20,'Lillian','lstanleyj@goodreads.com','''47.57.236.17''','1970-06-08 02:09:05'), - ('blue',21,'Aaron','arodriguezk@nps.gov','''205.245.118.221''','1985-10-11 23:07:49'), - ('blue',22,'Patrick','pparkerl@techcrunch.com','''19.8.100.182''','2006-03-29 12:53:56'), - ('blue',23,'Phillip','pmorenom@intel.com','''41.38.254.103''','2011-11-07 15:35:43'), - ('blue',24,'Henry','hgarcian@newsvine.com','''1.191.216.252''','2008-08-28 08:30:44'), - ('blue',25,'Irene','iturnero@opera.com','''50.17.60.190''','1994-04-01 07:15:02'), - ('blue',26,'Andrew','adunnp@pen.io','''123.52.253.176''','2000-11-01 06:03:25'), - ('blue',27,'David','dgutierrezq@wp.com','''238.23.203.42''','1988-01-25 07:29:18'), - ('blue',28,'Henry','hsanchezr@cyberchimps.com','''248.102.2.185''','1983-01-01 13:36:37'), - ('blue',29,'Evelyn','epetersons@gizmodo.com','''32.80.46.119''','1979-07-16 17:24:12'), - ('blue',30,'Tammy','tmitchellt@purevolume.com','''249.246.167.88''','2001-04-03 10:00:23'), - ('blue',31,'Jacqueline','jlittleu@domainmarket.com','''127.181.97.47''','1986-02-11 21:35:50'), - ('blue',32,'Earl','eortizv@opera.com','''166.47.248.240''','1996-07-06 08:16:27'), - ('blue',33,'Juan','jgordonw@sciencedirect.com','''71.77.2.200''','1987-01-31 03:46:44'), - ('blue',34,'Diane','dhowellx@nyu.edu','''140.94.133.12''','1994-06-11 02:30:05'), - ('blue',35,'Randy','rkennedyy@microsoft.com','''73.255.34.196''','2005-05-26 20:28:39'), - ('blue',36,'Janice','jriveraz@time.com','''22.214.227.32''','1990-02-09 04:16:52'), - ('blue',37,'Laura','lperry10@diigo.com','''159.148.145.73''','2015-03-17 05:59:25'), - ('blue',38,'Gary','gray11@statcounter.com','''40.193.124.56''','1970-01-27 10:04:51'), - ('blue',39,'Jesse','jmcdonald12@typepad.com','''31.7.86.103''','2009-03-14 08:14:29'), - ('blue',40,'Sandra','sgonzalez13@goodreads.com','''223.80.168.239''','1993-05-21 14:08:54'), - ('blue',41,'Scott','smoore14@archive.org','''38.238.46.83''','1980-08-30 11:16:56'), - ('blue',42,'Phillip','pevans15@cisco.com','''158.234.59.34''','2011-12-15 23:26:31'), - ('blue',43,'Steven','sriley16@google.ca','''90.247.57.68''','2011-10-29 19:03:28'), - ('blue',44,'Deborah','dbrown17@hexun.com','''179.125.143.240''','1995-04-10 14:36:07'), - ('blue',45,'Lori','lross18@ow.ly','''64.80.162.180''','1980-12-27 16:49:15'), - ('blue',46,'Sean','sjackson19@tumblr.com','''240.116.183.69''','1988-06-12 21:24:45'), - ('blue',47,'Terry','tbarnes1a@163.com','''118.38.213.137''','1997-09-22 16:43:19'), - ('blue',48,'Dorothy','dross1b@ebay.com','''116.81.76.49''','2005-02-28 13:33:24'), - ('blue',49,'Samuel','swashington1c@house.gov','''38.191.253.40''','1989-01-19 21:15:48'), - ('blue',50,'Ralph','rcarter1d@tinyurl.com','''104.84.60.174''','2007-08-11 10:21:49'), - ('green',51,'Wayne','whudson1e@princeton.edu','''90.61.24.102''','1983-07-03 16:58:12'), - ('green',52,'Rose','rjames1f@plala.or.jp','''240.83.81.10''','1995-06-08 11:46:23'), - ('green',53,'Louise','lcox1g@theglobeandmail.com','''105.11.82.145''','2016-09-19 14:45:51'), - ('green',54,'Kenneth','kjohnson1h@independent.co.uk','''139.5.45.94''','1976-08-17 11:26:19'), - ('green',55,'Donna','dbrown1i@amazon.co.uk','''19.45.169.45''','2006-05-27 16:51:40'), - ('green',56,'Johnny','jvasquez1j@trellian.com','''118.202.238.23''','1975-11-17 08:42:32'), - ('green',57,'Patrick','pramirez1k@tamu.edu','''231.25.153.198''','1997-08-06 11:51:09'), - ('green',58,'Helen','hlarson1l@prweb.com','''8.40.21.39''','1993-08-04 19:53:40'), - ('green',59,'Patricia','pspencer1m@gmpg.org','''212.198.40.15''','1977-08-03 16:37:27'), - ('green',60,'Joseph','jspencer1n@marriott.com','''13.15.63.238''','2005-07-23 20:22:06'), - ('green',61,'Phillip','pschmidt1o@blogtalkradio.com','''177.98.201.190''','1976-05-19 21:47:44'), - ('green',62,'Joan','jwebb1p@google.ru','''105.229.170.71''','1972-09-07 17:53:47'), - ('green',63,'Phyllis','pkennedy1q@imgur.com','''35.145.8.244''','2000-01-01 22:33:37'), - ('green',64,'Katherine','khunter1r@smh.com.au','''248.168.205.32''','1991-01-09 06:40:24'), - ('green',65,'Laura','lvasquez1s@wiley.com','''128.129.115.152''','1997-10-23 12:04:56'), - ('green',66,'Juan','jdunn1t@state.gov','''44.228.124.51''','2004-11-10 05:07:35'), - ('green',67,'Judith','jholmes1u@wiley.com','''40.227.179.115''','1977-08-02 17:01:45'), - ('green',68,'Beverly','bbaker1v@wufoo.com','''208.34.84.59''','2016-03-06 20:07:23'), - ('green',69,'Lawrence','lcarr1w@flickr.com','''59.158.212.223''','1988-09-13 06:07:21'), - ('green',70,'Gloria','gwilliams1x@mtv.com','''245.231.88.33''','1995-03-18 22:32:46'), - ('green',71,'Steven','ssims1y@cbslocal.com','''104.50.58.255''','2001-08-05 21:26:20'), - ('green',72,'Betty','bmills1z@arstechnica.com','''103.177.214.220''','1981-12-14 21:26:54'), - ('green',73,'Mildred','mfuller20@prnewswire.com','''151.158.8.130''','2000-04-19 10:13:55'), - ('green',74,'Donald','dday21@icq.com','''9.178.102.255''','1972-12-03 00:58:24'), - ('green',75,'Eric','ethomas22@addtoany.com','''85.2.241.227''','1992-11-01 05:59:30'), - ('green',76,'Joyce','jarmstrong23@sitemeter.com','''169.224.20.36''','1985-10-24 06:50:01'), - ('green',77,'Maria','mmartinez24@amazonaws.com','''143.189.167.135''','2005-10-05 05:17:42'), - ('green',78,'Harry','hburton25@youtube.com','''156.47.176.237''','1978-03-26 05:53:33'), - ('green',79,'Kevin','klawrence26@hao123.com','''79.136.183.83''','1994-10-12 04:38:52'), - ('green',80,'David','dhall27@prweb.com','''133.149.172.153''','1976-12-15 16:24:24'), - ('green',81,'Kathy','kperry28@twitter.com','''229.242.72.228''','1979-03-04 02:58:56'), - ('green',82,'Adam','aprice29@elegantthemes.com','''13.145.21.10''','1982-11-07 11:46:59'), - ('green',83,'Brandon','bgriffin2a@va.gov','''73.249.128.212''','2013-10-30 05:30:36'), - ('green',84,'Henry','hnguyen2b@discovery.com','''211.36.214.242''','1985-01-09 06:37:27'), - ('green',85,'Eric','esanchez2c@edublogs.org','''191.166.188.251''','2004-05-01 23:21:42'), - ('green',86,'Jason','jlee2d@jimdo.com','''193.92.16.182''','1973-01-08 09:05:39'), - ('green',87,'Diana','drichards2e@istockphoto.com','''19.130.175.245''','1994-10-05 22:50:49'), - ('green',88,'Andrea','awelch2f@abc.net.au','''94.155.233.96''','2002-04-26 08:41:44'), - ('green',89,'Louis','lwagner2g@miitbeian.gov.cn','''26.217.34.111''','2003-08-25 07:56:39'), - ('green',90,'Jane','jsims2h@seesaa.net','''43.4.220.135''','1987-03-20 20:39:04'), - ('green',91,'Larry','lgrant2i@si.edu','''97.126.79.34''','2000-09-07 20:26:19'), - ('green',92,'Louis','ldean2j@prnewswire.com','''37.148.40.127''','2011-09-16 20:12:14'), - ('green',93,'Jennifer','jcampbell2k@xing.com','''38.106.254.142''','1988-07-15 05:06:49'), - ('green',94,'Wayne','wcunningham2l@google.com.hk','''223.28.26.187''','2009-12-15 06:16:54'), - ('green',95,'Lori','lstevens2m@icq.com','''181.250.181.58''','1984-10-28 03:29:19'), - ('green',96,'Judy','jsimpson2n@marriott.com','''180.121.239.219''','1986-02-07 15:18:10'), - ('green',97,'Phillip','phoward2o@usa.gov','''255.247.0.175''','2002-12-26 08:44:45'), - ('green',98,'Gloria','gwalker2p@usa.gov','''156.140.7.128''','1997-10-04 07:58:58'), - ('green',99,'Paul','pjohnson2q@umn.edu','''183.59.198.197''','1991-11-14 12:33:55'), - ('green',100,'Frank','fgreene2r@blogspot.com','''150.143.68.121''','2010-06-12 23:55:39'); diff --git a/tests/integration/sources_test/seeds/expected_multi_source.csv b/tests/integration/sources_test/seeds/expected_multi_source.csv deleted file mode 100644 index de9c1c01d..000000000 --- a/tests/integration/sources_test/seeds/expected_multi_source.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name,color -1,Larry,blue -2,Curly,red -3,Moe,green diff --git a/tests/integration/sources_test/seeds/other_source_table.csv b/tests/integration/sources_test/seeds/other_source_table.csv deleted file mode 100644 index a92b2cb8e..000000000 --- a/tests/integration/sources_test/seeds/other_source_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,color -1,blue -2,red -3,green diff --git a/tests/integration/sources_test/seeds/other_table.csv b/tests/integration/sources_test/seeds/other_table.csv deleted file mode 100644 index 56bdda92b..000000000 --- a/tests/integration/sources_test/seeds/other_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name -1,Larry -2,Curly -3,Moe diff --git a/tests/integration/sources_test/seeds/source.csv b/tests/integration/sources_test/seeds/source.csv deleted file mode 100644 index a8f87412e..000000000 --- a/tests/integration/sources_test/seeds/source.csv +++ /dev/null @@ -1,101 +0,0 @@ -favorite_color,id,first_name,email,ip_address,updated_at -blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 -blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 -blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 -blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 -blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 -blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 -blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 -blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 -blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 -blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 -blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 -blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 -blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 -blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 -blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 -blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 -blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 -blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 -blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 -blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 -blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 -blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 -blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 -blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 -blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 -blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 -blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 -blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 -blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 -blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 -blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 -blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 -blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 -blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 -blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 -blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 -blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 -blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 -blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 -blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 -blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 -blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 -blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 -blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 -blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 -blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 -blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 -blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 -blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 -blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 -green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 -green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 -green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 -green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 -green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 -green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 -green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 -green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 -green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 -green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 -green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 -green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 -green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 -green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 -green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 -green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 -green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 -green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 -green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 -green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 -green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 -green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 -green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 -green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 -green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 -green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 -green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 -green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 -green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 -green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 -green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 -green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 -green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 -green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 -green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 -green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 -green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 -green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 -green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 -green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 -green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 -green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 -green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 -green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 -green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 -green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 -green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 -green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 -green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 -green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 diff --git a/tests/integration/sources_test/test_sources.py b/tests/integration/sources_test/test_sources.py deleted file mode 100644 index a8b7017b9..000000000 --- a/tests/integration/sources_test/test_sources.py +++ /dev/null @@ -1,203 +0,0 @@ -import json -import os -from datetime import datetime, timedelta - -import yaml - -import dbt.tracking -import dbt.version -from dbt.events.functions import reset_metadata_vars -from tests.integration.base import DBTIntegrationTest, use_profile, AnyFloat, \ - AnyStringWith - - -class BaseSourcesTest(DBTIntegrationTest): - @property - def schema(self): - return "sources" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'quoting': {'database': True, 'schema': True, 'identifier': True}, - 'seeds': { - 'quote_columns': True, - }, - } - - def setUp(self): - super().setUp() - os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] = 'test_run_schema' - - def tearDown(self): - del os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] - super().tearDown() - - def run_dbt_with_vars(self, cmd, *args, **kwargs): - vars_dict = { - 'test_run_schema': self.unique_schema(), - 'test_loaded_at': self.adapter.quote('updated_at'), - } - cmd.extend(['--vars', yaml.safe_dump(vars_dict)]) - return self.run_dbt(cmd, *args, **kwargs) - - -class SuccessfulSourcesTest(BaseSourcesTest): - def setUp(self): - super().setUp() - self.run_dbt_with_vars(['seed']) - self.maxDiff = None - self._id = 101 - # this is the db initial value - self.last_inserted_time = "2016-09-19T14:45:51+00:00" - os.environ['DBT_ENV_CUSTOM_ENV_key'] = 'value' - - def tearDown(self): - super().tearDown() - del os.environ['DBT_ENV_CUSTOM_ENV_key'] - - def _set_updated_at_to(self, delta): - insert_time = datetime.utcnow() + delta - timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") - # favorite_color,id,first_name,email,ip_address,updated_at - insert_id = self._id - self._id += 1 - raw_sql = """INSERT INTO {schema}.{source} - ({quoted_columns}) - VALUES ( - 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' - )""" - quoted_columns = ','.join( - self.adapter.quote(c) if self.adapter_type != 'bigquery' else c - for c in - ('favorite_color', 'id', 'first_name', - 'email', 'ip_address', 'updated_at') - ) - self.run_sql( - raw_sql, - kwargs={ - 'schema': self.unique_schema(), - 'time': timestr, - 'id': insert_id, - 'source': self.adapter.quote('source'), - 'quoted_columns': quoted_columns, - } - ) - self.last_inserted_time = insert_time.strftime( - "%Y-%m-%dT%H:%M:%S+00:00") - - -class TestSourceFreshness(SuccessfulSourcesTest): - - def _assert_freshness_results(self, path, state): - self.assertTrue(os.path.exists(path)) - with open(path) as fp: - data = json.load(fp) - - assert set(data) == {'metadata', 'results', 'elapsed_time'} - assert 'generated_at' in data['metadata'] - assert isinstance(data['elapsed_time'], float) - self.assertBetween(data['metadata']['generated_at'], - self.freshness_start_time) - assert data['metadata']['dbt_schema_version'] == 'https://schemas.getdbt.com/dbt/sources/v3.json' - assert data['metadata']['dbt_version'] == dbt.version.__version__ - assert data['metadata']['invocation_id'] == dbt.tracking.active_user.invocation_id - key = 'key' - if os.name == 'nt': - key = key.upper() - assert data['metadata']['env'] == {key: 'value'} - - last_inserted_time = self.last_inserted_time - - self.assertEqual(len(data['results']), 1) - - self.assertEqual(data['results'], [ - { - 'unique_id': 'source.test.test_source.test_table', - 'max_loaded_at': last_inserted_time, - 'snapshotted_at': AnyStringWith(), - 'max_loaded_at_time_ago_in_s': AnyFloat(), - 'status': state, - 'criteria': { - 'filter': None, - 'warn_after': {'count': 10, 'period': 'hour'}, - 'error_after': {'count': 18, 'period': 'hour'}, - }, - 'adapter_response': {}, - 'thread_id': AnyStringWith('Thread-'), - 'execution_time': AnyFloat(), - 'timing': [ - { - 'name': 'compile', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - }, - { - 'name': 'execute', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - } - ] - } - ]) - - def _run_source_freshness(self): - # test_source.test_table should have a loaded_at field of `updated_at` - # and a freshness of warn_after: 10 hours, error_after: 18 hours - # by default, our data set is way out of date! - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/error_source.json'], - expect_pass=False - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'error') - self._assert_freshness_results('target/error_source.json', 'error') - - self._set_updated_at_to(timedelta(hours=-12)) - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/warn_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'warn') - self._assert_freshness_results('target/warn_source.json', 'warn') - - self._set_updated_at_to(timedelta(hours=-2)) - self.freshness_start_time = datetime.utcnow() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/pass_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'pass') - self._assert_freshness_results('target/pass_source.json', 'pass') - - @use_profile('redshift') - def test_redshift_source_freshness(self): - reset_metadata_vars() - self._run_source_freshness() - - -class TestUnquotedSources(SuccessfulSourcesTest): - @property - def project_config(self): - cfg = super().project_config - cfg['quoting'] = { - 'identifier': False, - 'schema': False, - 'database': False, - } - return cfg - - @use_profile('redshift') - def test_redshift_catalog(self): - reset_metadata_vars() - self.run_dbt_with_vars(['run']) - reset_metadata_vars() - self.run_dbt_with_vars(['docs', 'generate']) diff --git a/tests/integration/store_test_failures_tests/models/fine_model.sql b/tests/integration/store_test_failures_tests/models/fine_model.sql deleted file mode 100644 index 94b923a17..000000000 --- a/tests/integration/store_test_failures_tests/models/fine_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('people') }} diff --git a/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql b/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql deleted file mode 100644 index 97536ffaf..000000000 --- a/tests/integration/store_test_failures_tests/models/fine_model_but_with_a_no_good_very_long_name.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as quite_long_column_name diff --git a/tests/integration/store_test_failures_tests/models/problematic_model.sql b/tests/integration/store_test_failures_tests/models/problematic_model.sql deleted file mode 100644 index e780d6b00..000000000 --- a/tests/integration/store_test_failures_tests/models/problematic_model.sql +++ /dev/null @@ -1,11 +0,0 @@ -select * from {{ ref('people') }} - -union all - -select * from {{ ref('people') }} -where id in (1,2) - -union all - -select null as id, first_name, last_name, email, gender, ip_address from {{ ref('people') }} -where id in (3,4) diff --git a/tests/integration/store_test_failures_tests/models/schema.yml b/tests/integration/store_test_failures_tests/models/schema.yml deleted file mode 100644 index f01a9e350..000000000 --- a/tests/integration/store_test_failures_tests/models/schema.yml +++ /dev/null @@ -1,40 +0,0 @@ -version: 2 - -models: - - - name: fine_model - columns: - - name: id - tests: - - unique - - not_null - - - name: problematic_model - columns: - - name: id - tests: - - unique: - store_failures: true - - not_null - - name: first_name - tests: - # test truncation of really long test name - - accepted_values: - values: - - Jack - - Kathryn - - Gerald - - Bonnie - - Harold - - Jacqueline - - Wanda - - Craig - # - Gary - # - Rose - - - name: fine_model_but_with_a_no_good_very_long_name - columns: - - name: quite_long_column_name - tests: - # test truncation of really long test name with builtin - - unique diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv deleted file mode 100644 index 02f28435b..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_accepted_values.csv +++ /dev/null @@ -1,3 +0,0 @@ -value_field,n_records -Gary,1 -Rose,1 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv deleted file mode 100644 index d9e7257f1..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_failing_test.csv +++ /dev/null @@ -1,11 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 -2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 -3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 -5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 -6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 -7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 -8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 -9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 -10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv deleted file mode 100644 index 95fef8a25..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_not_null_problematic_model_id.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 diff --git a/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv b/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv deleted file mode 100644 index 431d54ef8..000000000 --- a/tests/integration/store_test_failures_tests/seeds/expected/expected_unique_problematic_model_id.csv +++ /dev/null @@ -1,3 +0,0 @@ -unique_field,n_records -2,2 -1,2 \ No newline at end of file diff --git a/tests/integration/store_test_failures_tests/seeds/people.csv b/tests/integration/store_test_failures_tests/seeds/people.csv deleted file mode 100644 index d9e7257f1..000000000 --- a/tests/integration/store_test_failures_tests/seeds/people.csv +++ /dev/null @@ -1,11 +0,0 @@ -id,first_name,last_name,email,gender,ip_address -1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 -2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 -3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 -4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 -5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 -6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 -7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 -8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 -9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 -10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 diff --git a/tests/integration/store_test_failures_tests/test_store_test_failures.py b/tests/integration/store_test_failures_tests/test_store_test_failures.py deleted file mode 100644 index 9cc05c211..000000000 --- a/tests/integration/store_test_failures_tests/test_store_test_failures.py +++ /dev/null @@ -1,93 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, FakeArgs, use_profile - - -class TestStoreTestFailures(DBTIntegrationTest): - @property - def schema(self): - return "test_store_test_failures" - - def tearDown(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - with self.adapter.connection_named('__test'): - self._drop_schema_named(self.default_database, test_audit_schema) - - super().tearDown() - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["tests"], - "seeds": { - "quote_columns": False, - "test": { - "expected": self.column_type_overrides() - }, - }, - } - - def column_type_overrides(self): - return {} - - def run_tests_store_one_failure(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - - self.run_dbt(["seed"]) - self.run_dbt(["run"]) - self.run_dbt(["test"], expect_pass=False) - - # one test is configured with store_failures: true, make sure it worked - self.assertTablesEqual("unique_problematic_model_id", "expected_unique_problematic_model_id", test_audit_schema) - - def run_tests_store_failures_and_assert(self): - test_audit_schema = self.unique_schema() + "_dbt_test__audit" - - self.run_dbt(["seed"]) - self.run_dbt(["run"]) - # make sure this works idempotently for all tests - self.run_dbt(["test", "--store-failures"], expect_pass=False) - results = self.run_dbt(["test", "--store-failures"], expect_pass=False) - - # compare test results - actual = [(r.status, r.failures) for r in results] - expected = [('pass', 0), ('pass', 0), ('pass', 0), ('pass', 0), - ('fail', 2), ('fail', 2), ('fail', 2), ('fail', 10),] - self.assertEqual(sorted(actual), sorted(expected)) - - # compare test results stored in database - self.assertTablesEqual("failing_test", "expected_failing_test", test_audit_schema) - self.assertTablesEqual("not_null_problematic_model_id", "expected_not_null_problematic_model_id", test_audit_schema) - self.assertTablesEqual("unique_problematic_model_id", "expected_unique_problematic_model_id", test_audit_schema) - self.assertTablesEqual("accepted_values_problematic_mo_c533ab4ca65c1a9dbf14f79ded49b628", "expected_accepted_values", test_audit_schema) - - -class RedshiftTestStoreTestFailures(TestStoreTestFailures): - - def column_type_overrides(self): - return { - "expected_not_null_problematic_model_id": { - "+column_types": { - "email": "varchar(26)", - "first_name": "varchar(10)", - }, - }, - "expected_unique_problematic_model_id": { - "+column_types": { - "n_records": "bigint", - }, - }, - "expected_accepted_values": { - "+column_types": { - "value_field": "varchar(10)", - "n_records": "bigint", - }, - }, - } - - @use_profile('redshift') - def test__redshift__store_and_assert(self): - self.run_tests_store_failures_and_assert() diff --git a/tests/integration/store_test_failures_tests/tests/failing_test.sql b/tests/integration/store_test_failures_tests/tests/failing_test.sql deleted file mode 100644 index 1bb5ae5ba..000000000 --- a/tests/integration/store_test_failures_tests/tests/failing_test.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('fine_model') }} diff --git a/tests/integration/store_test_failures_tests/tests/passing_test.sql b/tests/integration/store_test_failures_tests/tests/passing_test.sql deleted file mode 100644 index 15c9a7a64..000000000 --- a/tests/integration/store_test_failures_tests/tests/passing_test.sql +++ /dev/null @@ -1,2 +0,0 @@ -select * from {{ ref('fine_model') }} -where false diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py index cc2861e4e..8547480d1 100644 --- a/tests/unit/mock_adapter.py +++ b/tests/unit/mock_adapter.py @@ -1,16 +1,16 @@ from unittest import mock from contextlib import contextmanager -from dbt.adapters.base import BaseAdapter, PythonJobHelper +from dbt.adapters.base import BaseAdapter def adapter_factory(): class MockAdapter(BaseAdapter): - ConnectionManager = mock.MagicMock(TYPE='mock') + ConnectionManager = mock.MagicMock(TYPE="mock") responder = mock.MagicMock() # some convenient defaults responder.quote.side_effect = lambda identifier: '"{}"'.format(identifier) - responder.date_function.side_effect = lambda: 'unitdate()' + responder.date_function.side_effect = lambda: "unitdate()" responder.is_cancelable.side_effect = lambda: False @contextmanager diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py index 5b975d029..542387c0d 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/test_context.py @@ -15,11 +15,7 @@ from dbt.contracts.graph.model_config import ( NodeConfig, ) -from dbt.contracts.graph.nodes import ( - ModelNode, - DependsOn, - Macro -) +from dbt.contracts.graph.nodes import ModelNode, DependsOn, Macro from dbt.context import providers from dbt.node_types import NodeType @@ -27,73 +23,73 @@ class TestRuntimeWrapper(unittest.TestCase): def setUp(self): self.mock_config = mock.MagicMock() - self.mock_config.quoting = { - 'database': True, 'schema': True, 'identifier': True} + self.mock_config.quoting = {"database": True, "schema": True, "identifier": True} adapter_class = adapter_factory() self.mock_adapter = adapter_class(self.mock_config) self.namespace = mock.MagicMock() - self.wrapper = providers.RuntimeDatabaseWrapper( - self.mock_adapter, self.namespace) + self.wrapper = providers.RuntimeDatabaseWrapper(self.mock_adapter, self.namespace) self.responder = self.mock_adapter.responder PROFILE_DATA = { - 'target': 'test', - 'quoting': {}, - 'outputs': { - 'test': { - 'type': 'redshift', - 'host': 'localhost', - 'schema': 'analytics', - 'user': 'test', - 'pass': 'test', - 'dbname': 'test', - 'port': 1, + "target": "test", + "quoting": {}, + "outputs": { + "test": { + "type": "redshift", + "host": "localhost", + "schema": "analytics", + "user": "test", + "pass": "test", + "dbname": "test", + "port": 1, } }, } PROJECT_DATA = { - 'name': 'root', - 'version': '0.1', - 'profile': 'test', - 'project-root': os.getcwd(), - 'config-version': 2, + "name": "root", + "version": "0.1", + "profile": "test", + "project-root": os.getcwd(), + "config-version": 2, } def model(): return ModelNode( - alias='model_one', - name='model_one', - database='dbt', - schema='analytics', + alias="model_one", + name="model_one", + database="dbt", + schema="analytics", resource_type=NodeType.Model, - unique_id='model.root.model_one', - fqn=['root', 'model_one'], - package_name='root', - original_file_path='model_one.sql', - root_path='/usr/src/app', + unique_id="model.root.model_one", + fqn=["root", "model_one"], + package_name="root", + original_file_path="model_one.sql", + root_path="/usr/src/app", refs=[], sources=[], depends_on=DependsOn(), - config=NodeConfig.from_dict({ - 'enabled': True, - 'materialized': 'view', - 'persist_docs': {}, - 'post-hook': [], - 'pre-hook': [], - 'vars': {}, - 'quoting': {}, - 'column_types': {}, - 'tags': [], - }), + config=NodeConfig.from_dict( + { + "enabled": True, + "materialized": "view", + "persist_docs": {}, + "post-hook": [], + "pre-hook": [], + "vars": {}, + "quoting": {}, + "column_types": {}, + "tags": [], + } + ), tags=[], - path='model_one.sql', - raw_sql='', - description='', - columns={} + path="model_one.sql", + raw_sql="", + description="", + columns={}, ) @@ -101,8 +97,8 @@ def mock_macro(name, package_name): macro = mock.MagicMock( __class__=Macro, package_name=package_name, - resource_type='macro', - unique_id=f'macro.{package_name}.{name}', + resource_type="macro", + unique_id=f"macro.{package_name}.{name}", ) # Mock(name=...) does not set the `name` attribute, this does. macro.name = name @@ -111,7 +107,7 @@ def mock_macro(name, package_name): def mock_manifest(config): manifest_macros = {} - for name in ['macro_a', 'macro_b']: + for name in ["macro_a", "macro_b"]: macro = mock_macro(name, config.project_name) manifest_macros[macro.unique_id] = macro return mock.MagicMock(macros=manifest_macros) @@ -120,47 +116,49 @@ def mock_manifest(config): def mock_model(): return mock.MagicMock( __class__=ModelNode, - alias='model_one', - name='model_one', - database='dbt', - schema='analytics', + alias="model_one", + name="model_one", + database="dbt", + schema="analytics", resource_type=NodeType.Model, - unique_id='model.root.model_one', - fqn=['root', 'model_one'], - package_name='root', - original_file_path='model_one.sql', - root_path='/usr/src/app', + unique_id="model.root.model_one", + fqn=["root", "model_one"], + package_name="root", + original_file_path="model_one.sql", + root_path="/usr/src/app", refs=[], sources=[], depends_on=DependsOn(), - config=NodeConfig.from_dict({ - 'enabled': True, - 'materialized': 'view', - 'persist_docs': {}, - 'post-hook': [], - 'pre-hook': [], - 'vars': {}, - 'quoting': {}, - 'column_types': {}, - 'tags': [], - }), + config=NodeConfig.from_dict( + { + "enabled": True, + "materialized": "view", + "persist_docs": {}, + "post-hook": [], + "pre-hook": [], + "vars": {}, + "quoting": {}, + "column_types": {}, + "tags": [], + } + ), tags=[], - path='model_one.sql', - raw_sql='', - description='', + path="model_one.sql", + raw_sql="", + description="", columns={}, ) @pytest.fixture def get_adapter(): - with mock.patch.object(providers, 'get_adapter') as patch: + with mock.patch.object(providers, "get_adapter") as patch: yield patch @pytest.fixture def get_include_paths(): - with mock.patch.object(factory, 'get_include_paths') as patch: + with mock.patch.object(factory, "get_include_paths") as patch: patch.return_value = [] yield patch @@ -177,12 +175,12 @@ def manifest_fx(config): @pytest.fixture def manifest_extended(manifest_fx): - dbt_macro = mock_macro('default__some_macro', 'dbt') + dbt_macro = mock_macro("default__some_macro", "dbt") # same namespace, same name, different pkg! - rs_macro = mock_macro('redshift__some_macro', 'dbt_redshift') + rs_macro = mock_macro("redshift__some_macro", "dbt_redshift") # same name, different package - package_default_macro = mock_macro('default__some_macro', 'root') - package_rs_macro = mock_macro('redshift__some_macro', 'root') + package_default_macro = mock_macro("default__some_macro", "root") + package_rs_macro = mock_macro("redshift__some_macro", "root") manifest_fx.macros[dbt_macro.unique_id] = dbt_macro manifest_fx.macros[rs_macro.unique_id] = rs_macro manifest_fx.macros[package_default_macro.unique_id] = package_default_macro @@ -200,8 +198,8 @@ def redshift_adapter(config, get_adapter): def test_resolve_specific(config, manifest_extended, redshift_adapter, get_include_paths): - rs_macro = manifest_extended.macros['macro.dbt_redshift.redshift__some_macro'] - package_rs_macro = manifest_extended.macros['macro.root.redshift__some_macro'] + rs_macro = manifest_extended.macros["macro.dbt_redshift.redshift__some_macro"] + package_rs_macro = manifest_extended.macros["macro.root.redshift__some_macro"] ctx = providers.generate_runtime_model_context( model=mock_model(), @@ -209,24 +207,24 @@ def test_resolve_specific(config, manifest_extended, redshift_adapter, get_inclu manifest=manifest_extended, ) - ctx['adapter'].config.dispatch + ctx["adapter"].config.dispatch # macro_a exists, but default__macro_a and redshift__macro_a do not - with pytest.raises(dbt.exceptions.CompilationException): - ctx['adapter'].dispatch('macro_a').macro + with pytest.raises(dbt.exceptions.CompilationError): + ctx["adapter"].dispatch("macro_a").macro # root namespace is always preferred, unless search order is explicitly defined in 'dispatch' config - assert ctx['adapter'].dispatch('some_macro').macro is package_rs_macro - assert ctx['adapter'].dispatch('some_macro', 'dbt').macro is package_rs_macro - assert ctx['adapter'].dispatch('some_macro', 'root').macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro").macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro", "dbt").macro is package_rs_macro + assert ctx["adapter"].dispatch("some_macro", "root").macro is package_rs_macro # override 'dbt' namespace search order, dispatch to 'root' first - ctx['adapter'].config.dispatch = [{'macro_namespace': 'dbt', 'search_order': ['root', 'dbt']}] - assert ctx['adapter'].dispatch('some_macro', macro_namespace='dbt').macro is package_rs_macro + ctx["adapter"].config.dispatch = [{"macro_namespace": "dbt", "search_order": ["root", "dbt"]}] + assert ctx["adapter"].dispatch("some_macro", macro_namespace="dbt").macro is package_rs_macro # override 'dbt' namespace search order, dispatch to 'dbt' only - ctx['adapter'].config.dispatch = [{'macro_namespace': 'dbt', 'search_order': ['dbt']}] - assert ctx['adapter'].dispatch('some_macro', macro_namespace='dbt').macro is rs_macro + ctx["adapter"].config.dispatch = [{"macro_namespace": "dbt", "search_order": ["dbt"]}] + assert ctx["adapter"].dispatch("some_macro", macro_namespace="dbt").macro is rs_macro # override 'root' namespace search order, dispatch to 'dbt' first - ctx['adapter'].config.dispatch = [{'macro_namespace': 'root', 'search_order': ['dbt', 'root']}] + ctx["adapter"].config.dispatch = [{"macro_namespace": "root", "search_order": ["dbt", "root"]}] diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py index 33c3dc1aa..a91eee59c 100644 --- a/tests/unit/test_redshift_adapter.py +++ b/tests/unit/test_redshift_adapter.py @@ -1,55 +1,56 @@ import unittest from unittest import mock -from unittest.mock import Mock +from unittest.mock import Mock, call import agate -import boto3 +import dbt +import redshift_connector from dbt.adapters.redshift import ( RedshiftAdapter, Plugin as RedshiftPlugin, ) from dbt.clients import agate_helper -from dbt.exceptions import FailedToConnectException - -from .utils import config_from_parts_or_dicts, mock_connection, TestAdapterConversions, inject_adapter +from dbt.exceptions import FailedToConnectError +from dbt.adapters.redshift.connections import RedshiftConnectMethodFactory, RedshiftSSLConfig +from .utils import ( + config_from_parts_or_dicts, + mock_connection, + TestAdapterConversions, + inject_adapter, +) -def fetch_cluster_credentials(*args, **kwargs): - return { - 'DbUser': 'root', - 'DbPassword': 'tmp_password' - } +DEFAULT_SSL_CONFIG = RedshiftSSLConfig().to_dict() class TestRedshiftAdapter(unittest.TestCase): - def setUp(self): profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist', - 'pass': 'password', - 'port': 5439, - 'schema': 'public' + "outputs": { + "test": { + "type": "redshift", + "dbname": "redshift", + "user": "root", + "host": "thishostshouldnotexist.test.us-east-1", + "pass": "password", + "port": 5439, + "schema": "public", } }, - 'target': 'test' + "target": "test", } project_cfg = { - 'name': 'X', - 'version': '0.1', - 'profile': 'test', - 'project-root': '/tmp/dbt/does-not-exist', - 'quoting': { - 'identifier': False, - 'schema': True, + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, }, - 'config-version': 2, + "config-version": 2, } self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) @@ -62,317 +63,512 @@ def adapter(self): inject_adapter(self._adapter, RedshiftPlugin) return self._adapter - def test_implicit_database_conn(self): - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual(creds, self.config.credentials) + @mock.patch("redshift_connector.connect", Mock()) + def test_explicit_region_with_database_conn(self): + self.config.method = "database" + + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + **DEFAULT_SSL_CONFIG, + ) - def test_explicit_database_conn(self): - self.config.method = 'database' + @mock.patch("redshift_connector.connect", Mock()) + def test_explicit_iam_conn_without_profile(self): + self.config.credentials = self.config.credentials.replace( + method="iam", + cluster_id="my_redshift", + host="thishostshouldnotexist.test.us-east-1", + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + db_user="root", + password="", + user="", + cluster_identifier="my_redshift", + region=None, + timeout=None, + auto_create=False, + db_groups=[], + profile=None, + port=5439, + **DEFAULT_SSL_CONFIG, + ) - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual(creds, self.config.credentials) + @mock.patch("redshift_connector.connect", Mock()) + def test_conn_timeout_30(self): + self.config.credentials = self.config.credentials.replace(connect_timeout=30) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=30, + **DEFAULT_SSL_CONFIG, + ) - def test_explicit_iam_conn(self): + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_iam_conn_with_profile(self): self.config.credentials = self.config.credentials.replace( - method='iam', - cluster_id='my_redshift', - iam_duration_seconds=1200 + method="iam", + cluster_id="my_redshift", + iam_profile="test", + host="thishostshouldnotexist.test.us-east-1", + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + + redshift_connector.connect.assert_called_once_with( + iam=True, + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + cluster_identifier="my_redshift", + region=None, + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=None, + port=5439, + **DEFAULT_SSL_CONFIG, ) - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_iam_serverless_with_profile(self): + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com", + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233.us-east-2.redshift-serverless.amazonaws.com", + database="redshift", + cluster_identifier=None, + region=None, + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=None, + port=5439, + **DEFAULT_SSL_CONFIG, + ) - expected_creds = self.config.credentials.replace(password='tmp_password') - self.assertEqual(creds, expected_creds) + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_explicit_region(self): + # Successful test + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233.redshift-serverless.amazonaws.com", + region="us-east-2", + ) + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233.redshift-serverless.amazonaws.com", + database="redshift", + cluster_identifier=None, + region="us-east-2", + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + timeout=None, + port=5439, + **DEFAULT_SSL_CONFIG, + ) - def test_iam_conn_optionals(self): + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_disable(self): + self.config.credentials.sslmode = "disable" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + ssl=False, + sslmode=None, + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_allow(self): + self.config.credentials.sslmode = "allow" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_verify_full(self): + self.config.credentials.sslmode = "verify-full" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + ssl=True, + sslmode="verify-full", + ) + + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_verify_ca(self): + self.config.credentials.sslmode = "verify-ca" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + @mock.patch("redshift_connector.connect", Mock()) + def test_sslmode_prefer(self): + self.config.credentials.sslmode = "prefer" + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + host="thishostshouldnotexist.test.us-east-1", + database="redshift", + user="root", + password="password", + port=5439, + auto_create=False, + db_groups=[], + region=None, + timeout=None, + ssl=True, + sslmode="verify-ca", + ) + + @mock.patch("redshift_connector.connect", Mock()) + @mock.patch("boto3.Session", Mock()) + def test_serverless_iam_failure(self): + self.config.credentials = self.config.credentials.replace( + method="iam", + iam_profile="test", + host="doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com", + ) + with self.assertRaises(dbt.exceptions.FailedToConnectError) as context: + connection = self.adapter.acquire_connection("dummy") + connection.handle + redshift_connector.connect.assert_called_once_with( + iam=True, + host="doesnotexist.1233.us-east-2.redshift-srvrlss.amazonaws.com", + database="redshift", + cluster_identifier=None, + region=None, + auto_create=False, + db_groups=[], + db_user="root", + password="", + user="", + profile="test", + port=5439, + timeout=None, + **DEFAULT_SSL_CONFIG, + ) + self.assertTrue("'host' must be provided" in context.exception.msg) + + def test_iam_conn_optionals(self): profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist', - 'port': 5439, - 'schema': 'public', - 'method': 'iam', - 'cluster_id': 'my_redshift', - 'db_groups': ["my_dbgroup"], - 'autocreate': True, + "outputs": { + "test": { + "type": "redshift", + "dbname": "redshift", + "user": "root", + "host": "thishostshouldnotexist", + "port": 5439, + "schema": "public", + "method": "iam", + "cluster_id": "my_redshift", + "db_groups": ["my_dbgroup"], + "autocreate": True, } }, - 'target': 'test' + "target": "test", } config_from_parts_or_dicts(self.config, profile_cfg) def test_invalid_auth_method(self): # we have to set method this way, otherwise it won't validate - self.config.credentials.method = 'badmethod' - - with self.assertRaises(FailedToConnectException) as context: - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - - self.assertTrue('badmethod' in context.exception.msg) + self.config.credentials.method = "badmethod" + with self.assertRaises(FailedToConnectError) as context: + connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) + connect_method_factory.get_connect_method() + self.assertTrue("badmethod" in context.exception.msg) def test_invalid_iam_no_cluster_id(self): - self.config.credentials = self.config.credentials.replace(method='iam') - with self.assertRaises(FailedToConnectException) as context: - with mock.patch.object( - RedshiftAdapter.ConnectionManager, - 'fetch_cluster_credentials', - new=fetch_cluster_credentials - ): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) + self.config.credentials = self.config.credentials.replace(method="iam") + with self.assertRaises(FailedToConnectError) as context: + connect_method_factory = RedshiftConnectMethodFactory(self.config.credentials) + connect_method_factory.get_connect_method() self.assertTrue("'cluster_id' must be provided" in context.exception.msg) - def test_default_session_is_not_used_when_iam_used(self): - boto3.DEFAULT_SESSION = Mock() - self.config.credentials = self.config.credentials.replace(method='iam') - self.config.credentials.cluster_id = 'clusterid' - with mock.patch('dbt.adapters.redshift.connections.boto3.Session'): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual( - boto3.DEFAULT_SESSION.client.call_count, - 0, - "The redshift client should not be created using " - "the default session because the session object is not thread-safe" - ) - - def test_default_session_is_not_used_when_iam_not_used(self): - boto3.DEFAULT_SESSION = Mock() - self.config.credentials = self.config.credentials.replace(method=None) - with mock.patch('dbt.adapters.redshift.connections.boto3.Session'): - RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials) - self.assertEqual( - boto3.DEFAULT_SESSION.client.call_count, 0, - "The redshift client should not be created using " - "the default session because the session object is not thread-safe" - ) - def test_cancel_open_connections_empty(self): self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) def test_cancel_open_connections_master(self): key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections[key] = mock_connection('master') + self.adapter.connections.thread_connections[key] = mock_connection("master") self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) def test_cancel_open_connections_single(self): - master = mock_connection('master') - model = mock_connection('model') - model.handle.get_backend_pid.return_value = 42 + master = mock_connection("master") + model = mock_connection("model") key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections.update({ - key: master, - 1: model, - }) - with mock.patch.object(self.adapter.connections, 'add_query') as add_query: + self.adapter.connections.thread_connections.update( + { + key: master, + 1: model, + } + ) + with mock.patch.object(self.adapter.connections, "add_query") as add_query: query_result = mock.MagicMock() - add_query.return_value = (None, query_result) + cursor = mock.Mock() + cursor.fetchone.return_value = 42 + add_query.side_effect = [(None, cursor), (None, query_result)] self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) - - add_query.assert_called_once_with('select pg_terminate_backend(42)') + add_query.assert_has_calls( + [ + call("select pg_backend_pid()"), + call("select pg_terminate_backend(42)"), + ] + ) master.handle.get_backend_pid.assert_not_called() - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_default_keepalive(self, psycopg2): - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - keepalives_idle=4, - application_name='dbt' - ) - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_changed_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=5) - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - keepalives_idle=5, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_search_path(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test") - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - options="-c search_path=test", - keepalives_idle=4, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_search_path_with_space(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test test") - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - options=r"-c search_path=test\ test", - keepalives_idle=4, - application_name='dbt') - - @mock.patch('dbt.adapters.postgres.connections.psycopg2') - def test_set_zero_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=0) - connection = self.adapter.acquire_connection('dummy') - - psycopg2.connect.assert_not_called() - connection.handle # this "property" changes the state of the class - psycopg2.connect.assert_called_once_with( - dbname='redshift', - user='root', - host='thishostshouldnotexist', - password='password', - port=5439, - connect_timeout=10, - application_name='dbt') - def test_dbname_verification_is_case_insensitive(self): # Override adapter settings from setUp() profile_cfg = { - 'outputs': { - 'test': { - 'type': 'redshift', - 'dbname': 'Redshift', - 'user': 'root', - 'host': 'thishostshouldnotexist', - 'pass': 'password', - 'port': 5439, - 'schema': 'public' + "outputs": { + "test": { + "type": "redshift", + "dbname": "Redshift", + "user": "root", + "host": "thishostshouldnotexist", + "pass": "password", + "port": 5439, + "schema": "public", } }, - 'target': 'test' + "target": "test", } project_cfg = { - 'name': 'X', - 'version': '0.1', - 'profile': 'test', - 'project-root': '/tmp/dbt/does-not-exist', - 'quoting': { - 'identifier': False, - 'schema': True, + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, }, - 'config-version': 2, + "config-version": 2, } self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) self.adapter.cleanup_connections() self._adapter = RedshiftAdapter(self.config) - self.adapter.verify_database('redshift') + self.adapter.verify_database("redshift") + + def test_execute_with_fetch(self): + cursor = mock.Mock() + table = dbt.clients.agate_helper.empty_table() + with mock.patch.object(self.adapter.connections, "add_query") as mock_add_query: + mock_add_query.return_value = ( + None, + cursor, + ) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, "get_response") as mock_get_response: + mock_get_response.return_value = None + with mock.patch.object( + self.adapter.connections, "get_result_from_cursor" + ) as mock_get_result_from_cursor: + mock_get_result_from_cursor.return_value = table + self.adapter.connections.execute(sql="select * from test", fetch=True) + mock_add_query.assert_called_once_with("select * from test", False) + mock_get_result_from_cursor.assert_called_once_with(cursor, None) + mock_get_response.assert_called_once_with(cursor) + + def test_execute_without_fetch(self): + cursor = mock.Mock() + with mock.patch.object(self.adapter.connections, "add_query") as mock_add_query: + mock_add_query.return_value = ( + None, + cursor, + ) # when mock_add_query is called, it will always return None, cursor + with mock.patch.object(self.adapter.connections, "get_response") as mock_get_response: + mock_get_response.return_value = None + with mock.patch.object( + self.adapter.connections, "get_result_from_cursor" + ) as mock_get_result_from_cursor: + self.adapter.connections.execute(sql="select * from test2", fetch=False) + mock_add_query.assert_called_once_with("select * from test2", False) + mock_get_result_from_cursor.assert_not_called() + mock_get_response.assert_called_once_with(cursor) + + def test_add_query_with_no_cursor(self): + with mock.patch.object( + self.adapter.connections, "get_thread_connection" + ) as mock_get_thread_connection: + mock_get_thread_connection.return_value = None + with self.assertRaisesRegex( + dbt.exceptions.DbtRuntimeError, "Tried to run invalid SQL: on " + ): + self.adapter.connections.add_query(sql="") + mock_get_thread_connection.assert_called_once() + + def test_add_query_success(self): + cursor = mock.Mock() + with mock.patch.object( + dbt.adapters.redshift.connections.SQLConnectionManager, "add_query" + ) as mock_add_query: + mock_add_query.return_value = None, cursor + self.adapter.connections.add_query("select * from test3") + mock_add_query.assert_called_once_with( + "select * from test3", True, bindings=None, abridge_sql_log=False + ) class TestRedshiftAdapterConversions(TestAdapterConversions): def test_convert_text_type(self): rows = [ - ['', 'a1', 'stringval1'], - ['', 'a2', 'stringvalasdfasdfasdfa'], - ['', 'a3', 'stringval3'], + ["", "a1", "stringval1"], + ["", "a2", "stringvalasdfasdfasdfa"], + ["", "a3", "stringval3"], ] agate_table = self._make_table_of(rows, agate.Text) - expected = ['varchar(64)', 'varchar(2)', 'varchar(22)'] + expected = ["varchar(64)", "varchar(2)", "varchar(22)"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_text_type(agate_table, col_idx) == expect def test_convert_number_type(self): rows = [ - ['', '23.98', '-1'], - ['', '12.78', '-2'], - ['', '79.41', '-3'], + ["", "23.98", "-1"], + ["", "12.78", "-2"], + ["", "79.41", "-3"], ] agate_table = self._make_table_of(rows, agate.Number) - expected = ['integer', 'float8', 'integer'] + expected = ["integer", "float8", "integer"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_number_type(agate_table, col_idx) == expect def test_convert_boolean_type(self): rows = [ - ['', 'false', 'true'], - ['', 'false', 'false'], - ['', 'false', 'true'], + ["", "false", "true"], + ["", "false", "false"], + ["", "false", "true"], ] agate_table = self._make_table_of(rows, agate.Boolean) - expected = ['boolean', 'boolean', 'boolean'] + expected = ["boolean", "boolean", "boolean"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_boolean_type(agate_table, col_idx) == expect def test_convert_datetime_type(self): rows = [ - ['', '20190101T01:01:01Z', '2019-01-01 01:01:01'], - ['', '20190102T01:01:01Z', '2019-01-01 01:01:01'], - ['', '20190103T01:01:01Z', '2019-01-01 01:01:01'], + ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], + ] + agate_table = self._make_table_of( + rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] + ) + expected = [ + "timestamp without time zone", + "timestamp without time zone", + "timestamp without time zone", ] - agate_table = self._make_table_of(rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime]) - expected = ['timestamp without time zone', 'timestamp without time zone', 'timestamp without time zone'] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_datetime_type(agate_table, col_idx) == expect def test_convert_date_type(self): rows = [ - ['', '2019-01-01', '2019-01-04'], - ['', '2019-01-02', '2019-01-04'], - ['', '2019-01-03', '2019-01-04'], + ["", "2019-01-01", "2019-01-04"], + ["", "2019-01-02", "2019-01-04"], + ["", "2019-01-03", "2019-01-04"], ] agate_table = self._make_table_of(rows, agate.Date) - expected = ['date', 'date', 'date'] + expected = ["date", "date", "date"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_date_type(agate_table, col_idx) == expect def test_convert_time_type(self): # dbt's default type testers actually don't have a TimeDelta at all. rows = [ - ['', '120s', '10s'], - ['', '3m', '11s'], - ['', '1h', '12s'], + ["", "120s", "10s"], + ["", "3m", "11s"], + ["", "1h", "12s"], ] agate_table = self._make_table_of(rows, agate.TimeDelta) - expected = ['varchar(24)', 'varchar(24)', 'varchar(24)'] + expected = ["varchar(24)", "varchar(24)", "varchar(24)"] for col_idx, expect in enumerate(expected): assert RedshiftAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/utils.py b/tests/unit/utils.py index a2a0147ac..f2ca418e3 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -26,25 +26,33 @@ def normalize(path): class Obj: - which = 'blah' + which = "blah" single_threaded = False -def mock_connection(name, state='open'): +def mock_connection(name, state="open"): conn = mock.MagicMock() conn.name = name conn.state = state return conn -def profile_from_dict(profile, profile_name, cli_vars='{}'): +def profile_from_dict(profile, profile_name, cli_vars="{}"): from dbt.config import Profile from dbt.config.renderer import ProfileRenderer from dbt.config.utils import parse_cli_vars + if not isinstance(cli_vars, dict): cli_vars = parse_cli_vars(cli_vars) renderer = ProfileRenderer(cli_vars) + + # in order to call dbt's internal profile rendering, we need to set the + # flags global. This is a bit of a hack, but it's the best way to do it. + from dbt.flags import set_from_args + from argparse import Namespace + + set_from_args(Namespace(), None) return Profile.from_raw_profile_info( profile, profile_name, @@ -52,15 +60,16 @@ def profile_from_dict(profile, profile_name, cli_vars='{}'): ) -def project_from_dict(project, profile, packages=None, selectors=None, cli_vars='{}'): +def project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"): from dbt.config.renderer import DbtProjectYamlRenderer from dbt.config.utils import parse_cli_vars + if not isinstance(cli_vars, dict): cli_vars = parse_cli_vars(cli_vars) renderer = DbtProjectYamlRenderer(profile, cli_vars) - project_root = project.pop('project-root', os.getcwd()) + project_root = project.pop("project-root", os.getcwd()) partial = PartialProject.from_dicts( project_root=project_root, @@ -71,14 +80,18 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= return partial.render(renderer) -def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars='{}'): +def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars="{}"): from dbt.config import Project, Profile, RuntimeConfig + from dbt.config.utils import parse_cli_vars from copy import deepcopy + if not isinstance(cli_vars, dict): + cli_vars = parse_cli_vars(cli_vars) + if isinstance(project, Project): profile_name = project.profile_name else: - profile_name = project.get('profile') + profile_name = project.get("profile") if not isinstance(profile, Profile): profile = profile_from_dict( @@ -98,16 +111,13 @@ def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, args = Obj() args.vars = cli_vars - args.profile_dir = '/dev/null' - return RuntimeConfig.from_parts( - project=project, - profile=profile, - args=args - ) + args.profile_dir = "/dev/null" + return RuntimeConfig.from_parts(project=project, profile=profile, args=args) def inject_plugin(plugin): from dbt.adapters.factory import FACTORY + key = plugin.adapter.type() FACTORY.plugins[key] = plugin @@ -115,8 +125,11 @@ def inject_plugin(plugin): def inject_plugin_for(config): # from dbt.adapters.postgres import Plugin, PostgresAdapter from dbt.adapters.factory import FACTORY + FACTORY.load_plugin(config.credentials.type) - adapter = FACTORY.get_adapter(config) # TODO: there's a get_adaptor function in factory.py, but no method on AdapterContainer + adapter = FACTORY.get_adapter( + config + ) # TODO: there's a get_adaptor function in factory.py, but no method on AdapterContainer return adapter @@ -126,12 +139,14 @@ def inject_adapter(value, plugin): """ inject_plugin(plugin) from dbt.adapters.factory import FACTORY + key = value.type() FACTORY.adapters[key] = value def clear_plugin(plugin): from dbt.adapters.factory import FACTORY + key = plugin.adapter.type() FACTORY.plugins.pop(key, None) FACTORY.adapters.pop(key, None) @@ -174,7 +189,7 @@ def compare_dicts(dict1, dict2): common_keys = set(first_set).intersection(set(second_set)) found_differences = False for key in common_keys: - if dict1[key] != dict2[key] : + if dict1[key] != dict2[key]: print(f"--- --- first dict: {key}: {str(dict1[key])}") print(f"--- --- second dict: {key}: {str(dict2[key])}") found_differences = True @@ -189,7 +204,7 @@ def assert_from_dict(obj, dct, cls=None): cls = obj.__class__ cls.validate(dct) obj_from_dict = cls.from_dict(dct) - if hasattr(obj, 'created_at'): + if hasattr(obj, "created_at"): obj_from_dict.created_at = 1 obj.created_at = 1 assert obj_from_dict == obj @@ -197,10 +212,10 @@ def assert_from_dict(obj, dct, cls=None): def assert_to_dict(obj, dct): obj_to_dict = obj.to_dict(omit_none=True) - if 'created_at' in obj_to_dict: - obj_to_dict['created_at'] = 1 - if 'created_at' in dct: - dct['created_at'] = 1 + if "created_at" in obj_to_dict: + obj_to_dict["created_at"] = 1 + if "created_at" in dct: + dct["created_at"] = 1 assert obj_to_dict == dct @@ -216,10 +231,10 @@ def assert_fails_validation(dct, cls): class TestAdapterConversions(TestCase): - @staticmethod def _get_tester_for(column_type): from dbt.clients import agate_helper + if column_type is agate.TimeDelta: # dbt never makes this! return agate.TimeDelta() @@ -227,10 +242,10 @@ def _get_tester_for(column_type): if isinstance(instance, column_type): return instance - raise ValueError(f'no tester for {column_type}') + raise ValueError(f"no tester for {column_type}") def _make_table_of(self, rows, column_types): - column_names = list(string.ascii_letters[:len(rows[0])]) + column_names = list(string.ascii_letters[: len(rows[0])]) if isinstance(column_types, type): column_types = [self._get_tester_for(column_types) for _ in column_names] else: @@ -241,6 +256,7 @@ def _make_table_of(self, rows, column_types): def load_internal_manifest_macros(config, macro_hook=lambda m: None): from dbt.parser.manifest import ManifestLoader + return ManifestLoader.load_macros(config, macro_hook) diff --git a/tox.ini b/tox.ini index 85d20f595..285c1e738 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] skipsdist = True -envlist = py37,py38,py39,py310,py311 +envlist = py38,py39,py310,py311 -[testenv:{unit,py37,py38,py39,py310,py311,py}] +[testenv:{unit,py38,py39,py310,py311,py}] description = unit testing skip_install = true passenv = @@ -13,7 +13,7 @@ deps = -rdev-requirements.txt -e. -[testenv:{integration,py37,py38,py39,py310,py311,py}-{redshift}] +[testenv:{integration,py38,py39,py310,py311,py}-{redshift}] description = adapter plugin integration testing skip_install = true passenv = @@ -21,8 +21,8 @@ passenv = REDSHIFT_TEST_* PYTEST_ADDOPTS commands = - redshift: {envpython} -m pytest {posargs} -m profile_redshift tests/integration - redshift: {envpython} -m pytest {posargs} tests/functional + {envpython} -m pytest --dist=loadscope {posargs} tests/functional -k "not tests/functional/adapter/utils" + {envpython} -m pytest --dist=loadscope {posargs} tests/functional/adapter/utils deps = -rdev-requirements.txt -e.