diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/pull_request_template.md
similarity index 100%
rename from .github/PULL_REQUEST_TEMPLATE/pull_request_template.md
rename to .github/pull_request_template.md
diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index bf9d83d97..278e85bf5 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -147,33 +147,12 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- - name: Set up postgres (linux)
- if: |
- matrix.adapter == 'postgres' &&
- runner.os == 'Linux'
- uses: ./.github/actions/setup-postgres-linux
-
- - name: Set up postgres (macos)
- if: |
- matrix.adapter == 'postgres' &&
- runner.os == 'macOS'
- uses: ./.github/actions/setup-postgres-macos
-
- - name: Set up postgres (windows)
- if: |
- matrix.adapter == 'postgres' &&
- runner.os == 'Windows'
- uses: ./.github/actions/setup-postgres-windows
-
- name: Install python dependencies
run: |
pip install --upgrade pip
pip install tox
pip --version
tox --version
- - name: Run tox (postgres)
- if: matrix.adapter == 'postgres'
- run: tox
- name: Run tox (redshift)
if: matrix.adapter == 'redshift'
@@ -185,30 +164,6 @@ jobs:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
run: tox
- - name: Run tox (snowflake)
- if: matrix.adapter == 'snowflake'
- env:
- SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
- SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
- SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
- SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
- SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }}
- SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }}
- SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }}
- SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }}
- SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }}
- SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
- SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }}
- SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
- run: tox
-
- - name: Run tox (bigquery)
- if: matrix.adapter == 'bigquery'
- env:
- BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }}
- BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }}
- run: tox
-
- uses: actions/upload-artifact@v2
if: always()
with:
diff --git a/README.md b/README.md
index 75b757486..f7dad3317 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,16 @@
+
+
+
+
+
+
+
+
-**[dbt](https://www.getdbt.com/)** (data build tool) enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
+**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
@@ -11,21 +19,20 @@ dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-agg
The `dbt-redshift` package contains all of the code enabling dbt to work with Amazon Redshift. For
more information on using dbt with Redshift, consult [the docs](https://docs.getdbt.com/docs/profile-redshift).
+## Getting started
-## Find out more
-
-- Check out the [Introduction to dbt](https://docs.getdbt.com/docs/introduction/).
-- Read the [dbt Viewpoint](https://docs.getdbt.com/docs/about/viewpoint/).
+- [Install dbt](https://docs.getdbt.com/docs/installation)
+- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
-## Join thousands of analysts in the dbt community
+## Join the dbt Community
-- Join the [chat](http://community.getdbt.com/) on Slack.
-- Find community posts on [dbt Discourse](https://discourse.getdbt.com).
+- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
+- Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
## Reporting bugs and contributing code
-- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt/issues/new).
-- Want to help us build dbt? Check out the [Contributing Getting Started Guide](https://github.com/dbt-labs/dbt/blob/HEAD/CONTRIBUTING.md)
+- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt/issues/new)
+- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt/blob/HEAD/CONTRIBUTING.md)
## Code of Conduct
diff --git a/tests/integration/base.py b/tests/integration/base.py
index 9c6d2d840..13dc8283d 100644
--- a/tests/integration/base.py
+++ b/tests/integration/base.py
@@ -58,9 +58,7 @@ def __eq__(self, other):
class FakeArgs:
def __init__(self):
self.threads = 1
- self.data = False
self.defer = False
- self.schema = True
self.full_refresh = False
self.models = None
self.select = None
diff --git a/tests/integration/deprecation_tests/adapter-macro-macros/macros.sql b/tests/integration/deprecation_tests/adapter-macro-macros/macros.sql
deleted file mode 100644
index b03ca28f1..000000000
--- a/tests/integration/deprecation_tests/adapter-macro-macros/macros.sql
+++ /dev/null
@@ -1,17 +0,0 @@
-{% macro some_macro(arg1, arg2) -%}
- {{ adapter_macro('some_macro', arg1, arg2) }}
-{%- endmacro %}
-
-
-{% macro default__some_macro(arg1, arg2) %}
- {% do exceptions.raise_compiler_error('not allowed') %}
-{% endmacro %}
-
-{% macro postgres__some_macro(arg1, arg2) -%}
- {{ arg1 }}{{ arg2 }}
-{%- endmacro %}
-
-
-{% macro some_other_macro(arg1, arg2) -%}
- {{ adapter_macro('test.some_macro', arg1, arg2) }}
-{%- endmacro %}
diff --git a/tests/integration/deprecation_tests/adapter-macro-models-package/model.sql b/tests/integration/deprecation_tests/adapter-macro-models-package/model.sql
deleted file mode 100644
index 7b86017b4..000000000
--- a/tests/integration/deprecation_tests/adapter-macro-models-package/model.sql
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if some_other_macro('foo', 'bar') != 'foobar' %}
- {% do exceptions.raise_compiler_error('invalid foobar') %}
-{% endif %}
-select 1 as id
diff --git a/tests/integration/deprecation_tests/adapter-macro-models/model.sql b/tests/integration/deprecation_tests/adapter-macro-models/model.sql
deleted file mode 100644
index 37621d3d2..000000000
--- a/tests/integration/deprecation_tests/adapter-macro-models/model.sql
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if some_macro('foo', 'bar') != 'foobar' %}
- {% do exceptions.raise_compiler_error('invalid foobar') %}
-{% endif %}
-select 1 as id
diff --git a/tests/integration/deprecation_tests/boring-models/boring.sql b/tests/integration/deprecation_tests/boring-models/boring.sql
deleted file mode 100644
index 43258a714..000000000
--- a/tests/integration/deprecation_tests/boring-models/boring.sql
+++ /dev/null
@@ -1 +0,0 @@
-select 1 as id
diff --git a/tests/integration/deprecation_tests/custom-materialization-macros/custom.sql b/tests/integration/deprecation_tests/custom-materialization-macros/custom.sql
deleted file mode 100644
index 46ce9b9e4..000000000
--- a/tests/integration/deprecation_tests/custom-materialization-macros/custom.sql
+++ /dev/null
@@ -1,65 +0,0 @@
-{%- materialization custom, default -%}
-
- {%- set identifier = model['alias'] -%}
- {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}
- {%- set backup_identifier = model['name'] + '__dbt_backup' -%}
-
- {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}
- {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,
- type='view') -%}
- {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,
- schema=schema, database=database, type='view') -%}
-
- /*
- This relation (probably) doesn't exist yet. If it does exist, it's a leftover from
- a previous run, and we're going to try to drop it immediately. At the end of this
- materialization, we're going to rename the "old_relation" to this identifier,
- and then we're going to drop it. In order to make sure we run the correct one of:
- - drop view ...
- - drop table ...
-
- We need to set the type of this relation to be the type of the old_relation, if it exists,
- or else "view" as a sane default if it does not. Note that if the old_relation does not
- exist, then there is nothing to move out of the way and subsequentally drop. In that case,
- this relation will be effectively unused.
- */
- {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}
- {%- set backup_relation = api.Relation.create(identifier=backup_identifier,
- schema=schema, database=database,
- type=backup_relation_type) -%}
-
- {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}
-
- {{ run_hooks(pre_hooks, inside_transaction=False) }}
-
- -- drop the temp relations if they exists for some reason
- {{ adapter.drop_relation(intermediate_relation) }}
- {{ adapter.drop_relation(backup_relation) }}
-
- -- `BEGIN` happens here:
- {{ run_hooks(pre_hooks, inside_transaction=True) }}
-
- -- build model
- {% call statement('main') -%}
- {{ create_view_as(intermediate_relation, sql) }}
- {%- endcall %}
-
- -- cleanup
- -- move the existing view out of the way
- {% if old_relation is not none %}
- {{ adapter.rename_relation(target_relation, backup_relation) }}
- {% endif %}
- {{ adapter.rename_relation(intermediate_relation, target_relation) }}
-
- {{ run_hooks(post_hooks, inside_transaction=True) }}
-
- {{ adapter.commit() }}
-
- {{ drop_relation_if_exists(backup_relation) }}
-
- {{ run_hooks(post_hooks, inside_transaction=False) }}
-
- {# do not return anything! #}
- {# {{ return({'relations': [target_relation]}) }} #}
-
-{%- endmaterialization -%}
diff --git a/tests/integration/deprecation_tests/custom-models/boring.sql b/tests/integration/deprecation_tests/custom-models/boring.sql
deleted file mode 100644
index 12c0293b3..000000000
--- a/tests/integration/deprecation_tests/custom-models/boring.sql
+++ /dev/null
@@ -1,2 +0,0 @@
-{{ config(materialized='custom') }}
-select 1 as id
diff --git a/tests/integration/deprecation_tests/data/data_seed.csv b/tests/integration/deprecation_tests/data/data_seed.csv
deleted file mode 100644
index afd0a31ef..000000000
--- a/tests/integration/deprecation_tests/data/data_seed.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-id,dupe
-1,a
-2,a
-3,a
-4,a
diff --git a/tests/integration/deprecation_tests/data/seed.csv b/tests/integration/deprecation_tests/data/seed.csv
deleted file mode 100644
index 69ae389be..000000000
--- a/tests/integration/deprecation_tests/data/seed.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-a,b
-1,hello
-2,goodbye
diff --git a/tests/integration/deprecation_tests/dispatch-macros/cast.sql b/tests/integration/deprecation_tests/dispatch-macros/cast.sql
deleted file mode 100644
index 3dc65d2d7..000000000
--- a/tests/integration/deprecation_tests/dispatch-macros/cast.sql
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-{% macro string_literal(s) -%}
- {{ adapter.dispatch('string_literal', packages=['test'])(s) }}
-{%- endmacro %}
-
-{% macro default__string_literal(s) %}
- '{{ s }}'::text
-{% endmacro %}
-
-{% macro bigquery__string_literal(s) %}
- cast('{{ s }}' as string)
-{% endmacro %}
diff --git a/tests/integration/deprecation_tests/dispatch-macros/expect_value.sql b/tests/integration/deprecation_tests/dispatch-macros/expect_value.sql
deleted file mode 100644
index 0ee66151f..000000000
--- a/tests/integration/deprecation_tests/dispatch-macros/expect_value.sql
+++ /dev/null
@@ -1,10 +0,0 @@
-
--- cross-db compatible test, similar to accepted_values
-
-{% test expect_value(model, field, value) %}
-
-select *
-from {{ model }}
-where {{ field }} != '{{ value }}'
-
-{% endtest %}
diff --git a/tests/integration/deprecation_tests/dispatch-models/alias_in_project.sql b/tests/integration/deprecation_tests/dispatch-models/alias_in_project.sql
deleted file mode 100644
index aa9ecd0bf..000000000
--- a/tests/integration/deprecation_tests/dispatch-models/alias_in_project.sql
+++ /dev/null
@@ -1,2 +0,0 @@
-
-select {{ string_literal(this.name) }} as tablename
diff --git a/tests/integration/deprecation_tests/dispatch-models/alias_in_project_with_override.sql b/tests/integration/deprecation_tests/dispatch-models/alias_in_project_with_override.sql
deleted file mode 100644
index 67cb93d93..000000000
--- a/tests/integration/deprecation_tests/dispatch-models/alias_in_project_with_override.sql
+++ /dev/null
@@ -1,4 +0,0 @@
-
-{{ config(alias='override_alias') }}
-
-select {{ string_literal(this.name) }} as tablename
diff --git a/tests/integration/deprecation_tests/dispatch-models/foo_alias.sql b/tests/integration/deprecation_tests/dispatch-models/foo_alias.sql
deleted file mode 100644
index 3f7d16467..000000000
--- a/tests/integration/deprecation_tests/dispatch-models/foo_alias.sql
+++ /dev/null
@@ -1,9 +0,0 @@
-
-{{
- config(
- alias='foo',
- materialized='table'
- )
-}}
-
-select {{ string_literal(this.name) }} as tablename
diff --git a/tests/integration/deprecation_tests/dispatch-models/ref_foo_alias.sql b/tests/integration/deprecation_tests/dispatch-models/ref_foo_alias.sql
deleted file mode 100644
index e01463bb7..000000000
--- a/tests/integration/deprecation_tests/dispatch-models/ref_foo_alias.sql
+++ /dev/null
@@ -1,16 +0,0 @@
-
-{{
- config(
- materialized='table'
- )
-}}
-
-with trigger_ref as (
-
- -- we should still be able to ref a model by its filepath
- select * from {{ ref('foo_alias') }}
-
-)
-
--- this name should still be the filename
-select {{ string_literal(this.name) }} as tablename
diff --git a/tests/integration/deprecation_tests/dispatch-models/schema.yml b/tests/integration/deprecation_tests/dispatch-models/schema.yml
deleted file mode 100644
index b3a82faad..000000000
--- a/tests/integration/deprecation_tests/dispatch-models/schema.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-version: 2
-models:
-- name: foo_alias
- tests:
- - expect_value:
- field: tablename
- value: foo
-- name: ref_foo_alias
- tests:
- - expect_value:
- field: tablename
- value: ref_foo_alias
-- name: alias_in_project
- tests:
- - expect_value:
- field: tablename
- value: project_alias
-- name: alias_in_project_with_override
- tests:
- - expect_value:
- field: tablename
- value: override_alias
diff --git a/tests/integration/deprecation_tests/models-key-mismatch/schema.yml b/tests/integration/deprecation_tests/models-key-mismatch/schema.yml
deleted file mode 100644
index 0f713412e..000000000
--- a/tests/integration/deprecation_tests/models-key-mismatch/schema.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-version: 2
-models:
- - name: seed
- description: my cool seed
diff --git a/tests/integration/deprecation_tests/models/already_exists.sql b/tests/integration/deprecation_tests/models/already_exists.sql
deleted file mode 100644
index c215a179a..000000000
--- a/tests/integration/deprecation_tests/models/already_exists.sql
+++ /dev/null
@@ -1,5 +0,0 @@
-select 1 as id
-
-{% if adapter.already_exists(this.schema, this.identifier) and not should_full_refresh() %}
- where id > (select max(id) from {{this}})
-{% endif %}
diff --git a/tests/integration/deprecation_tests/test_deprecations.py b/tests/integration/deprecation_tests/test_deprecations.py
deleted file mode 100644
index ce32a0e35..000000000
--- a/tests/integration/deprecation_tests/test_deprecations.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from tests.integration.base import DBTIntegrationTest, use_profile
-
-from dbt import deprecations
-
-
-class BaseTestDeprecations(DBTIntegrationTest):
- def setUp(self):
- super().setUp()
- deprecations.reset_deprecations()
-
- @property
- def schema(self):
- return "deprecation_test"
-
- @staticmethod
- def dir(path):
- return path.lstrip("/")
-
-
-class TestAdapterMacroDeprecation(BaseTestDeprecations):
- @property
- def models(self):
- return self.dir('adapter-macro-models')
-
- @property
- def project_config(self):
- return {
- 'config-version': 2,
- 'macro-paths': [self.dir('adapter-macro-macros')]
- }
-
- @use_profile('redshift')
- def test_redshift_adapter_macro(self):
- self.assertEqual(deprecations.active_deprecations, set())
- # pick up the postgres macro
- self.run_dbt()
- expected = {'adapter-macro'}
- self.assertEqual(expected, deprecations.active_deprecations)
-
-
-class TestAdapterMacroDeprecationPackages(BaseTestDeprecations):
- @property
- def models(self):
- return self.dir('adapter-macro-models-package')
-
- @property
- def project_config(self):
- return {
- 'config-version': 2,
- 'macro-paths': [self.dir('adapter-macro-macros')]
- }
-
- @use_profile('redshift')
- def test_redshift_adapter_macro_pkg(self):
- self.assertEqual(deprecations.active_deprecations, set())
- # pick up the postgres macro
- self.assertEqual(deprecations.active_deprecations, set())
- self.run_dbt()
- expected = {'adapter-macro'}
- self.assertEqual(expected, deprecations.active_deprecations)
diff --git a/tests/integration/docs_generate_tests/test_docs_generate.py b/tests/integration/docs_generate_tests/test_docs_generate.py
index 6b2ca44c3..924dc3576 100644
--- a/tests/integration/docs_generate_tests/test_docs_generate.py
+++ b/tests/integration/docs_generate_tests/test_docs_generate.py
@@ -888,7 +888,7 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'test.test.not_null_model_id.d01cc630e6': {
'alias': 'not_null_model_id',
- 'compiled_path': Normalized('target/compiled/test/models/schema.yml/schema_test/not_null_model_id.sql'),
+ 'compiled_path': Normalized('target/compiled/test/models/schema.yml/not_null_model_id.sql'),
'build_path': None,
'created_at': ANY,
'column_name': 'id',
@@ -901,20 +901,20 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'deferred': False,
'description': '',
- 'fqn': ['test', 'schema_test', 'not_null_model_id'],
+ 'fqn': ['test', 'not_null_model_id'],
'name': 'not_null_model_id',
'original_file_path': model_schema_yml_path,
'package_name': 'test',
'patch_path': None,
- 'path': Normalized('schema_test/not_null_model_id.sql'),
- 'raw_sql': "{{ test_not_null(**_dbt_schema_test_kwargs) }}",
+ 'path': Normalized('not_null_model_id.sql'),
+ 'raw_sql': "{{ test_not_null(**_dbt_generic_test_kwargs) }}",
'refs': [['model']],
'relation_name': None,
'resource_type': 'test',
'root_path': self.test_root_realpath,
'schema': test_audit_schema,
'database': self.default_database,
- 'tags': ['schema'],
+ 'tags': [],
'meta': {},
'unique_id': 'test.test.not_null_model_id.d01cc630e6',
'docs': {'show': True},
@@ -979,7 +979,7 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'test.test.test_nothing_model_.5d38568946': {
'alias': 'test_nothing_model_',
- 'compiled_path': Normalized('target/compiled/test/models/schema.yml/schema_test/test_nothing_model_.sql'),
+ 'compiled_path': Normalized('target/compiled/test/models/schema.yml/test_nothing_model_.sql'),
'build_path': None,
'created_at': ANY,
'column_name': None,
@@ -992,20 +992,20 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'deferred': False,
'description': '',
- 'fqn': ['test', 'schema_test', 'test_nothing_model_'],
+ 'fqn': ['test', 'test_nothing_model_'],
'name': 'test_nothing_model_',
'original_file_path': model_schema_yml_path,
'package_name': 'test',
'patch_path': None,
- 'path': normalize('schema_test/test_nothing_model_.sql'),
- 'raw_sql': "{{ test.test_nothing(**_dbt_schema_test_kwargs) }}",
+ 'path': normalize('test_nothing_model_.sql'),
+ 'raw_sql': "{{ test.test_nothing(**_dbt_generic_test_kwargs) }}",
'refs': [['model']],
'relation_name': None,
'resource_type': 'test',
'root_path': self.test_root_realpath,
'schema': test_audit_schema,
'database': self.default_database,
- 'tags': ['schema'],
+ 'tags': [],
'meta': {},
'unique_id': 'test.test.test_nothing_model_.5d38568946',
'docs': {'show': True},
@@ -1025,7 +1025,7 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'test.test.unique_model_id.67b76558ff': {
'alias': 'unique_model_id',
- 'compiled_path': Normalized('target/compiled/test/models/schema.yml/schema_test/unique_model_id.sql'),
+ 'compiled_path': Normalized('target/compiled/test/models/schema.yml/unique_model_id.sql'),
'build_path': None,
'created_at': ANY,
'column_name': 'id',
@@ -1038,20 +1038,20 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
},
'deferred': False,
'description': '',
- 'fqn': ['test', 'schema_test', 'unique_model_id'],
+ 'fqn': ['test', 'unique_model_id'],
'name': 'unique_model_id',
'original_file_path': model_schema_yml_path,
'package_name': 'test',
'patch_path': None,
- 'path': normalize('schema_test/unique_model_id.sql'),
- 'raw_sql': "{{ test_unique(**_dbt_schema_test_kwargs) }}",
+ 'path': normalize('unique_model_id.sql'),
+ 'raw_sql': "{{ test_unique(**_dbt_generic_test_kwargs) }}",
'refs': [['model']],
'relation_name': None,
'resource_type': 'test',
'root_path': self.test_root_realpath,
'schema': test_audit_schema,
'database': self.default_database,
- 'tags': ['schema'],
+ 'tags': [],
'meta': {},
'unique_id': 'test.test.unique_model_id.67b76558ff',
'docs': {'show': True},
@@ -1218,7 +1218,7 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
'test.macro_info': ANY,
'test.macro_arg_info': ANY,
},
- 'disabled': [],
+ 'disabled': {},
}
def _checksum_file(self, path):
@@ -1487,7 +1487,7 @@ def expected_redshift_incremental_view_manifest(self):
'test.macro_info': ANY,
'test.macro_arg_info': ANY,
},
- 'disabled': [],
+ 'disabled': {},
}
def verify_metadata(self, metadata, dbt_schema_version):
diff --git a/tests/integration/incremental_schema_tests/test_incremental_schema.py b/tests/integration/incremental_schema_tests/test_incremental_schema.py
index 77391d625..5d803c48f 100644
--- a/tests/integration/incremental_schema_tests/test_incremental_schema.py
+++ b/tests/integration/incremental_schema_tests/test_incremental_schema.py
@@ -28,11 +28,11 @@ def list_tests_and_assert(self, include, exclude, expected_tests):
print(listed)
assert len(listed) == len(expected_tests)
- test_names = [name.split('.')[2] for name in listed]
+ test_names = [name.split('.')[-1] for name in listed]
assert sorted(test_names) == sorted(expected_tests)
def run_tests_and_assert(
- self, include, exclude, expected_tests, compare_source, compare_target, schema = False, data = False
+ self, include, exclude, expected_tests, compare_source, compare_target
):
run_args = ['run']
@@ -50,10 +50,6 @@ def run_tests_and_assert(
test_args.extend(('--models', include))
if exclude:
test_args.extend(('--exclude', exclude))
- if schema:
- test_args.append('--schema')
- if data:
- test_args.append('--data')
results = self.run_dbt(test_args)
tests_run = [r.node.name for r in results]
diff --git a/tests/integration/simple_snapshot_test/test_simple_snapshot.py b/tests/integration/simple_snapshot_test/test_simple_snapshot.py
index 8007509ca..9a5023de6 100644
--- a/tests/integration/simple_snapshot_test/test_simple_snapshot.py
+++ b/tests/integration/simple_snapshot_test/test_simple_snapshot.py
@@ -325,7 +325,7 @@ def _snapshot_and_assert_invalidated(self):
for result in snapshotted[10:]:
# result is a tuple, the dbt_valid_to column is the latest
self.assertIsInstance(result[-1], datetime)
- self.assertGreaterEqual(result[-1].astimezone(pytz.UTC), self._invalidated_snapshot_datetime)
+ self.assertGreaterEqual(result[-1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime)
def _revive_records(self):
database = self.default_database
@@ -365,7 +365,7 @@ def _snapshot_and_assert_revived(self):
for result in invalidated_records:
# result is a tuple, the dbt_valid_to column is the latest
self.assertIsInstance(result[1], datetime)
- self.assertGreaterEqual(result[1].astimezone(pytz.UTC), self._invalidated_snapshot_datetime)
+ self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime)
# records which weren't revived (id != 10, 11)
revived_records = self.run_sql(
@@ -387,7 +387,7 @@ def _snapshot_and_assert_revived(self):
self.assertIsInstance(result[1], datetime)
# there are milliseconds (part of microseconds in datetime objects) in the
# invalidated_snapshot_datetime and not in result datetime so set the microseconds to 0
- self.assertGreaterEqual(result[1].astimezone(pytz.UTC), self._invalidated_snapshot_datetime.replace(microsecond=0))
+ self.assertGreaterEqual(result[1].replace(tzinfo=pytz.UTC), self._invalidated_snapshot_datetime.replace(microsecond=0))
self.assertIsNone(result[2])
@use_profile('redshift')
diff --git a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py b/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py
index fec868f7c..c1e9a2882 100644
--- a/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py
+++ b/tests/integration/simple_snapshot_test/test_snapshot_check_cols.py
@@ -32,7 +32,7 @@ def snapshot_check_cols_cycle(self):
self.assertEqual(len(results), 1)
def assert_expected(self):
- self.run_dbt(['test', '--data', '--vars', 'version: 3'])
+ self.run_dbt(['test', '--select', 'test_type:singular', '--vars', 'version: 3'])
@use_profile('redshift')
def test__redshift__simple_snapshot(self):
diff --git a/tox.ini b/tox.ini
index 4db9528b8..44bf99189 100644
--- a/tox.ini
+++ b/tox.ini
@@ -25,7 +25,7 @@ description = adapter plugin integration testing
skip_install = true
passenv = DBT_* REDSHIFT_TEST_* PYTEST_ADDOPTS
commands =
- redshift: {envpython} -m pytest {posargs} -m profile_redshift test/integration
+ redshift: {envpython} -m pytest {posargs} -m profile_redshift tests/integration
deps =
-rdev_requirements.txt
-e.