Skip to content

Commit

Permalink
Merge pull request #2902 from fishtown-analytics/fix/test-selection
Browse files Browse the repository at this point in the history
set default `materialized` for test node configs
  • Loading branch information
Kyle Wigley authored Nov 24, 2020
2 parents 0951d08 + d246aa8 commit fec0e31
Show file tree
Hide file tree
Showing 7 changed files with 207 additions and 114 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
- Increased the supported relation name length in postgres from 29 to 51 ([#2850](https://github.com/fishtown-analytics/dbt/pull/2850))
- Widen supported Google Cloud libraries dependencies ([#2794](https://github.com/fishtown-analytics/dbt/pull/2794), [#2877](https://github.com/fishtown-analytics/dbt/pull/2877)).
- dbt list command always return 0 as exit code ([#2886](https://github.com/fishtown-analytics/dbt/issues/2886), [#2892](https://github.com/fishtown-analytics/dbt/issues/2892))
- Set default `materialized` for test node configs to `test` ([#2806](https://github.com/fishtown-analytics/dbt/issues/2806), [#2902](https://github.com/fishtown-analytics/dbt/pull/2902))

### Under the hood
- Bump hologram version to 0.0.11. Add scripts/dtr.py ([#2888](https://github.com/fishtown-analytics/dbt/issues/2840),[#2889](https://github.com/fishtown-analytics/dbt/pull/2889))
Expand Down
1 change: 1 addition & 0 deletions core/dbt/contracts/graph/model_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,7 @@ class SeedConfig(NodeConfig):

@dataclass
class TestConfig(NodeConfig):
materialized: str = 'test'
severity: Severity = Severity('ERROR')


Expand Down
81 changes: 51 additions & 30 deletions test/integration/029_docs_generate_tests/test_docs_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ def test_postgres_include_schema(self):
self.assertEqual(len(manifest['nodes']), 1)
self.assertIn('model.test.model', manifest['nodes'])
self.assertIn('schema', manifest['nodes']['model.test.model'])
self.assertEqual('pg', manifest['nodes']['model.test.model']['schema'][:2])
self.assertEqual('pg', manifest['nodes']
['model.test.model']['schema'][:2])


class TestDocsGenerate(DBTIntegrationTest):
Expand All @@ -117,7 +118,6 @@ def tearDown(self):
super().tearDown()
del os.environ['DBT_ENV_CUSTOM_ENV_env_key']


@property
def schema(self):
return 'docs_generate_029'
Expand Down Expand Up @@ -209,11 +209,11 @@ def _redshift_stats(self):
"include": True
},
"diststyle": {
"id": "diststyle",
"label": "Dist Style",
"value": AnyStringWith('AUTO'),
"description": "Distribution style or distribution key column, if key distribution is defined.",
"include": True
"id": "diststyle",
"label": "Dist Style",
"value": AnyStringWith('AUTO'),
"description": "Distribution style or distribution key column, if key distribution is defined.",
"include": True
},
"max_varchar": {
"id": "max_varchar",
Expand Down Expand Up @@ -349,7 +349,7 @@ def _expected_catalog(self, id_type, text_type, time_type, view_type,
table_type, model_stats, seed_stats=None, case=None,
case_columns=False, model_database=None):
if case is None:
case = lambda x: x
def case(x): return x
col_case = case if case_columns else lambda x: x

if seed_stats is None:
Expand Down Expand Up @@ -888,7 +888,8 @@ def verify_catalog(self, expected):

assert set(catalog) == {'errors', 'metadata', 'nodes', 'sources'}

self.verify_metadata(catalog['metadata'], 'https://schemas.getdbt.com/dbt/catalog/v1.json')
self.verify_metadata(
catalog['metadata'], 'https://schemas.getdbt.com/dbt/catalog/v1.json')
assert not catalog['errors']

for key in 'nodes', 'sources':
Expand Down Expand Up @@ -988,7 +989,7 @@ def rendered_tst_config(self, **updates):
result = {
'column_types': {},
'enabled': True,
'materialized': 'view',
'materialized': 'test',
'persist_docs': {},
'post-hook': [],
'pre-hook': [],
Expand Down Expand Up @@ -1071,10 +1072,13 @@ def expected_seeded_manifest(self, model_database=None, quote_model=False):
model_database = self.alternative_database

model_config = self.rendered_model_config(database=model_database)
second_config = self.rendered_model_config(schema=self.alternate_schema[-4:])
second_config = self.rendered_model_config(
schema=self.alternate_schema[-4:])

unrendered_model_config = self.unrendered_model_config(database=model_database, materialized='view')
unrendered_second_config = self.unrendered_model_config(schema=self.alternate_schema[-4:], materialized='view')
unrendered_model_config = self.unrendered_model_config(
database=model_database, materialized='view')
unrendered_second_config = self.unrendered_model_config(
schema=self.alternate_schema[-4:], materialized='view')

seed_config = self.rendered_seed_config()
unrendered_seed_config = self.unrendered_seed_config()
Expand Down Expand Up @@ -2143,7 +2147,8 @@ def expected_bigquery_complex_manifest(self):
'config': self.rendered_model_config(
cluster_by=['first_name'],
materialized='table',
partition_by={'field': 'updated_at', 'data_type': 'date'},
partition_by={'field': 'updated_at',
'data_type': 'date'},
),
'sources': [],
'depends_on': {'macros': [], 'nodes': ['seed.test.seed']},
Expand Down Expand Up @@ -2219,7 +2224,8 @@ def expected_bigquery_complex_manifest(self):
'unrendered_config': self.unrendered_model_config(
cluster_by=['first_name'],
materialized='table',
partition_by={'field': 'updated_at', 'data_type': 'date'},
partition_by={'field': 'updated_at',
'data_type': 'date'},
),
},
'model.test.multi_clustered': {
Expand All @@ -2228,7 +2234,8 @@ def expected_bigquery_complex_manifest(self):
'config': self.rendered_model_config(
cluster_by=['first_name', 'email'],
materialized='table',
partition_by={'field': 'updated_at', 'data_type': 'date'}
partition_by={'field': 'updated_at',
'data_type': 'date'}
),
'sources': [],
'depends_on': {'macros': [], 'nodes': ['seed.test.seed']},
Expand Down Expand Up @@ -2303,7 +2310,8 @@ def expected_bigquery_complex_manifest(self):
'unrendered_config': self.unrendered_model_config(
cluster_by=['first_name', 'email'],
materialized='table',
partition_by={'field': 'updated_at', 'data_type': 'date'}
partition_by={'field': 'updated_at',
'data_type': 'date'}
),
},
'model.test.nested_view': {
Expand Down Expand Up @@ -2864,12 +2872,16 @@ def verify_manifest(self, expected_manifest):

for key in manifest_keys:
if key == 'macros':
self.verify_manifest_macros(manifest, expected_manifest.get('macros'))
self.verify_manifest_macros(
manifest, expected_manifest.get('macros'))
elif key == 'metadata':
metadata = manifest['metadata']
self.verify_metadata(metadata, 'https://schemas.getdbt.com/dbt/manifest/v1.json')
assert 'project_id' in metadata and metadata['project_id'] == '098f6bcd4621d373cade4e832627b4f6'
assert 'send_anonymous_usage_stats' in metadata and metadata['send_anonymous_usage_stats'] is False
self.verify_metadata(
metadata, 'https://schemas.getdbt.com/dbt/manifest/v1.json')
assert 'project_id' in metadata and metadata[
'project_id'] == '098f6bcd4621d373cade4e832627b4f6'
assert 'send_anonymous_usage_stats' in metadata and metadata[
'send_anonymous_usage_stats'] is False
assert 'user_id' in metadata and metadata['user_id'] is None
assert 'adapter_type' in metadata and metadata['adapter_type'] == self.adapter_type
else:
Expand All @@ -2895,9 +2907,12 @@ def expected_run_results(self, quote_schema=True, quote_model=False,
model_database = self.alternative_database

model_config = self.rendered_model_config(database=model_database)
second_model_config = self.rendered_model_config(schema=self.alternate_schema[-4:])
unrendered_model_config = self.unrendered_model_config(database=model_database, materialized='view')
unrendered_second_model_config = self.unrendered_model_config(schema=self.alternate_schema[-4:], materialized='view')
second_model_config = self.rendered_model_config(
schema=self.alternate_schema[-4:])
unrendered_model_config = self.unrendered_model_config(
database=model_database, materialized='view')
unrendered_second_model_config = self.unrendered_model_config(
schema=self.alternate_schema[-4:], materialized='view')
schema = self.unique_schema()

# we are selecting from the seed, which is always in the default db
Expand Down Expand Up @@ -3735,7 +3750,8 @@ def verify_run_results(self, expected_run_results):
run_results = _read_json('./target/run_results.json')

assert 'metadata' in run_results
self.verify_metadata(run_results['metadata'], 'https://schemas.getdbt.com/dbt/run-results/v1.json')
self.verify_metadata(
run_results['metadata'], 'https://schemas.getdbt.com/dbt/run-results/v1.json')
self.assertIn('elapsed_time', run_results)
self.assertGreater(run_results['elapsed_time'], 0)
self.assertTrue(
Expand All @@ -3752,7 +3768,8 @@ def verify_run_results(self, expected_run_results):

@use_profile('postgres')
def test__postgres__run_and_generate_no_compile(self):
self.run_and_generate(alternate_db=self.default_database, args=['--no-compile'])
self.run_and_generate(
alternate_db=self.default_database, args=['--no-compile'])
self.verify_catalog(self.expected_postgres_catalog())
self.assertFalse(os.path.exists('./target/manifest.json'))

Expand All @@ -3777,12 +3794,14 @@ def test__postgres_references(self):

self.verify_catalog(self.expected_postgres_references_catalog())
self.verify_manifest(self.expected_postgres_references_manifest())
self.verify_run_results(self.expected_postgres_references_run_results())
self.verify_run_results(
self.expected_postgres_references_run_results())

@use_profile('postgres')
def test_postgres_asset_paths_copied(self):
self.run_and_generate(
{'asset-paths': [self.dir('assets'), self.dir('non-existent-assets')]},
{'asset-paths': [self.dir('assets'),
self.dir('non-existent-assets')]},
)

assert os.path.exists('./target/assets')
Expand Down Expand Up @@ -3823,7 +3842,8 @@ def connect(*args, **kwargs):

self.verify_catalog(self.expected_snowflake_catalog(case_columns=True))
self.verify_manifest(self.expected_seeded_manifest(quote_model=True))
self.verify_run_results(self.expected_run_results(quote_schema=False, quote_model=True))
self.verify_run_results(self.expected_run_results(
quote_schema=False, quote_model=True))

@use_profile('bigquery')
def test__bigquery__run_and_generate(self):
Expand Down Expand Up @@ -3862,7 +3882,8 @@ def test__redshift__incremental_view(self):
model_count=1,
)
self.verify_catalog(self.expected_redshift_incremental_catalog())
self.verify_manifest(self.expected_redshift_incremental_view_manifest())
self.verify_manifest(
self.expected_redshift_incremental_view_manifest())

@use_profile('presto')
def test__presto__run_and_generate(self):
Expand Down
6 changes: 3 additions & 3 deletions test/integration/047_dbt_ls_test/test_ls.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ def expect_test_output(self):
'tags': ['schema'],
'config': {
'enabled': True,
'materialized': 'view',
'materialized': 'test',
'post-hook': [],
'severity': 'ERROR',
'tags': [],
Expand All @@ -356,7 +356,7 @@ def expect_test_output(self):
'tags': ['data'],
'config': {
'enabled': True,
'materialized': 'view',
'materialized': 'test',
'post-hook': [],
'severity': 'ERROR',
'tags': [],
Expand All @@ -380,7 +380,7 @@ def expect_test_output(self):
'tags': ['schema'],
'config': {
'enabled': True,
'materialized': 'view',
'materialized': 'test',
'post-hook': [],
'severity': 'ERROR',
'tags': [],
Expand Down
Loading

0 comments on commit fec0e31

Please sign in to comment.