forked from dbt-labs/dbt-spark
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Convert Spark persist docs test (dbt-labs#612)
* Begin conversion and get ready for CI testing. * Uncheck models * Change core index. * Pair test down for minimal test * Adjust code with some guessing. * Forgot a version tag * Make test conversion work. Finally * Fix up the code. * Attempt to fix test conversion with profile skips. * Add missing column test and cleanup code. * Remove shas from the requirements now that base conversion is live. * Revert whitespace change. --------- Co-authored-by: Mila Page <versusfacit@users.noreply.github.com>
- Loading branch information
1 parent
5d27961
commit f877d1e
Showing
10 changed files
with
163 additions
and
163 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
119 changes: 119 additions & 0 deletions
119
tests/functional/adapter/persist_docs/test_persist_docs.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
import json | ||
import os | ||
import pytest | ||
|
||
from dbt.tests.util import run_dbt | ||
|
||
from fixtures import ( | ||
_MODELS__MY_FUN_DOCS, | ||
_MODELS__INCREMENTAL_DELTA, | ||
_MODELS__TABLE_DELTA_MODEL, | ||
_MODELS__TABLE_DELTA_MODEL_MISSING_COLUMN, | ||
_PROPERTIES__MODELS, | ||
_PROPERTIES__SEEDS, | ||
_SEEDS__BASIC, | ||
) | ||
|
||
|
||
@pytest.mark.skip_profile("apache_spark", "spark_session") | ||
class TestPersistDocsDeltaTable: | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"incremental_delta_model.sql": _MODELS__INCREMENTAL_DELTA, | ||
"my_fun_docs.md": _MODELS__MY_FUN_DOCS, | ||
"table_delta_model.sql": _MODELS__TABLE_DELTA_MODEL, | ||
"schema.yml": _PROPERTIES__MODELS | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return { | ||
"seed.csv": _SEEDS__BASIC, | ||
"seed.yml": _PROPERTIES__SEEDS | ||
} | ||
|
||
|
||
@pytest.fixture(scope="class") | ||
def project_config_update(self): | ||
return { | ||
'models': { | ||
'test': { | ||
'+persist_docs': { | ||
"relation": True, | ||
"columns": True, | ||
}, | ||
} | ||
}, | ||
'seeds': { | ||
'test': { | ||
'+persist_docs': { | ||
"relation": True, | ||
"columns": True, | ||
}, | ||
'+file_format': 'delta', | ||
'+quote_columns': True | ||
} | ||
}, | ||
} | ||
|
||
def test_delta_comments(self, project): | ||
run_dbt(['seed']) | ||
run_dbt(['run']) | ||
|
||
for table, whatis in [ | ||
('table_delta_model', 'Table'), | ||
('seed', 'Seed'), | ||
('incremental_delta_model', 'Incremental') | ||
]: | ||
results = project.run_sql( | ||
'describe extended {schema}.{table}'.format(schema=project.test_schema, table=table), | ||
fetch='all' | ||
) | ||
|
||
for result in results: | ||
if result[0] == 'Comment': | ||
assert result[1].startswith(f'{whatis} model description') | ||
if result[0] == 'id': | ||
assert result[2].startswith('id Column description') | ||
if result[0] == 'name': | ||
assert result[2].startswith('Some stuff here and then a call to') | ||
|
||
|
||
@pytest.mark.skip_profile("apache_spark", "spark_session") | ||
class TestPersistDocsMissingColumn: | ||
@pytest.fixture(scope="class") | ||
def project_config_update(self): | ||
return { | ||
"models": { | ||
"test": { | ||
"+persist_docs": { | ||
"columns": True, | ||
}, | ||
} | ||
} | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return { | ||
"seed.csv": _SEEDS__BASIC, | ||
"seed.yml": _PROPERTIES__SEEDS | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"table_delta_model.sql": _MODELS__TABLE_DELTA_MODEL_MISSING_COLUMN, | ||
"my_fun_docs.md": _MODELS__MY_FUN_DOCS, | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def properties(self): | ||
return {"schema.yml": _PROPERTIES__MODELS} | ||
|
||
def test_missing_column(self, project): | ||
'''spark will use our schema to verify all columns exist rather than fail silently''' | ||
run_dbt(["seed"]) | ||
res = run_dbt(["run"], expect_pass=False) | ||
assert "Missing field name in table" in res[0].message |
2 changes: 0 additions & 2 deletions
2
tests/integration/persist_docs/models/incremental_delta_model.sql
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.