Skip to content

Commit

Permalink
support contracts on models materialized as view (#670)
Browse files Browse the repository at this point in the history
* first pass to add view mat contract check

* changelog, point to branch for testing, fix typo

* fix tests

* fix skips

* remove dbt-core pin

* remove incremental temp logic
  • Loading branch information
emmyoop authored Mar 8, 2023
1 parent 0cb64bc commit 88266c6
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 8 deletions.
6 changes: 3 additions & 3 deletions .changes/unreleased/Features-20230223-180923.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
kind: Features
body: implement data_type_code_to_name on SparkConnectionManager
body: Enforce contracts on models materialized as tables and views
time: 2023-02-23T18:09:23.787675-05:00
custom:
Author: michelleark
Issue: "639"
Author: michelleark emmyoop
Issue: 639 654
3 changes: 3 additions & 0 deletions dbt/include/spark/macros/adapters.sql
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,9 @@
{% macro spark__create_view_as(relation, sql) -%}
create or replace view {{ relation }}
{{ comment_clause() }}
{% if config.get('contract', False) -%}
{{ get_assert_columns_equivalent(sql) }}
{%- endif %}
as
{{ sql }}
{% endmacro %}
Expand Down
29 changes: 24 additions & 5 deletions tests/functional/adapter/test_constraints.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import pytest
from dbt.tests.util import relation_from_name
from dbt.tests.adapter.constraints.test_constraints import (
BaseConstraintsColumnsEqual,
BaseTableConstraintsColumnsEqual,
BaseViewConstraintsColumnsEqual,
BaseConstraintsRuntimeEnforcement
)
from dbt.tests.adapter.constraints.fixtures import (
Expand All @@ -28,8 +29,7 @@
constraints_yml = model_schema_yml.replace("text", "string").replace("primary key", "")


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_http_cluster')
class TestSparkConstraintsColumnsEqualPyodbc(BaseConstraintsColumnsEqual):
class PyodbcSetup:
@pytest.fixture(scope="class")
def models(self):
return {
Expand Down Expand Up @@ -68,8 +68,7 @@ def data_types(self, int_type, schema_int_type, string_type):
]


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_sql_endpoint', 'databricks_cluster')
class TestSparkConstraintsColumnsEqualDatabricksHTTP(BaseConstraintsColumnsEqual):
class DatabricksHTTPSetup:
@pytest.fixture(scope="class")
def models(self):
return {
Expand Down Expand Up @@ -107,6 +106,26 @@ def data_types(self, int_type, schema_int_type, string_type):
]


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_http_cluster')
class TestSparkTableConstraintsColumnsEqualPyodbc(PyodbcSetup, BaseTableConstraintsColumnsEqual):
pass


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_http_cluster')
class TestSparkViewConstraintsColumnsEqualPyodbc(PyodbcSetup, BaseViewConstraintsColumnsEqual):
pass


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_sql_endpoint', 'databricks_cluster')
class TestSparkTableConstraintsColumnsEqualDatabricksHTTP(DatabricksHTTPSetup, BaseTableConstraintsColumnsEqual):
pass


@pytest.mark.skip_profile('spark_session', 'apache_spark', 'databricks_sql_endpoint', 'databricks_cluster')
class TestSparkViewConstraintsColumnsEqualDatabricksHTTP(DatabricksHTTPSetup, BaseViewConstraintsColumnsEqual):
pass


@pytest.mark.skip_profile('spark_session', 'apache_spark')
class TestSparkConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement):
@pytest.fixture(scope="class")
Expand Down

0 comments on commit 88266c6

Please sign in to comment.