Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make internal macros use macro dispatch to be overridable in child adapters #320

Merged
merged 5 commits into from
Apr 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@

### Under the hood
- Use dbt.tests.adapter.basic in test suite ([#298](https://github.com/dbt-labs/dbt-spark/issues/298), [#299](https://github.com/dbt-labs/dbt-spark/pull/299))
- Make internal macros use macro dispatch to be overridable in child adapters ([#319](https://github.com/dbt-labs/dbt-spark/issues/319), [#320](https://github.com/dbt-labs/dbt-spark/pull/320))

### Contributors
- [@JCZuurmond](https://github.com/dbt-labs/dbt-spark/pull/279) ( [#279](https://github.com/dbt-labs/dbt-spark/pull/279))
- [@ueshin](https://github.com/ueshin) ([#320](https://github.com/dbt-labs/dbt-spark/pull/320))

## dbt-spark 1.1.0b1 (March 23, 2022)

Expand Down
36 changes: 35 additions & 1 deletion dbt/include/spark/macros/adapters.sql
Original file line number Diff line number Diff line change
@@ -1,19 +1,33 @@
{% macro file_format_clause() %}
{{ return(adapter.dispatch('file_format_clause', 'dbt')()) }}
{%- endmacro -%}

{% macro spark__file_format_clause() %}
{%- set file_format = config.get('file_format', validator=validation.any[basestring]) -%}
{%- if file_format is not none %}
using {{ file_format }}
{%- endif %}
{%- endmacro -%}


{% macro location_clause() %}
{{ return(adapter.dispatch('location_clause', 'dbt')()) }}
{%- endmacro -%}

{% macro spark__location_clause() %}
{%- set location_root = config.get('location_root', validator=validation.any[basestring]) -%}
{%- set identifier = model['alias'] -%}
{%- if location_root is not none %}
location '{{ location_root }}/{{ identifier }}'
{%- endif %}
{%- endmacro -%}


{% macro options_clause() -%}
{{ return(adapter.dispatch('options_clause', 'dbt')()) }}
{%- endmacro -%}

{% macro spark__options_clause() -%}
{%- set options = config.get('options') -%}
{%- if config.get('file_format') == 'hudi' -%}
{%- set unique_key = config.get('unique_key') -%}
Expand All @@ -35,7 +49,12 @@
{%- endif %}
{%- endmacro -%}


{% macro comment_clause() %}
{{ return(adapter.dispatch('comment_clause', 'dbt')()) }}
{%- endmacro -%}

{% macro spark__comment_clause() %}
{%- set raw_persist_docs = config.get('persist_docs', {}) -%}

{%- if raw_persist_docs is mapping -%}
Expand All @@ -48,7 +67,12 @@
{% endif %}
{%- endmacro -%}


{% macro partition_cols(label, required=false) %}
{{ return(adapter.dispatch('partition_cols', 'dbt')(label, required)) }}
{%- endmacro -%}

{% macro spark__partition_cols(label, required=false) %}
{%- set cols = config.get('partition_by', validator=validation.any[list, basestring]) -%}
{%- if cols is not none %}
{%- if cols is string -%}
Expand All @@ -65,6 +89,10 @@


{% macro clustered_cols(label, required=false) %}
{{ return(adapter.dispatch('clustered_cols', 'dbt')(label, required)) }}
{%- endmacro -%}

{% macro spark__clustered_cols(label, required=false) %}
{%- set cols = config.get('clustered_by', validator=validation.any[list, basestring]) -%}
{%- set buckets = config.get('buckets', validator=validation.any[int]) -%}
{%- if (cols is not none) and (buckets is not none) %}
Expand All @@ -80,6 +108,7 @@
{%- endif %}
{%- endmacro -%}


{% macro fetch_tbl_properties(relation) -%}
{% call statement('list_properties', fetch_result=True) -%}
SHOW TBLPROPERTIES {{ relation }}
Expand All @@ -88,12 +117,17 @@
{%- endmacro %}


{#-- We can't use temporary tables with `create ... as ()` syntax #}
{% macro create_temporary_view(relation, sql) -%}
{{ return(adapter.dispatch('create_temporary_view', 'dbt')(relation, sql)) }}
{%- endmacro -%}

{#-- We can't use temporary tables with `create ... as ()` syntax #}
{% macro spark__create_temporary_view(relation, sql) -%}
create temporary view {{ relation.include(schema=false) }} as
{{ sql }}
{% endmacro %}


{% macro spark__create_table_as(temporary, relation, sql) -%}
{% if temporary -%}
{{ create_temporary_view(relation, sql) }}
Expand Down
9 changes: 8 additions & 1 deletion tests/unit/test_macros.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ def setUp(self):
'validation': mock.Mock(),
'model': mock.Mock(),
'exceptions': mock.Mock(),
'config': mock.Mock()
'config': mock.Mock(),
'adapter': mock.Mock(),
'return': lambda r: r,
}
self.default_context['config'].get = lambda key, default=None, **kwargs: self.config.get(key, default)

Expand All @@ -24,6 +26,11 @@ def __get_template(self, template_filename):

def __run_macro(self, template, name, temporary, relation, sql):
self.default_context['model'].alias = relation

def dispatch(macro_name, macro_namespace=None, packages=None):
return getattr(template.module, f'spark__{macro_name}')
self.default_context['adapter'].dispatch = dispatch

value = getattr(template.module, name)(temporary, relation, sql)
return re.sub(r'\s\s+', ' ', value)

Expand Down