Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add macro argument definitions (#2081, #2083) #2096

Merged
merged 2 commits into from
Feb 10, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions core/dbt/clients/jinja.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,10 +379,8 @@ def render_template(template, ctx, node=None):
return template.render(ctx)


def get_rendered(string, ctx, node=None,
capture_macros=False):
template = get_template(string, ctx, node,
capture_macros=capture_macros)
def get_rendered(string, ctx, node=None, capture_macros=False):
template = get_template(string, ctx, node, capture_macros=capture_macros)

return render_template(template, ctx, node)

Expand Down
23 changes: 3 additions & 20 deletions core/dbt/context/docs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from typing import (
Any, Optional, List, Dict, Union
Any, Dict, Union
)

from dbt.exceptions import (
Expand All @@ -9,7 +9,7 @@
from dbt.config.runtime import RuntimeConfig
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.graph.parsed import Docref, ParsedMacro
from dbt.contracts.graph.parsed import ParsedMacro

from dbt.context.base import contextmember
from dbt.context.configured import ConfiguredContext
Expand All @@ -20,29 +20,15 @@ def __init__(
self,
config: RuntimeConfig,
node: Any,
docrefs: List[Docref],
column_name: Optional[str],
) -> None:
super().__init__(config)
self.node = node
self.docrefs = docrefs
self.column_name = column_name

@contextmember
def doc(self, *args: str) -> str:
# when you call doc(), this is what happens at parse time
if len(args) != 1 and len(args) != 2:
doc_invalid_args(self.node, args)
doc_package_name = ''
doc_name = args[0]
if len(args) == 2:
doc_package_name = args[1]

docref = Docref(documentation_package=doc_package_name,
documentation_name=doc_name,
column_name=self.column_name)
self.docrefs.append(docref)

# At parse time, nothing should care about what doc() returns
return ''

Expand Down Expand Up @@ -87,11 +73,8 @@ def doc(self, *args: str) -> str:
def generate_parser_docs(
config: RuntimeConfig,
unparsed: Any,
docrefs: List[Docref],
column_name: Optional[str] = None,
) -> Dict[str, Any]:

ctx = DocsParseContext(config, unparsed, docrefs, column_name)
ctx = DocsParseContext(config, unparsed)
return ctx.to_dict()


Expand Down
25 changes: 5 additions & 20 deletions core/dbt/contracts/graph/parsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from dbt.contracts.graph.unparsed import (
UnparsedNode, UnparsedMacro, UnparsedDocumentationFile, Quoting,
UnparsedBaseNode, FreshnessThreshold, ExternalTable,
AdditionalPropertiesAllowed, HasYamlMetadata
AdditionalPropertiesAllowed, HasYamlMetadata, MacroArgument
)
from dbt.contracts.util import Replaceable, list_str
from dbt.logger import GLOBAL_LOGGER as logger # noqa
Expand Down Expand Up @@ -125,17 +125,6 @@ class ColumnInfo(JsonSchemaMixin, Replaceable):
tags: List[str] = field(default_factory=list)


# Docrefs are not quite like regular references, as they indicate what they
# apply to as well as what they are referring to (so the doc package + doc
# name, but also the column name if relevant). This is because column
# descriptions are rendered separately from their models.
@dataclass
class Docref(JsonSchemaMixin, Replaceable):
documentation_name: str
documentation_package: str
column_name: Optional[str] = None


@dataclass
class HasFqn(JsonSchemaMixin, Replaceable):
fqn: List[str]
Expand Down Expand Up @@ -186,7 +175,6 @@ def patch(self, patch: 'ParsedNodePatch'):
self.patch_path: Optional[str] = patch.original_file_path
self.description = patch.description
self.columns = patch.columns
self.docrefs = patch.docrefs
self.meta = patch.meta
if dbt.flags.STRICT_MODE:
assert isinstance(self, JsonSchemaMixin)
Expand Down Expand Up @@ -221,7 +209,6 @@ class ParsedNodeDefaults(ParsedNodeMandatory):
refs: List[List[str]] = field(default_factory=list)
sources: List[List[Any]] = field(default_factory=list)
depends_on: DependsOn = field(default_factory=DependsOn)
docrefs: List[Docref] = field(default_factory=list)
description: str = field(default='')
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
Expand Down Expand Up @@ -467,7 +454,6 @@ def json_schema(cls, embeddable: bool = False) -> Dict[str, Any]:
class ParsedPatch(HasYamlMetadata, Replaceable):
name: str
description: str
docrefs: List[Docref]
meta: Dict[str, Any]


Expand All @@ -481,7 +467,7 @@ class ParsedNodePatch(ParsedPatch):

@dataclass
class ParsedMacroPatch(ParsedPatch):
pass
arguments: List[MacroArgument] = field(default_factory=list)


@dataclass
Expand All @@ -497,10 +483,10 @@ class ParsedMacro(UnparsedMacro, HasUniqueID):
tags: List[str] = field(default_factory=list)
# TODO: is this ever populated?
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
docrefs: List[Docref] = field(default_factory=list)
description: str = field(default='')
description: str = ''
meta: Dict[str, Any] = field(default_factory=dict)
patch_path: Optional[str] = None
arguments: List[MacroArgument] = field(default_factory=list)

def local_vars(self):
return {}
Expand All @@ -515,8 +501,8 @@ def generator(self) -> MacroGenerator:
def patch(self, patch: ParsedMacroPatch):
self.patch_path: Optional[str] = patch.original_file_path
self.description = patch.description
self.docrefs = patch.docrefs
self.meta = patch.meta
self.arguments = patch.arguments
if dbt.flags.STRICT_MODE:
assert isinstance(self, JsonSchemaMixin)
self.to_dict(validate=True)
Expand Down Expand Up @@ -548,7 +534,6 @@ class ParsedSourceDefinition(
loaded_at_field: Optional[str] = None
freshness: Optional[FreshnessThreshold] = None
external: Optional[ExternalTable] = None
docrefs: List[Docref] = field(default_factory=list)
description: str = ''
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
Expand Down
9 changes: 8 additions & 1 deletion core/dbt/contracts/graph/unparsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,16 @@ class UnparsedNodeUpdate(HasColumnTests, HasTests, HasYamlMetadata):
pass


@dataclass
class MacroArgument(JsonSchemaMixin):
name: str
type: Optional[str] = None
description: str = ''


@dataclass
class UnparsedMacroUpdate(HasDocs, HasYamlMetadata):
pass
arguments: List[MacroArgument] = field(default_factory=list)


class TimePeriod(StrEnum):
Expand Down
30 changes: 10 additions & 20 deletions core/dbt/parser/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from dbt.logger import GLOBAL_LOGGER as logger, DbtProcessState
from dbt.node_types import NodeType
from dbt.clients.jinja import get_rendered
from dbt.clients.system import make_directory
from dbt.config import Project, RuntimeConfig
from dbt.context.docs import generate_runtime_docs
Expand Down Expand Up @@ -445,18 +446,9 @@ def _process_docs_for_node(
context: Dict[str, Any],
node: NonSourceNode,
):
for docref in node.docrefs:
column_name = docref.column_name

if column_name is None:
obj = node
else:
obj = _get_node_column(node, column_name)

raw = obj.description or ''
# At this point, we know that our documentation string has a
# 'docs("...")' pointing at it. We want to render it.
obj.description = dbt.clients.jinja.get_rendered(raw, context)
node.description = get_rendered(node.description, context)
for column_name, column in node.columns.items():
column.description = get_rendered(column.description, context)


def _process_docs_for_source(
Expand All @@ -465,25 +457,23 @@ def _process_docs_for_source(
):
table_description = source.description
source_description = source.source_description
table_description = dbt.clients.jinja.get_rendered(table_description,
context)
source_description = dbt.clients.jinja.get_rendered(source_description,
context)
table_description = get_rendered(table_description, context)
source_description = get_rendered(source_description, context)
source.description = table_description
source.source_description = source_description

for column in source.columns.values():
column_desc = column.description
column_desc = dbt.clients.jinja.get_rendered(column_desc, context)
column_desc = get_rendered(column_desc, context)
column.description = column_desc


def _process_docs_for_macro(
context: Dict[str, Any], macro: ParsedMacro
) -> None:
for docref in macro.docrefs:
raw = macro.description or ''
macro.description = dbt.clients.jinja.get_rendered(raw, context)
macro.description = get_rendered(macro.description, context)
for arg in macro.arguments:
arg.description = get_rendered(arg.description, context)


def process_docs(manifest: Manifest, config: RuntimeConfig):
Expand Down
49 changes: 24 additions & 25 deletions core/dbt/parser/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
ParsedNodePatch,
ParsedSourceDefinition,
ColumnInfo,
Docref,
ParsedTestNode,
ParsedMacroPatch,
)
Expand Down Expand Up @@ -76,7 +75,6 @@ class ParserRef:
"""A helper object to hold parse-time references."""
def __init__(self):
self.column_info: Dict[str, ColumnInfo] = {}
self.docrefs: List[Docref] = []

def add(self, column: UnparsedColumn, description, data_type, meta):
self.column_info[column.name] = ColumnInfo(
Expand All @@ -88,14 +86,12 @@ def add(self, column: UnparsedColumn, description, data_type, meta):
)


def collect_docrefs(
def column_info(
config: RuntimeConfig,
target: UnparsedSchemaYaml,
refs: ParserRef,
column_name: Optional[str],
*descriptions: str,
) -> None:
context = generate_parser_docs(config, target, refs.docrefs, column_name)
context = generate_parser_docs(config, target)
for description in descriptions:
get_rendered(description, context)

Expand Down Expand Up @@ -360,15 +356,12 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
def parse_docs(self, block: TargetBlock) -> ParserRef:
refs = ParserRef()
for column in block.columns:
column_name = column.name
description = column.description
data_type = column.data_type
meta = column.meta
collect_docrefs(
column_info(
self.root_project,
block.target,
refs,
column_name,
description,
)

Expand Down Expand Up @@ -446,12 +439,11 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
def parse_docs(self, block: TargetBlock) -> ParserRef:
refs = ParserRef()
for column in block.columns:
column_name = column.name
description = column.description
data_type = column.data_type
meta = column.meta
collect_docrefs(
self.root_project, block.target, refs, column_name, description
column_info(
self.root_project, block.target, description
)

refs.add(column, description, data_type, meta)
Expand Down Expand Up @@ -533,9 +525,8 @@ def parse_patch(
description = table.description or ''
meta = table.meta or {}
source_description = source.description or ''
collect_docrefs(
self.root_project, source, refs, None, description,
source_description
column_info(
self.root_project, source, description, source_description
)

loaded_at_field = table.loaded_at_field or source.loaded_at_field
Expand Down Expand Up @@ -566,7 +557,6 @@ def parse_patch(
source_meta=source_meta,
meta=meta,
loader=source.loader,
docrefs=refs.docrefs,
loaded_at_field=loaded_at_field,
freshness=freshness,
quoting=quoting,
Expand All @@ -580,12 +570,12 @@ def parse_patch(
class NonSourceParser(
YamlDocsReader[NonSourceTarget, Parsed], Generic[NonSourceTarget, Parsed]
):
def collect_docrefs(
self, block: TargetBlock[NonSourceTarget], refs: ParserRef
def collect_column_info(
self, block: TargetBlock[NonSourceTarget]
) -> str:
description = block.target.description
collect_docrefs(
self.root_project, block.target, refs, None, description
column_info(
self.root_project, block.target, description
)
return description

Expand Down Expand Up @@ -618,15 +608,14 @@ class NodePatchParser(
def parse_patch(
self, block: TargetBlock[NodeTarget], refs: ParserRef
) -> None:
description = self.collect_docrefs(block, refs)
description = self.collect_column_info(block)
result = ParsedNodePatch(
name=block.target.name,
original_file_path=block.target.original_file_path,
yaml_key=block.target.yaml_key,
package_name=block.target.package_name,
description=description,
columns=refs.column_info,
docrefs=refs.docrefs,
meta=block.target.meta,
)
self.results.add_patch(self.yaml.file, result)
Expand All @@ -649,6 +638,16 @@ def _target_type(self) -> Type[UnparsedAnalysisUpdate]:


class MacroPatchParser(NonSourceParser[UnparsedMacroUpdate, ParsedMacroPatch]):
def collect_column_info(
self, block: TargetBlock[UnparsedMacroUpdate]
) -> str:
description = block.target.description
arg_docs = [arg.description for arg in block.target.arguments]
column_info(
self.root_project, block.target, description, *arg_docs
)
return description

def get_block(self, node: UnparsedMacroUpdate) -> TargetBlock:
return TargetBlock.from_yaml_block(self.yaml, node)

Expand All @@ -658,15 +657,15 @@ def _target_type(self) -> Type[UnparsedMacroUpdate]:
def parse_patch(
self, block: TargetBlock[UnparsedMacroUpdate], refs: ParserRef
) -> None:
description = self.collect_docrefs(block, refs)
description = self.collect_column_info(block)

result = ParsedMacroPatch(
name=block.target.name,
original_file_path=block.target.original_file_path,
yaml_key=block.target.yaml_key,
package_name=block.target.package_name,
arguments=block.target.arguments,
description=description,
docrefs=refs.docrefs,
meta=block.target.meta,
)
self.results.add_macro_patch(self.yaml.file, result)
Loading