Skip to content

Commit

Permalink
Render all description fields, regardless of docrefs
Browse files Browse the repository at this point in the history
remove the whole idea of docrefs
  • Loading branch information
Jacob Beck committed Feb 6, 2020
1 parent be29156 commit 048b96b
Show file tree
Hide file tree
Showing 11 changed files with 32 additions and 270 deletions.
6 changes: 2 additions & 4 deletions core/dbt/clients/jinja.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,10 +379,8 @@ def render_template(template, ctx, node=None):
return template.render(ctx)


def get_rendered(string, ctx, node=None,
capture_macros=False):
template = get_template(string, ctx, node,
capture_macros=capture_macros)
def get_rendered(string, ctx, node=None, capture_macros=False):
template = get_template(string, ctx, node, capture_macros=capture_macros)

return render_template(template, ctx, node)

Expand Down
23 changes: 3 additions & 20 deletions core/dbt/context/docs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from typing import (
Any, Optional, List, Dict, Union
Any, Dict, Union
)

from dbt.exceptions import (
Expand All @@ -9,7 +9,7 @@
from dbt.config.runtime import RuntimeConfig
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.graph.parsed import Docref, ParsedMacro
from dbt.contracts.graph.parsed import ParsedMacro

from dbt.context.base import contextmember
from dbt.context.configured import ConfiguredContext
Expand All @@ -20,29 +20,15 @@ def __init__(
self,
config: RuntimeConfig,
node: Any,
docrefs: List[Docref],
column_name: Optional[str],
) -> None:
super().__init__(config)
self.node = node
self.docrefs = docrefs
self.column_name = column_name

@contextmember
def doc(self, *args: str) -> str:
# when you call doc(), this is what happens at parse time
if len(args) != 1 and len(args) != 2:
doc_invalid_args(self.node, args)
doc_package_name = ''
doc_name = args[0]
if len(args) == 2:
doc_package_name = args[1]

docref = Docref(documentation_package=doc_package_name,
documentation_name=doc_name,
column_name=self.column_name)
self.docrefs.append(docref)

# At parse time, nothing should care about what doc() returns
return ''

Expand Down Expand Up @@ -87,11 +73,8 @@ def doc(self, *args: str) -> str:
def generate_parser_docs(
config: RuntimeConfig,
unparsed: Any,
docrefs: List[Docref],
column_name: Optional[str] = None,
) -> Dict[str, Any]:

ctx = DocsParseContext(config, unparsed, docrefs, column_name)
ctx = DocsParseContext(config, unparsed)
return ctx.to_dict()


Expand Down
17 changes: 0 additions & 17 deletions core/dbt/contracts/graph/parsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,17 +125,6 @@ class ColumnInfo(JsonSchemaMixin, Replaceable):
tags: List[str] = field(default_factory=list)


# Docrefs are not quite like regular references, as they indicate what they
# apply to as well as what they are referring to (so the doc package + doc
# name, but also the column name if relevant). This is because column
# descriptions are rendered separately from their models.
@dataclass
class Docref(JsonSchemaMixin, Replaceable):
documentation_name: str
documentation_package: str
column_name: Optional[str] = None


@dataclass
class HasFqn(JsonSchemaMixin, Replaceable):
fqn: List[str]
Expand Down Expand Up @@ -186,7 +175,6 @@ def patch(self, patch: 'ParsedNodePatch'):
self.patch_path: Optional[str] = patch.original_file_path
self.description = patch.description
self.columns = patch.columns
self.docrefs = patch.docrefs
self.meta = patch.meta
if dbt.flags.STRICT_MODE:
assert isinstance(self, JsonSchemaMixin)
Expand Down Expand Up @@ -221,7 +209,6 @@ class ParsedNodeDefaults(ParsedNodeMandatory):
refs: List[List[str]] = field(default_factory=list)
sources: List[List[Any]] = field(default_factory=list)
depends_on: DependsOn = field(default_factory=DependsOn)
docrefs: List[Docref] = field(default_factory=list)
description: str = field(default='')
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
Expand Down Expand Up @@ -467,7 +454,6 @@ def json_schema(cls, embeddable: bool = False) -> Dict[str, Any]:
class ParsedPatch(HasYamlMetadata, Replaceable):
name: str
description: str
docrefs: List[Docref]
meta: Dict[str, Any]


Expand Down Expand Up @@ -497,7 +483,6 @@ class ParsedMacro(UnparsedMacro, HasUniqueID):
tags: List[str] = field(default_factory=list)
# TODO: is this ever populated?
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
docrefs: List[Docref] = field(default_factory=list)
description: str = ''
meta: Dict[str, Any] = field(default_factory=dict)
patch_path: Optional[str] = None
Expand All @@ -516,7 +501,6 @@ def generator(self) -> MacroGenerator:
def patch(self, patch: ParsedMacroPatch):
self.patch_path: Optional[str] = patch.original_file_path
self.description = patch.description
self.docrefs = patch.docrefs
self.meta = patch.meta
self.arguments = patch.arguments
if dbt.flags.STRICT_MODE:
Expand Down Expand Up @@ -550,7 +534,6 @@ class ParsedSourceDefinition(
loaded_at_field: Optional[str] = None
freshness: Optional[FreshnessThreshold] = None
external: Optional[ExternalTable] = None
docrefs: List[Docref] = field(default_factory=list)
description: str = ''
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
Expand Down
21 changes: 6 additions & 15 deletions core/dbt/parser/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,17 +446,9 @@ def _process_docs_for_node(
context: Dict[str, Any],
node: NonSourceNode,
):
for docref in node.docrefs:
column_name = docref.column_name

if column_name is None:
obj = node
else:
obj = _get_node_column(node, column_name)

# At this point, we know that our documentation string has a
# 'docs("...")' pointing at it. We want to render it.
obj.description = get_rendered(obj.description, context)
node.description = get_rendered(node.description, context)
for column_name, column in node.columns.items():
column.description = get_rendered(column.description, context)


def _process_docs_for_source(
Expand All @@ -479,10 +471,9 @@ def _process_docs_for_source(
def _process_docs_for_macro(
context: Dict[str, Any], macro: ParsedMacro
) -> None:
for docref in macro.docrefs:
macro.description = get_rendered(macro.description, context)
for arg in macro.arguments:
arg.description = get_rendered(arg.description, context)
macro.description = get_rendered(macro.description, context)
for arg in macro.arguments:
arg.description = get_rendered(arg.description, context)


def process_docs(manifest: Manifest, config: RuntimeConfig):
Expand Down
46 changes: 17 additions & 29 deletions core/dbt/parser/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
ParsedNodePatch,
ParsedSourceDefinition,
ColumnInfo,
Docref,
ParsedTestNode,
ParsedMacroPatch,
)
Expand Down Expand Up @@ -76,7 +75,6 @@ class ParserRef:
"""A helper object to hold parse-time references."""
def __init__(self):
self.column_info: Dict[str, ColumnInfo] = {}
self.docrefs: List[Docref] = []

def add(self, column: UnparsedColumn, description, data_type, meta):
self.column_info[column.name] = ColumnInfo(
Expand All @@ -88,14 +86,12 @@ def add(self, column: UnparsedColumn, description, data_type, meta):
)


def collect_docrefs(
def column_info(
config: RuntimeConfig,
target: UnparsedSchemaYaml,
refs: ParserRef,
column_name: Optional[str],
*descriptions: str,
) -> None:
context = generate_parser_docs(config, target, refs.docrefs, column_name)
context = generate_parser_docs(config, target)
for description in descriptions:
get_rendered(description, context)

Expand Down Expand Up @@ -360,15 +356,12 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
def parse_docs(self, block: TargetBlock) -> ParserRef:
refs = ParserRef()
for column in block.columns:
column_name = column.name
description = column.description
data_type = column.data_type
meta = column.meta
collect_docrefs(
column_info(
self.root_project,
block.target,
refs,
column_name,
description,
)

Expand Down Expand Up @@ -446,12 +439,11 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
def parse_docs(self, block: TargetBlock) -> ParserRef:
refs = ParserRef()
for column in block.columns:
column_name = column.name
description = column.description
data_type = column.data_type
meta = column.meta
collect_docrefs(
self.root_project, block.target, refs, column_name, description
column_info(
self.root_project, block.target, description
)

refs.add(column, description, data_type, meta)
Expand Down Expand Up @@ -533,9 +525,8 @@ def parse_patch(
description = table.description or ''
meta = table.meta or {}
source_description = source.description or ''
collect_docrefs(
self.root_project, source, refs, None, description,
source_description
column_info(
self.root_project, source, description, source_description
)

loaded_at_field = table.loaded_at_field or source.loaded_at_field
Expand Down Expand Up @@ -566,7 +557,6 @@ def parse_patch(
source_meta=source_meta,
meta=meta,
loader=source.loader,
docrefs=refs.docrefs,
loaded_at_field=loaded_at_field,
freshness=freshness,
quoting=quoting,
Expand All @@ -580,12 +570,12 @@ def parse_patch(
class NonSourceParser(
YamlDocsReader[NonSourceTarget, Parsed], Generic[NonSourceTarget, Parsed]
):
def collect_docrefs(
self, block: TargetBlock[NonSourceTarget], refs: ParserRef
def collect_column_info(
self, block: TargetBlock[NonSourceTarget]
) -> str:
description = block.target.description
collect_docrefs(
self.root_project, block.target, refs, None, description
column_info(
self.root_project, block.target, description
)
return description

Expand Down Expand Up @@ -618,15 +608,14 @@ class NodePatchParser(
def parse_patch(
self, block: TargetBlock[NodeTarget], refs: ParserRef
) -> None:
description = self.collect_docrefs(block, refs)
description = self.collect_column_info(block)
result = ParsedNodePatch(
name=block.target.name,
original_file_path=block.target.original_file_path,
yaml_key=block.target.yaml_key,
package_name=block.target.package_name,
description=description,
columns=refs.column_info,
docrefs=refs.docrefs,
meta=block.target.meta,
)
self.results.add_patch(self.yaml.file, result)
Expand All @@ -649,13 +638,13 @@ def _target_type(self) -> Type[UnparsedAnalysisUpdate]:


class MacroPatchParser(NonSourceParser[UnparsedMacroUpdate, ParsedMacroPatch]):
def collect_docrefs(
self, block: TargetBlock[UnparsedMacroUpdate], refs: ParserRef
def collect_column_info(
self, block: TargetBlock[UnparsedMacroUpdate]
) -> str:
description = block.target.description
arg_docs = [arg.description for arg in block.target.arguments]
collect_docrefs(
self.root_project, block.target, refs, None, description, *arg_docs
column_info(
self.root_project, block.target, description, *arg_docs
)
return description

Expand All @@ -668,7 +657,7 @@ def _target_type(self) -> Type[UnparsedMacroUpdate]:
def parse_patch(
self, block: TargetBlock[UnparsedMacroUpdate], refs: ParserRef
) -> None:
description = self.collect_docrefs(block, refs)
description = self.collect_column_info(block)

result = ParsedMacroPatch(
name=block.target.name,
Expand All @@ -677,7 +666,6 @@ def parse_patch(
package_name=block.target.package_name,
arguments=block.target.arguments,
description=description,
docrefs=refs.docrefs,
meta=block.target.meta,
)
self.results.add_macro_patch(self.yaml.file, result)
Loading

0 comments on commit 048b96b

Please sign in to comment.