From ccc47456bcb99407d60ce7f2b8ad9ac37572e126 Mon Sep 17 00:00:00 2001 From: Corey Oordt Date: Sun, 13 Feb 2022 10:14:25 -0600 Subject: [PATCH] Added code --- cookie_composer/__init__.py | 2 + cookie_composer/_commands/__init__.py | 0 cookie_composer/_commands/_create.py | 37 ++++ cookie_composer/cli.py | 39 +++++ cookie_composer/composition.py | 210 +++++++++++++++++++++++ cookie_composer/data_merge.py | 97 +++++++++++ cookie_composer/exceptions.py | 26 +++ cookie_composer/layers.py | 201 ++++++++++++++++++++++ cookie_composer/matching.py | 27 +++ cookie_composer/merge_files/__init__.py | 39 +++++ cookie_composer/merge_files/json_file.py | 47 +++++ cookie_composer/merge_files/yaml_file.py | 54 ++++++ pyproject.toml | 2 +- 13 files changed, 780 insertions(+), 1 deletion(-) create mode 100644 cookie_composer/__init__.py create mode 100644 cookie_composer/_commands/__init__.py create mode 100644 cookie_composer/_commands/_create.py create mode 100644 cookie_composer/cli.py create mode 100644 cookie_composer/composition.py create mode 100644 cookie_composer/data_merge.py create mode 100644 cookie_composer/exceptions.py create mode 100644 cookie_composer/layers.py create mode 100644 cookie_composer/matching.py create mode 100644 cookie_composer/merge_files/__init__.py create mode 100644 cookie_composer/merge_files/json_file.py create mode 100644 cookie_composer/merge_files/yaml_file.py diff --git a/cookie_composer/__init__.py b/cookie_composer/__init__.py new file mode 100644 index 0000000..9de4d2a --- /dev/null +++ b/cookie_composer/__init__.py @@ -0,0 +1,2 @@ +"""The Cookiecutter Composer.""" +__version__: str = "0.1.0" diff --git a/cookie_composer/_commands/__init__.py b/cookie_composer/_commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cookie_composer/_commands/_create.py b/cookie_composer/_commands/_create.py new file mode 100644 index 0000000..c4e998e --- /dev/null +++ b/cookie_composer/_commands/_create.py @@ -0,0 +1,37 @@ +"""Methods for generating projects.""" + +from typing import Optional + +from pathlib import Path + +from cookie_composer.composition import ( + LayerConfig, + ProjectComposition, + is_composition_file, + read_composition, +) +from cookie_composer.layers import process_composition + + +def create( + path_or_url: str, + output_dir: Optional[Path] = None, +) -> Path: + """ + Generate a new project from a composition file, local template or remote template. + + Args: + path_or_url: The path or url to the composition file or template + output_dir: Where to generate the project + + Returns: + The path to the generated project. + """ + output_dir = output_dir or Path(".") + if is_composition_file(path_or_url): + composition = read_composition(path_or_url, output_dir) + else: + tmpl = LayerConfig(template=path_or_url) + composition = ProjectComposition(layers=[tmpl], destination=output_dir) + process_composition(composition) + return output_dir diff --git a/cookie_composer/cli.py b/cookie_composer/cli.py new file mode 100644 index 0000000..f362087 --- /dev/null +++ b/cookie_composer/cli.py @@ -0,0 +1,39 @@ +"""Command line setup.""" +from typing import Optional + +from pathlib import Path + +import typer + +from cookie_composer._commands import _create + +app = typer.Typer() + + +@app.command() +def create(path_or_url: str, output_dir: Optional[Path] = None): + """ + Create a project from a template or configuration. + + Args: + path_or_url: The path or URL to the template or composition file + output_dir: Where to write the output + """ + _create.create(path_or_url, output_dir) + + +@app.command() +def add(path_or_url: str): + """ + Add a template or configuration to an existing project. + + Args: + path_or_url: A URL or string to add the template or configuration + """ + + +@app.command() +def update(): + """ + Update the project to the latest version of each template. + """ diff --git a/cookie_composer/composition.py b/cookie_composer/composition.py new file mode 100644 index 0000000..3ccb03d --- /dev/null +++ b/cookie_composer/composition.py @@ -0,0 +1,210 @@ +"""Project configuration and options.""" +from typing import Any, Dict, List, Optional, Union + +import logging +from enum import Enum +from pathlib import Path + +from pydantic import AnyHttpUrl, BaseModel, DirectoryPath, Field + +from cookie_composer.exceptions import MissingCompositionFileError +from cookie_composer.matching import rel_fnmatch + +logger = logging.getLogger(__name__) + + +class MergeStrategy(str, Enum): + """Strategies of merging files and data.""" + + DO_NOT_MERGE = "do-not-merge" + """Do not merge the data, use the file path to determine what to do.""" + + NESTED_OVERWRITE = "nested-overwrite" + """Merge deeply nested structures and overwrite at the lowest level; A deep ``dict.update()``.""" + + OVERWRITE = "overwrite" + """Overwrite at the top level like ``dict.update()``.""" + + COMPREHENSIVE = "comprehensive" + """Comprehensively merge the two data structures. + + - Scalars are overwritten by the new values + - lists are merged and de-duplicated + - dicts are recursively merged + """ + + +class LayerConfig(BaseModel): + """Configuration for a layer of a composition.""" + + # + # Template specification + # + template: Union[str, AnyHttpUrl] + """The path or URL to the template.""" + + directory: Optional[str] + """Directory within a git repository template that holds the cookiecutter.json file.""" + + checkout: Optional[str] + """The branch, tag or commit to use if template is a git repository. + + Also used for updating projects.""" + + password: Optional[str] + """The password to use if template is a password-protected Zip archive.""" + + commit: Optional[str] + """What git hash was applied if the template is a git repository.""" + + # + # Input specification + # + no_input: bool = False + """Do not prompt for parameters and only use cookiecutter.json file content. + + This is only used for initial generation. After initial generation, the results + are stored in the context.""" + + context: Dict[str, Any] = Field(default_factory=dict) + """Dictionary that will provide values for input. + + Also stores the answers for missing context parameters after initial generation.""" + + # + # File generation + # + skip_hooks: bool = False + """Skip the template hooks.""" + + skip_if_file_exists: bool = True + """Skip the files in the corresponding directories if they already exist.""" + + skip_generation: List[str] = Field(default_factory=list) + """Paths or glob patterns to skip attempting to generate.""" + + overwrite: List[str] = Field(default_factory=list) + """Paths or glob patterns to always overwrite.""" + + overwrite_exclude: List[str] = Field(default_factory=list) + """Paths or glob patterns to exclude from overwriting.""" + + merge_strategies: Dict[str, MergeStrategy] = Field( + default_factory=lambda: {"*": "do-not-merge"} + ) + """The method to merge specific paths or glob patterns.""" + + +class RenderedLayer(BaseModel): + """Information about a rendered layer.""" + + layer: LayerConfig + """The original layer configuration that was rendered.""" + + location: DirectoryPath + """The location to the rendered layer.""" + + new_context: Dict[str, Any] + """The context based on questions asked.""" + + latest_commit: Optional[str] = None + """The latest commit checkout out.""" + + +class ProjectComposition(BaseModel): + """Composition of templates for a project.""" + + layers: List[LayerConfig] + destination: DirectoryPath + + +def is_composition_file(path_or_url: Union[str, Path]) -> bool: + """ + Is the filename a composition file? + + Args: + path_or_url: The path or URL to check + + Returns: + ``True`` if the path is a configuration file. + """ + return Path(path_or_url).suffix in {".yaml", ".yml"} + + +def read_composition( + path_or_url: Union[str, Path], destination: Union[str, Path] +) -> ProjectComposition: + """ + Read a JSON or YAML file and return a ProjectComposition. + + Args: + path_or_url: The location of the configuration file + destination: Where the destination of the project should be rendered + + Returns: + A project composition + + Raises: + MissingCompositionFileError: Raised when it can not access the configuration file. + """ + import fsspec + from ruyaml import YAML + + yaml = YAML(typ="safe") + try: + of = fsspec.open(path_or_url, mode="rt") + with of as f: + contents = list(yaml.load_all(f)) + templates = [LayerConfig(**doc) for doc in contents] + return ProjectComposition( + layers=templates, destination=Path(destination).expanduser().resolve() + ) + except (ValueError, FileNotFoundError) as e: + raise MissingCompositionFileError(path_or_url) from e + + +def write_composition(layers: list, destination: Union[str, Path]): + """ + Write a JSON or YAML composition file. + + Args: + layers: The layers of the composition + destination: Where to write the file + """ + import fsspec + from ruyaml import YAML + + yaml = YAML(typ="safe") + of = fsspec.open(destination, mode="wt") + dict_layers = [layer.dict() for layer in layers] + with of as f: + yaml.dump_all(dict_layers, f) + + +def get_merge_strategy(path: Path, merge_strategies: Dict[str, str]) -> MergeStrategy: + """ + Return the merge strategy of the path based on the layer configured rules. + + Files that are not mergable return ``MergeStrategy.DO_NOT_MERGE`` + + Args: + path: The file path to evaluate. + merge_strategies: The glob pattern->strategy mapping + + Returns: + The appropriate merge strategy. + """ + from cookie_composer.merge_files import MERGE_FUNCTIONS + + strategy = MergeStrategy.DO_NOT_MERGE # The default + + if path.suffix not in MERGE_FUNCTIONS: + return MergeStrategy.DO_NOT_MERGE + + for pattern, strat in merge_strategies.items(): + if rel_fnmatch(str(path), pattern): + logger.debug(f"{path} matches merge strategy pattern {pattern} for {strat}") + strategy = strat + break + + return strategy diff --git a/cookie_composer/data_merge.py b/cookie_composer/data_merge.py new file mode 100644 index 0000000..bbf325f --- /dev/null +++ b/cookie_composer/data_merge.py @@ -0,0 +1,97 @@ +"""Tools for merging data.""" +from typing import Any, Iterable + +import copy +from functools import reduce + + +def deep_merge(*dicts) -> dict: + """ + Merges dicts deeply. + + Args: + dicts: List of dicts to merge with the first one the base + + Returns: + dict: The merged dict + """ + + def merge_into(d1, d2): + for key in d2: + if key not in d1 or not isinstance(d1[key], dict): + d1[key] = copy.deepcopy(d2[key]) + else: + d1[key] = merge_into(d1[key], d2[key]) + return d1 + + return reduce(merge_into, dicts, {}) + + +def merge_iterables(iter1: Iterable, iter2: Iterable) -> set: + """ + Merge and de-duplicate a bunch of lists into a single list. + + Order is not guaranteed. + + Args: + iter1: An Iterable + iter2: An Iterable + + Returns: + The merged, de-duplicated sequence as a set + """ + from itertools import chain + + return set(chain(iter1, iter2)) + + +def comprehensive_merge(*args) -> Any: + """ + Merges data comprehensively. + + All arguments must be of the same type. + + - Scalars are overwritten by the new values + - lists are merged and de-duplicated + - dicts are recursively merged + + Args: + args: List of dicts to merge with the first one the base + + Returns: + The merged data + + Raises: + ValueError: If the values are not of the same type + """ + + def merge_into(d1, d2): + if type(d1) != type(d2): + raise ValueError(f"Cannot merge {type(d2)} into {type(d1)}.") + + if isinstance(d1, list): + return list(merge_iterables(d1, d2)) + elif isinstance(d1, set): + return merge_iterables(d1, d2) + elif isinstance(d1, tuple): + return tuple(merge_iterables(d1, d2)) + elif isinstance(d1, dict): + for key in d2: + if key in d1: + d1[key] = merge_into(d1[key], d2[key]) + else: + d1[key] = copy.deepcopy(d2[key]) + return d1 + else: + return copy.deepcopy(d2) + + if isinstance(args[0], list): + return reduce(merge_into, args, []) + elif isinstance(args[0], tuple): + return reduce(merge_into, args, tuple()) + elif isinstance(args[0], set): + return reduce(merge_into, args, set()) + elif isinstance(args[0], dict): + return reduce(merge_into, args, {}) + else: + return reduce(merge_into, args) diff --git a/cookie_composer/exceptions.py b/cookie_composer/exceptions.py new file mode 100644 index 0000000..9e975cb --- /dev/null +++ b/cookie_composer/exceptions.py @@ -0,0 +1,26 @@ +"""Exceptions raised when bad things happen.""" +from typing import Optional + + +class MissingCompositionFileError(Exception): + """The composition is missing or inaccessible.""" + + def __init__(self, path_or_url: str): + msg = f"The composition is missing or inaccessible at {path_or_url}" + super().__init__(msg) + + +class MergeError(Exception): + """There was a problem merging a file.""" + + def __init__( + self, + origin: Optional[str] = None, + destination: Optional[str] = None, + strategy: Optional[str] = None, + error_message: Optional[str] = "", + ): + if origin and destination and strategy: + msg = f"There was a problem merging {origin} and {destination} using {strategy}: {error_message}" + super().__init__(msg) + super().__init__(error_message) diff --git a/cookie_composer/layers.py b/cookie_composer/layers.py new file mode 100644 index 0000000..6d18fe3 --- /dev/null +++ b/cookie_composer/layers.py @@ -0,0 +1,201 @@ +"""Layer management.""" +from typing import Mapping + +import logging +import os +import shutil +import tempfile +from enum import Enum +from pathlib import Path + +from cookie_composer.composition import ( + LayerConfig, + MergeStrategy, + ProjectComposition, + RenderedLayer, + get_merge_strategy, + write_composition, +) +from cookie_composer.data_merge import comprehensive_merge +from cookie_composer.matching import matches_any_glob +from cookie_composer.merge_files import MERGE_FUNCTIONS + +from ._vendor.cookiecutter.config import get_user_config +from ._vendor.cookiecutter.generate import generate_context, generate_files +from ._vendor.cookiecutter.prompt import prompt_for_config +from ._vendor.cookiecutter.repository import determine_repo_dir +from ._vendor.cookiecutter.utils import rmtree + +logger = logging.getLogger(__name__) + + +class WriteStrategy(Enum): + """How to deal with a file.""" + + WRITE = 1 + """Write or overwrite the file.""" + + SKIP = 2 + """Skip the file.""" + + MERGE = 3 + """Merge the file with an existing file, or write a new file.""" + + +def render_layer( + layer_config: LayerConfig, render_dir: Path, full_context: Mapping = None +) -> RenderedLayer: + """ + Process one layer of the template composition. + + Renders the template using cookiecutter + + Args: + layer_config: The configuration of the layer to render + render_dir: Where to render the template + full_context: The extra context from all layers in the composition + + Returns: + The rendered layer information + """ + config_dict = get_user_config(config_file=None, default_config=False) + + repo_dir, cleanup = determine_repo_dir( + template=layer_config.template, + abbreviations=config_dict["abbreviations"], + clone_to_dir=config_dict["cookiecutters_dir"], + checkout=layer_config.commit or layer_config.checkout, + no_input=layer_config.no_input, + password=layer_config.password, + directory=layer_config.directory, + ) + context = generate_context( + context_file=Path(layer_config.template) / "cookiecutter.json", + default_context=config_dict["default_context"], + extra_context=full_context, + ) + context["cookiecutter"] = prompt_for_config(context, layer_config.no_input) + + # TODO: Get the latest commit, if it is a git repository + latest_commit = None + + rendered_layer = RenderedLayer( + layer=layer_config, + location=render_dir, + new_context=context["cookiecutter"], + latest_commit=latest_commit, + ) + + # call cookiecutter's generate files function + generate_files( + repo_dir=repo_dir, + context=context, + overwrite_if_exists=False, + output_dir=str(render_dir), + ) + + if cleanup: + rmtree(repo_dir) + + return rendered_layer + + +def merge_layers(destination: Path, rendered_layer: RenderedLayer): + """ + Merge a layer into another layer using the rules specified in the layer_config. + + Args: + destination: The root path to merge into. + rendered_layer: The information about the rendered layer. + """ + for root, dirs, files in os.walk(rendered_layer.location): + rel_root = Path(root).relative_to(rendered_layer.location) + + for f in files: + dest_path = destination / rel_root / f + origin_path = Path(f"{root}/{f}") + write_strat = get_write_strategy(origin_path, dest_path, rendered_layer) + if write_strat == WriteStrategy.MERGE: + merge_strategy = get_merge_strategy( + origin_path, rendered_layer.layer.merge_strategies + ) + MERGE_FUNCTIONS[dest_path.suffix]( + origin_path, dest_path, merge_strategy + ) + elif write_strat == WriteStrategy.WRITE: + shutil.copy(origin_path, dest_path) + + for d in dirs: + dest_path = destination / rel_root / d + origin_path = Path(f"{root}/{d}") + write_strat = get_write_strategy(origin_path, dest_path, rendered_layer) + if write_strat in {WriteStrategy.WRITE, WriteStrategy.MERGE}: + dest_path.mkdir(parents=True, exist_ok=True) + + +def get_write_strategy( + origin: Path, destination: Path, rendered_layer: RenderedLayer +) -> WriteStrategy: + """ + Based on the layer_config rules, determine if we should overwrite an existin path. + + Args: + origin: Path within the rendered layer that we are evaluating. + destination: Path to which we would write this file (may not actually exist) + rendered_layer: Rendered layer configuration. + + Returns: + The appropriate way to handle writing this file. + """ + if matches_any_glob(origin, rendered_layer.layer.skip_generation): + logger.debug(f"{origin} matches a skip_generation pattern. Skipping.") + return WriteStrategy.SKIP + + if not destination.exists(): + logger.debug(f"{destination} does not exist. Writing.") + return WriteStrategy.WRITE + + merge_strat = get_merge_strategy(origin, rendered_layer.layer.merge_strategies) + if merge_strat != MergeStrategy.DO_NOT_MERGE: + logger.debug("Strategy is not do-not-merge. Merging.") + return WriteStrategy.MERGE + logger.debug("Strategy is do-not-merge. Continuing evaluation.") + + if matches_any_glob(origin, rendered_layer.layer.overwrite_exclude): + logger.debug(f"{origin} matches an overwrite_exclude pattern. Skipping.") + return WriteStrategy.SKIP + + if matches_any_glob(origin, rendered_layer.layer.overwrite): + logger.debug(f"{origin} matches an overwrite pattern. Writing.") + return WriteStrategy.WRITE + + if rendered_layer.layer.skip_if_file_exists: + logger.debug("skip_if_file_exists is True. Skipping.") + return WriteStrategy.SKIP + else: + logger.debug("skip_if_file_exists is False. Writing.") + return WriteStrategy.WRITE + + +def process_composition(composition: ProjectComposition): + """Process the composition.""" + full_context = {} + rendered_layers = [] + + for layer_config in composition.layers: + if layer_config.context: + full_context = comprehensive_merge(full_context, layer_config.context) + + with tempfile.TemporaryDirectory() as render_dir: + rendered_layer = render_layer(layer_config, render_dir, full_context) + merge_layers(composition.destination, rendered_layer) + rendered_layers.append(rendered_layer) + full_context = comprehensive_merge(full_context, rendered_layer.new_context) + + layers = [] + for rendered_layer in rendered_layers: + rendered_layer.layer.commit = rendered_layer.latest_commit + layers.append(rendered_layer.layer) + + composition_file = composition.destination / ".composition.yaml" + write_composition(layers, composition_file) diff --git a/cookie_composer/matching.py b/cookie_composer/matching.py new file mode 100644 index 0000000..c1fb0e9 --- /dev/null +++ b/cookie_composer/matching.py @@ -0,0 +1,27 @@ +"""Matching files and patterns.""" +from typing import List, Union + +from fnmatch import fnmatch +from pathlib import Path + + +def rel_fnmatch(name: str, pat: str) -> bool: + """Force a relative match of the pattern by prefixing a '*'.""" + if pat.startswith("*"): + return fnmatch(name, pat) + else: + return fnmatch(name, f"*{pat}") + + +def matches_any_glob(path: Union[str, Path], patterns: List[str]) -> bool: + """ + Does the path match any of the glob patterns. + + Args: + path: Path to test + patterns: A list of glob patterns + + Returns: + ``True`` if it matches any of the patterns + """ + return any(rel_fnmatch(str(path), pat) for pat in patterns) diff --git a/cookie_composer/merge_files/__init__.py b/cookie_composer/merge_files/__init__.py new file mode 100644 index 0000000..4b6df25 --- /dev/null +++ b/cookie_composer/merge_files/__init__.py @@ -0,0 +1,39 @@ +""" +Methods for merging data files. + +The merging functions should look similar to the following: + +:: + + def merge_generic_files(origin: Path, destination: Path, merge_strategy: MergeStrategy): + ''' + Merge two ??? files into one. + + Raises: + MergeError: If something goes wrong + + Args: + origin: The path to the data file to merge + destination: The path to the data file to merge into and write out. + merge_strategy: How to do the merge + ''' + +The function must write the file to destination. + +The function must wrap any errors into a MergeError and raise it. +""" +from typing import Callable, Dict + +from pathlib import Path + +from cookie_composer.composition import MergeStrategy +from cookie_composer.merge_files.json_file import merge_json_files +from cookie_composer.merge_files.yaml_file import merge_yaml_files + +merge_function = Callable[[Path, Path, MergeStrategy], None] + +MERGE_FUNCTIONS: Dict[str, merge_function] = { + ".json": merge_json_files, + ".yaml": merge_yaml_files, + ".yml": merge_yaml_files, +} diff --git a/cookie_composer/merge_files/json_file.py b/cookie_composer/merge_files/json_file.py new file mode 100644 index 0000000..28ff70f --- /dev/null +++ b/cookie_composer/merge_files/json_file.py @@ -0,0 +1,47 @@ +"""Merge two json files into one.""" +import json +from pathlib import Path + +from cookie_composer import data_merge +from cookie_composer.composition import MergeStrategy +from cookie_composer.exceptions import MergeError + + +def merge_json_files( + new_file: Path, existing_file: Path, merge_strategy: MergeStrategy +): + """ + Merge two json files into one. + + Args: + new_file: The path to the data file to merge + existing_file: The path to the data file to merge into and write out. + merge_strategy: How to do the merge + + Raises: + MergeError: If something goes wrong + """ + if merge_strategy == MergeStrategy.DO_NOT_MERGE: + raise MergeError( + str(new_file), + str(existing_file), + str(merge_strategy), + "Can not merge with do-not-merge strategy.", + ) + + try: + new_data = json.loads(new_file.read_text()) + existing_data = json.loads(existing_file.read_text()) + except (json.JSONDecodeError, FileNotFoundError) as e: + raise MergeError(str(new_file), str(existing_file), str(merge_strategy), str(e)) + + if merge_strategy == MergeStrategy.OVERWRITE: + existing_data.update(new_data) + elif merge_strategy == MergeStrategy.NESTED_OVERWRITE: + existing_data = data_merge.deep_merge(existing_data, new_data) + elif merge_strategy == MergeStrategy.COMPREHENSIVE: + existing_data = data_merge.comprehensive_merge(existing_data, new_data) + else: + raise MergeError(error_message=f"Unrecognized merge strategy {merge_strategy}") + + existing_file.write_text(json.dumps(existing_data)) diff --git a/cookie_composer/merge_files/yaml_file.py b/cookie_composer/merge_files/yaml_file.py new file mode 100644 index 0000000..27aa5e8 --- /dev/null +++ b/cookie_composer/merge_files/yaml_file.py @@ -0,0 +1,54 @@ +"""Merge two json files into one.""" + +from pathlib import Path + +from cookie_composer import data_merge +from cookie_composer.composition import MergeStrategy +from cookie_composer.exceptions import MergeError + + +def merge_yaml_files( + new_file: Path, existing_file: Path, merge_strategy: MergeStrategy +): + """ + Merge two json files into one. + + Args: + new_file: The path to the data file to merge + existing_file: The path to the data file to merge into and write out. + merge_strategy: How to do the merge + + Raises: + MergeError: If something goes wrong + """ + from ruyaml import YAML, YAMLError + + yaml = YAML(typ="safe") + + if merge_strategy == MergeStrategy.DO_NOT_MERGE: + raise MergeError( + str(new_file), + str(existing_file), + str(merge_strategy), + "Can not merge with do-not-merge strategy.", + ) + + try: + new_data = yaml.load(new_file) + existing_data = yaml.load(existing_file) + except (YAMLError, FileNotFoundError) as e: + raise MergeError(str(new_file), str(existing_file), str(merge_strategy), str(e)) + + if merge_strategy == MergeStrategy.OVERWRITE: + if isinstance(existing_data, dict) and isinstance(new_data, dict): + existing_data.update(new_data) + else: + existing_data = new_data + elif merge_strategy == MergeStrategy.NESTED_OVERWRITE: + existing_data = data_merge.deep_merge(existing_data, new_data) + elif merge_strategy == MergeStrategy.COMPREHENSIVE: + existing_data = data_merge.comprehensive_merge(existing_data, new_data) + else: + raise MergeError(error_message=f"Unrecognized merge strategy {merge_strategy}") + + yaml.dump(existing_data, existing_file) diff --git a/pyproject.toml b/pyproject.toml index e9af331..d84d002 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,7 +74,7 @@ color_output = true [tool.pydocstyle] convention = "google" -add-ignore = ["D107", "D200", "D212"] +add-ignore = ["D107", "D200", "D212", "D214"] match = "(?!test_).*\\.py" [tool.vendoring]