Skip to content

Commit

Permalink
[wip] Merge validate goal with lint goal
Browse files Browse the repository at this point in the history
[ci skip-rust]

[ci skip-build-wheels]
  • Loading branch information
Eric-Arellano committed Jan 29, 2022
1 parent c09289d commit 206c91b
Show file tree
Hide file tree
Showing 3 changed files with 109 additions and 24 deletions.
1 change: 1 addition & 0 deletions pants.toml
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ interpreter_constraints = [">=3.7,<3.10"]

[regex-lint]
config = "@build-support/regexes/config.yaml"
detail_level = "none"

[generate-lockfiles]
custom_command = "build-support/bin/generate_all_lockfiles.sh"
Expand Down
66 changes: 63 additions & 3 deletions src/python/pants/backend/project_info/regex_lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@

from pants.base.deprecated import resolve_conflicting_options
from pants.base.exiter import PANTS_FAILED_EXIT_CODE, PANTS_SUCCEEDED_EXIT_CODE
from pants.core.goals.lint import LintFilesRequest, LintResult, LintResults
from pants.engine.collection import Collection
from pants.engine.console import Console
from pants.engine.fs import Digest, DigestContents, SpecsSnapshot
from pants.engine.fs import Digest, DigestContents, PathGlobs, SpecsSnapshot
from pants.engine.goal import Goal, GoalSubsystem
from pants.engine.rules import Get, collect_rules, goal_rule
from pants.engine.rules import Get, collect_rules, goal_rule, rule
from pants.engine.unions import UnionRule
from pants.option.subsystem import Subsystem
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
from pants.util.memo import memoized_method

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -365,5 +368,62 @@ async def validate(
return Validate(exit_code)


class RegexLintRequest(LintFilesRequest):
pass


@rule(desc="Lint with regex patterns", level=LogLevel.DEBUG)
async def validate_as_lint(
request: RegexLintRequest,
validate_subsystem: ValidateSubsystem,
regex_lint_subsystem: RegexLintSubsystem,
) -> LintResults:
multi_matcher = regex_lint_subsystem.get_multi_matcher()
if multi_matcher is None:
return LintResults((), linter_name="regex-lint")

digest_contents = await Get(DigestContents, PathGlobs(request.file_paths))
regex_match_results = RegexMatchResults(
multi_matcher.check_source_file(file_content.path, file_content.content)
for file_content in sorted(digest_contents, key=lambda fc: fc.path)
)

stdout = ""
detail_level = regex_lint_subsystem.detail_level(validate_subsystem)
num_matched_all = 0
num_nonmatched_some = 0
for rmr in regex_match_results:
if not rmr.matching and not rmr.nonmatching:
continue
if detail_level == DetailLevel.names:
if rmr.nonmatching:
stdout += f"{rmr.path}\n"
continue

if rmr.nonmatching:
icon = "X"
num_nonmatched_some += 1
else:
icon = "V"
num_matched_all += 1
matched_msg = " Matched: {}".format(",".join(rmr.matching)) if rmr.matching else ""
nonmatched_msg = (
" Didn't match: {}".format(",".join(rmr.nonmatching)) if rmr.nonmatching else ""
)
if detail_level == DetailLevel.all or (
detail_level == DetailLevel.nonmatching and nonmatched_msg
):
stdout += f"{icon} {rmr.path}:{matched_msg}{nonmatched_msg}\n"

if detail_level not in (DetailLevel.none, DetailLevel.names):
if stdout:
stdout += "\n"
stdout += f"{num_matched_all} files matched all required patterns.\n"
stdout += f"{num_nonmatched_some} files failed to match at least one required pattern."

exit_code = PANTS_FAILED_EXIT_CODE if num_nonmatched_some else PANTS_SUCCEEDED_EXIT_CODE
return LintResults((LintResult(exit_code, stdout, ""),), linter_name="regex-lint")


def rules():
return collect_rules()
return (*collect_rules(), UnionRule(LintFilesRequest, RegexLintRequest))
66 changes: 45 additions & 21 deletions src/python/pants/core/goals/lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pants.core.util_rules.distdir import DistDir
from pants.engine.console import Console
from pants.engine.engine_aware import EngineAwareReturnType
from pants.engine.fs import EMPTY_DIGEST, Digest, Workspace
from pants.engine.fs import EMPTY_DIGEST, Digest, SpecsSnapshot, Workspace
from pants.engine.goal import Goal, GoalSubsystem
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, goal_rule
Expand Down Expand Up @@ -134,6 +134,14 @@ class LintRequest(StyleRequest):
"""


@union
@dataclass(frozen=True)
class LintFilesRequest:
"""The entry point for linters that do not use targets."""

file_paths: tuple[str, ...]


# If a user wants linter reports to show up in dist/ they must ensure that the reports
# are written under this directory. E.g.,
# ./pants --flake8-args="--output-file=reports/report.txt" lint <target>
Expand Down Expand Up @@ -199,43 +207,59 @@ async def lint(
console: Console,
workspace: Workspace,
targets: Targets,
specs_snapshot: SpecsSnapshot,
lint_subsystem: LintSubsystem,
union_membership: UnionMembership,
dist_dir: DistDir,
) -> Lint:
request_types = cast("Iterable[type[LintRequest]]", union_membership[LintRequest])
requests = tuple(
target_requests = tuple(
request_type(
request_type.field_set_type.create(target)
for target in targets
if request_type.field_set_type.is_applicable(target)
)
for request_type in request_types
for request_type in union_membership[LintRequest]
)
file_requests = tuple(
request_type(specs_snapshot.snapshot.files)
for request_type in union_membership[LintFilesRequest]
)

if lint_subsystem.per_file_caching:
all_batch_results = await MultiGet(
Get(LintResults, LintRequest, request.__class__([field_set]))
for request in requests
if request.field_sets
for field_set in request.field_sets
)
all_requests = [
*(
Get(LintResults, LintRequest, request.__class__([field_set]))
for request in target_requests
if request.field_sets
for field_set in request.field_sets
),
*(
Get(LintResults, LintFilesRequest, request.__class__((fp,)))
for request in file_requests
for fp in request.file_paths
),
]
else:

def address_str(fs: FieldSet) -> str:
return fs.address.spec

all_batch_results = await MultiGet(
Get(LintResults, LintRequest, request.__class__(field_set_batch))
for request in requests
if request.field_sets
for field_set_batch in partition_sequentially(
request.field_sets,
key=address_str,
size_target=lint_subsystem.batch_size,
size_max=4 * lint_subsystem.batch_size,
)
)
all_requests = [
*(
Get(LintResults, LintRequest, request.__class__(field_set_batch))
for request in target_requests
if request.field_sets
for field_set_batch in partition_sequentially(
request.field_sets,
key=address_str,
size_target=lint_subsystem.batch_size,
size_max=4 * lint_subsystem.batch_size,
)
),
*(Get(LintResults, LintFilesRequest, request) for request in file_requests),
]

all_batch_results = await MultiGet(all_requests)

def key_fn(results: LintResults):
return results.linter_name
Expand Down

0 comments on commit 206c91b

Please sign in to comment.