Skip to content

Commit

Permalink
Add flake8-implicit-str-concat check to Ruff (#36597)
Browse files Browse the repository at this point in the history
This was enabled initially in #23873, but hasn't been part of the Ruff checks to-date. Let's add it!
  • Loading branch information
josh-fell committed Jan 5, 2024
1 parent 16361f1 commit 1cc9fe1
Show file tree
Hide file tree
Showing 14 changed files with 21 additions and 26 deletions.
4 changes: 1 addition & 3 deletions airflow/providers/weaviate/hooks/weaviate.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,9 +280,7 @@ def create_or_replace_classes(
intersection_classes = set__exiting_classes.intersection(set__to_be_added_classes)
classes_to_create = set()
if existing == "fail" and intersection_classes:
raise ValueError(
f"Trying to create class {intersection_classes}" f" but this class already exists."
)
raise ValueError(f"Trying to create class {intersection_classes} but this class already exists.")
elif existing == "ignore":
classes_to_create = set__to_be_added_classes - set__exiting_classes
elif existing == "replace":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,7 @@ def verify(
)
if (pull or image_name) and run_in_parallel:
get_console().print(
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. " "Exiting[/]"
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def verify(
)
if (pull or image_name) and run_in_parallel:
get_console().print(
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. " "Exiting[/]"
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ def basic_provider_checks(provider_package_id: str) -> dict[str, Any]:
f"since you asked for it, it will be built [/]\n"
)
elif provider_metadata.get("state") == "suspended":
get_console().print(f"[warning]The package: {provider_package_id} is suspended " f"skipping it [/]\n")
get_console().print(f"[warning]The package: {provider_package_id} is suspended skipping it [/]\n")
raise PackageSuspendedException()
return provider_metadata

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def _get_all_changes_for_package(
)
else:
get_console().print(
f"[info]New version of the '{provider_package_id}' " f"package is ready to be released!\n"
f"[info]New version of the '{provider_package_id}' package is ready to be released!\n"
)
next_version_tag = f"{HTTPS_REMOTE}/{base_branch}"
changes_table = ""
Expand Down Expand Up @@ -413,7 +413,7 @@ def _ask_the_user_for_the_type_of_changes(non_interactive: bool) -> TypeOfChange
if given_answer in type_of_changes_array:
return TypeOfChange(given_answer)
get_console().print(
f"[warning] Wrong answer given: '{given_answer}'. " f"Should be one of {display_answers}"
f"[warning] Wrong answer given: '{given_answer}'. Should be one of {display_answers}"
)


Expand Down Expand Up @@ -680,7 +680,7 @@ def update_release_notes(
answer = user_confirm(f"Provider {provider_package_id} marked for release. Proceed?")
if answer == Answer.NO:
get_console().print(
f"\n[warning]Skipping provider: {provider_package_id} " f"on user request![/]\n"
f"\n[warning]Skipping provider: {provider_package_id} on user request![/]\n"
)
raise PrepareReleaseDocsUserSkippedException()
elif answer == Answer.QUIT:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def copy_provider_sources_to_target(provider_id: str) -> Path:
relative_provider_path = source_provider_sources_path.relative_to(AIRFLOW_SOURCES_ROOT)
target_providers_sub_folder = target_provider_root_path / relative_provider_path
get_console().print(
f"[info]Copying provider sources: " f"{source_provider_sources_path} -> {target_providers_sub_folder}"
f"[info]Copying provider sources: {source_provider_sources_path} -> {target_providers_sub_folder}"
)
copytree(source_provider_sources_path, target_providers_sub_folder)
shutil.copy(AIRFLOW_SOURCES_ROOT / "LICENSE", target_providers_sub_folder / "LICENSE")
Expand Down Expand Up @@ -214,9 +214,7 @@ def build_provider_package(provider_id: str, target_provider_root_sources_path:
except subprocess.CalledProcessError as ex:
get_console().print("[error]The command returned an error %s", ex)
raise PrepareReleasePackageErrorBuildingPackageException()
get_console().print(
f"\n[info]Prepared provider package {provider_id} in " f"format {package_format}[/]\n"
)
get_console().print(f"\n[info]Prepared provider package {provider_id} in format {package_format}[/]\n")


def move_built_packages_and_cleanup(
Expand Down
4 changes: 2 additions & 2 deletions dev/breeze/src/airflow_breeze/utils/packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def get_short_package_name(long_form_provider: str) -> str:
else:
if not long_form_provider.startswith(LONG_PROVIDERS_PREFIX):
raise ValueError(
f"Invalid provider name: {long_form_provider}. " f"Should start with {LONG_PROVIDERS_PREFIX}"
f"Invalid provider name: {long_form_provider}. Should start with {LONG_PROVIDERS_PREFIX}"
)
return long_form_provider[len(LONG_PROVIDERS_PREFIX) :].replace("-", ".")

Expand Down Expand Up @@ -661,7 +661,7 @@ def make_sure_remote_apache_exists_and_fetch(github_repository: str = "apache/ai
)
else:
get_console().print(
f"[error]Error {ex}[/]\n" f"[error]When checking if {HTTPS_REMOTE} is set.[/]\n\n"
f"[error]Error {ex}[/]\n[error]When checking if {HTTPS_REMOTE} is set.[/]\n\n"
)
sys.exit(1)
get_console().print("[info]Fetching full history and tags from remote.")
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ extend-select = [
"TCH", # Rules around TYPE_CHECKING blocks
"TID251", # Specific modules or module members that may not be imported or accessed
"TID253", # Ban certain modules from being imported at module level
"ISC", # Checks for implicit literal string concatenation (auto-fixable)
]
extend-ignore = [
"D203",
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/pre_commit/pre_commit_check_provider_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def check_documentation_link_exists(link: str, doc_file_name: str):
fail_pre_commit = True
console.print()
console.print(
f"[red]ERROR! The {docs_file} does not contain:\n:[/]" f"{link}\n" f"[bright_blue]Please add it!"
f"[red]ERROR! The {docs_file} does not contain:\n:[/]{link}\n[bright_blue]Please add it!"
)
console.print()

Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/run_generate_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ def generate_constraints_no_providers(config_params: ConfigParams) -> None:
core_dependencies = get_core_airflow_dependencies()
uninstall_all_packages(config_params)
console.print(
f"[bright_blue]Installing airflow with [{core_dependencies}] extras only " f"with eager upgrade."
f"[bright_blue]Installing airflow with [{core_dependencies}] extras only with eager upgrade."
)
install_local_airflow_with_eager_upgrade(
config_params, config_params.eager_upgrade_additional_requirements, core_dependencies
Expand Down
2 changes: 1 addition & 1 deletion tests/api_connexion/endpoints/test_xcom_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ def test_should_respond_200_with_xcom_key(self):

def assert_expected_result(expected_entries, key=None):
response = self.client.get(
"/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries" f"{('?xcom_key='+key )}",
f"/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries?xcom_key={key}",
environ_overrides={"REMOTE_USER": "test"},
)

Expand Down
2 changes: 1 addition & 1 deletion tests/providers/amazon/aws/links/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class TestBatchJobQueueLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
"https://console.aws.amazon.com/batch/home" "?region=us-east-1#queues/detail/arn:fake:jq"
"https://console.aws.amazon.com/batch/home?region=us-east-1#queues/detail/arn:fake:jq"
),
region_name="us-east-1",
aws_partition="aws",
Expand Down
4 changes: 2 additions & 2 deletions tests/providers/amazon/aws/links/test_emr.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class TestEmrClusterLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
"https://console.aws.amazon.com/emr/home" "?region=us-west-1#/clusterDetails/j-TEST-FLOW-ID"
"https://console.aws.amazon.com/emr/home?region=us-west-1#/clusterDetails/j-TEST-FLOW-ID"
),
region_name="us-west-1",
aws_partition="aws",
Expand All @@ -57,7 +57,7 @@ class TestEmrLogsLink(BaseAwsLinksTestCase):
def test_extra_link(self):
self.assert_extra_link_url(
expected_url=(
"https://console.aws.amazon.com/s3/buckets/myLogUri/" "?region=eu-west-2&prefix=j-8989898989/"
"https://console.aws.amazon.com/s3/buckets/myLogUri/?region=eu-west-2&prefix=j-8989898989/"
),
region_name="eu-west-2",
aws_partition="aws",
Expand Down
8 changes: 3 additions & 5 deletions tests/providers/weaviate/hooks/test_weaviate.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,13 +684,13 @@ def test___generate_uuids(generate_uuid5, weaviate_hook):
{"id": [1, 2], "name": ["ross", "bob"], "age": ["12", "22"], "gender": ["m", "m"]}
)
with pytest.raises(
ValueError, match=r"Property 'id' already in dataset. Consider renaming or specify" r" 'uuid_column'"
ValueError, match=r"Property 'id' already in dataset. Consider renaming or specify 'uuid_column'"
):
weaviate_hook._generate_uuids(df=df, class_name="test", unique_columns=["name", "age", "gender"])

with pytest.raises(
ValueError,
match=r"Property age already in dataset. Consider renaming or specify" r" a different 'uuid_column'.",
match=r"Property age already in dataset. Consider renaming or specify a different 'uuid_column'.",
):
weaviate_hook._generate_uuids(
df=df, uuid_column="age", class_name="test", unique_columns=["name", "age", "gender"]
Expand Down Expand Up @@ -773,9 +773,7 @@ def test_error_option_of_create_or_replace_document_objects(

_get_segregated_documents.return_value = ({}, {"abc.xml"}, {}, {"zyx.html"})
_generate_uuids.return_value = (df, "id")
with pytest.raises(
ValueError, match="Documents abc.xml already exists. You can either" " skip or replace"
):
with pytest.raises(ValueError, match="Documents abc.xml already exists. You can either skip or replace"):
weaviate_hook.create_or_replace_document_objects(
data=df, document_column="doc", class_name="test", existing="error"
)
Expand Down

0 comments on commit 1cc9fe1

Please sign in to comment.