Skip to content

Commit 428a439

Browse files
authored
Clean up in-line f-string concatenation (#23591)
1 parent ec4dcce commit 428a439

File tree

17 files changed

+23
-27
lines changed

17 files changed

+23
-27
lines changed

airflow/providers/amazon/aws/utils/redshift.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def build_credentials_block(credentials: ReadOnlyCredentials) -> str:
4343

4444
else:
4545
credentials_line = (
46-
f"aws_access_key_id={credentials.access_key};" f"aws_secret_access_key={credentials.secret_key}"
46+
f"aws_access_key_id={credentials.access_key};aws_secret_access_key={credentials.secret_key}"
4747
)
4848

4949
return credentials_line

airflow/providers/apache/drill/hooks/drill.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def get_uri(self) -> str:
7272
conn_type = 'drill' if not conn_md.conn_type else conn_md.conn_type
7373
dialect_driver = conn_md.extra_dejson.get('dialect_driver', 'drill+sadrill')
7474
storage_plugin = conn_md.extra_dejson.get('storage_plugin', 'dfs')
75-
return f'{conn_type}://{host}/{storage_plugin}' f'?dialect_driver={dialect_driver}'
75+
return f'{conn_type}://{host}/{storage_plugin}?dialect_driver={dialect_driver}'
7676

7777
def set_autocommit(self, conn: Connection, autocommit: bool) -> NotImplementedError:
7878
raise NotImplementedError("There are no transactions in Drill.")

airflow/providers/databricks/operators/databricks_repos.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def __init__(
9090
self.git_provider = self.__detect_repo_provider__(git_url)
9191
if self.git_provider is None:
9292
raise AirflowException(
93-
"git_provider isn't specified and couldn't be guessed" f" for URL {git_url}"
93+
"git_provider isn't specified and couldn't be guessed for URL {git_url}"
9494
)
9595
else:
9696
self.git_provider = git_provider

airflow/providers/google/cloud/hooks/datafusion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def wait_for_pipeline_state(
115115
return
116116
if current_state in failure_states:
117117
raise AirflowException(
118-
f"Pipeline {pipeline_name} state {current_state} is not " f"one of {success_states}"
118+
f"Pipeline {pipeline_name} state {current_state} is not one of {success_states}"
119119
)
120120
sleep(30)
121121

airflow/providers/google/cloud/utils/credentials_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -369,5 +369,5 @@ def _get_project_id_from_service_account_email(service_account_email: str) -> st
369369
return service_account_email.split('@')[1].split('.')[0]
370370
except IndexError:
371371
raise AirflowException(
372-
f"Could not extract project_id from service account's email: " f"{service_account_email}."
372+
f"Could not extract project_id from service account's email: {service_account_email}."
373373
)

airflow/providers/hashicorp/_internal_client/vault_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def __init__(
123123
)
124124
if auth_type not in VALID_AUTH_TYPES:
125125
raise VaultError(
126-
f"The auth_type is not supported: {auth_type}. " f"It should be one of {VALID_AUTH_TYPES}"
126+
f"The auth_type is not supported: {auth_type}. It should be one of {VALID_AUTH_TYPES}"
127127
)
128128
if auth_type == "token" and not token and not token_path:
129129
raise VaultError("The 'token' authentication type requires 'token' or 'token_path'")

airflow/utils/cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def _check_cli_args(args):
4949
raise ValueError("Args should be set")
5050
if not isinstance(args[0], Namespace):
5151
raise ValueError(
52-
"1st positional argument should be argparse.Namespace instance," f"but is {type(args[0])}"
52+
f"1st positional argument should be argparse.Namespace instance, but is {type(args[0])}"
5353
)
5454

5555

@@ -148,7 +148,7 @@ def _build_metrics(func_name, namespace):
148148

149149
if not isinstance(namespace, Namespace):
150150
raise ValueError(
151-
"namespace argument should be argparse.Namespace instance," f"but is {type(namespace)}"
151+
f"namespace argument should be argparse.Namespace instance, but is {type(namespace)}"
152152
)
153153
tmp_dic = vars(namespace)
154154
metrics['dag_id'] = tmp_dic.get('dag_id')

dev/assign_cherry_picked_prs_with_milestone.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ def assign_prs(
294294
continue
295295
console.print('-' * 80)
296296
console.print(
297-
f"\n >>>> Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
297+
f"\n >>>> Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
298298
)
299299
pr: PullRequest
300300
try:

dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -305,10 +305,10 @@ def version(verbose: bool, python: str):
305305
f"{get_installation_sources_config_metadata_hash()}[/]"
306306
)
307307
get_console().print(
308-
f"[info]Used sources config hash : " f"{get_used_sources_setup_metadata_hash()}[/]"
308+
f"[info]Used sources config hash : {get_used_sources_setup_metadata_hash()}[/]"
309309
)
310310
get_console().print(
311-
f"[info]Package config hash : " f"{(get_package_setup_metadata_hash())}[/]\n"
311+
f"[info]Package config hash : {(get_package_setup_metadata_hash())}[/]\n"
312312
)
313313

314314

@@ -497,7 +497,7 @@ def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, for
497497
else:
498498
get_console().print(f"[info]The autocomplete script would be added to {script_path}[/]")
499499
get_console().print(
500-
f"\n[warning]Please exit and re-enter your shell or run:[/]" f"\n\n source {script_path}\n"
500+
f"\n[warning]Please exit and re-enter your shell or run:[/]\n\n source {script_path}\n"
501501
)
502502
return True
503503

dev/breeze/src/airflow_breeze/utils/custom_param_types.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,9 +89,7 @@ def convert(self, value, param, ctx):
8989
if isinstance(value, CacheableDefault):
9090
is_cached, new_value = read_and_validate_value_from_cache(param_name, value.value)
9191
if not is_cached:
92-
get_console().print(
93-
f"\n[info]Default value of {param.name} " f"parameter {new_value} used.[/]\n"
94-
)
92+
get_console().print(f"\n[info]Default value of {param.name} parameter {new_value} used.[/]\n")
9593
else:
9694
allowed, allowed_values = check_if_values_allowed(param_name, value)
9795
if allowed:

dev/breeze/src/airflow_breeze/utils/run_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ def get_filesystem_type(filepath):
199199

200200
def instruct_build_image(python: str):
201201
"""Print instructions to the user that they should build the image"""
202-
get_console().print(f'[warning]\nThe CI image for ' f'python version {python} may be outdated[/]\n')
202+
get_console().print(f'[warning]\nThe CI image for Python version {python} may be outdated[/]\n')
203203
get_console().print(
204204
f"\n[info]Please run at the earliest convenience:[/]\n\nbreeze build-image --python {python}\n\n"
205205
)

dev/prepare_release_issue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ def generate_issue_content(
274274
for i in range(count_prs):
275275
pr_number = prs[i]
276276
progress.console.print(
277-
f"Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
277+
f"Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
278278
)
279279

280280
pr: PullRequestOrIssue

dev/provider_packages/prepare_provider_packages.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1773,7 +1773,7 @@ def generate_new_changelog(package_id, provider_details, changelog_path, changes
17731773
)
17741774
else:
17751775
console.print(
1776-
f"[green]Appending the provider {package_id} changelog for" f"`{latest_version}` version.[/]"
1776+
f"[green]Appending the provider {package_id} changelog for `{latest_version}` version.[/]"
17771777
)
17781778
with open(changelog_path, "wt") as changelog:
17791779
changelog.write("\n".join(new_changelog_lines))
@@ -1914,7 +1914,7 @@ def generate_issue_content(
19141914
for i in range(len(pr_list)):
19151915
pr_number = pr_list[i]
19161916
progress.console.print(
1917-
f"Retrieving PR#{pr_number}: " f"https://github.com/apache/airflow/pull/{pr_number}"
1917+
f"Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
19181918
)
19191919
try:
19201920
pull_requests[pr_number] = repo.get_pull(pr_number)

docs/apache-airflow/security/webserver.rst

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -234,9 +234,7 @@ webserver_config.py itself if you wish.
234234
team_data = remote_app.get("user/teams")
235235
teams = team_parser(team_data.json())
236236
roles = map_roles(teams)
237-
log.debug(
238-
f"User info from Github: {user_data}\n" f"Team info from Github: {teams}"
239-
)
237+
log.debug(f"User info from Github: {user_data}\nTeam info from Github: {teams}")
240238
return {"username": "github_" + user_data.get("login"), "role_keys": roles}
241239
242240

scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def get_errors_and_hooks(content: Any, max_length: int) -> Tuple[List[str], Dict
6464
name = hook['name']
6565
if len(name) > max_length:
6666
errors.append(
67-
f"Name is too long for hook {hook_id} in {PRE_COMMIT_YAML_FILE}. " f"Please shorten it!"
67+
f"Name is too long for hook {hook_id} in {PRE_COMMIT_YAML_FILE}. Please shorten it!"
6868
)
6969
continue
7070
hooks[hook_id].append(name)

tests/cli/test_cli_parser.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,9 @@ def test_subcommand_arg_name_conflict(self):
8585
for group, command in subcommand.items():
8686
for com in command:
8787
conflict_arg = [arg for arg, count in Counter(com.args).items() if count > 1]
88-
assert [] == conflict_arg, (
89-
f"Command group {group} function {com.name} have " f"conflict args name {conflict_arg}"
90-
)
88+
assert (
89+
[] == conflict_arg
90+
), f"Command group {group} function {com.name} have conflict args name {conflict_arg}"
9191

9292
def test_subcommand_arg_flag_conflict(self):
9393
"""

tests/system/providers/google/bigquery/example_bigquery_tables.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@
101101
dataset_id=DATASET_NAME,
102102
table_id="test_materialized_view",
103103
materialized_view={
104-
"query": f"SELECT SUM(salary) AS sum_salary " f"FROM `{PROJECT_ID}.{DATASET_NAME}.test_table`",
104+
"query": f"SELECT SUM(salary) AS sum_salary FROM `{PROJECT_ID}.{DATASET_NAME}.test_table`",
105105
"enableRefresh": True,
106106
"refreshIntervalMs": 2000000,
107107
},

0 commit comments

Comments
 (0)