Skip to content

Commit

Permalink
Simplify string expressions & Use f-string (#12216)
Browse files Browse the repository at this point in the history
* Simplify string expressions & Use f-string

This is a follow-up clean-up work for the minor issues caused in the process of introducing Black

* Fixup
  • Loading branch information
XD-DENG committed Nov 10, 2020
1 parent 0443f04 commit dd2095f
Show file tree
Hide file tree
Showing 17 changed files with 33 additions and 37 deletions.
8 changes: 4 additions & 4 deletions airflow/cli/cli_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def positive_int(value):
# instead.
ARG_INTERACTIVE = Arg(
('-N', '--interactive'),
help='Do not capture standard output and error streams ' '(useful for interactive debugging)',
help='Do not capture standard output and error streams (useful for interactive debugging)',
action='store_true',
)
ARG_FORCE = Arg(
Expand Down Expand Up @@ -573,15 +573,15 @@ def positive_int(value):
ARG_LASTNAME = Arg(('-l', '--lastname'), help='Last name of the user', required=True, type=str)
ARG_ROLE = Arg(
('-r', '--role'),
help='Role of the user. Existing roles include Admin, ' 'User, Op, Viewer, and Public',
help='Role of the user. Existing roles include Admin, User, Op, Viewer, and Public',
required=True,
type=str,
)
ARG_EMAIL = Arg(('-e', '--email'), help='Email of the user', required=True, type=str)
ARG_EMAIL_OPTIONAL = Arg(('-e', '--email'), help='Email of the user', type=str)
ARG_PASSWORD = Arg(
('-p', '--password'),
help='Password of the user, required to create a user ' 'without --use-random-password',
help='Password of the user, required to create a user without --use-random-password',
type=str,
)
ARG_USE_RANDOM_PASSWORD = Arg(
Expand Down Expand Up @@ -628,7 +628,7 @@ def positive_int(value):
# info
ARG_ANONYMIZE = Arg(
('--anonymize',),
help=('Minimize any personal identifiable information. ' 'Use it when sharing output with others.'),
help='Minimize any personal identifiable information. Use it when sharing output with others.',
action='store_true',
)
ARG_FILE_IO = Arg(
Expand Down
2 changes: 1 addition & 1 deletion airflow/cli/commands/user_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def users_manage_role(args, remove=False):
raise SystemExit('Missing args: must supply one of --username or --email')

if args.username and args.email:
raise SystemExit('Conflicting args: must supply either --username' ' or --email, but not both')
raise SystemExit('Conflicting args: must supply either --username or --email, but not both')

appbuilder = cached_app().appbuilder # pylint: disable=no-member
user = appbuilder.sm.find_user(username=args.username) or appbuilder.sm.find_user(email=args.email)
Expand Down
12 changes: 6 additions & 6 deletions airflow/executors/kubernetes_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def run(self) -> None:
raise
else:
self.log.warning(
'Watch died gracefully, starting back up with: ' 'last resource_version: %s',
'Watch died gracefully, starting back up with: last resource_version: %s',
self.resource_version,
)

Expand Down Expand Up @@ -263,7 +263,7 @@ def _health_check_kube_watcher(self):
self.log.debug("KubeJobWatcher alive, continuing")
else:
self.log.error(
'Error while health checking kube watcher process. ' 'Process died for unknown reasons'
'Error while health checking kube watcher process. Process died for unknown reasons'
)
self.kube_watcher = self._make_kube_watcher()

Expand Down Expand Up @@ -478,7 +478,7 @@ def clear_not_launched_queued_tasks(self, session=None) -> None:
pod_list = self.kube_client.list_namespaced_pod(self.kube_config.kube_namespace, **kwargs)
if not pod_list.items:
self.log.info(
'TaskInstance: %s found in queued state but was not launched, ' 'rescheduling', task
'TaskInstance: %s found in queued state but was not launched, rescheduling', task
)
session.query(TaskInstance).filter(
TaskInstance.dag_id == task.dag_id,
Expand Down Expand Up @@ -509,7 +509,7 @@ def _create_or_update_secret(secret_name, secret_path):
**self.kube_config.kube_client_request_args,
)
self.log.exception(
'Exception while trying to inject secret. ' 'Secret name: %s, error details: %s',
'Exception while trying to inject secret. Secret name: %s, error details: %s',
secret_name,
e,
)
Expand Down Expand Up @@ -605,7 +605,7 @@ def sync(self) -> None:
self.change_state(key, State.FAILED, e)
else:
self.log.warning(
'ApiException when attempting to run task, re-queueing. ' 'Message: %s',
'ApiException when attempting to run task, re-queueing. Message: %s',
json.loads(e.body)['message'],
)
self.task_queue.put(task)
Expand Down Expand Up @@ -729,7 +729,7 @@ def _flush_result_queue(self) -> None:
self._change_state(key, state, pod_id, namespace)
except Exception as e: # pylint: disable=broad-except
self.log.exception(
'Ignoring exception: %s when attempting to change state of %s ' 'to %s.',
'Ignoring exception: %s when attempting to change state of %s to %s.',
e,
results,
state,
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/baseoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1194,7 +1194,7 @@ def _set_relatives(

if len(dags) > 1:
raise AirflowException(
'Tried to set relationships between tasks in ' 'more than one DAG: {}'.format(dags.values())
f'Tried to set relationships between tasks in more than one DAG: {dags.values()}'
)
elif len(dags) == 1:
dag = dags.popitem()[1]
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/dagrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def __init__(

def __repr__(self):
return (
'<DagRun {dag_id} @ {execution_date}: {run_id}, ' 'externally triggered: {external_trigger}>'
'<DagRun {dag_id} @ {execution_date}: {run_id}, externally triggered: {external_trigger}>'
).format(
dag_id=self.dag_id,
execution_date=self.execution_date,
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/apache/druid/hooks/druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:
self.log.info("Druid ingestion spec: %s", json_index_spec)
req_index = requests.post(url, data=json_index_spec, headers=self.header, auth=self.get_auth())
if req_index.status_code != 200:
raise AirflowException('Did not get 200 when ' 'submitting the Druid job to {}'.format(url))
raise AirflowException(f'Did not get 200 when submitting the Druid job to {url}')

req_json = req_index.json()
# Wait until the job is completed
Expand Down
6 changes: 2 additions & 4 deletions airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -2067,7 +2067,7 @@ def run_query(
_validate_value("api_resource_configs['query']", configuration['query'], dict)

if sql is None and not configuration['query'].get('query', None):
raise TypeError('`BigQueryBaseCursor.run_query` ' 'missing 1 required positional argument: `sql`')
raise TypeError('`BigQueryBaseCursor.run_query` missing 1 required positional argument: `sql`')

# BigQuery also allows you to define how you want a table's schema to change
# as a side effect of a query job
Expand Down Expand Up @@ -2801,9 +2801,7 @@ def _split_tablename(
) -> Tuple[str, str, str]:

if '.' not in table_input:
raise ValueError(
'Expected target table name in the format of ' '<dataset>.<table>. Got: {}'.format(table_input)
)
raise ValueError(f'Expected table name in the format of <dataset>.<table>. Got: {table_input}')

if not default_project_id:
raise ValueError("INTERNAL: No default project is specified")
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/hooks/gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,7 @@ def get_crc32c(self, bucket_name: str, object_name: str):
:type object_name: str
"""
self.log.info(
'Retrieving the crc32c checksum of ' 'object_name: %s in bucket_name: %s',
'Retrieving the crc32c checksum of object_name: %s in bucket_name: %s',
object_name,
bucket_name,
)
Expand All @@ -651,7 +651,7 @@ def get_md5hash(self, bucket_name: str, object_name: str) -> str:
storage bucket_name.
:type object_name: str
"""
self.log.info('Retrieving the MD5 hash of ' 'object: %s in bucket: %s', object_name, bucket_name)
self.log.info('Retrieving the MD5 hash of object: %s in bucket: %s', object_name, bucket_name)
client = self.get_conn()
bucket = client.bucket(bucket_name)
blob = bucket.get_blob(blob_name=object_name)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/mlengine.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def create_job(self, job: dict, project_id: str, use_existing_job_fn: Optional[C
existing_job = self._get_job(project_id, job_id)
if not use_existing_job_fn(existing_job):
self.log.error(
'Job with job_id %s already exist, but it does ' 'not match our expectation: %s',
'Job with job_id %s already exist, but it does not match our expectation: %s',
job_id,
existing_job,
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/microsoft/azure/hooks/azure_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def configure_pool( # pylint: disable=too-many-arguments

elif os_family:
self.log.info(
'Using cloud service configuration to create pool, ' 'virtual machine configuration ignored'
'Using cloud service configuration to create pool, virtual machine configuration ignored'
)
pool = batch_models.PoolAddParameter(
id=pool_id,
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/slack/hooks/slack.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def __get_token(self, token: Any, slack_conn_id: Any) -> str:
raise AirflowException('Missing token(password) in Slack connection')
return conn.password

raise AirflowException('Cannot get token: ' 'No valid Slack token nor slack_conn_id supplied.')
raise AirflowException('Cannot get token: No valid Slack token nor slack_conn_id supplied.')

def call(self, api_method: str, *args, **kwargs) -> None:
"""
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/slack/hooks/slack_webhook.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def _get_token(self, token: str, http_conn_id: Optional[str]) -> str:
extra = conn.extra_dejson
return extra.get('webhook_token', '')
else:
raise AirflowException('Cannot get token: No valid Slack ' 'webhook token nor conn_id supplied')
raise AirflowException('Cannot get token: No valid Slack webhook token nor conn_id supplied')

def _build_slack_message(self) -> str:
"""
Expand Down
4 changes: 1 addition & 3 deletions airflow/providers/ssh/hooks/ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,9 +184,7 @@ def get_conn(self) -> paramiko.SSHClient:
)
client.load_system_host_keys()
if self.no_host_key_check:
self.log.warning(
'No Host Key Verification. This wont protect ' 'against Man-In-The-Middle attacks'
)
self.log.warning('No Host Key Verification. This wont protect against Man-In-The-Middle attacks')
# Default is RejectPolicy
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
connect_kwargs = dict(
Expand Down
10 changes: 5 additions & 5 deletions tests/providers/apache/druid/hooks/test_druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def test_submit_gone_wrong(self, m):
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}',
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/' '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
'http://druid-overlord:8081/druid/indexer/v1/task/9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "FAILED"}}',
)

Expand All @@ -65,7 +65,7 @@ def test_submit_ok(self, m):
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}',
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/' '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
'http://druid-overlord:8081/druid/indexer/v1/task/9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "SUCCESS"}}',
)

Expand All @@ -82,7 +82,7 @@ def test_submit_correct_json_body(self, m):
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}',
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/' '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
'http://druid-overlord:8081/druid/indexer/v1/task/9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "SUCCESS"}}',
)

Expand All @@ -106,7 +106,7 @@ def test_submit_unknown_response(self, m):
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}',
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/' '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
'http://druid-overlord:8081/druid/indexer/v1/task/9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "UNKNOWN"}}',
)

Expand All @@ -126,7 +126,7 @@ def test_submit_timeout(self, m):
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}',
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/' '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
'http://druid-overlord:8081/druid/indexer/v1/task/9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "RUNNING"}}',
)
shutdown_post = m.post(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def setUp(self):
self.gke_hook = GKEHook(location=GKE_ZONE)
self.gke_hook._client = mock.Mock()

@mock.patch('airflow.providers.google.cloud.hooks.kubernetes_engine.container_v1.' 'ClusterManagerClient')
@mock.patch('airflow.providers.google.cloud.hooks.kubernetes_engine.container_v1.ClusterManagerClient')
@mock.patch('airflow.providers.google.common.hooks.base_google.ClientInfo')
@mock.patch('airflow.providers.google.cloud.hooks.kubernetes_engine.GKEHook._get_credentials')
def test_get_client(self, mock_get_credentials, mock_client_info, mock_client):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def __create_instance(

self.log.info('... Done creating a test %s instance "%s"!\n', db_version, instance_name)
except Exception as ex:
self.log.error('Exception occurred. ' 'Aborting creating a test instance.\n\n%s', ex)
self.log.error('Exception occurred. Aborting creating a test instance.\n\n%s', ex)
raise ex

def __delete_service_accounts_acls(self):
Expand Down Expand Up @@ -350,7 +350,7 @@ def __setup_instance_and_certs(
self.__create_db(instance_name, db_name)
self.log.info('... Done setting up a test %s instance "%s"!\n', db_version, instance_name)
except Exception as ex:
self.log.error('Exception occurred. ' 'Aborting setting up test instance and certs.\n\n%s', ex)
self.log.error('Exception occurred. Aborting setting up test instance and certs.\n\n%s', ex)
raise ex

def __delete_instance(self, instance_name: str, master_instance_name: Optional[str]) -> None:
Expand Down
4 changes: 2 additions & 2 deletions tests/providers/google/cloud/operators/test_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,10 +361,10 @@ def test_set_machine_type_should_throw_ex_when_missing_machine_type(self, mock_h
)

@mock.patch(
'airflow.providers.google.cloud.operators.compute.ComputeEngineHook' '._check_zone_operation_status'
'airflow.providers.google.cloud.operators.compute.ComputeEngineHook._check_zone_operation_status'
)
@mock.patch(
'airflow.providers.google.cloud.operators.compute.ComputeEngineHook' '._execute_set_machine_type'
'airflow.providers.google.cloud.operators.compute.ComputeEngineHook._execute_set_machine_type'
)
@mock.patch('airflow.providers.google.cloud.operators.compute.ComputeEngineHook.get_conn')
def test_set_machine_type_should_handle_and_trim_gce_error(
Expand Down

0 comments on commit dd2095f

Please sign in to comment.