Skip to content

Commit d305876

Browse files
authored
Remove redundant None provided as default to dict.get() (#11448)
1 parent d8d13fa commit d305876

File tree

26 files changed

+45
-45
lines changed

26 files changed

+45
-45
lines changed

airflow/api/common/experimental/trigger_dag.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def _trigger_dag(
8989
state=State.RUNNING,
9090
conf=run_conf,
9191
external_trigger=True,
92-
dag_hash=dag_bag.dags_hash.get(dag_id, None),
92+
dag_hash=dag_bag.dags_hash.get(dag_id),
9393
)
9494

9595
triggers.append(trigger)

airflow/api_connexion/endpoints/connection_endpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def patch_connection(connection_id, session, update_mask=None):
9797
"Connection not found",
9898
detail=f"The Connection with connection_id: `{connection_id}` was not found",
9999
)
100-
if data.get('conn_id', None) and connection.conn_id != data['conn_id']:
100+
if data.get('conn_id') and connection.conn_id != data['conn_id']:
101101
raise BadRequest(detail="The connection_id cannot be updated.")
102102
if update_mask:
103103
update_mask = [i.strip() for i in update_mask]

airflow/api_connexion/endpoints/log_endpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def get_log(session, dag_id, dag_run_id, task_id, task_try_number, full_content=
4242
except BadSignature:
4343
raise BadRequest("Bad Signature. Please use only the tokens provided by the API.")
4444

45-
if metadata.get('download_logs', None) and metadata['download_logs']:
45+
if metadata.get('download_logs') and metadata['download_logs']:
4646
full_content = True
4747

4848
if full_content:

airflow/api_connexion/exceptions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def common_error_handler(exception):
4242
"""
4343
if isinstance(exception, ProblemException):
4444

45-
link = EXCEPTIONS_LINK_MAP.get(exception.status, None)
45+
link = EXCEPTIONS_LINK_MAP.get(exception.status)
4646
if link:
4747
response = problem(
4848
status=exception.status,

airflow/api_connexion/security.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def requires_access_decorator(func: T):
8181
def decorated(*args, **kwargs):
8282

8383
check_authentication()
84-
check_authorization(permissions, kwargs.get('dag_id', None))
84+
check_authorization(permissions, kwargs.get('dag_id'))
8585

8686
return func(*args, **kwargs)
8787

airflow/jobs/scheduler_job.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1555,7 +1555,7 @@ def _create_dag_runs(self, dag_models: Iterable[DagModel], session: Session) ->
15551555
"""
15561556
for dag_model in dag_models:
15571557
dag = self.dagbag.get_dag(dag_model.dag_id, session=session)
1558-
dag_hash = self.dagbag.dags_hash.get(dag.dag_id, None)
1558+
dag_hash = self.dagbag.dags_hash.get(dag.dag_id)
15591559
dag.create_dagrun(
15601560
run_type=DagRunType.SCHEDULED,
15611561
execution_date=dag_model.next_dagrun,

airflow/operators/python.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def __init__(
187187
multiple_outputs: bool = False,
188188
**kwargs
189189
) -> None:
190-
kwargs['task_id'] = self._get_unique_task_id(task_id, kwargs.get('dag', None))
190+
kwargs['task_id'] = self._get_unique_task_id(task_id, kwargs.get('dag'))
191191
super().__init__(**kwargs)
192192
self.python_callable = python_callable
193193

airflow/providers/amazon/aws/hooks/step_function.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def start_execution(
6767
self.log.info('Executing Step Function State Machine: %s', state_machine_arn)
6868

6969
response = self.conn.start_execution(**execution_args)
70-
return response.get('executionArn', None)
70+
return response.get('executionArn')
7171

7272
def describe_execution(self, execution_arn: str) -> dict:
7373
"""

airflow/providers/apache/spark/hooks/spark_submit.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -209,9 +209,9 @@ def _resolve_connection(self) -> Dict[str, Any]:
209209

210210
# Determine optional yarn queue from the extra field
211211
extra = conn.extra_dejson
212-
conn_data['queue'] = extra.get('queue', None)
213-
conn_data['deploy_mode'] = extra.get('deploy-mode', None)
214-
conn_data['spark_home'] = extra.get('spark-home', None)
212+
conn_data['queue'] = extra.get('queue')
213+
conn_data['deploy_mode'] = extra.get('deploy-mode')
214+
conn_data['spark_home'] = extra.get('spark-home')
215215
conn_data['spark_binary'] = self._spark_binary or extra.get('spark-binary', "spark-submit")
216216
conn_data['namespace'] = extra.get('namespace')
217217
except AirflowException:

airflow/providers/google/cloud/operators/functions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -480,6 +480,6 @@ def execute(self, context: Dict):
480480
location=self.location,
481481
project_id=self.project_id,
482482
)
483-
self.log.info('Function called successfully. Execution id %s', result.get('executionId', None))
484-
self.xcom_push(context=context, key='execution_id', value=result.get('executionId', None))
483+
self.log.info('Function called successfully. Execution id %s', result.get('executionId'))
484+
self.xcom_push(context=context, key='execution_id', value=result.get('executionId'))
485485
return result

airflow/providers/google/cloud/operators/mlengine.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ def execute(self, context):
283283
# Helper method to check if the existing job's prediction input is the
284284
# same as the request we get here.
285285
def check_existing_job(existing_job):
286-
return existing_job.get('predictionInput', None) == prediction_request['predictionInput']
286+
return existing_job.get('predictionInput') == prediction_request['predictionInput']
287287

288288
finished_prediction_job = hook.create_job(
289289
project_id=self._project_id, job=prediction_request, use_existing_job_fn=check_existing_job
@@ -1264,12 +1264,12 @@ def execute(self, context):
12641264
# Helper method to check if the existing job's training input is the
12651265
# same as the request we get here.
12661266
def check_existing_job(existing_job):
1267-
existing_training_input = existing_job.get('trainingInput', None)
1267+
existing_training_input = existing_job.get('trainingInput')
12681268
requested_training_input = training_request['trainingInput']
12691269
if 'scaleTier' not in existing_training_input:
12701270
existing_training_input['scaleTier'] = None
12711271

1272-
existing_training_input['args'] = existing_training_input.get('args', None)
1272+
existing_training_input['args'] = existing_training_input.get('args')
12731273
requested_training_input["args"] = (
12741274
requested_training_input['args'] if requested_training_input["args"] else None
12751275
)

airflow/providers/oracle/hooks/oracle.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,11 @@ def get_conn(self) -> 'OracleHook':
5757
self.oracle_conn_id # type: ignore[attr-defined] # pylint: disable=no-member
5858
)
5959
conn_config = {'user': conn.login, 'password': conn.password}
60-
dsn = conn.extra_dejson.get('dsn', None)
61-
sid = conn.extra_dejson.get('sid', None)
62-
mod = conn.extra_dejson.get('module', None)
60+
dsn = conn.extra_dejson.get('dsn')
61+
sid = conn.extra_dejson.get('sid')
62+
mod = conn.extra_dejson.get('module')
6363

64-
service_name = conn.extra_dejson.get('service_name', None)
64+
service_name = conn.extra_dejson.get('service_name')
6565
port = conn.port if conn.port else 1521
6666
if dsn and sid and not service_name:
6767
conn_config['dsn'] = cx_Oracle.makedsn(dsn, port, sid)

airflow/providers/postgres/hooks/postgres.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ def _generate_insert_sql(
223223
placeholders = [
224224
"%s",
225225
] * len(values)
226-
replace_index = kwargs.get("replace_index", None)
226+
replace_index = kwargs.get("replace_index")
227227

228228
if target_fields:
229229
target_fields_fragment = ", ".join(target_fields)

airflow/providers/redis/hooks/redis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def get_conn(self):
5656
self.host = conn.host
5757
self.port = conn.port
5858
self.password = None if str(conn.password).lower() in ['none', 'false', ''] else conn.password
59-
self.db = conn.extra_dejson.get('db', None)
59+
self.db = conn.extra_dejson.get('db')
6060

6161
# check for ssl parameters in conn.extra
6262
ssl_arg_names = [

airflow/providers/salesforce/hooks/salesforce.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def get_conn(self) -> api.Salesforce:
7272
password=connection.password,
7373
security_token=extras['security_token'],
7474
instance_url=connection.host,
75-
domain=extras.get('domain', None),
75+
domain=extras.get('domain'),
7676
)
7777
return self.conn
7878

airflow/providers/sendgrid/utils/emailer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def send_email(
8989
personalization.add_bcc(Email(bcc_address))
9090

9191
# Add custom_args to personalization if present
92-
pers_custom_args = kwargs.get('personalization_custom_args', None)
92+
pers_custom_args = kwargs.get('personalization_custom_args')
9393
if isinstance(pers_custom_args, dict):
9494
for key in pers_custom_args.keys():
9595
personalization.add_custom_arg(CustomArg(key, pers_custom_args[key]))

airflow/providers/sftp/hooks/sftp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ def __init__(self, ftp_conn_id: str = 'sftp_default', *args, **kwargs) -> None:
6262
if conn.extra is not None:
6363
extra_options = conn.extra_dejson
6464
if 'private_key_pass' in extra_options:
65-
self.private_key_pass = extra_options.get('private_key_pass', None)
65+
self.private_key_pass = extra_options.get('private_key_pass')
6666

6767
# For backward compatibility
6868
# TODO: remove in Airflow 2.1

airflow/providers/snowflake/hooks/snowflake.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def _get_conn_params(self) -> Dict[str, Optional[str]]:
8080
# passphrase for the private key. If your private key file is not encrypted (not recommended), then
8181
# leave the password empty.
8282

83-
private_key_file = conn.extra_dejson.get('private_key_file', None)
83+
private_key_file = conn.extra_dejson.get('private_key_file')
8484
if private_key_file:
8585
with open(private_key_file, "rb") as key:
8686
passphrase = None

airflow/sensors/smart_sensor_operator.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -499,18 +499,18 @@ def email_alert(task_instance, error_info):
499499
sensor_work.log.exception(e, exc_info=True)
500500

501501
def handle_failure(sensor_work, ti):
502-
if sensor_work.execution_context.get('retries', None) and \
502+
if sensor_work.execution_context.get('retries') and \
503503
ti.try_number <= ti.max_tries:
504504
# retry
505505
ti.state = State.UP_FOR_RETRY
506-
if sensor_work.execution_context.get('email_on_retry', None) and \
507-
sensor_work.execution_context.get('email', None):
506+
if sensor_work.execution_context.get('email_on_retry') and \
507+
sensor_work.execution_context.get('email'):
508508
sensor_work.log.info("%s sending email alert for retry", sensor_work.ti_key)
509509
email_alert(ti, error)
510510
else:
511511
ti.state = State.FAILED
512-
if sensor_work.execution_context.get('email_on_failure', None) and \
513-
sensor_work.execution_context.get('email', None):
512+
if sensor_work.execution_context.get('email_on_failure') and \
513+
sensor_work.execution_context.get('email'):
514514
sensor_work.log.info("%s sending email alert for failure", sensor_work.ti_key)
515515
email_alert(ti, error)
516516

@@ -566,7 +566,7 @@ def _check_and_handle_ti_timeout(self, sensor_work):
566566
:param sensor_work: SensorWork
567567
"""
568568
task_timeout = sensor_work.execution_context.get('timeout', self.timeout)
569-
task_execution_timeout = sensor_work.execution_context.get('execution_timeout', None)
569+
task_execution_timeout = sensor_work.execution_context.get('execution_timeout')
570570
if task_execution_timeout:
571571
task_timeout = min(task_timeout, task_execution_timeout)
572572

airflow/utils/decorators.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any:
6060
dag_args: Dict[str, Any] = {}
6161
dag_params: Dict[str, Any] = {}
6262

63-
dag = kwargs.get('dag', None) or DagContext.get_current_dag()
63+
dag = kwargs.get('dag') or DagContext.get_current_dag()
6464
if dag:
6565
dag_args = copy(dag.default_args) or {}
6666
dag_params = copy(dag.params) or {}

airflow/www/views.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -410,9 +410,9 @@ def get_int_arg(value, default=0):
410410
return default
411411

412412
arg_current_page = request.args.get('page', '0')
413-
arg_search_query = request.args.get('search', None)
414-
arg_tags_filter = request.args.getlist('tags', None)
415-
arg_status_filter = request.args.get('status', None)
413+
arg_search_query = request.args.get('search')
414+
arg_tags_filter = request.args.getlist('tags')
415+
arg_status_filter = request.args.get('status')
416416

417417
if request.args.get('reset_tags') is not None:
418418
flask_session[FILTER_TAGS_COOKIE] = None
@@ -1282,7 +1282,7 @@ def trigger(self, session=None):
12821282
state=State.RUNNING,
12831283
conf=run_conf,
12841284
external_trigger=True,
1285-
dag_hash=current_app.dag_bag.dags_hash.get(dag_id, None),
1285+
dag_hash=current_app.dag_bag.dags_hash.get(dag_id),
12861286
)
12871287

12881288
flash(

tests/providers/amazon/aws/hooks/test_s3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def test_create_bucket_us_standard_region(self, monkeypatch):
7979
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-1')
8080
bucket = hook.get_bucket('new_bucket')
8181
assert bucket is not None
82-
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint', None)
82+
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint')
8383
# https://github.com/spulec/moto/pull/1961
8484
# If location is "us-east-1", LocationConstraint should be None
8585
assert region is None
@@ -90,7 +90,7 @@ def test_create_bucket_other_region(self):
9090
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-2')
9191
bucket = hook.get_bucket('new_bucket')
9292
assert bucket is not None
93-
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint', None)
93+
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint')
9494
assert region == 'us-east-2'
9595

9696
def test_check_for_prefix(self, s3_bucket):

tests/providers/amazon/aws/hooks/test_step_function.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def test_start_execution(self):
4141
name='pseudo-state-machine', definition='{}', roleArn='arn:aws:iam::000000000000:role/Role'
4242
)
4343

44-
state_machine_arn = state_machine.get('stateMachineArn', None)
44+
state_machine_arn = state_machine.get('stateMachineArn')
4545

4646
execution_arn = hook.start_execution(
4747
state_machine_arn=state_machine_arn, name=None, state_machine_input={}
@@ -56,7 +56,7 @@ def test_describe_execution(self):
5656
name='pseudo-state-machine', definition='{}', roleArn='arn:aws:iam::000000000000:role/Role'
5757
)
5858

59-
state_machine_arn = state_machine.get('stateMachineArn', None)
59+
state_machine_arn = state_machine.get('stateMachineArn')
6060

6161
execution_arn = hook.start_execution(
6262
state_machine_arn=state_machine_arn, name=None, state_machine_input={}

tests/providers/google/cloud/hooks/test_mlengine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -714,7 +714,7 @@ def test_create_mlengine_job_check_existing_job_failed(self, mock_get_conn):
714714

715715
# fmt: on
716716
def check_input(existing_job):
717-
return existing_job.get('someInput', None) == my_job['someInput']
717+
return existing_job.get('someInput') == my_job['someInput']
718718

719719
with self.assertRaises(HttpError):
720720
self.hook.create_job(project_id=project_id, job=my_job, use_existing_job_fn=check_input)
@@ -748,7 +748,7 @@ def test_create_mlengine_job_check_existing_job_success(self, mock_get_conn):
748748

749749
# fmt: on
750750
def check_input(existing_job):
751-
return existing_job.get('someInput', None) == my_job['someInput']
751+
return existing_job.get('someInput') == my_job['someInput']
752752

753753
create_job_response = self.hook.create_job(
754754
project_id=project_id, job=my_job, use_existing_job_fn=check_input

tests/providers/salesforce/hooks/test_salesforce.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def test_get_conn(self, mock_salesforce, mock_get_connection):
5555
password=mock_get_connection.return_value.password,
5656
security_token=mock_get_connection.return_value.extra_dejson["security_token"],
5757
instance_url=mock_get_connection.return_value.host,
58-
domain=mock_get_connection.return_value.extra_dejson.get("domain", None),
58+
domain=mock_get_connection.return_value.extra_dejson.get("domain"),
5959
)
6060

6161
@patch("airflow.providers.salesforce.hooks.salesforce.Salesforce")

tests/test_utils/mock_plugins.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ def mock_plugin_manager(**kwargs):
6262
with ExitStack() as exit_stack:
6363
for attr in PLUGINS_MANAGER_NULLABLE_ATTRIBUTES:
6464
exit_stack.enter_context( # pylint: disable=no-member
65-
mock.patch(f"airflow.plugins_manager.{attr}", kwargs.get(attr, None))
65+
mock.patch(f"airflow.plugins_manager.{attr}", kwargs.get(attr))
6666
)
6767
exit_stack.enter_context( # pylint: disable=no-member
6868
mock.patch("airflow.plugins_manager.import_errors", kwargs.get("import_errors", {}))

0 commit comments

Comments
 (0)