Skip to content

Commit b115257

Browse files
Use literal dict instead of calling dict() in providers (#33761)
1 parent 1e81ed1 commit b115257

File tree

23 files changed

+491
-484
lines changed

23 files changed

+491
-484
lines changed

airflow/providers/amazon/aws/hooks/redshift_data.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def get_table_primary_key(
188188
pk_columns = []
189189
token = ""
190190
while True:
191-
kwargs = dict(Id=stmt_id)
191+
kwargs = {"Id": stmt_id}
192192
if token:
193193
kwargs["NextToken"] = token
194194
response = self.conn.get_statement_result(**kwargs)

airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> Non
246246
if ti.map_index >= 0:
247247
# Old tasks _couldn't_ be mapped, so we don't have to worry about compat
248248
base_label_selector += f",map_index={ti.map_index}"
249-
kwargs = dict(label_selector=base_label_selector)
249+
kwargs = {"label_selector": base_label_selector}
250250
if self.kube_config.kube_client_request_args:
251251
kwargs.update(**self.kube_config.kube_client_request_args)
252252

airflow/providers/cncf/kubernetes/operators/pod.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -852,10 +852,10 @@ def patch_already_checked(self, pod: k8s.V1Pod, *, reraise=True):
852852
def on_kill(self) -> None:
853853
if self.pod:
854854
pod = self.pod
855-
kwargs = dict(
856-
name=pod.metadata.name,
857-
namespace=pod.metadata.namespace,
858-
)
855+
kwargs = {
856+
"name": pod.metadata.name,
857+
"namespace": pod.metadata.namespace,
858+
}
859859
if self.termination_grace_period is not None:
860860
kwargs.update(grace_period_seconds=self.termination_grace_period)
861861
self.client.delete_namespaced_pod(**kwargs)

airflow/providers/databricks/hooks/databricks_base.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -121,12 +121,12 @@ def my_after_func(retry_state):
121121
self.retry_args["retry"] = retry_if_exception(self._retryable_error)
122122
self.retry_args["after"] = my_after_func
123123
else:
124-
self.retry_args = dict(
125-
stop=stop_after_attempt(self.retry_limit),
126-
wait=wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
127-
retry=retry_if_exception(self._retryable_error),
128-
after=my_after_func,
129-
)
124+
self.retry_args = {
125+
"stop": stop_after_attempt(self.retry_limit),
126+
"wait": wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
127+
"retry": retry_if_exception(self._retryable_error),
128+
"after": my_after_func,
129+
}
130130

131131
@cached_property
132132
def databricks_conn(self) -> Connection:

airflow/providers/docker/decorators/docker.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -112,15 +112,15 @@ def execute(self, context: Context):
112112
self.pickling_library.dump({"args": self.op_args, "kwargs": self.op_kwargs}, file)
113113
py_source = self.get_python_source()
114114
write_python_script(
115-
jinja_context=dict(
116-
op_args=self.op_args,
117-
op_kwargs=self.op_kwargs,
118-
pickling_library=self.pickling_library.__name__,
119-
python_callable=self.python_callable.__name__,
120-
python_callable_source=py_source,
121-
expect_airflow=self.expect_airflow,
122-
string_args_global=False,
123-
),
115+
jinja_context={
116+
"op_args": self.op_args,
117+
"op_kwargs": self.op_kwargs,
118+
"pickling_library": self.pickling_library.__name__,
119+
"python_callable": self.python_callable.__name__,
120+
"python_callable_source": py_source,
121+
"expect_airflow": self.expect_airflow,
122+
"string_args_global": False,
123+
},
124124
filename=script_filename,
125125
)
126126

airflow/providers/elasticsearch/hooks/elasticsearch.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -92,13 +92,13 @@ def get_conn(self) -> ESConnection:
9292
conn_id = getattr(self, self.conn_name_attr)
9393
conn = self.connection or self.get_connection(conn_id)
9494

95-
conn_args = dict(
96-
host=conn.host,
97-
port=conn.port,
98-
user=conn.login or None,
99-
password=conn.password or None,
100-
scheme=conn.schema or "http",
101-
)
95+
conn_args = {
96+
"host": conn.host,
97+
"port": conn.port,
98+
"user": conn.login or None,
99+
"password": conn.password or None,
100+
"scheme": conn.schema or "http",
101+
}
102102

103103
if conn.extra_dejson.get("http_compress", False):
104104
conn_args["http_compress"] = bool(["http_compress"])

airflow/providers/exasol/hooks/exasol.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,12 @@ def __init__(self, *args, **kwargs) -> None:
5656
def get_conn(self) -> ExaConnection:
5757
conn_id = getattr(self, self.conn_name_attr)
5858
conn = self.get_connection(conn_id)
59-
conn_args = dict(
60-
dsn=f"{conn.host}:{conn.port}",
61-
user=conn.login,
62-
password=conn.password,
63-
schema=self.schema or conn.schema,
64-
)
59+
conn_args = {
60+
"dsn": f"{conn.host}:{conn.port}",
61+
"user": conn.login,
62+
"password": conn.password,
63+
"schema": self.schema or conn.schema,
64+
}
6565
# check for parameters in conn.extra
6666
for arg_name, arg_val in conn.extra_dejson.items():
6767
if arg_name in ["compression", "encryption", "json_lib", "client_name"]:

airflow/providers/google/ads/hooks/ads.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,9 @@ def _search(
224224

225225
iterators = []
226226
for client_id in client_ids:
227-
iterator = service.search(request=dict(customer_id=client_id, query=query, page_size=page_size))
227+
iterator = service.search(
228+
request={"customer_id": client_id, "query": query, "page_size": page_size}
229+
)
228230
iterators.append(iterator)
229231

230232
self.log.info("Fetched Google Ads Iterators")

airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -100,19 +100,19 @@ def get_absolute_path(path):
100100
return os.path.join(HOME_DIR, path)
101101

102102

103-
postgres_kwargs = dict(
104-
user=quote_plus(GCSQL_POSTGRES_USER),
105-
password=quote_plus(GCSQL_POSTGRES_PASSWORD),
106-
public_port=GCSQL_POSTGRES_PUBLIC_PORT,
107-
public_ip=quote_plus(GCSQL_POSTGRES_PUBLIC_IP),
108-
project_id=quote_plus(GCP_PROJECT_ID),
109-
location=quote_plus(GCP_REGION),
110-
instance=quote_plus(GCSQL_POSTGRES_INSTANCE_NAME_QUERY),
111-
database=quote_plus(GCSQL_POSTGRES_DATABASE_NAME),
112-
client_cert_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_CERT_FILE)),
113-
client_key_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_KEY_FILE)),
114-
server_ca_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_SERVER_CA_FILE)),
115-
)
103+
postgres_kwargs = {
104+
"user": quote_plus(GCSQL_POSTGRES_USER),
105+
"password": quote_plus(GCSQL_POSTGRES_PASSWORD),
106+
"public_port": GCSQL_POSTGRES_PUBLIC_PORT,
107+
"public_ip": quote_plus(GCSQL_POSTGRES_PUBLIC_IP),
108+
"project_id": quote_plus(GCP_PROJECT_ID),
109+
"location": quote_plus(GCP_REGION),
110+
"instance": quote_plus(GCSQL_POSTGRES_INSTANCE_NAME_QUERY),
111+
"database": quote_plus(GCSQL_POSTGRES_DATABASE_NAME),
112+
"client_cert_file": quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_CERT_FILE)),
113+
"client_key_file": quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_KEY_FILE)),
114+
"server_ca_file": quote_plus(get_absolute_path(GCSQL_POSTGRES_SERVER_CA_FILE)),
115+
}
116116

117117
# The connections below are created using one of the standard approaches - via environment
118118
# variables named AIRFLOW_CONN_* . The connections can also be created in the database
@@ -166,19 +166,19 @@ def get_absolute_path(path):
166166
"sslrootcert={server_ca_file}".format(**postgres_kwargs)
167167
)
168168

169-
mysql_kwargs = dict(
170-
user=quote_plus(GCSQL_MYSQL_USER),
171-
password=quote_plus(GCSQL_MYSQL_PASSWORD),
172-
public_port=GCSQL_MYSQL_PUBLIC_PORT,
173-
public_ip=quote_plus(GCSQL_MYSQL_PUBLIC_IP),
174-
project_id=quote_plus(GCP_PROJECT_ID),
175-
location=quote_plus(GCP_REGION),
176-
instance=quote_plus(GCSQL_MYSQL_INSTANCE_NAME_QUERY),
177-
database=quote_plus(GCSQL_MYSQL_DATABASE_NAME),
178-
client_cert_file=quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_CERT_FILE)),
179-
client_key_file=quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_KEY_FILE)),
180-
server_ca_file=quote_plus(get_absolute_path(GCSQL_MYSQL_SERVER_CA_FILE)),
181-
)
169+
mysql_kwargs = {
170+
"user": quote_plus(GCSQL_MYSQL_USER),
171+
"password": quote_plus(GCSQL_MYSQL_PASSWORD),
172+
"public_port": GCSQL_MYSQL_PUBLIC_PORT,
173+
"public_ip": quote_plus(GCSQL_MYSQL_PUBLIC_IP),
174+
"project_id": quote_plus(GCP_PROJECT_ID),
175+
"location": quote_plus(GCP_REGION),
176+
"instance": quote_plus(GCSQL_MYSQL_INSTANCE_NAME_QUERY),
177+
"database": quote_plus(GCSQL_MYSQL_DATABASE_NAME),
178+
"client_cert_file": quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_CERT_FILE)),
179+
"client_key_file": quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_KEY_FILE)),
180+
"server_ca_file": quote_plus(get_absolute_path(GCSQL_MYSQL_SERVER_CA_FILE)),
181+
}
182182

183183
# MySQL: connect via proxy over TCP (specific proxy version)
184184
os.environ["AIRFLOW_CONN_PROXY_MYSQL_TCP"] = (

airflow/providers/google/cloud/hooks/bigtable.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -148,11 +148,11 @@ def create_instance(
148148
instance_labels,
149149
)
150150

151-
cluster_kwargs = dict(
152-
cluster_id=main_cluster_id,
153-
location_id=main_cluster_zone,
154-
default_storage_type=cluster_storage_type,
155-
)
151+
cluster_kwargs = {
152+
"cluster_id": main_cluster_id,
153+
"location_id": main_cluster_zone,
154+
"default_storage_type": cluster_storage_type,
155+
}
156156
if instance_type != enums.Instance.Type.DEVELOPMENT and cluster_nodes:
157157
cluster_kwargs["serve_nodes"] = cluster_nodes
158158
clusters = [instance.cluster(**cluster_kwargs)]

airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -517,7 +517,7 @@ async def get_jobs(self, job_names: list[str]) -> ListTransferJobsAsyncPager:
517517
"""
518518
client = self.get_conn()
519519
jobs_list_request = ListTransferJobsRequest(
520-
filter=json.dumps(dict(project_id=self.project_id, job_names=job_names))
520+
filter=json.dumps({"project_id": self.project_id, "job_names": job_names})
521521
)
522522
return await client.list_transfer_jobs(request=jobs_list_request)
523523

airflow/providers/google/cloud/hooks/compute_ssh.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ def _authorize_compute_engine_instance_metadata(self, pubkey):
314314
item["value"] = keys
315315
break
316316
else:
317-
new_dict = dict(key="ssh-keys", value=keys)
317+
new_dict = {"key": "ssh-keys", "value": keys}
318318
metadata["items"] = [new_dict]
319319

320320
self._compute_hook.set_instance_metadata(

airflow/providers/google/cloud/hooks/dataflow.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1236,12 +1236,12 @@ async def get_job(
12361236
client = await self.initialize_client(JobsV1Beta3AsyncClient)
12371237

12381238
request = GetJobRequest(
1239-
dict(
1240-
project_id=project_id,
1241-
job_id=job_id,
1242-
view=job_view,
1243-
location=location,
1244-
)
1239+
{
1240+
"project_id": project_id,
1241+
"job_id": job_id,
1242+
"view": job_view,
1243+
"location": location,
1244+
}
12451245
)
12461246

12471247
job = await client.get_job(

0 commit comments

Comments
 (0)