Skip to content

Commit 0f73647

Browse files
authored
D205 Support - Provider: Google (#32356)
* D205 Support - Provider: Google * PR Fixes
1 parent 92497fa commit 0f73647

File tree

98 files changed

+567
-590
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

98 files changed

+567
-590
lines changed

airflow/providers/google/cloud/hooks/automl.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -122,10 +122,10 @@ def create_model(
122122
retry: Retry | _MethodDefault = DEFAULT,
123123
) -> Operation:
124124
"""
125-
Creates a model_id. Returns a Model in the `response` field when it
126-
completes. When you create a model, several model evaluations are
127-
created for it: a global evaluation, and one evaluation for each
128-
annotation spec.
125+
Creates a model_id and returns a Model in the `response` field when it completes.
126+
127+
When you create a model, several model evaluations are created for it:
128+
a global evaluation, and one evaluation for each annotation spec.
129129
130130
:param model: The model_id to create. If a dict is provided, it must be of the same form
131131
as the protobuf message `google.cloud.automl_v1beta1.types.Model`
@@ -163,9 +163,10 @@ def batch_predict(
163163
metadata: Sequence[tuple[str, str]] = (),
164164
) -> Operation:
165165
"""
166-
Perform a batch prediction. Unlike the online `Predict`, batch
167-
prediction result won't be immediately available in the response.
168-
Instead, a long running operation object is returned.
166+
Perform a batch prediction and returns a long-running operation object.
167+
168+
Unlike the online `Predict`, batch prediction result won't be immediately
169+
available in the response. Instead, a long-running operation object is returned.
169170
170171
:param model_id: Name of the model_id requested to serve the batch prediction.
171172
:param input_config: Required. The input configuration for batch prediction.
@@ -215,8 +216,7 @@ def predict(
215216
metadata: Sequence[tuple[str, str]] = (),
216217
) -> PredictResponse:
217218
"""
218-
Perform an online prediction. The prediction result will be directly
219-
returned in the response.
219+
Perform an online prediction and returns the prediction result in the response.
220220
221221
:param model_id: Name of the model_id requested to serve the prediction.
222222
:param payload: Required. Payload to perform a prediction on. The payload must match the problem type
@@ -485,7 +485,9 @@ def deploy_model(
485485
metadata: Sequence[tuple[str, str]] = (),
486486
) -> Operation:
487487
"""
488-
Deploys a model. If a model is already deployed, deploying it with the same parameters
488+
Deploys a model.
489+
490+
If a model is already deployed, deploying it with the same parameters
489491
has no effect. Deploying with different parameters (as e.g. changing node_number) will
490492
reset the deployment state without pausing the model_id's availability.
491493

airflow/providers/google/cloud/hooks/bigquery_dts.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,8 @@ def __init__(
7575
@staticmethod
7676
def _disable_auto_scheduling(config: dict | TransferConfig) -> TransferConfig:
7777
"""
78+
Create a transfer config with the automatic scheduling disabled.
79+
7880
In the case of Airflow, the customer needs to create a transfer config
7981
with the automatic scheduling disabled (UI, CLI or an Airflow operator) and
8082
then trigger a transfer run using a specialized Airflow operator that will
@@ -195,10 +197,10 @@ def start_manual_transfer_runs(
195197
metadata: Sequence[tuple[str, str]] = (),
196198
) -> StartManualTransferRunsResponse:
197199
"""
198-
Start manual transfer runs to be executed now with schedule_time equal
199-
to current time. The transfer runs can be created for a time range where
200-
the run_time is between start_time (inclusive) and end_time
201-
(exclusive), or for a specific run_time.
200+
Start manual transfer runs to be executed now with schedule_time equal to current time.
201+
202+
The transfer runs can be created for a time range where the run_time is between
203+
start_time (inclusive) and end_time (exclusive), or for a specific run_time.
202204
203205
:param transfer_config_id: Id of transfer config to be used.
204206
:param requested_time_range: Time range for the transfer runs that should be started.

airflow/providers/google/cloud/hooks/bigtable.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,7 @@ def _get_client(self, project_id: str) -> Client:
6969
@GoogleBaseHook.fallback_to_default_project_id
7070
def get_instance(self, instance_id: str, project_id: str) -> Instance | None:
7171
"""
72-
Retrieves and returns the specified Cloud Bigtable instance if it exists.
73-
Otherwise, returns None.
72+
Retrieves and returns the specified Cloud Bigtable instance if it exists, otherwise returns None.
7473
7574
:param instance_id: The ID of the Cloud Bigtable instance.
7675
:param project_id: Optional, Google Cloud project ID where the
@@ -86,6 +85,7 @@ def get_instance(self, instance_id: str, project_id: str) -> Instance | None:
8685
def delete_instance(self, instance_id: str, project_id: str) -> None:
8786
"""
8887
Deletes the specified Cloud Bigtable instance.
88+
8989
Raises google.api_core.exceptions.NotFound if the Cloud Bigtable instance does
9090
not exist.
9191
@@ -217,6 +217,7 @@ def create_table(
217217
) -> None:
218218
"""
219219
Creates the specified Cloud Bigtable table.
220+
220221
Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists.
221222
222223
:param instance: The Cloud Bigtable instance that owns the table.
@@ -238,6 +239,7 @@ def create_table(
238239
def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None:
239240
"""
240241
Deletes the specified table in Cloud Bigtable.
242+
241243
Raises google.api_core.exceptions.NotFound if the table does not exist.
242244
243245
:param instance_id: The ID of the Cloud Bigtable instance.
@@ -256,6 +258,7 @@ def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None
256258
def update_cluster(instance: Instance, cluster_id: str, nodes: int) -> None:
257259
"""
258260
Updates number of nodes in the specified Cloud Bigtable cluster.
261+
259262
Raises google.api_core.exceptions.NotFound if the cluster does not exist.
260263
261264
:param instance: The Cloud Bigtable instance that owns the cluster.
@@ -284,6 +287,7 @@ def get_column_families_for_table(instance: Instance, table_id: str) -> dict[str
284287
def get_cluster_states_for_table(instance: Instance, table_id: str) -> dict[str, ClusterState]:
285288
"""
286289
Fetches Cluster States for the specified table in Cloud Bigtable.
290+
287291
Raises google.api_core.exceptions.NotFound if the table does not exist.
288292
289293
:param instance: The Cloud Bigtable instance that owns the table.

airflow/providers/google/cloud/hooks/cloud_build.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -502,8 +502,7 @@ def retry_build(
502502
location: str = "global",
503503
) -> Build:
504504
"""
505-
Creates a new build based on the specified build. This method creates a new build
506-
using the original build request, which may or may not result in an identical build.
505+
Create a new build using the original build request; may or may not result in an identical build.
507506
508507
:param id_: Build ID of the original build.
509508
:param project_id: Optional, Google Cloud Project project_id where the function belongs.

airflow/providers/google/cloud/hooks/cloud_composer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,7 @@ def get_environment(
156156
) -> Environment:
157157
"""
158158
Get an existing environment.
159+
159160
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
160161
:param region: Required. The ID of the Google Cloud region that the service belongs to.
161162
:param environment_id: Required. The ID of the Google Cloud environment that the service belongs to.

airflow/providers/google/cloud/hooks/cloud_sql.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -304,8 +304,7 @@ def delete_database(self, instance: str, database: str, project_id: str) -> None
304304
@GoogleBaseHook.fallback_to_default_project_id
305305
def export_instance(self, instance: str, body: dict, project_id: str):
306306
"""
307-
Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump
308-
or CSV file.
307+
Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump or CSV file.
309308
310309
:param instance: Database instance ID of the Cloud SQL instance. This does not include the
311310
project ID.
@@ -327,8 +326,7 @@ def export_instance(self, instance: str, body: dict, project_id: str):
327326
@GoogleBaseHook.fallback_to_default_project_id
328327
def import_instance(self, instance: str, body: dict, project_id: str) -> None:
329328
"""
330-
Imports data into a Cloud SQL instance from a SQL dump or CSV file in
331-
Cloud Storage.
329+
Imports data into a Cloud SQL instance from a SQL dump or CSV file in Cloud Storage.
332330
333331
:param instance: Database instance ID. This does not include the
334332
project ID.
@@ -382,8 +380,7 @@ def _wait_for_operation_to_complete(
382380
self, project_id: str, operation_name: str, time_to_sleep: int = TIME_TO_SLEEP_IN_SECONDS
383381
) -> None:
384382
"""
385-
Waits for the named operation to complete - checks status of the
386-
asynchronous call.
383+
Waits for the named operation to complete - checks status of the asynchronous call.
387384
388385
:param project_id: Project ID of the project that contains the instance.
389386
:param operation_name: Name of the operation.
@@ -721,10 +718,11 @@ def get_socket_path(self) -> str:
721718

722719

723720
class CloudSQLDatabaseHook(BaseHook):
724-
"""Serves DB connection configuration for Google Cloud SQL (Connections
725-
of *gcpcloudsqldb://* type).
721+
"""
722+
Serves DB connection configuration for Google Cloud SQL (Connections of *gcpcloudsqldb://* type).
726723
727724
The hook is a "meta" one. It does not perform an actual connection.
725+
728726
It is there to retrieve all the parameters configured in gcpcloudsql:// connection,
729727
start/stop Cloud SQL Proxy if needed, dynamically generate Postgres or MySQL
730728
connection in the database and return an actual Postgres or MySQL hook.

airflow/providers/google/cloud/hooks/compute.py

Lines changed: 19 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ def insert_instance_template(
107107
) -> None:
108108
"""
109109
Creates Instance Template using body specified.
110+
110111
Must be called with keyword arguments rather than positional.
111112
112113
:param body: Instance Template representation as an object.
@@ -158,9 +159,10 @@ def delete_instance_template(
158159
) -> None:
159160
"""
160161
Deletes Instance Template.
161-
Deleting an Instance Template is permanent and cannot be undone. It
162-
is not possible to delete templates that are already in use by a managed instance group.
163-
Must be called with keyword arguments rather than positional.
162+
163+
Deleting an Instance Template is permanent and cannot be undone. It is not
164+
possible to delete templates that are already in use by a managed instance
165+
group. Must be called with keyword arguments rather than positional.
164166
165167
:param resource_id: Name of the Compute Engine Instance Template resource.
166168
:param request_id: Unique request_id that you might add to achieve
@@ -210,6 +212,7 @@ def get_instance_template(
210212
) -> InstanceTemplate:
211213
"""
212214
Retrieves Instance Template by project_id and resource_id.
215+
213216
Must be called with keyword arguments rather than positional.
214217
215218
:param resource_id: Name of the Instance Template.
@@ -259,6 +262,7 @@ def insert_instance(
259262
) -> None:
260263
"""
261264
Creates Instance using body specified.
265+
262266
Must be called with keyword arguments rather than positional.
263267
264268
:param body: Instance representation as an object. Should at least include 'name', 'machine_type',
@@ -332,6 +336,7 @@ def get_instance(
332336
) -> Instance:
333337
"""
334338
Retrieves Instance by project_id and resource_id.
339+
335340
Must be called with keyword arguments rather than positional.
336341
337342
:param resource_id: Name of the Instance
@@ -383,8 +388,8 @@ def delete_instance(
383388
metadata: Sequence[tuple[str, str]] = (),
384389
) -> None:
385390
"""
386-
Deletes Instance.
387-
Deleting an Instance is permanent and cannot be undone.
391+
Permanently and irrevocably deletes an Instance.
392+
388393
It is not possible to delete Instances that are already in use by a managed instance group.
389394
Must be called with keyword arguments rather than positional.
390395
@@ -433,6 +438,7 @@ def delete_instance(
433438
def start_instance(self, zone: str, resource_id: str, project_id: str) -> None:
434439
"""
435440
Starts an existing instance defined by project_id, zone and resource_id.
441+
436442
Must be called with keyword arguments rather than positional.
437443
438444
:param zone: Google Cloud zone where the instance exists
@@ -457,7 +463,8 @@ def start_instance(self, zone: str, resource_id: str, project_id: str) -> None:
457463
@GoogleBaseHook.fallback_to_default_project_id
458464
def stop_instance(self, zone: str, resource_id: str, project_id: str) -> None:
459465
"""
460-
Stops an instance defined by project_id, zone and resource_id
466+
Stops an instance defined by project_id, zone and resource_id.
467+
461468
Must be called with keyword arguments rather than positional.
462469
463470
:param zone: Google Cloud zone where the instance exists
@@ -483,6 +490,7 @@ def stop_instance(self, zone: str, resource_id: str, project_id: str) -> None:
483490
def set_machine_type(self, zone: str, resource_id: str, body: dict, project_id: str) -> None:
484491
"""
485492
Sets machine type of an instance defined by project_id, zone and resource_id.
493+
486494
Must be called with keyword arguments rather than positional.
487495
488496
:param zone: Google Cloud zone where the instance exists.
@@ -524,6 +532,7 @@ def insert_instance_group_manager(
524532
) -> None:
525533
"""
526534
Creates an Instance Group Managers using the body specified.
535+
527536
After the group is created, instances in the group are created using the specified Instance Template.
528537
Must be called with keyword arguments rather than positional.
529538
@@ -580,6 +589,7 @@ def get_instance_group_manager(
580589
) -> InstanceGroupManager:
581590
"""
582591
Retrieves Instance Group Manager by project_id, zone and resource_id.
592+
583593
Must be called with keyword arguments rather than positional.
584594
585595
:param resource_id: The name of the Managed Instance Group
@@ -631,8 +641,8 @@ def delete_instance_group_manager(
631641
metadata: Sequence[tuple[str, str]] = (),
632642
) -> None:
633643
"""
634-
Deletes Instance Group Managers.
635-
Deleting an Instance Group Manager is permanent and cannot be undone.
644+
Permanently and irrevocably deletes Instance Group Managers.
645+
636646
Must be called with keyword arguments rather than positional.
637647
638648
:param resource_id: Name of the Compute Engine Instance Group Managers resource.
@@ -687,6 +697,7 @@ def patch_instance_group_manager(
687697
) -> None:
688698
"""
689699
Patches Instance Group Manager with the specified body.
700+
690701
Must be called with keyword arguments rather than positional.
691702
692703
:param zone: Google Cloud zone where the Instance Group Manager exists

airflow/providers/google/cloud/hooks/dataflow.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -389,8 +389,7 @@ def _refresh_jobs(self) -> None:
389389

390390
def _check_dataflow_job_state(self, job) -> bool:
391391
"""
392-
Helper method to check the state of one job in dataflow for this task
393-
if job failed raise exception.
392+
Helper method to check the state of one job in dataflow for this task if job failed raise exception.
394393
395394
:return: True if job is done.
396395
:raise: Exception

airflow/providers/google/cloud/hooks/datafusion.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -103,10 +103,7 @@ def wait_for_pipeline_state(
103103
failure_states: list[str] | None = None,
104104
timeout: int = 5 * 60,
105105
) -> None:
106-
"""
107-
Polls pipeline state and raises an exception if the state is one of
108-
`failure_states` or the operation timed_out.
109-
"""
106+
"""Polls pipeline state and raises an exception if the state fails or times out."""
110107
failure_states = failure_states or FAILURE_STATES
111108
success_states = success_states or SUCCESS_STATES
112109
start_time = monotonic()
@@ -190,6 +187,7 @@ def get_conn(self) -> Resource:
190187
def restart_instance(self, instance_name: str, location: str, project_id: str) -> Operation:
191188
"""
192189
Restart a single Data Fusion instance.
190+
193191
At the end of an operation instance is fully restarted.
194192
195193
:param instance_name: The name of the instance to restart.

airflow/providers/google/cloud/hooks/dataprep.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ def get_jobs_for_job_group(self, job_id: int) -> dict[str, Any]:
105105
def get_job_group(self, job_group_id: int, embed: str, include_deleted: bool) -> dict[str, Any]:
106106
"""
107107
Get the specified job group.
108+
108109
A job group is a job that is executed from a specific node in a flow.
109110
110111
:param job_group_id: The ID of the job that will be fetched

0 commit comments

Comments
 (0)