Skip to content

Commit

Permalink
Fix remaining MyPy errors in Google Provider (#20358)
Browse files Browse the repository at this point in the history
  • Loading branch information
potiuk committed Dec 18, 2021
1 parent 05e4cd1 commit ed604b6
Show file tree
Hide file tree
Showing 51 changed files with 755 additions and 661 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/google/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

# HACK:
# Sphinx-autoapi doesn't like imports to excluded packages in the main module.
conf = importlib.import_module('airflow.configuration').conf
conf = importlib.import_module('airflow.configuration').conf # type: ignore[attr-defined]

PROVIDERS_GOOGLE_VERBOSE_LOGGING: bool = conf.getboolean(
'providers_google', 'VERBOSE_LOGGING', fallback=False
Expand Down
72 changes: 36 additions & 36 deletions airflow/providers/google/cloud/hooks/automl.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
)
from google.protobuf.field_mask_pb2 import FieldMask

from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook


class CloudAutoMLHook(GoogleBaseHook):
Expand Down Expand Up @@ -102,9 +102,9 @@ def create_model(
self,
model: Union[dict, Model],
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
retry: Optional[Retry] = None,
) -> Operation:
"""
Expand Down Expand Up @@ -138,7 +138,7 @@ def create_model(
request={'parent': parent, 'model': model},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)

@GoogleBaseHook.fallback_to_default_project_id
Expand All @@ -148,11 +148,11 @@ def batch_predict(
input_config: Union[dict, BatchPredictInputConfig],
output_config: Union[dict, BatchPredictOutputConfig],
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
params: Optional[Dict[str, str]] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Operation:
"""
Perform a batch prediction. Unlike the online `Predict`, batch
Expand Down Expand Up @@ -199,7 +199,7 @@ def batch_predict(
},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -209,11 +209,11 @@ def predict(
model_id: str,
payload: Union[dict, ExamplePayload],
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
params: Optional[Dict[str, str]] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> PredictResponse:
"""
Perform an online prediction. The prediction result will be directly
Expand Down Expand Up @@ -249,7 +249,7 @@ def predict(
request={'name': name, 'payload': payload, 'params': params},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -258,10 +258,10 @@ def create_dataset(
self,
dataset: Union[dict, Dataset],
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Dataset:
"""
Creates a dataset.
Expand Down Expand Up @@ -291,7 +291,7 @@ def create_dataset(
request={'parent': parent, 'dataset': dataset},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -301,10 +301,10 @@ def import_data(
dataset_id: str,
location: str,
input_config: Union[dict, InputConfig],
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Operation:
"""
Imports data into a dataset. For Tables this method can only be called on an empty Dataset.
Expand Down Expand Up @@ -336,7 +336,7 @@ def import_data(
request={'name': name, 'input_config': input_config},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -346,13 +346,13 @@ def list_column_specs(
dataset_id: str,
table_spec_id: str,
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
field_mask: Optional[Union[dict, FieldMask]] = None,
filter_: Optional[str] = None,
page_size: Optional[int] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> ListColumnSpecsPager:
"""
Lists column specs in a table spec.
Expand Down Expand Up @@ -399,7 +399,7 @@ def list_column_specs(
request={'parent': parent, 'field_mask': field_mask, 'filter': filter_, 'page_size': page_size},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -408,10 +408,10 @@ def get_model(
self,
model_id: str,
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Model:
"""
Gets a AutoML model.
Expand Down Expand Up @@ -440,7 +440,7 @@ def get_model(
request={'name': name},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -449,10 +449,10 @@ def delete_model(
self,
model_id: str,
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Model:
"""
Deletes a AutoML model.
Expand Down Expand Up @@ -481,7 +481,7 @@ def delete_model(
request={'name': name},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -491,7 +491,7 @@ def update_dataset(
update_mask: Optional[Union[dict, FieldMask]] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Dataset:
"""
Updates a dataset.
Expand All @@ -518,7 +518,7 @@ def update_dataset(
request={'dataset': dataset, 'update_mask': update_mask},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -527,11 +527,11 @@ def deploy_model(
self,
model_id: str,
location: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
image_detection_metadata: Optional[Union[ImageObjectDetectionModelDeploymentMetadata, dict]] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Operation:
"""
Deploys a model. If a model is already deployed, deploying it with the same parameters
Expand Down Expand Up @@ -572,7 +572,7 @@ def deploy_model(
},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -585,7 +585,7 @@ def list_table_specs(
page_size: Optional[int] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> ListTableSpecsPager:
"""
Lists table specs in a dataset_id.
Expand Down Expand Up @@ -625,7 +625,7 @@ def list_table_specs(
request={'parent': parent, 'filter': filter_, 'page_size': page_size},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -636,7 +636,7 @@ def list_datasets(
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> ListDatasetsPager:
"""
Lists datasets in a project.
Expand Down Expand Up @@ -666,7 +666,7 @@ def list_datasets(
request={'parent': parent},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result

Expand All @@ -678,7 +678,7 @@ def delete_dataset(
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Operation:
"""
Deletes a dataset and all of its contents.
Expand Down Expand Up @@ -707,6 +707,6 @@ def delete_dataset(
request={'name': name},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)
return result
22 changes: 11 additions & 11 deletions airflow/providers/google/cloud/hooks/bigquery_dts.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
)
from googleapiclient.discovery import Resource

from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook


def get_object_id(obj: dict) -> str:
Expand Down Expand Up @@ -104,11 +104,11 @@ def get_conn(self) -> DataTransferServiceClient:
def create_transfer_config(
self,
transfer_config: Union[dict, TransferConfig],
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
authorization_code: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TransferConfig:
"""
Creates a new data transfer configuration.
Expand Down Expand Up @@ -146,17 +146,17 @@ def create_transfer_config(
},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)

@GoogleBaseHook.fallback_to_default_project_id
def delete_transfer_config(
self,
transfer_config_id: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes transfer configuration.
Expand Down Expand Up @@ -192,12 +192,12 @@ def delete_transfer_config(
def start_manual_transfer_runs(
self,
transfer_config_id: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
requested_time_range: Optional[dict] = None,
requested_run_time: Optional[dict] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> StartManualTransferRunsResponse:
"""
Start manual transfer runs to be executed now with schedule_time equal
Expand Down Expand Up @@ -245,18 +245,18 @@ def start_manual_transfer_runs(
},
retry=retry,
timeout=timeout,
metadata=metadata or (),
metadata=metadata,
)

@GoogleBaseHook.fallback_to_default_project_id
def get_transfer_run(
self,
run_id: str,
transfer_config_id: str,
project_id: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TransferRun:
"""
Returns information about the particular transfer run.
Expand Down

0 comments on commit ed604b6

Please sign in to comment.