Skip to content

Commit

Permalink
Remove args parameter from provider operator constructors (#10097)
Browse files Browse the repository at this point in the history
  • Loading branch information
chipmyersjr committed Aug 1, 2020
1 parent 402e22c commit aeea712
Show file tree
Hide file tree
Showing 97 changed files with 392 additions and 644 deletions.
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/athena.py
Expand Up @@ -59,10 +59,9 @@ def __init__( # pylint: disable=too-many-arguments
result_configuration=None,
sleep_time=30,
max_tries=None,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.query = query
self.database = database
self.output_location = output_location
Expand Down
8 changes: 4 additions & 4 deletions airflow/providers/amazon/aws/operators/cloud_formation.py
Expand Up @@ -49,8 +49,8 @@ def __init__(
stack_name,
params,
aws_conn_id='aws_default',
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.stack_name = stack_name
self.params = params
self.aws_conn_id = aws_conn_id
Expand Down Expand Up @@ -87,8 +87,8 @@ def __init__(
stack_name,
params=None,
aws_conn_id='aws_default',
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.params = params or {}
self.stack_name = stack_name
self.params = params
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/datasync.py
Expand Up @@ -121,10 +121,9 @@ def __init__(
update_task_kwargs=None,
task_execution_kwargs=None,
delete_task_after_execution=False,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)

# Assignments
self.aws_conn_id = aws_conn_id
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/ec2_start_instance.py
Expand Up @@ -49,9 +49,8 @@ def __init__(self,
aws_conn_id: str = "aws_default",
region_name: Optional[str] = None,
check_interval: float = 15,
*args,
**kwargs):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.instance_id = instance_id
self.aws_conn_id = aws_conn_id
self.region_name = region_name
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/ec2_stop_instance.py
Expand Up @@ -49,9 +49,8 @@ def __init__(self,
aws_conn_id: str = "aws_default",
region_name: Optional[str] = None,
check_interval: float = 15,
*args,
**kwargs):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.instance_id = instance_id
self.aws_conn_id = aws_conn_id
self.region_name = region_name
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/emr_add_steps.py
Expand Up @@ -56,12 +56,12 @@ def __init__(
cluster_states=None,
aws_conn_id='aws_default',
steps=None,
*args, **kwargs):
**kwargs):
if kwargs.get('xcom_push') is not None:
raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead")
if not (job_flow_id is None) ^ (job_flow_name is None):
raise AirflowException('Exactly one of job_flow_id or job_flow_name must be specified.')
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
steps = steps or []
self.aws_conn_id = aws_conn_id
self.job_flow_id = job_flow_id
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/emr_create_job_flow.py
Expand Up @@ -48,8 +48,8 @@ def __init__(
emr_conn_id='emr_default',
job_flow_overrides=None,
region_name=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.aws_conn_id = aws_conn_id
self.emr_conn_id = emr_conn_id
if job_flow_overrides is None:
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/emr_modify_cluster.py
Expand Up @@ -43,10 +43,10 @@ def __init__(
cluster_id: str,
step_concurrency_level: int,
aws_conn_id: str = 'aws_default',
*args, **kwargs):
**kwargs):
if kwargs.get('xcom_push') is not None:
raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead")
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.aws_conn_id = aws_conn_id
self.cluster_id = cluster_id
self.step_concurrency_level = step_concurrency_level
Expand Down
Expand Up @@ -39,8 +39,8 @@ def __init__(
self,
job_flow_id,
aws_conn_id='aws_default',
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.job_flow_id = job_flow_id
self.aws_conn_id = aws_conn_id

Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/glue.py
Expand Up @@ -69,9 +69,9 @@ def __init__(self,
region_name=None,
s3_bucket=None,
iam_role_name=None,
*args, **kwargs
**kwargs
): # pylint: disable=too-many-arguments
super(AwsGlueJobOperator, self).__init__(*args, **kwargs)
super(AwsGlueJobOperator, self).__init__(**kwargs)
self.job_name = job_name
self.job_desc = job_desc
self.script_location = script_location
Expand Down
6 changes: 2 additions & 4 deletions airflow/providers/amazon/aws/operators/s3_bucket.py
Expand Up @@ -43,9 +43,8 @@ def __init__(self,
bucket_name,
aws_conn_id: Optional[str] = "aws_default",
region_name: Optional[str] = None,
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.bucket_name = bucket_name
self.region_name = region_name
self.aws_conn_id = aws_conn_id
Expand Down Expand Up @@ -79,9 +78,8 @@ def __init__(self,
bucket_name,
force_delete: Optional[bool] = False,
aws_conn_id: Optional[str] = "aws_default",
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.bucket_name = bucket_name
self.force_delete = force_delete
self.aws_conn_id = aws_conn_id
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/s3_copy_object.py
Expand Up @@ -77,8 +77,8 @@ def __init__(
source_version_id=None,
aws_conn_id='aws_default',
verify=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)

self.source_bucket_key = source_bucket_key
self.dest_bucket_key = dest_bucket_key
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/s3_delete_objects.py
Expand Up @@ -69,12 +69,12 @@ def __init__(
prefix=None,
aws_conn_id='aws_default',
verify=None,
*args, **kwargs):
**kwargs):

if not bool(keys) ^ bool(prefix):
raise ValueError("Either keys or prefix should be set.")

super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.bucket = bucket
self.keys = keys
self.prefix = prefix
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/s3_file_transform.py
Expand Up @@ -95,9 +95,9 @@ def __init__(
dest_aws_conn_id: str = 'aws_default',
dest_verify: Optional[Union[bool, str]] = None,
replace: bool = False,
*args, **kwargs) -> None:
**kwargs) -> None:
# pylint: disable=too-many-arguments
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.source_s3_key = source_s3_key
self.source_aws_conn_id = source_aws_conn_id
self.source_verify = source_verify
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/s3_list.py
Expand Up @@ -75,9 +75,8 @@ def __init__(self,
delimiter='',
aws_conn_id='aws_default',
verify=None,
*args,
**kwargs):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.bucket = bucket
self.prefix = prefix
self.delimiter = delimiter
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sagemaker_base.py
Expand Up @@ -44,8 +44,8 @@ class SageMakerBaseOperator(BaseOperator):
def __init__(self,
config,
aws_conn_id='aws_default',
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)

self.aws_conn_id = aws_conn_id
self.config = config
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
Expand Up @@ -77,9 +77,9 @@ def __init__(self,
check_interval=30,
max_ingestion_time=None,
operation='create',
*args, **kwargs):
**kwargs):
super().__init__(config=config,
*args, **kwargs)
**kwargs)

self.config = config
self.wait_for_completion = wait_for_completion
Expand Down
Expand Up @@ -42,9 +42,9 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
@apply_defaults
def __init__(self,
config,
*args, **kwargs):
**kwargs):
super().__init__(config=config,
*args, **kwargs)
**kwargs)

self.config = config

Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sagemaker_model.py
Expand Up @@ -39,9 +39,9 @@ class SageMakerModelOperator(SageMakerBaseOperator):
@apply_defaults
def __init__(self,
config,
*args, **kwargs):
**kwargs):
super().__init__(config=config,
*args, **kwargs)
**kwargs)

self.config = config

Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sagemaker_transform.py
Expand Up @@ -67,9 +67,9 @@ def __init__(self,
wait_for_completion=True,
check_interval=30,
max_ingestion_time=None,
*args, **kwargs):
**kwargs):
super().__init__(config=config,
*args, **kwargs)
**kwargs)
self.config = config
self.wait_for_completion = wait_for_completion
self.check_interval = check_interval
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sagemaker_tuning.py
Expand Up @@ -60,9 +60,9 @@ def __init__(self,
wait_for_completion=True,
check_interval=30,
max_ingestion_time=None,
*args, **kwargs):
**kwargs):
super().__init__(config=config,
*args, **kwargs)
**kwargs)
self.config = config
self.wait_for_completion = wait_for_completion
self.check_interval = check_interval
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/sns.py
Expand Up @@ -50,8 +50,8 @@ def __init__(
aws_conn_id='aws_default',
subject=None,
message_attributes=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.target_arn = target_arn
self.message = message
self.subject = subject
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/operators/sqs.py
Expand Up @@ -48,9 +48,8 @@ def __init__(self,
message_attributes=None,
delay_seconds=0,
aws_conn_id='aws_default',
*args,
**kwargs):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.sqs_queue = sqs_queue
self.aws_conn_id = aws_conn_id
self.message_content = message_content
Expand Down
Expand Up @@ -41,8 +41,8 @@ class StepFunctionGetExecutionOutputOperator(BaseOperator):
ui_color = '#f9c915'

@apply_defaults
def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, **kwargs):
super().__init__(**kwargs)
self.execution_arn = execution_arn
self.aws_conn_id = aws_conn_id
self.region_name = region_name
Expand Down
Expand Up @@ -51,8 +51,8 @@ class StepFunctionStartExecutionOperator(BaseOperator):
def __init__(self, state_machine_arn: str, name: Optional[str] = None,
state_machine_input: Union[dict, str, None] = None,
aws_conn_id='aws_default', region_name=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
**kwargs):
super().__init__(**kwargs)
self.state_machine_arn = state_machine_arn
self.name = name
self.input = state_machine_input
Expand Down
Expand Up @@ -47,8 +47,8 @@ def __init__(self,
application_file: str,
namespace: Optional[str] = None,
kubernetes_conn_id: str = 'kubernetes_default',
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
**kwargs) -> None:
super().__init__(**kwargs)
self.application_file = application_file
self.namespace = namespace
self.kubernetes_conn_id = kubernetes_conn_id
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/dingding/operators/dingding.py
Expand Up @@ -52,9 +52,8 @@ def __init__(self,
message=None,
at_mobiles=None,
at_all=False,
*args,
**kwargs):
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.dingding_conn_id = dingding_conn_id
self.message_type = message_type
self.message = message
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/docker/operators/docker.py
Expand Up @@ -162,10 +162,9 @@ def __init__(
shm_size: Optional[int] = None,
tty: Optional[bool] = False,
cap_add: Optional[Iterable[str]] = None,
*args,
**kwargs) -> None:

super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.api_version = api_version
self.auto_remove = auto_remove
self.command = command
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/docker/operators/docker_swarm.py
Expand Up @@ -100,9 +100,8 @@ def __init__(
self,
image,
enable_logging=True,
*args,
**kwargs):
super().__init__(image=image, *args, **kwargs)
super().__init__(image=image, **kwargs)

self.enable_logging = enable_logging
self.service = None
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/exasol/operators/exasol.py
Expand Up @@ -53,8 +53,8 @@ def __init__(
autocommit: bool = False,
parameters: Optional[Mapping] = None,
schema: Optional[str] = None,
*args, **kwargs):
super(ExasolOperator, self).__init__(*args, **kwargs)
**kwargs):
super(ExasolOperator, self).__init__(**kwargs)
self.exasol_conn_id = exasol_conn_id
self.sql = sql
self.autocommit = autocommit
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/google/ads/operators/ads.py
Expand Up @@ -69,10 +69,9 @@ def __init__(
gcp_conn_id: str = "google_cloud_default",
google_ads_conn_id: str = "google_ads_default",
gzip: bool = False,
*args,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
super().__init__(**kwargs)
self.bucket = bucket
self.object_name = object_name
self.gcp_conn_id = gcp_conn_id
Expand Down

0 comments on commit aeea712

Please sign in to comment.