Skip to content

Commit

Permalink
Fix spellings (#11825)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbampton committed Oct 26, 2020
1 parent c62a49a commit 8afdb6a
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/operators/datasync.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def execute(self, context):
if self.candidate_task_arns:
self.task_arn = self.choose_task(self.candidate_task_arns)

# If we couldnt find one then try create one
# If we could not find one then try to create one
if not self.task_arn and self.create_task_kwargs:
self._create_datasync_task()

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/example_dags/example_dlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@

# [START howto_operator_dlp_use_inspect_template]
inspect_content = CloudDLPInspectContentOperator(
task_id="inpsect_content",
task_id="inspect_content",
project_id=GCP_PROJECT,
item=ITEM,
inspect_template_name="{{ task_instance.xcom_pull('create_template', key='return_value')['name'] }}",
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/example_dags/example_pubsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@
# [START howto_operator_gcp_pubsub_pull_message_with_operator]
subscription = "{{ task_instance.xcom_pull('subscribe_task') }}"

pull_messages_operaator = PubSubPullOperator(
pull_messages_operator = PubSubPullOperator(
task_id="pull_messages",
ack_messages=True,
project_id=GCP_PROJECT_ID,
Expand Down Expand Up @@ -165,7 +165,7 @@
create_topic
>> subscribe_task
>> publish_task
>> pull_messages_operaator
>> pull_messages_operator
>> pull_messages_result
>> unsubscribe_task
>> delete_topic
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -697,7 +697,7 @@ def update_table(
:type table_id: str
:param table_resource: Table resource as described in documentation:
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table
The table has to contain ``tableReference`` or ``project_id``, ``datset_id`` and ``table_id``
The table has to contain ``tableReference`` or ``project_id``, ``dataset_id`` and ``table_id``
have to be provided.
:type table_resource: Dict[str, Any]
:param fields: The fields of ``table`` to change, spelled as the Table
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ def update_cluster( # pylint: disable=too-many-arguments
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1.types.FieldMask`
:type update_mask: Union[Dict, google.cloud.dataproc_v1.types.FieldMask]
:param graceful_decommission_timeout: Optional. Timeout for graceful YARN decomissioning. Graceful
:param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful
decommissioning allows removing nodes from the cluster without interrupting jobs in progress.
Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes
(and potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/operators/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,7 +658,7 @@ class DataprocScaleClusterOperator(BaseOperator):
:type num_workers: int
:param num_preemptible_workers: The new number of preemptible workers
:type num_preemptible_workers: int
:param graceful_decommission_timeout: Timeout for graceful YARN decomissioning.
:param graceful_decommission_timeout: Timeout for graceful YARN decommissioning.
Maximum value is 1d
:type graceful_decommission_timeout: str
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
Expand Down Expand Up @@ -1870,7 +1870,7 @@ class DataprocUpdateClusterOperator(BaseOperator):
new value. If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1beta2.types.FieldMask`
:type update_mask: Union[Dict, google.cloud.dataproc_v1beta2.types.FieldMask]
:param graceful_decommission_timeout: Optional. Timeout for graceful YARN decomissioning. Graceful
:param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful
decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout
specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and
potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def provide_gcp_connection(
connection. It build a new connection that includes path to provided service json,
required scopes and project id.
:param key_file_path: Path to file with Gooogle Cloud Service Account .json file.
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:type key_file_path: str
:param scopes: OAuth scopes for the connection
:type scopes: Sequence
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
task_id="trigger_job",
job_name="generate-merlin-config",
parameters={"first_parameter": "a_value", "second_parameter": "18"},
# parameters="resources/paremeter.json", You can also pass a path to a json file containing your param
# parameters="resources/parameter.json", You can also pass a path to a json file containing your param
jenkins_connection_id="your_jenkins_connection", # T he connection must be configured first
)

Expand Down

0 comments on commit 8afdb6a

Please sign in to comment.