Skip to content

Commit

Permalink
Update Docstrings of Modules with Missing Params (#15391)
Browse files Browse the repository at this point in the history
Added params and descriptions to docstrings to improve documentation of the following modules

 - MySqlHook
 - RedshiftHook
 - CloudSQLHook
 - BigQueryHook
 - OracleHook
 - MongoHook
 - HdfsSensor
 - ZendeskHook
 - ElasticsearchHook
  • Loading branch information
bryce-lewis committed Apr 22, 2021
1 parent 327e79d commit 71c673e
Show file tree
Hide file tree
Showing 9 changed files with 80 additions and 4 deletions.
7 changes: 7 additions & 0 deletions airflow/providers/amazon/aws/hooks/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ class RedshiftHook(AwsBaseHook):
.. seealso::
:class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
:param aws_conn_id: The Airflow connection used for AWS credentials.
:type aws_conn_id: str
"""

def __init__(self, *args, **kwargs) -> None:
Expand All @@ -44,6 +47,10 @@ def cluster_status(self, cluster_identifier: str) -> str:
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param skip_final_cluster_snapshot: determines cluster snapshot creation
:type skip_final_cluster_snapshot: bool
:param final_cluster_snapshot_identifier: Optional[str]
:type final_cluster_snapshot_identifier: Optional[str]
"""
try:
response = self.get_conn().describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters']
Expand Down
11 changes: 11 additions & 0 deletions airflow/providers/apache/hdfs/sensors/hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,17 @@ class HdfsSensor(BaseSensorOperator):
"""
Waits for a file or folder to land in HDFS
:param filepath: The route to a stored file.
:type filepath: str
:param hdfs_conn_id: The Airflow connection used for HDFS credentials.
:type hdfs_conn_id: str
:param ignored_ext: This is the list of ignored extensions.
:type ignored_ext: Optional[List[str]]
:param ignore_copying: Shall we ignore?
:type ignore_copying: Optional[bool]
:param file_size: This is the size of the file.
:type file_size: Optional[int]
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:HdfsSensor`
Expand Down
9 changes: 8 additions & 1 deletion airflow/providers/elasticsearch/hooks/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,14 @@


class ElasticsearchHook(DbApiHook):
"""Interact with Elasticsearch through the elasticsearch-dbapi."""
"""
Interact with Elasticsearch through the elasticsearch-dbapi.
This hook uses the Elasticsearch conn_id.
:param elasticsearch_conn_id: The Airflow connection used for Elasticsearch credentials.
:type elasticsearch_conn_id: str
"""

conn_name_attr = 'elasticsearch_conn_id'
default_conn_name = 'elasticsearch_default'
Expand Down
22 changes: 21 additions & 1 deletion airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,27 @@

# pylint: disable=too-many-public-methods
class BigQueryHook(GoogleBaseHook, DbApiHook):
"""Interact with BigQuery. This hook uses the Google Cloud connection."""
"""
Interact with BigQuery. This hook uses the Google Cloud connection.
:param gcp_conn_id: The Airflow connection used for GCP credentials.
:type gcp_conn_id: Optional[str]
:param delegate_to: This performs a task on one host with reference to other hosts.
:type delegate_to: Optional[str]
:param use_legacy_sql: This specifies whether to use legacy SQL dialect.
:type use_legacy_sql: bool
:param location: The location of the BigQuery resource.
:type location: Optional[str]
:param bigquery_conn_id: The Airflow connection used for BigQuery credentials.
:type bigquery_conn_id: Optional[str]
:param api_resource_configs: This contains params configuration applied for Google BigQuery jobs.
:type api_resource_configs: Optional[Dict]
:param impersonation_chain: This is the optional service account to impersonate using short term
credentials.
:type impersonation_chain: Optional[Union[str, Sequence[str]]]
:param labels: The BigQuery resource label.
:type labels: Optional[Dict]
"""

conn_name_attr = 'gcp_conn_id'
default_conn_name = 'google_cloud_default'
Expand Down
10 changes: 10 additions & 0 deletions airflow/providers/google/cloud/hooks/cloud_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,16 @@ class CloudSQLHook(GoogleBaseHook):
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
:param api_version: This is the version of the api.
:type api_version: str
:param gcp_conn_id: The Airflow connection used for GCP credentials.
:type gcp_conn_id: str
:param delegate_to: This performs a task on one host with reference to other hosts.
:type delegate_to: Optional[str]
:param impersonation_chain: This is the optional service account to impersonate using short term
credentials.
:type impersonation_chain: Optional[str]
"""

conn_name_attr = 'gcp_conn_id'
Expand Down
4 changes: 4 additions & 0 deletions airflow/providers/mongo/hooks/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

class MongoHook(BaseHook):
"""
Interact with Mongo. This hook uses the Mongo conn_id.
PyMongo Wrapper to Interact With Mongo Database
Mongo Connection Documentation
https://docs.mongodb.com/manual/reference/connection-string/index.html
Expand All @@ -38,6 +39,9 @@ class MongoHook(BaseHook):
ex.
{"srv": true, "replicaSet": "test", "ssl": true, "connectTimeoutMS": 30000}
:param conn_id: This is the Airflow connection to use for Mongo credentials.
:type conn_id: str
"""

conn_name_attr = 'conn_id'
Expand Down
5 changes: 5 additions & 0 deletions airflow/providers/mysql/hooks/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,11 @@ class MySqlHook(DbApiHook):
"aws_default" connection to get the temporary token unless you override
in extras.
extras example: ``{"iam":true, "aws_conn_id":"my_aws_conn"}``
:param schema: The MySQL database schema to connect to.
:type schema: Optional[str]
:param connection: The Airflow connection used for MySQL credentials.
:type connection: Optional[Dict]
"""

conn_name_attr = 'mysql_conn_id'
Expand Down
9 changes: 8 additions & 1 deletion airflow/providers/oracle/hooks/oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,12 @@


class OracleHook(DbApiHook):
"""Interact with Oracle SQL."""
"""
Interact with Oracle SQL.
:param oracle_conn_id: The Airflow connection used for Oracle credentials.
:type oracle_conn_id: str
"""

conn_name_attr = 'oracle_conn_id'
default_conn_name = 'oracle_default'
Expand All @@ -53,6 +58,8 @@ def get_conn(self) -> 'OracleHook':
as in ``{ "dsn":"some.host.address" , "service_name":"some.service.name" }``
see more param detail in
`cx_Oracle.connect <https://cx-oracle.readthedocs.io/en/latest/module.html#cx_Oracle.connect>`_
"""
conn = self.get_connection(
self.oracle_conn_id # type: ignore[attr-defined] # pylint: disable=no-member
Expand Down
7 changes: 6 additions & 1 deletion airflow/providers/zendesk/hooks/zendesk.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@


class ZendeskHook(BaseHook):
"""A hook to talk to Zendesk"""
"""
Interact with Zendesk. This hook uses the Zendesk conn_id.
:param zendesk_conn_id: The Airflow connection used for Zendesk credentials.
:type zendesk_conn_id: str
"""

def __init__(self, zendesk_conn_id: str) -> None:
super().__init__()
Expand Down

0 comments on commit 71c673e

Please sign in to comment.