Skip to content

Commit

Permalink
Add D200 pydocstyle check (#11688)
Browse files Browse the repository at this point in the history
  • Loading branch information
potix2 committed Oct 20, 2020
1 parent cb7c67d commit 349b081
Show file tree
Hide file tree
Showing 421 changed files with 878 additions and 2,574 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Expand Up @@ -200,7 +200,7 @@ repos:
name: Run pydocstyle
args:
- --convention=pep257
- --add-ignore=D100,D102,D104,D105,D107,D200,D205,D400,D401
- --add-ignore=D100,D102,D104,D105,D107,D205,D400,D401
exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^provider_packages|^kubernetes_tests|.*example_dags/.*
- repo: local
hooks:
Expand Down
8 changes: 2 additions & 6 deletions airflow/api/client/__init__.py
Expand Up @@ -15,9 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
API Client that allows interacting with Airflow API
"""
"""API Client that allows interacting with Airflow API"""
from importlib import import_module
from typing import Any

Expand All @@ -27,9 +25,7 @@


def get_current_api_client() -> Client:
"""
Return current API Client based on current Airflow configuration
"""
"""Return current API Client based on current Airflow configuration"""
api_module = import_module(conf.get('cli', 'api_client')) # type: Any
auth_backend = api.load_auth()
session = None
Expand Down
8 changes: 2 additions & 6 deletions airflow/api/common/experimental/get_lineage.py
Expand Up @@ -15,9 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Lineage apis
"""
"""Lineage apis"""
import datetime
from typing import Any, Dict, List

Expand All @@ -29,9 +27,7 @@

@provide_session
def get_lineage(dag_id: str, execution_date: datetime.datetime, session=None) -> Dict[str, Dict[str, Any]]:
"""
Gets the lineage information for dag specified
"""
"""Gets the lineage information for dag specified"""
dag = check_and_get_dag(dag_id)
check_and_get_dagrun(dag, execution_date)

Expand Down
4 changes: 1 addition & 3 deletions airflow/api_connexion/endpoints/config_endpoint.py
Expand Up @@ -64,9 +64,7 @@ def _config_to_json(config: Config) -> str:

@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)])
def get_config() -> Response:
"""
Get current configuration.
"""
"""Get current configuration."""
serializer = {
'text/plain': _config_to_text,
'application/json': _config_to_json,
Expand Down
20 changes: 5 additions & 15 deletions airflow/api_connexion/endpoints/connection_endpoint.py
Expand Up @@ -37,9 +37,7 @@
@security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)])
@provide_session
def delete_connection(connection_id, session):
"""
Delete a connection entry
"""
"""Delete a connection entry"""
connection = session.query(Connection).filter_by(conn_id=connection_id).one_or_none()
if connection is None:
raise NotFound(
Expand All @@ -53,9 +51,7 @@ def delete_connection(connection_id, session):
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)])
@provide_session
def get_connection(connection_id, session):
"""
Get a connection entry
"""
"""Get a connection entry"""
connection = session.query(Connection).filter(Connection.conn_id == connection_id).one_or_none()
if connection is None:
raise NotFound(
Expand All @@ -69,9 +65,7 @@ def get_connection(connection_id, session):
@format_parameters({'limit': check_limit})
@provide_session
def get_connections(session, limit, offset=0):
"""
Get all connection entries
"""
"""Get all connection entries"""
total_entries = session.query(func.count(Connection.id)).scalar()
query = session.query(Connection)
connections = query.order_by(Connection.id).offset(offset).limit(limit).all()
Expand All @@ -83,9 +77,7 @@ def get_connections(session, limit, offset=0):
@security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_CONNECTION)])
@provide_session
def patch_connection(connection_id, session, update_mask=None):
"""
Update a connection entry
"""
"""Update a connection entry"""
try:
data = connection_schema.load(request.json, partial=True)
except ValidationError as err:
Expand Down Expand Up @@ -119,9 +111,7 @@ def patch_connection(connection_id, session, update_mask=None):
@security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)])
@provide_session
def post_connection(session):
"""
Create connection entry
"""
"""Create connection entry"""
body = request.json
try:
data = connection_schema.load(body)
Expand Down
16 changes: 4 additions & 12 deletions airflow/api_connexion/endpoints/dag_endpoint.py
Expand Up @@ -35,9 +35,7 @@
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)])
@provide_session
def get_dag(dag_id, session):
"""
Get basic information about a DAG.
"""
"""Get basic information about a DAG."""
dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one_or_none()

if dag is None:
Expand All @@ -48,9 +46,7 @@ def get_dag(dag_id, session):

@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)])
def get_dag_details(dag_id):
"""
Get details of DAG.
"""
"""Get details of DAG."""
dag: DAG = current_app.dag_bag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found", detail=f"The DAG with dag_id: {dag_id} was not found")
Expand All @@ -60,9 +56,7 @@ def get_dag_details(dag_id):
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)])
@format_parameters({'limit': check_limit})
def get_dags(limit, offset=0):
"""
Get all DAGs.
"""
"""Get all DAGs."""
readable_dags = current_app.appbuilder.sm.get_readable_dags(g.user)
dags = readable_dags.order_by(DagModel.dag_id).offset(offset).limit(limit).all()
total_entries = readable_dags.count()
Expand All @@ -73,9 +67,7 @@ def get_dags(limit, offset=0):
@security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAGS)])
@provide_session
def patch_dag(session, dag_id, update_mask=None):
"""
Update the specific DAG
"""
"""Update the specific DAG"""
dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one_or_none()
if not dag:
raise NotFound(f"Dag with id: '{dag_id}' not found")
Expand Down
20 changes: 5 additions & 15 deletions airflow/api_connexion/endpoints/dag_run_endpoint.py
Expand Up @@ -41,9 +41,7 @@
)
@provide_session
def delete_dag_run(dag_id, dag_run_id, session):
"""
Delete a DAG Run
"""
"""Delete a DAG Run"""
if session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).delete() == 0:
raise NotFound(detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found")
return NoContent, 204
Expand All @@ -57,9 +55,7 @@ def delete_dag_run(dag_id, dag_run_id, session):
)
@provide_session
def get_dag_run(dag_id, dag_run_id, session):
"""
Get a DAG Run.
"""
"""Get a DAG Run."""
dag_run = session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).one_or_none()
if dag_run is None:
raise NotFound(
Expand Down Expand Up @@ -99,9 +95,7 @@ def get_dag_runs(
offset=None,
limit=None,
):
"""
Get all DAG Runs.
"""
"""Get all DAG Runs."""
query = session.query(DagRun)

# This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
Expand Down Expand Up @@ -181,9 +175,7 @@ def _apply_date_filters_to_query(
)
@provide_session
def get_dag_runs_batch(session):
"""
Get list of DAG Runs
"""
"""Get list of DAG Runs"""
body = request.get_json()
try:
data = dagruns_batch_form_schema.load(body)
Expand Down Expand Up @@ -222,9 +214,7 @@ def get_dag_runs_batch(session):
)
@provide_session
def post_dag_run(dag_id, session):
"""
Trigger a DAG.
"""
"""Trigger a DAG."""
if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first():
raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found")

Expand Down
4 changes: 1 addition & 3 deletions airflow/api_connexion/endpoints/dag_source_endpoint.py
Expand Up @@ -31,9 +31,7 @@

@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE)])
def get_dag_source(file_token: str):
"""
Get source code using file token
"""
"""Get source code using file token"""
secret_key = current_app.config["SECRET_KEY"]
auth_s = URLSafeSerializer(secret_key)
try:
Expand Down
8 changes: 2 additions & 6 deletions airflow/api_connexion/endpoints/event_log_endpoint.py
Expand Up @@ -34,9 +34,7 @@
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_LOG)])
@provide_session
def get_event_log(event_log_id, session):
"""
Get a log entry
"""
"""Get a log entry"""
event_log = session.query(Log).filter(Log.id == event_log_id).one_or_none()
if event_log is None:
raise NotFound("Event Log not found")
Expand All @@ -47,9 +45,7 @@ def get_event_log(event_log_id, session):
@format_parameters({'limit': check_limit})
@provide_session
def get_event_logs(session, limit, offset=None):
"""
Get all log entries from event log
"""
"""Get all log entries from event log"""
total_entries = session.query(func.count(Log.id)).scalar()
event_logs = session.query(Log).order_by(Log.id).offset(offset).limit(limit).all()
return event_log_collection_schema.dump(
Expand Down
4 changes: 1 addition & 3 deletions airflow/api_connexion/endpoints/extra_link_endpoint.py
Expand Up @@ -37,9 +37,7 @@
)
@provide_session
def get_extra_links(dag_id: str, dag_run_id: str, task_id: str, session):
"""
Get extra links for task instance
"""
"""Get extra links for task instance"""
dagbag: DagBag = current_app.dag_bag
dag: DAG = dagbag.get_dag(dag_id)
if not dag:
Expand Down
4 changes: 1 addition & 3 deletions airflow/api_connexion/endpoints/health_endpoint.py
Expand Up @@ -22,9 +22,7 @@


def get_health():
"""
Return the health of the airflow scheduler and metadatabase
"""
"""Return the health of the airflow scheduler and metadatabase"""
metadatabase_status = HEALTHY
latest_scheduler_heartbeat = None
scheduler_status = UNHEALTHY
Expand Down
8 changes: 2 additions & 6 deletions airflow/api_connexion/endpoints/import_error_endpoint.py
Expand Up @@ -33,9 +33,7 @@
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR)])
@provide_session
def get_import_error(import_error_id, session):
"""
Get an import error
"""
"""Get an import error"""
error = session.query(ImportError).filter(ImportError.id == import_error_id).one_or_none()

if error is None:
Expand All @@ -50,9 +48,7 @@ def get_import_error(import_error_id, session):
@format_parameters({'limit': check_limit})
@provide_session
def get_import_errors(session, limit, offset=None):
"""
Get all import errors
"""
"""Get all import errors"""
total_entries = session.query(func.count(ImportError.id)).scalar()
import_errors = session.query(ImportError).order_by(ImportError.id).offset(offset).limit(limit).all()
return import_error_collection_schema.dump(
Expand Down
4 changes: 1 addition & 3 deletions airflow/api_connexion/endpoints/log_endpoint.py
Expand Up @@ -37,9 +37,7 @@
)
@provide_session
def get_log(session, dag_id, dag_run_id, task_id, task_try_number, full_content=False, token=None):
"""
Get logs for specific task instance
"""
"""Get logs for specific task instance"""
key = current_app.config["SECRET_KEY"]
if not token:
metadata = {}
Expand Down
20 changes: 5 additions & 15 deletions airflow/api_connexion/endpoints/pool_endpoint.py
Expand Up @@ -31,9 +31,7 @@
@security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)])
@provide_session
def delete_pool(pool_name: str, session):
"""
Delete a pool
"""
"""Delete a pool"""
if pool_name == "default_pool":
raise BadRequest(detail="Default Pool can't be deleted")
elif session.query(Pool).filter(Pool.pool == pool_name).delete() == 0:
Expand All @@ -45,9 +43,7 @@ def delete_pool(pool_name: str, session):
@security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)])
@provide_session
def get_pool(pool_name, session):
"""
Get a pool
"""
"""Get a pool"""
obj = session.query(Pool).filter(Pool.pool == pool_name).one_or_none()
if obj is None:
raise NotFound(detail=f"Pool with name:'{pool_name}' not found")
Expand All @@ -58,9 +54,7 @@ def get_pool(pool_name, session):
@format_parameters({'limit': check_limit})
@provide_session
def get_pools(session, limit, offset=None):
"""
Get all pools
"""
"""Get all pools"""
total_entries = session.query(func.count(Pool.id)).scalar()
pools = session.query(Pool).order_by(Pool.id).offset(offset).limit(limit).all()
return pool_collection_schema.dump(PoolCollection(pools=pools, total_entries=total_entries))
Expand All @@ -69,9 +63,7 @@ def get_pools(session, limit, offset=None):
@security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_POOL)])
@provide_session
def patch_pool(pool_name, session, update_mask=None):
"""
Update a pool
"""
"""Update a pool"""
# Only slots can be modified in 'default_pool'
try:
if pool_name == Pool.DEFAULT_POOL_NAME and request.json["name"] != Pool.DEFAULT_POOL_NAME:
Expand Down Expand Up @@ -120,9 +112,7 @@ def patch_pool(pool_name, session, update_mask=None):
@security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL)])
@provide_session
def post_pool(session):
"""
Create a pool
"""
"""Create a pool"""
required_fields = ["name", "slots"] # Pool would require both fields in the post request
for field in required_fields:
if field not in request.json.keys():
Expand Down
8 changes: 2 additions & 6 deletions airflow/api_connexion/endpoints/task_endpoint.py
Expand Up @@ -31,9 +31,7 @@
]
)
def get_task(dag_id, task_id):
"""
Get simplified representation of a task.
"""
"""Get simplified representation of a task."""
dag: DAG = current_app.dag_bag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found")
Expand All @@ -52,9 +50,7 @@ def get_task(dag_id, task_id):
]
)
def get_tasks(dag_id):
"""
Get tasks for DAG
"""
"""Get tasks for DAG"""
dag: DAG = current_app.dag_bag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found")
Expand Down

0 comments on commit 349b081

Please sign in to comment.