Skip to content

Commit

Permalink
Avoid to use too broad noqa (#37862)
Browse files Browse the repository at this point in the history
  • Loading branch information
Taragolis committed Mar 4, 2024
1 parent 947a6d3 commit 30f7b2a
Show file tree
Hide file tree
Showing 31 changed files with 56 additions and 59 deletions.
2 changes: 1 addition & 1 deletion airflow/api/common/experimental/delete_dag.py
Expand Up @@ -20,7 +20,7 @@

import warnings

from airflow.api.common.delete_dag import * # noqa
from airflow.api.common.delete_dag import * # noqa: F403

warnings.warn(
"This module is deprecated. Please use `airflow.api.common.delete_dag` instead.",
Expand Down
2 changes: 1 addition & 1 deletion airflow/api/common/experimental/mark_tasks.py
Expand Up @@ -20,7 +20,7 @@

import warnings

from airflow.api.common.mark_tasks import ( # noqa
from airflow.api.common.mark_tasks import ( # noqa: F401
_create_dagruns,
set_dag_run_state_to_failed,
set_dag_run_state_to_running,
Expand Down
2 changes: 1 addition & 1 deletion airflow/api/common/experimental/trigger_dag.py
Expand Up @@ -20,7 +20,7 @@

import warnings

from airflow.api.common.trigger_dag import * # noqa
from airflow.api.common.trigger_dag import * # noqa: F403

warnings.warn(
"This module is deprecated. Please use `airflow.api.common.trigger_dag` instead.",
Expand Down
6 changes: 3 additions & 3 deletions airflow/hooks/dbapi.py
Expand Up @@ -21,9 +21,9 @@
import warnings

from airflow.exceptions import RemovedInAirflow3Warning
from airflow.providers.common.sql.hooks.sql import (
ConnectorProtocol, # noqa
DbApiHook, # noqa
from airflow.providers.common.sql.hooks.sql import ( # noqa: F401
ConnectorProtocol,
DbApiHook,
)

warnings.warn(
Expand Down
12 changes: 6 additions & 6 deletions airflow/macros/__init__.py
Expand Up @@ -17,16 +17,16 @@
# under the License.
from __future__ import annotations

import json # noqa
import time # noqa
import uuid # noqa
import json # noqa: F401
import time # noqa: F401
import uuid # noqa: F401
from datetime import datetime, timedelta
from random import random # noqa
from random import random # noqa: F401
from typing import TYPE_CHECKING, Any

import dateutil # noqa
import dateutil # noqa: F401

import airflow.utils.yaml as yaml # noqa
import airflow.utils.yaml as yaml # noqa: F401
from airflow.utils.deprecation_tools import add_deprecated_classes

if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/dagparam.py
Expand Up @@ -20,7 +20,7 @@
import warnings

from airflow.exceptions import RemovedInAirflow3Warning
from airflow.models.param import DagParam # noqa
from airflow.models.param import DagParam # noqa: F401

warnings.warn(
"This module is deprecated. Please use `airflow.models.param`.",
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/dagrun.py
Expand Up @@ -1421,7 +1421,7 @@ def get_run(session: Session, dag_id: str, execution_date: datetime) -> DagRun |
return session.scalar(
select(DagRun).where(
DagRun.dag_id == dag_id,
DagRun.external_trigger == False, # noqa
DagRun.external_trigger == False, # noqa: E712
DagRun.execution_date == execution_date,
)
)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/appflow.py
Expand Up @@ -22,7 +22,7 @@
from airflow.providers.amazon.aws.utils.waiter_with_logging import wait

if TYPE_CHECKING:
from mypy_boto3_appflow.client import AppflowClient # noqa
from mypy_boto3_appflow.client import AppflowClient # noqa: F401


class AppflowHook(AwsGenericHook["AppflowClient"]):
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/rds.py
Expand Up @@ -26,7 +26,7 @@
from airflow.providers.amazon.aws.utils.waiter_with_logging import wait

if TYPE_CHECKING:
from mypy_boto3_rds import RDSClient # noqa
from mypy_boto3_rds import RDSClient # noqa: F401


class RdsHook(AwsGenericHook["RDSClient"]):
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/redshift_data.py
Expand Up @@ -25,7 +25,7 @@
from airflow.providers.amazon.aws.utils import trim_none_values

if TYPE_CHECKING:
from mypy_boto3_redshift_data import RedshiftDataAPIServiceClient # noqa
from mypy_boto3_redshift_data import RedshiftDataAPIServiceClient # noqa: F401
from mypy_boto3_redshift_data.type_defs import DescribeStatementResponseTypeDef

FINISHED_STATE = "FINISHED"
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/verified_permissions.py
Expand Up @@ -21,7 +21,7 @@
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook

if TYPE_CHECKING:
from mypy_boto3_verifiedpermissions.client import VerifiedPermissionsClient # noqa
from mypy_boto3_verifiedpermissions.client import VerifiedPermissionsClient # noqa: F401


class VerifiedPermissionsHook(AwsGenericHook["VerifiedPermissionsClient"]):
Expand Down
Expand Up @@ -21,7 +21,7 @@
import warnings

from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.cncf.kubernetes.operators.pod import * # noqa
from airflow.providers.cncf.kubernetes.operators.pod import * # noqa: F403

warnings.warn(
"This module is deprecated. Please use `airflow.providers.cncf.kubernetes.operators.pod` instead.",
Expand Down
Expand Up @@ -21,7 +21,7 @@
import warnings

from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.cncf.kubernetes.triggers.pod import * # noqa
from airflow.providers.cncf.kubernetes.triggers.pod import * # noqa: F403

warnings.warn(
"This module is deprecated. Please use `airflow.providers.cncf.kubernetes.triggers.pod` instead.",
Expand Down
Expand Up @@ -1249,8 +1249,8 @@ def clean_perms(self) -> None:
sesh = self.appbuilder.get_session
perms = sesh.query(Permission).filter(
or_(
Permission.action == None, # noqa
Permission.resource == None, # noqa
Permission.action == None, # noqa: E711
Permission.resource == None, # noqa: E711
)
)
# Since FAB doesn't define ON DELETE CASCADE on these tables, we need
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/bigquery.py
Expand Up @@ -49,7 +49,7 @@
from google.cloud.exceptions import NotFound
from googleapiclient.discovery import Resource, build
from pandas_gbq import read_gbq
from pandas_gbq.gbq import GbqConnector # noqa
from pandas_gbq.gbq import GbqConnector # noqa: F401 Used in ``airflow.contrib.hooks.bigquery``
from requests import Session
from sqlalchemy import create_engine

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/slack/notifications/slack_notifier.py
Expand Up @@ -20,7 +20,7 @@
import warnings

from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.slack.notifications.slack import SlackNotifier # noqa
from airflow.providers.slack.notifications.slack import SlackNotifier # noqa: F401

warnings.warn(
"This module is deprecated. Please use `airflow.providers.slack.notifications.slack`",
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/snowflake/operators/snowflake.py
Expand Up @@ -449,7 +449,7 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
When executing the statement, Snowflake replaces placeholders (? and :name) in
the statement with these specified values.
:param deferrable: Run operator in the deferrable mode.
""" # noqa
""" # noqa: D205, D400

LIFETIME = timedelta(minutes=59) # The tokens will have a 59 minutes lifetime
RENEWAL_DELTA = timedelta(minutes=54) # Tokens will be renewed after 54 minutes
Expand Down
4 changes: 2 additions & 2 deletions airflow/serialization/pydantic/dag.py
Expand Up @@ -125,8 +125,8 @@ class DagModelPydantic(BaseModelPydantic):
default_view: Optional[str]
schedule_interval: Optional[PydanticInterval]
timetable_description: Optional[str]
tags: List[DagTagPydantic] # noqa
dag_owner_links: List[DagOwnerAttributesPydantic] # noqa
tags: List[DagTagPydantic] # noqa: UP006
dag_owner_links: List[DagOwnerAttributesPydantic] # noqa: UP006
parent_dag: Optional[PydanticDag]

max_active_tasks: int
Expand Down
2 changes: 1 addition & 1 deletion airflow/serialization/pydantic/dag_run.py
Expand Up @@ -53,7 +53,7 @@ class DagRunPydantic(BaseModelPydantic):
dag_hash: Optional[str]
updated_at: Optional[datetime]
dag: Optional[PydanticDag]
consumed_dataset_events: List[DatasetEventPydantic] # noqa
consumed_dataset_events: List[DatasetEventPydantic] # noqa: UP006
log_template_id: Optional[int]

model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True)
Expand Down
2 changes: 1 addition & 1 deletion airflow/settings.py
Expand Up @@ -32,7 +32,7 @@
from sqlalchemy.pool import NullPool

from airflow import policies
from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # NOQA F401
from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # noqa: F401
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.executors import executor_constants
from airflow.logging_config import configure_logging
Expand Down
2 changes: 1 addition & 1 deletion airflow/task/task_runner/base_task_runner.py
Expand Up @@ -27,7 +27,7 @@

if not IS_WINDOWS:
# ignored to avoid flake complaining on Linux
from pwd import getpwnam # noqa
from pwd import getpwnam # noqa: F401

from typing import TYPE_CHECKING

Expand Down
14 changes: 5 additions & 9 deletions airflow/utils/pydantic.py
Expand Up @@ -40,26 +40,22 @@ def is_pydantic_2_installed() -> bool:
from pydantic import BaseModel, ConfigDict, PlainSerializer, PlainValidator, ValidationInfo
else:

class BaseModel: # type: ignore[no-redef] # noqa
class BaseModel: # type: ignore[no-redef] # noqa: D101
def __init__(self, *args, **kwargs):
pass

class ConfigDict: # type: ignore[no-redef] # noqa
class ConfigDict: # type: ignore[no-redef] # noqa: D101
def __init__(self, *args, **kwargs):
pass

class PlainSerializer: # type: ignore[no-redef] # noqa
class PlainSerializer: # type: ignore[no-redef] # noqa: D101
def __init__(self, *args, **kwargs):
pass

class PlainSerializer: # type: ignore[no-redef] # noqa
class PlainValidator: # type: ignore[no-redef] # noqa: D101
def __init__(self, *args, **kwargs):
pass

class PlainValidator: # type: ignore[no-redef] # noqa
def __init__(self, *args, **kwargs):
pass

class ValidationInfo: # type: ignore[no-redef] # noqa
class ValidationInfo: # type: ignore[no-redef] # noqa: D101
def __init__(self, *args, **kwargs):
pass
2 changes: 1 addition & 1 deletion airflow/utils/yaml.py
Expand Up @@ -30,7 +30,7 @@
from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, cast

if TYPE_CHECKING:
from yaml.error import MarkedYAMLError, YAMLError # noqa
from yaml.error import MarkedYAMLError, YAMLError # noqa: F401


def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any:
Expand Down
26 changes: 13 additions & 13 deletions dev/breeze/src/airflow_breeze/breeze.py
Expand Up @@ -23,24 +23,24 @@
find_airflow_sources_root_to_operate_on,
)

from airflow_breeze.configure_rich_click import click # isort: skip # noqa
from airflow_breeze.configure_rich_click import click # isort: skip # noqa: F401

find_airflow_sources_root_to_operate_on()
create_directories_and_files()


from airflow_breeze.commands import developer_commands # noqa
from airflow_breeze.commands.ci_commands import ci_group # noqa
from airflow_breeze.commands.ci_image_commands import ci_image # noqa
from airflow_breeze.commands.kubernetes_commands import kubernetes_group # noqa
from airflow_breeze.commands.production_image_commands import prod_image # noqa
from airflow_breeze.commands.release_management_commands import release_management # noqa
from airflow_breeze.commands.minor_release_command import create_minor_version_branch # noqa
from airflow_breeze.commands.release_command import airflow_release # noqa
from airflow_breeze.commands.release_candidate_command import release_management # noqa
from airflow_breeze.commands.sbom_commands import sbom # noqa
from airflow_breeze.commands.setup_commands import setup # noqa
from airflow_breeze.commands.testing_commands import group_for_testing # noqa
from airflow_breeze.commands import developer_commands # noqa: I001, E402, F401
from airflow_breeze.commands.ci_commands import ci_group # noqa: E402
from airflow_breeze.commands.ci_image_commands import ci_image # noqa: E402
from airflow_breeze.commands.kubernetes_commands import kubernetes_group # noqa: E402
from airflow_breeze.commands.production_image_commands import prod_image # noqa: E402
from airflow_breeze.commands.release_management_commands import release_management # noqa: E402
from airflow_breeze.commands.minor_release_command import create_minor_version_branch # noqa: E402, F401
from airflow_breeze.commands.release_command import airflow_release # noqa: E402, F401
from airflow_breeze.commands.release_candidate_command import release_management # noqa: E402, F811
from airflow_breeze.commands.sbom_commands import sbom # noqa: E402
from airflow_breeze.commands.setup_commands import setup # noqa: E402
from airflow_breeze.commands.testing_commands import group_for_testing # noqa: E402

main.add_command(group_for_testing)
main.add_command(kubernetes_group)
Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/src/airflow_breeze/configure_rich_click.py
Expand Up @@ -18,7 +18,7 @@

from airflow_breeze.commands.sbom_commands_config import SBOM_COMMANDS, SBOM_PARAMETERS

from airflow_breeze.utils import recording # isort:skip # noqa
from airflow_breeze.utils import recording # isort:skip # noqa: F401

try:
# We handle ImportError so that click autocomplete works
Expand Down
2 changes: 1 addition & 1 deletion dev/breeze/src/airflow_breeze/utils/click_utils.py
Expand Up @@ -19,4 +19,4 @@
try:
from rich_click import RichGroup as BreezeGroup
except ImportError:
from click import Group as BreezeGroup # type: ignore[assignment] # noqa
from click import Group as BreezeGroup # type: ignore[assignment] # noqa: F401
1 change: 1 addition & 0 deletions pyproject.toml
Expand Up @@ -1330,6 +1330,7 @@ extend-select = [
"D419",
"TID251", # Specific modules or module members that may not be imported or accessed
"TID253", # Ban certain modules from being imported at module level
"PGH004", # Use specific rule codes when using noqa
"B006", # Checks for uses of mutable objects as function argument defaults.
]
ignore = [
Expand Down
2 changes: 1 addition & 1 deletion tests/dags/subdir1/test_ignore_this.py
Expand Up @@ -18,6 +18,6 @@
from __future__ import annotations

# needed to work against airflow "safe mode" parsing
from airflow.models import DAG # noqa
from airflow.models import DAG # noqa: F401

raise Exception("This dag file should have been ignored!")
2 changes: 1 addition & 1 deletion tests/models/test_dagbag.py
Expand Up @@ -163,7 +163,7 @@ def create_dag():
def my_flow():
pass

my_dag = my_flow() # noqa
my_dag = my_flow() # noqa: F841

source_lines = [line[12:] for line in inspect.getsource(create_dag).splitlines(keepends=True)[1:]]
path1 = tmp_path / "testfile1"
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/google/cloud/hooks/test_automl.py
Expand Up @@ -71,7 +71,7 @@ def test_get_conn(self, mock_automl_client):

@mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient")
def test_prediction_client(self, mock_prediction_client):
client = self.hook.prediction_client # noqa
client = self.hook.prediction_client # noqa: F841
mock_prediction_client.assert_called_once_with(credentials=CREDENTIALS, client_info=CLIENT_INFO)

@mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.create_model")
Expand Down
2 changes: 1 addition & 1 deletion tests/test_utils/perf/perf_kit/memory.py
Expand Up @@ -83,4 +83,4 @@ def trace_memory(human_readable=True, gc_collect=False):
# Example:

with trace_memory():
import airflow # noqa
import airflow # noqa: F401

0 comments on commit 30f7b2a

Please sign in to comment.