Skip to content

Commit

Permalink
Remove all "noinspection" comments native to IntelliJ (#10525)
Browse files Browse the repository at this point in the history
We have already fixed a lot of problems that were marked
with those, also IntelluiJ gotten a bit smarter on not
detecting false positives as well as understand more
pylint annotation. Wherever the problem remained
we replaced it with # noqa comments - as it is
also well understood by IntelliJ.
  • Loading branch information
potiuk committed Aug 24, 2020
1 parent f2da6b4 commit 2f2d8db
Show file tree
Hide file tree
Showing 64 changed files with 62 additions and 171 deletions.
1 change: 0 additions & 1 deletion airflow/api/auth/backend/deny_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def init_app(_):
def requires_authentication(function: T):
"""Decorator for functions that require authentication"""

# noinspection PyUnusedLocal
@wraps(function)
def decorated(*args, **kwargs): # pylint: disable=unused-argument
return Response("Forbidden", 403)
Expand Down
1 change: 0 additions & 1 deletion airflow/api/auth/backend/kerberos_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
from typing import Callable, Optional, Tuple, TypeVar, Union, cast

import kerberos
# noinspection PyProtectedMember
from flask import Response, _request_ctx_stack as stack, g, make_response, request # type: ignore
from requests.auth import AuthBase
from requests_kerberos import HTTPKerberosAuth
Expand Down
3 changes: 1 addition & 2 deletions airflow/api/client/json_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,9 @@ def _request(self, url, method='GET', json=None):
resp = getattr(self._session, method.lower())(**params) # pylint: disable=not-callable
if not resp.ok:
# It is justified here because there might be many resp types.
# noinspection PyBroadException
try:
data = resp.json()
except Exception: # pylint: disable=broad-except
except Exception: # noqa pylint: disable=broad-except
data = {}
raise OSError(data.get('error', 'Server error'))

Expand Down
3 changes: 1 addition & 2 deletions airflow/api/common/experimental/delete_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> i

count = 0

# noinspection PyUnresolvedReferences,PyProtectedMember
for model in models.base.Base._decl_class_registry.values(): # pylint: disable=protected-access
for model in models.base.Base._decl_class_registry.values(): # noqa pylint: disable=protected-access
if hasattr(model, "dag_id"):
if keep_records_in_log and model.__name__ == 'Log':
continue
Expand Down
7 changes: 3 additions & 4 deletions airflow/executors/celery_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,6 @@ def update_all_task_states(self) -> None:

def update_task_state(self, key: TaskInstanceKey, state: str, info: Any) -> None:
"""Updates state of a single task."""
# noinspection PyBroadException
try:
if self.last_state[key] != state:
if state == celery_states.SUCCESS:
Expand All @@ -246,16 +245,16 @@ def update_task_state(self, key: TaskInstanceKey, state: str, info: Any) -> None
del self.last_state[key]
elif state == celery_states.FAILURE:
self.fail(key, info)
del self.tasks[key]
del self.tasks[key] # noqa
del self.last_state[key]
elif state == celery_states.REVOKED:
self.fail(key, info)
del self.tasks[key]
del self.tasks[key] # noqa
del self.last_state[key]
else:
self.log.info("Unexpected state: %s", state)
self.last_state[key] = state
except Exception: # pylint: disable=broad-except
except Exception: # noqa pylint: disable=broad-except
self.log.exception("Error syncing the Celery executor, ignoring it.")

def end(self, synchronous: bool = False) -> None:
Expand Down
3 changes: 1 addition & 2 deletions airflow/executors/kubernetes_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,13 +739,12 @@ def clear_not_launched_queued_tasks(self, session=None) -> None:
)

for task in queued_tasks:
# noinspection PyProtectedMember
# pylint: disable=protected-access
dict_string = (
"dag_id={},task_id={},execution_date={},airflow-worker={}".format(
pod_generator.make_safe_label_value(task.dag_id),
pod_generator.make_safe_label_value(task.task_id),
AirflowKubernetesScheduler._datetime_to_label_safe_datestring(
AirflowKubernetesScheduler._datetime_to_label_safe_datestring( # noqa
task.execution_date
),
self.worker_uuid
Expand Down
2 changes: 0 additions & 2 deletions airflow/executors/local_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,6 @@ def start(self) -> None:
self.executor.workers_active = 0

# pylint: disable=unused-argument # pragma: no cover
# noinspection PyUnusedLocal
def execute_async(self,
key: TaskInstanceKey,
command: CommandType,
Expand Down Expand Up @@ -224,7 +223,6 @@ def start(self) -> None:
for worker in self.executor.workers:
worker.start()

# noinspection PyUnusedLocal
def execute_async(
self,
key: TaskInstanceKey,
Expand Down
16 changes: 5 additions & 11 deletions airflow/models/baseoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
from airflow.lineage import apply_lineage, prepare_lineage
from airflow.models.base import Operator
from airflow.models.pool import Pool
# noinspection PyPep8Naming
from airflow.models.taskinstance import Context, TaskInstance, clear_task_instances
from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
Expand Down Expand Up @@ -321,7 +320,6 @@ class derived from this one results in the creation of a task object,
# Set to True before calling execute method
_lock_for_execution = False

# noinspection PyUnusedLocal
# pylint: disable=too-many-arguments,too-many-locals, too-many-statements
@apply_defaults
def __init__(
Expand Down Expand Up @@ -429,8 +427,7 @@ def __init__(
self.retry_delay = retry_delay
else:
self.log.debug("Retry_delay isn't timedelta object, assuming secs")
# noinspection PyTypeChecker
self.retry_delay = timedelta(seconds=retry_delay)
self.retry_delay = timedelta(seconds=retry_delay) # noqa
self.retry_exponential_backoff = retry_exponential_backoff
self.max_retry_delay = max_retry_delay
self.params = params or {} # Available in templates!
Expand Down Expand Up @@ -705,7 +702,7 @@ def set_xcomargs_dependencies(self) -> None:
"""
from airflow.models.xcom_arg import XComArg

def apply_set_upstream(arg: Any):
def apply_set_upstream(arg: Any): # noqa
if isinstance(arg, XComArg):
self.set_upstream(arg.operator)
elif isinstance(arg, (tuple, set, list)):
Expand Down Expand Up @@ -823,14 +820,12 @@ def __deepcopy__(self, memo):
result = cls.__new__(cls)
memo[id(self)] = result

# noinspection PyProtectedMember
shallow_copy = cls.shallow_copy_attrs + \
cls._base_operator_shallow_copy_attrs # pylint: disable=protected-access

for k, v in self.__dict__.items():
if k not in shallow_copy:
# noinspection PyArgumentList
setattr(result, k, copy.deepcopy(v, memo))
setattr(result, k, copy.deepcopy(v, memo)) # noqa
else:
setattr(result, k, copy.copy(v))
return result
Expand Down Expand Up @@ -909,7 +904,7 @@ def render_template( # pylint: disable=too-many-return-statements
if type(content) is not tuple: # pylint: disable=unidiomatic-typecheck
# Special case for named tuples
return content.__class__(
*(self.render_template(element, context, jinja_env) for element in content)
*(self.render_template(element, context, jinja_env) for element in content) # noqa
)
else:
return tuple(self.render_template(element, context, jinja_env) for element in content)
Expand Down Expand Up @@ -944,7 +939,7 @@ def _render_nested_template_fields(

def get_template_env(self) -> jinja2.Environment:
"""Fetch a Jinja template environment from the DAG or instantiate empty environment if no DAG."""
return self.dag.get_template_env() if self.has_dag() else jinja2.Environment(cache_size=0)
return self.dag.get_template_env() if self.has_dag() else jinja2.Environment(cache_size=0) # noqa

def prepare_template(self) -> None:
"""
Expand Down Expand Up @@ -1181,7 +1176,6 @@ def _set_relatives(self,

# relationships can only be set if the tasks share a single DAG. Tasks
# without a DAG are assigned to that DAG.
# noinspection PyProtectedMember
dags = {
task._dag.dag_id: task._dag # type: ignore # pylint: disable=protected-access
for task in [self] + task_list if task.has_dag()}
Expand Down
3 changes: 1 addition & 2 deletions airflow/models/serialized_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,7 @@ def dag(self):
if isinstance(self.data, dict):
dag = SerializedDAG.from_dict(self.data) # type: Any
else:
# noinspection PyTypeChecker
dag = SerializedDAG.from_json(self.data)
dag = SerializedDAG.from_json(self.data) # noqa
return dag

@classmethod
Expand Down
2 changes: 0 additions & 2 deletions airflow/plugins_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
# specific language governing permissions and limitations
# under the License.
"""Manages all plugins."""
# noinspection PyDeprecation
import importlib
import importlib.machinery
import importlib.util
Expand Down Expand Up @@ -193,7 +192,6 @@ def load_plugins_from_plugin_directory():


# pylint: disable=protected-access
# noinspection Mypy,PyTypeHints
def make_module(name: str, objects: List[Any]):
"""Creates new module."""
if not objects:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/apache/hdfs/hooks/hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ class HDFSHookException(AirflowException):
"""Exception specific for HDFS"""


# noinspection PyAbstractClass
class HDFSHook(BaseHook):
"""
Interact with HDFS. This class is a wrapper around the snakebite library.
Expand Down
5 changes: 2 additions & 3 deletions airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def __init__(self, # pylint: disable=too-many-arguments,too-many-locals
self.node_selectors = node_selectors or {}
self.annotations = annotations or {}
self.affinity = affinity or {}
self.resources = self._set_resources(resources)
self.resources = self._set_resources(resources) # noqa
self.config_file = config_file
self.image_pull_secrets = image_pull_secrets
self.service_account_name = service_account_name
Expand Down Expand Up @@ -300,7 +300,7 @@ def execute(self, context) -> Optional[str]:
def handle_pod_overlap(self, labels, try_numbers_match, launcher, pod_list):
"""
In cases where the Scheduler restarts while a KubernetsPodOperator task is running,
In cases where the Scheduler restarts while a KubernetesPodOperator task is running,
this function will either continue to monitor the existing pod or launch a new pod
based on the `reattach_on_restart` parameter.
Expand Down Expand Up @@ -393,7 +393,6 @@ def create_new_pod_for_operator(self, labels, launcher) -> Tuple[State, k8s.V1Po
pod=self.full_pod_spec,
).gen_pod()

# noinspection PyTypeChecker
pod = append_to_pod(
pod,
self.pod_runtime_info_envs +
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/databricks/hooks/databricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ def __repr__(self):
return str(self.__dict__)


# noinspection PyAbstractClass
class DatabricksHook(BaseHook):
class DatabricksHook(BaseHook): # noqa
"""
Interact with Databricks.
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/docker/operators/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,12 +325,11 @@ def __get_tls_config(self) -> Optional[tls.TLSConfig]:
if self.tls_ca_cert and self.tls_client_cert and self.tls_client_key:
# Ignore type error on SSL version here - it is deprecated and type annotation is wrong
# it should be string
# noinspection PyTypeChecker
tls_config = tls.TLSConfig(
ca_cert=self.tls_ca_cert,
client_cert=(self.tls_client_cert, self.tls_client_key),
verify=True,
ssl_version=self.tls_ssl_version,
ssl_version=self.tls_ssl_version, # noqa
assert_hostname=self.tls_hostname
)
self.docker_url = self.docker_url.replace('tcp://', 'https://')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

# noinspection LongLine
"""
Example Airflow DAG that creates and performs following operations on Cloud Bigtable:
- creates an Instance
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/google/cloud/hooks/cloud_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
TIME_TO_SLEEP_IN_SECONDS = 5


# noinspection PyAbstractClass
class CloudBuildHook(GoogleBaseHook):
"""
Hook for the Google Cloud Build APIs.
Expand Down
4 changes: 1 addition & 3 deletions airflow/providers/google/cloud/hooks/cloud_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ class CloudSqlOperationStatus:
UNKNOWN = "UNKNOWN"


# noinspection PyAbstractClass
class CloudSQLHook(GoogleBaseHook):
"""
Hook for Google Cloud SQL APIs.
Expand Down Expand Up @@ -680,8 +679,7 @@ def get_socket_path(self) -> str:
CLOUD_SQL_VALID_DATABASE_TYPES = ['postgres', 'mysql']


# noinspection PyAbstractClass
class CloudSQLDatabaseHook(BaseHook):
class CloudSQLDatabaseHook(BaseHook): # noqa
# pylint: disable=too-many-instance-attributes
"""
Serves DB connection configuration for Google Cloud SQL (Connections
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def gen_job_name(job_name: str) -> str:
return f"{job_name}_{uniq}"


# noinspection PyAbstractClass
class CloudDataTransferServiceHook(GoogleBaseHook):
"""
Hook for Google Storage Transfer Service.
Expand Down
17 changes: 7 additions & 10 deletions airflow/providers/google/cloud/hooks/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ class GceOperationStatus:
DONE = "DONE"


# noinspection PyAbstractClass
class ComputeEngineHook(GoogleBaseHook):
"""
Hook for Google Compute Engine APIs.
Expand Down Expand Up @@ -93,7 +92,7 @@ def start_instance(self, zone: str, resource_id: str, project_id: str) -> None:
:type project_id: str
:return: None
"""
response = self.get_conn().instances().start( # pylint: disable=no-member
response = self.get_conn().instances().start( # noqa pylint: disable=no-member
project=project_id,
zone=zone,
instance=resource_id
Expand Down Expand Up @@ -124,7 +123,7 @@ def stop_instance(self, zone: str, resource_id: str, project_id: str) -> None:
:type project_id: str
:return: None
"""
response = self.get_conn().instances().stop( # pylint: disable=no-member
response = self.get_conn().instances().stop( # noqa pylint: disable=no-member
project=project_id,
zone=zone,
instance=resource_id
Expand Down Expand Up @@ -183,7 +182,7 @@ def _execute_set_machine_type(
body: Dict,
project_id: str
) -> Dict:
return self.get_conn().instances().setMachineType( # pylint: disable=no-member
return self.get_conn().instances().setMachineType( # noqa pylint: disable=no-member
project=project_id, zone=zone, instance=resource_id, body=body)\
.execute(num_retries=self.num_retries)

Expand All @@ -203,7 +202,7 @@ def get_instance_template(self, resource_id: str, project_id: str) -> Dict:
https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates
:rtype: dict
"""
response = self.get_conn().instanceTemplates().get( # pylint: disable=no-member
response = self.get_conn().instanceTemplates().get( # noqa pylint: disable=no-member
project=project_id,
instanceTemplate=resource_id
).execute(num_retries=self.num_retries)
Expand Down Expand Up @@ -234,7 +233,7 @@ def insert_instance_template(
:type project_id: str
:return: None
"""
response = self.get_conn().instanceTemplates().insert( # pylint: disable=no-member
response = self.get_conn().instanceTemplates().insert( # noqa pylint: disable=no-member
project=project_id,
body=body,
requestId=request_id
Expand Down Expand Up @@ -271,7 +270,7 @@ def get_instance_group_manager(
https://cloud.google.com/compute/docs/reference/rest/beta/instanceGroupManagers
:rtype: dict
"""
response = self.get_conn().instanceGroupManagers().get( # pylint: disable=no-member
response = self.get_conn().instanceGroupManagers().get( # noqa pylint: disable=no-member
project=project_id,
zone=zone,
instanceGroupManager=resource_id
Expand Down Expand Up @@ -310,7 +309,7 @@ def patch_instance_group_manager(
:type project_id: str
:return: None
"""
response = self.get_conn().instanceGroupManagers().patch( # pylint: disable=no-member
response = self.get_conn().instanceGroupManagers().patch( # noqa pylint: disable=no-member
project=project_id,
zone=zone,
instanceGroupManager=resource_id,
Expand Down Expand Up @@ -345,15 +344,13 @@ def _wait_for_operation_to_complete(
service = self.get_conn()
while True:
if zone is None:
# noinspection PyTypeChecker
operation_response = self._check_global_operation_status(
service=service,
operation_name=operation_name,
project_id=project_id,
num_retries=self.num_retries
)
else:
# noinspection PyTypeChecker
operation_response = self._check_zone_operation_status(
service, operation_name, project_id, zone, self.num_retries)
if operation_response.get("status") == GceOperationStatus.DONE:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/google/cloud/hooks/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
TIME_TO_SLEEP_IN_SECONDS = 1


# noinspection PyAbstractClass
class CloudFunctionsHook(GoogleBaseHook):
"""
Hook for the Google Cloud Functions APIs.
Expand Down

0 comments on commit 2f2d8db

Please sign in to comment.