Skip to content

Commit

Permalink
Consolidate importing of os.path.* (#34060)
Browse files Browse the repository at this point in the history
Co-authored-by: Tzu-ping Chung <[email protected]>
  • Loading branch information
eumiro and uranusjr committed Sep 7, 2023
1 parent a61b5e8 commit 9079093
Show file tree
Hide file tree
Showing 20 changed files with 47 additions and 56 deletions.
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/hooks/base_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import warnings
from copy import deepcopy
from functools import cached_property, wraps
from os import PathLike
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, Union

Expand Down Expand Up @@ -819,7 +818,7 @@ def test_connection(self):
return False, str(f"{type(e).__name__!r} error occurred while testing connection: {e}")

@cached_property
def waiter_path(self) -> PathLike[str] | None:
def waiter_path(self) -> os.PathLike[str] | None:
filename = self.client_type if self.client_type else self.resource_type
path = Path(__file__).parents[1].joinpath(f"waiters/{filename}.json").resolve()
return path if path.exists() else None
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/operators/glue.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
from __future__ import annotations

import os.path
import os
import urllib.parse
from functools import cached_property
from typing import TYPE_CHECKING, Sequence
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@
from __future__ import annotations

import json
import os
from copy import copy
from datetime import datetime
from decimal import Decimal
from functools import cached_property
from os.path import getsize
from tempfile import NamedTemporaryFile
from typing import IO, TYPE_CHECKING, Any, Callable, Sequence
from uuid import uuid4
Expand Down Expand Up @@ -197,7 +197,7 @@ def _scan_dynamodb_and_upload_to_s3(self, temp_file: IO, scan_kwargs: dict, tabl
scan_kwargs["ExclusiveStartKey"] = last_evaluated_key

# Upload the file to S3 if reach file size limit
if getsize(temp_file.name) >= self.file_size:
if os.path.getsize(temp_file.name) >= self.file_size:
_upload_file_to_s3(temp_file, self.s3_bucket_name, self.s3_key_prefix, self.dest_aws_conn_id)
temp_file.close()

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/ftp/hooks/ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import datetime
import ftplib
import os.path
import os
from typing import Any, Callable

from airflow.hooks.base import BaseHook
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
import os
import subprocess
from datetime import datetime
from os.path import expanduser
from pathlib import Path
from urllib.parse import quote_plus

from airflow import models
Expand Down Expand Up @@ -87,17 +87,14 @@

# [START howto_operator_cloudsql_query_connections]

HOME_DIR = expanduser("~")
HOME_DIR = Path.home()


def get_absolute_path(path):
"""
Returns absolute path.
"""
if path.startswith("/"):
return path
else:
return os.path.join(HOME_DIR, path)
return os.fspath(HOME_DIR / path)


postgres_kwargs = {
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/google/cloud/hooks/cloud_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import errno
import json
import os
import os.path
import platform
import random
import re
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/google/cloud/hooks/gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
from contextlib import contextmanager
from functools import partial
from io import BytesIO
from os import path
from tempfile import NamedTemporaryFile
from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Sequence, TypeVar, cast, overload
from urllib.parse import urlsplit
Expand Down Expand Up @@ -1290,7 +1289,7 @@ def _calculate_sync_destination_path(
self, blob: storage.Blob, destination_object: str | None, source_object_prefix_len: int
) -> str:
return (
path.join(destination_object, blob.name[source_object_prefix_len:])
os.path.join(destination_object, blob.name[source_object_prefix_len:])
if destination_object
else blob.name[source_object_prefix_len:]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import json
import logging
import os.path
import os
import tempfile
from contextlib import ExitStack, contextmanager
from typing import Collection, Generator, Sequence
Expand Down
11 changes: 6 additions & 5 deletions airflow/www/extensions/init_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@
from __future__ import annotations

import logging
import os
import warnings
from functools import cached_property
from os import path
from pathlib import Path
from typing import TYPE_CHECKING

from connexion import FlaskApi, ProblemException, Resolver
Expand All @@ -39,7 +40,7 @@
log = logging.getLogger(__name__)

# airflow/www/extensions/init_views.py => airflow/
ROOT_APP_DIR = path.abspath(path.join(path.dirname(__file__), path.pardir, path.pardir))
ROOT_APP_DIR = Path(__file__).parents[2].resolve()


def init_flash_views(app):
Expand Down Expand Up @@ -253,15 +254,15 @@ def _handle_method_not_allowed(ex):
else:
return views.method_not_allowed(ex)

with open(path.join(ROOT_APP_DIR, "api_connexion", "openapi", "v1.yaml")) as f:
with ROOT_APP_DIR.joinpath("api_connexion", "openapi", "v1.yaml").open() as f:
specification = safe_load(f)
api_bp = FlaskApi(
specification=specification,
resolver=_LazyResolver(),
base_path=base_path,
options={
"swagger_ui": conf.getboolean("webserver", "enable_swagger_ui", fallback=True),
"swagger_path": path.join(ROOT_APP_DIR, "www", "static", "dist", "swagger-ui"),
"swagger_path": os.fspath(ROOT_APP_DIR.joinpath("www", "static", "dist", "swagger-ui")),
},
strict_validation=True,
validate_responses=True,
Expand All @@ -279,7 +280,7 @@ def init_api_internal(app: Flask, standalone_api: bool = False) -> None:
if not standalone_api and not conf.getboolean("webserver", "run_internal_api", fallback=False):
return

with open(path.join(ROOT_APP_DIR, "api_internal", "openapi", "internal_api_v1.yaml")) as f:
with ROOT_APP_DIR.joinpath("api_internal", "openapi", "internal_api_v1.yaml").open() as f:
specification = safe_load(f)
api_bp = FlaskApi(
specification=specification,
Expand Down
5 changes: 2 additions & 3 deletions dev/provider_packages/prepare_provider_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
from datetime import datetime, timedelta
from enum import Enum
from functools import lru_cache
from os.path import dirname, relpath
from pathlib import Path
from random import choice
from shutil import copyfile
Expand Down Expand Up @@ -261,7 +260,7 @@ def get_target_folder() -> str:
:return: the folder path
"""
return os.path.abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, "provider_packages"))
return os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "provider_packages"))


def get_target_providers_folder() -> str:
Expand Down Expand Up @@ -1154,7 +1153,7 @@ def get_provider_jinja_context(
"PIP_REQUIREMENTS_TABLE": pip_requirements_table,
"PIP_REQUIREMENTS_TABLE_RST": pip_requirements_table_rst,
"PROVIDER_INFO": provider_info,
"CHANGELOG_RELATIVE_PATH": relpath(
"CHANGELOG_RELATIVE_PATH": os.path.relpath(
provider_details.source_provider_package_path,
provider_details.documentation_provider_package_path,
),
Expand Down
6 changes: 3 additions & 3 deletions docs/exts/exampleinclude.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from __future__ import annotations

"""Nice formatted include for examples"""
import os
import traceback
from os import path

from docutils import nodes

Expand Down Expand Up @@ -227,8 +227,8 @@ def doctree_read(app, doctree):

for objnode in doctree.traverse(ExampleHeader):
filepath = objnode.get("filename")
relative_path = path.relpath(
filepath, path.commonprefix([app.config.exampleinclude_sourceroot, filepath])
relative_path = os.path.relpath(
filepath, os.path.commonprefix([app.config.exampleinclude_sourceroot, filepath])
)
modname = relative_path.replace("/", ".")[:-3]
show_button = register_source(app, env, modname)
Expand Down
9 changes: 4 additions & 5 deletions scripts/ci/pre_commit/pre_commit_check_order_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,14 @@
import re
import sys
import textwrap
from os.path import abspath, dirname
from pathlib import Path

from rich import print

errors: list[str] = []

MY_DIR_PATH = os.path.dirname(__file__)
SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir, os.pardir, os.pardir))
sys.path.insert(0, SOURCE_DIR_PATH)
SOURCE_DIR_PATH = Path(__file__).parents[3].resolve()
sys.path.insert(0, os.fspath(SOURCE_DIR_PATH))


class ConsoleDiff(difflib.Differ):
Expand Down Expand Up @@ -124,7 +123,7 @@ def check_install_and_setup_requires() -> None:

from setuptools.config import read_configuration

path = abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir, "setup.cfg"))
path = os.fspath(SOURCE_DIR_PATH / "setup.cfg")
config = read_configuration(path)

pattern_dependent_version = re.compile("[~|><=;].*")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,18 @@
import os
import re
import sys
from os.path import dirname
from pathlib import Path

from rich import print
from rich.console import Console
from rich.table import Table

AIRFLOW_SOURCES_DIR = os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir)
AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve()
SETUP_PY_FILE = "setup.py"
DOCS_FILE = os.path.join("docs", "apache-airflow", "extra-packages-ref.rst")
PY_IDENTIFIER = r"[a-zA-Z_][a-zA-Z0-9_\.]*"

sys.path.insert(0, AIRFLOW_SOURCES_DIR)
sys.path.insert(0, os.fspath(AIRFLOW_SOURCES_DIR))

os.environ["_SKIP_PYTHON_VERSION_CHECK"] = "true"

Expand All @@ -49,9 +49,8 @@


def get_file_content(*path_elements: str) -> str:
file_path = os.path.join(AIRFLOW_SOURCES_DIR, *path_elements)
with open(file_path) as file_to_read:
return file_to_read.read()
file_path = AIRFLOW_SOURCES_DIR.joinpath(*path_elements)
return file_path.read_text()


def get_extras_from_setup() -> set[str]:
Expand Down
9 changes: 4 additions & 5 deletions scripts/ci/pre_commit/pre_commit_inline_scripts_in_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
# under the License.
from __future__ import annotations

from os import listdir
from pathlib import Path

AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve()
Expand Down Expand Up @@ -45,16 +44,16 @@ def insert_content(file_path: Path, content: list[str], header: str, footer: str
SCRIPTS_DOCKER_DIR = AIRFLOW_SOURCES_DIR / "scripts" / "docker"

for file in [DOCKERFILE_FILE, DOCKERFILE_CI_FILE]:
for script in listdir(SCRIPTS_DOCKER_DIR):
script_content = (SCRIPTS_DOCKER_DIR / script).read_text().splitlines(keepends=True)
for script in SCRIPTS_DOCKER_DIR.iterdir():
script_content = script.read_text().splitlines(keepends=True)
no_comments_script_content = [
line for line in script_content if not line.startswith("#") or line.startswith("#!")
]
no_comments_script_content.insert(0, f'COPY <<"EOF" /{script}\n')
no_comments_script_content.insert(0, f'COPY <<"EOF" /{script.name}\n')
insert_content(
file_path=file,
content=no_comments_script_content,
header="# The content below is automatically copied from scripts/docker/",
footer="EOF",
file_name=script,
file_name=script.name,
)
4 changes: 2 additions & 2 deletions scripts/in_container/update_quarantined_test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import re
import sys
from datetime import datetime
from os.path import dirname, join, realpath
from pathlib import Path
from typing import NamedTuple
from urllib.parse import urlsplit

Expand Down Expand Up @@ -234,7 +234,7 @@ def get_table(history_map: dict[str, TestHistory]) -> str:
print()
print(table)
print()
with open(join(dirname(realpath(__file__)), "quarantine_issue_header.md")) as f:
with Path(__file__).resolve().with_name("quarantine_issue_header.md").open() as f:
header = jinja2.Template(f.read(), autoescape=True, undefined=StrictUndefined).render(
DATE_UTC_NOW=datetime.utcnow()
)
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import sys
import unittest
from copy import deepcopy
from os.path import relpath
from pathlib import Path
from textwrap import wrap
from typing import Iterable
Expand Down Expand Up @@ -864,7 +863,9 @@ def parse_config_files(self, *args, **kwargs) -> None:
]
provider_yaml_files = glob.glob("airflow/providers/**/provider.yaml", recursive=True)
for provider_yaml_file in provider_yaml_files:
provider_relative_path = relpath(provider_yaml_file, str(AIRFLOW_SOURCES_ROOT / "airflow"))
provider_relative_path = os.path.relpath(
provider_yaml_file, str(AIRFLOW_SOURCES_ROOT / "airflow")
)
self.package_data["airflow"].append(provider_relative_path)
else:
self.install_requires.extend(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from __future__ import annotations

import json
from os import environ
import os

import pytest

Expand All @@ -27,7 +27,7 @@
from tests.test_utils.db import clear_db_connections
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest

TOKEN = environ.get("DATAPREP_TOKEN")
TOKEN = os.environ.get("DATAPREP_TOKEN")
EXTRA = {"token": TOKEN}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
"""
from __future__ import annotations

import os
from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
Expand All @@ -34,9 +34,9 @@

DAG_ID = "example_google_api_sheets_to_s3"

GOOGLE_SHEET_ID = getenv("GOOGLE_SHEET_ID", "test-google-sheet-id")
GOOGLE_SHEET_RANGE = getenv("GOOGLE_SHEET_RANGE", "test-google-sheet-range")
S3_DESTINATION_KEY = getenv("S3_DESTINATION_KEY", "s3://test-bucket/key.json")
GOOGLE_SHEET_ID = os.getenv("GOOGLE_SHEET_ID", "test-google-sheet-id")
GOOGLE_SHEET_RANGE = os.getenv("GOOGLE_SHEET_RANGE", "test-google-sheet-range")
S3_DESTINATION_KEY = os.getenv("S3_DESTINATION_KEY", "s3://test-bucket/key.json")

with DAG(
dag_id=DAG_ID,
Expand Down

0 comments on commit 9079093

Please sign in to comment.