Skip to content

Commit

Permalink
Update pre-commit checks (#15583)
Browse files Browse the repository at this point in the history
  • Loading branch information
mik-laj committed Apr 29, 2021
1 parent a587dda commit 814e471
Show file tree
Hide file tree
Showing 20 changed files with 125 additions and 95 deletions.
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Expand Up @@ -26,7 +26,7 @@ repos:
- id: identity
- id: check-hooks-apply
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.9
rev: v1.1.10
hooks:
- id: forbid-tabs
exclude: ^docs/Makefile$|^clients/gen/go.sh|\.gitmodules$
Expand Down Expand Up @@ -151,7 +151,7 @@ repos:
- "--maxlevel"
- "2"
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 21.4b2
hooks:
- id: black
args: [--config=./pyproject.toml]
Expand All @@ -171,7 +171,7 @@ repos:
args:
- --remove
- repo: https://github.com/asottile/pyupgrade
rev: v2.10.0
rev: v2.13.0
hooks:
- id: pyupgrade
args: ["--py36-plus"]
Expand All @@ -181,7 +181,7 @@ repos:
- id: rst-backticks
- id: python-no-log-warn
- repo: https://github.com/adrienverge/yamllint
rev: v1.26.0
rev: v1.26.1
hooks:
- id: yamllint
name: Check yaml files with yamllint
Expand All @@ -190,15 +190,15 @@ repos:
exclude:
^.*init_git_sync\.template\.yaml$|^.*airflow\.template\.yaml$|^chart/(?:templates|files)/.*\.yaml
- repo: https://github.com/timothycrosley/isort
rev: 5.6.4
rev: 5.8.0
hooks:
- id: isort
name: Run isort to sort imports
files: \.py$
# To keep consistent with the global isort skip config defined in setup.cfg
exclude: ^build/.*$|^.tox/.*$|^venv/.*$
- repo: https://github.com/pycqa/pydocstyle
rev: 5.1.1
rev: 6.0.0
hooks:
- id: pydocstyle
name: Run pydocstyle
Expand Down
12 changes: 6 additions & 6 deletions airflow/example_dags/example_task_group_decorator.py
Expand Up @@ -27,38 +27,38 @@
# Creating Tasks
@task
def task_start():
"""Dummy Task which is First Task of Dag """
"""Dummy Task which is First Task of Dag"""
return '[Task_start]'


@task
def task_1(value):
""" Dummy Task1"""
"""Dummy Task1"""
return f'[ Task1 {value} ]'


@task
def task_2(value):
""" Dummy Task2"""
"""Dummy Task2"""
return f'[ Task2 {value} ]'


@task
def task_3(value):
""" Dummy Task3"""
"""Dummy Task3"""
print(f'[ Task3 {value} ]')


@task
def task_end():
""" Dummy Task which is Last Task of Dag """
"""Dummy Task which is Last Task of Dag"""
print('[ Task_End ]')


# Creating TaskGroups
@task_group
def task_group_function(value):
""" TaskGroup for grouping related Tasks"""
"""TaskGroup for grouping related Tasks"""
return task_3(task_2(task_1(value)))


Expand Down
Expand Up @@ -75,7 +75,11 @@ def _remove_sample_data_from_s3():
teardown__task_remove_sample_data_from_s3 = PythonOperator(
python_callable=_remove_sample_data_from_s3, task_id='teardown__remove_sample_data_from_s3'
)
[setup__task_add_sample_data_to_s3, setup__task_create_table] >> task_transfer_s3_to_redshift >> [
teardown__task_drop_table,
teardown__task_remove_sample_data_from_s3,
]
(
[setup__task_add_sample_data_to_s3, setup__task_create_table]
>> task_transfer_s3_to_redshift
>> [
teardown__task_drop_table,
teardown__task_remove_sample_data_from_s3,
]
)
12 changes: 8 additions & 4 deletions airflow/providers/apache/beam/example_dags/example_beam.py
Expand Up @@ -268,10 +268,14 @@
py_system_site_packages=False,
)

[
start_python_pipeline_local_direct_runner,
start_python_pipeline_direct_runner,
] >> start_python_pipeline_local_flink_runner >> start_python_pipeline_local_spark_runner
(
[
start_python_pipeline_local_direct_runner,
start_python_pipeline_direct_runner,
]
>> start_python_pipeline_local_flink_runner
>> start_python_pipeline_local_spark_runner
)


with models.DAG(
Expand Down
25 changes: 16 additions & 9 deletions airflow/providers/dingding/example_dags/example_dingding.py
Expand Up @@ -205,12 +205,19 @@ def failure_callback(context):
message="",
)

[
text_msg_remind_none,
text_msg_remind_specific,
text_msg_remind_include_invalid,
text_msg_remind_all,
] >> link_msg >> markdown_msg >> [
single_action_card_msg,
multi_action_card_msg,
] >> feed_card_msg >> msg_failure_callback
(
[
text_msg_remind_none,
text_msg_remind_specific,
text_msg_remind_include_invalid,
text_msg_remind_all,
]
>> link_msg
>> markdown_msg
>> [
single_action_card_msg,
multi_action_card_msg,
]
>> feed_card_msg
>> msg_failure_callback
)
Expand Up @@ -205,10 +205,21 @@

create_dataset >> patch_dataset >> update_dataset >> get_dataset >> get_dataset_result >> delete_dataset

update_dataset >> create_table >> create_view >> create_materialized_view >> update_table >> [
get_dataset_tables,
delete_view,
] >> upsert_table >> delete_materialized_view >> delete_table >> delete_dataset
(
update_dataset
>> create_table
>> create_view
>> create_materialized_view
>> update_table
>> [
get_dataset_tables,
delete_view,
]
>> upsert_table
>> delete_materialized_view
>> delete_table
>> delete_dataset
)
update_dataset >> create_external_table >> delete_dataset

with models.DAG(
Expand Down
16 changes: 10 additions & 6 deletions airflow/providers/snowflake/example_dags/example_snowflake.py
Expand Up @@ -123,9 +123,13 @@

# [END howto_operator_snowflake_to_slack]

snowflake_op_sql_str >> [
snowflake_op_with_params,
snowflake_op_sql_list,
snowflake_op_template_file,
copy_into_table,
] >> slack_report
(
snowflake_op_sql_str
>> [
snowflake_op_with_params,
snowflake_op_sql_list,
snowflake_op_template_file,
copy_into_table,
]
>> slack_report
)
2 changes: 1 addition & 1 deletion airflow/utils/file.py
Expand Up @@ -65,7 +65,7 @@ def mkdirs(path, mode):
Path(path).mkdir(mode=mode, parents=True, exist_ok=True)


ZIP_REGEX = re.compile(r'((.*\.zip){})?(.*)'.format(re.escape(os.sep)))
ZIP_REGEX = re.compile(fr'((.*\.zip){re.escape(os.sep)})?(.*)')


def correct_maybe_zipped(fileloc):
Expand Down
4 changes: 2 additions & 2 deletions dev/airflow-license
Expand Up @@ -75,7 +75,7 @@ if __name__ == "__main__":
for notice in notices:
notice = notice[0]
license = parse_license_file(notice[1])
print("{:<30}|{:<50}||{:<20}||{:<10}".format(notice[1], notice[2][:50], notice[0], license))
print(f"{notice[1]:<30}|{notice[2][:50]:<50}||{notice[0]:<20}||{license:<10}")

file_count = len([name for name in os.listdir("../licenses")])
print("Defined licenses: {} Files found: {}".format(len(notices), file_count))
print(f"Defined licenses: {len(notices)} Files found: {file_count}")
2 changes: 1 addition & 1 deletion dev/provider_packages/prepare_provider_packages.py
Expand Up @@ -323,7 +323,7 @@ def get_long_description(provider_package_id: str) -> str:
readme_file = os.path.join(package_folder, "README.md")
if not os.path.exists(readme_file):
return ""
with open(readme_file, encoding='utf-8', mode="r") as file:
with open(readme_file, encoding='utf-8') as file:
readme_contents = file.read()
copying = True
long_description = ""
Expand Down
2 changes: 1 addition & 1 deletion tests/cli/commands/test_pool_command.py
Expand Up @@ -114,7 +114,7 @@ def test_pool_import_export(self):
# Export json
pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json']))

with open('pools_export.json', mode='r') as file:
with open('pools_export.json') as file:
pool_config_output = json.load(file)
assert pool_config_input == pool_config_output, "Input and output pool files are not same"
os.remove('pools_import.json')
Expand Down
2 changes: 1 addition & 1 deletion tests/cli/commands/test_user_command.py
Expand Up @@ -230,7 +230,7 @@ def test_cli_export_users(self):
self._import_users_from_file([user1, user2])

users_filename = self._export_users_to_file()
with open(users_filename, mode='r') as file:
with open(users_filename) as file:
retrieved_users = json.loads(file.read())
os.remove(users_filename)

Expand Down
4 changes: 2 additions & 2 deletions tests/dags/test_task_view_type_check.py
Expand Up @@ -37,11 +37,11 @@ class CallableClass:
"""

def __call__(self):
"""A __call__ method """
"""A __call__ method"""


def a_function(_, __):
"""A function with two args """
"""A function with two args"""


partial_function = functools.partial(a_function, arg_x=1)
Expand Down
4 changes: 2 additions & 2 deletions tests/jobs/test_scheduler_job.py
Expand Up @@ -3437,7 +3437,7 @@ def test_list_py_file_paths(self):
assert detected_files == expected_files

def test_adopt_or_reset_orphaned_tasks_nothing(self):
"""Try with nothing. """
"""Try with nothing."""
self.scheduler_job = SchedulerJob()
session = settings.Session()
assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session)
Expand Down Expand Up @@ -3497,7 +3497,7 @@ def test_adopt_or_reset_orphaned_tasks_backfill_dag(self):
session.rollback()

def test_reset_orphaned_tasks_nonexistent_dagrun(self):
"""Make sure a task in an orphaned state is not reset if it has no dagrun. """
"""Make sure a task in an orphaned state is not reset if it has no dagrun."""
dag_id = 'test_reset_orphaned_tasks_nonexistent_dagrun'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
Expand Down
22 changes: 11 additions & 11 deletions tests/operators/test_sql.py
Expand Up @@ -395,7 +395,7 @@ def tearDown(self):
session.query(TI).delete()

def test_unsupported_conn_type(self):
"""Check if BranchSQLOperator throws an exception for unsupported connection type """
"""Check if BranchSQLOperator throws an exception for unsupported connection type"""
op = BranchSQLOperator(
task_id="make_choice",
conn_id="redis_default",
Expand All @@ -409,7 +409,7 @@ def test_unsupported_conn_type(self):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

def test_invalid_conn(self):
"""Check if BranchSQLOperator throws an exception for invalid connection """
"""Check if BranchSQLOperator throws an exception for invalid connection"""
op = BranchSQLOperator(
task_id="make_choice",
conn_id="invalid_connection",
Expand All @@ -423,7 +423,7 @@ def test_invalid_conn(self):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

def test_invalid_follow_task_true(self):
"""Check if BranchSQLOperator throws an exception for invalid connection """
"""Check if BranchSQLOperator throws an exception for invalid connection"""
op = BranchSQLOperator(
task_id="make_choice",
conn_id="invalid_connection",
Expand All @@ -437,7 +437,7 @@ def test_invalid_follow_task_true(self):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

def test_invalid_follow_task_false(self):
"""Check if BranchSQLOperator throws an exception for invalid connection """
"""Check if BranchSQLOperator throws an exception for invalid connection"""
op = BranchSQLOperator(
task_id="make_choice",
conn_id="invalid_connection",
Expand All @@ -452,7 +452,7 @@ def test_invalid_follow_task_false(self):

@pytest.mark.backend("mysql")
def test_sql_branch_operator_mysql(self):
"""Check if BranchSQLOperator works with backend """
"""Check if BranchSQLOperator works with backend"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand All @@ -465,7 +465,7 @@ def test_sql_branch_operator_mysql(self):

@pytest.mark.backend("postgres")
def test_sql_branch_operator_postgres(self):
"""Check if BranchSQLOperator works with backend """
"""Check if BranchSQLOperator works with backend"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="postgres_default",
Expand All @@ -478,7 +478,7 @@ def test_sql_branch_operator_postgres(self):

@mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
def test_branch_single_value_with_dag_run(self, mock_get_db_hook):
"""Check BranchSQLOperator branch operation """
"""Check BranchSQLOperator branch operation"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand Down Expand Up @@ -518,7 +518,7 @@ def test_branch_single_value_with_dag_run(self, mock_get_db_hook):

@mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
def test_branch_true_with_dag_run(self, mock_get_db_hook):
"""Check BranchSQLOperator branch operation """
"""Check BranchSQLOperator branch operation"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand Down Expand Up @@ -559,7 +559,7 @@ def test_branch_true_with_dag_run(self, mock_get_db_hook):

@mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
def test_branch_false_with_dag_run(self, mock_get_db_hook):
"""Check BranchSQLOperator branch operation """
"""Check BranchSQLOperator branch operation"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand Down Expand Up @@ -642,7 +642,7 @@ def test_branch_list_with_dag_run(self, mock_get_db_hook):

@mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
def test_invalid_query_result_with_dag_run(self, mock_get_db_hook):
"""Check BranchSQLOperator branch operation """
"""Check BranchSQLOperator branch operation"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand Down Expand Up @@ -672,7 +672,7 @@ def test_invalid_query_result_with_dag_run(self, mock_get_db_hook):

@mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
def test_with_skip_in_branch_downstream_dependencies(self, mock_get_db_hook):
"""Test SQL Branch with skipping all downstream dependencies """
"""Test SQL Branch with skipping all downstream dependencies"""
branch_op = BranchSQLOperator(
task_id="make_choice",
conn_id="mysql_default",
Expand Down
2 changes: 1 addition & 1 deletion tests/operators/test_weekday.py
Expand Up @@ -128,7 +128,7 @@ def test_branch_follow_true(self, _, weekday):

@freeze_time("2021-01-25") # Monday
def test_branch_follow_true_with_execution_date(self):
"""Checks if BranchDayOfWeekOperator follows true branch when set use_task_execution_day """
"""Checks if BranchDayOfWeekOperator follows true branch when set use_task_execution_day"""

branch_op = BranchDayOfWeekOperator(
task_id="make_choice",
Expand Down

0 comments on commit 814e471

Please sign in to comment.