Skip to content

Commit 57f390e

Browse files
authored
Merge branch 'main' into wire-in-update-scripts-in-finalize-job
2 parents 7b5a026 + 3347e03 commit 57f390e

File tree

10 files changed

+35
-11
lines changed

10 files changed

+35
-11
lines changed

.github/workflows/basic-tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ jobs:
223223
VERBOSE: "false"
224224
SKIP_BREEZE_PRE_COMMITS: "true"
225225
SKIP: ${{ inputs.skip-pre-commits }}
226-
COLUMNS: "250"
226+
COLUMNS: "202"
227227

228228
test-git-clone-on-windows:
229229
timeout-minutes: 5

.github/workflows/ci-amd.yml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,6 @@ jobs:
248248
contents: read
249249
packages: write
250250
id-token: write
251-
if: needs.build-info.outputs.canary-run == 'true'
252251
with:
253252
runners: ${{ needs.build-info.outputs.amd-runners }}
254253
platform: "linux/amd64"
@@ -820,12 +819,19 @@ jobs:
820819
go-version: 1.24
821820
cache-dependency-path: go-sdk/go.sum
822821

822+
# keep this in sync with go.mod in go-sdk/
823+
- name: Setup Gotestsum
824+
shell: bash
825+
run: |
826+
go install gotest.tools/gotestsum@ddd0b05a6878e2e8257a2abe6e7df66cebc53d0e # v1.12.3
827+
gotestsum --version
828+
823829
- name: "Cleanup dist files"
824830
run: rm -fv ./dist/*
825831

826832
- name: Run Go tests
827833
working-directory: ./go-sdk
828-
run: go test -v ./...
834+
run: gotestsum --format testname ./...
829835

830836
tests-airflow-ctl:
831837
name: "Airflow CTL tests"

.github/workflows/ci-image-checks.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ jobs:
193193
env:
194194
VERBOSE: "false"
195195
SKIP: ${{ inputs.skip-pre-commits }}
196-
COLUMNS: "250"
196+
COLUMNS: "202"
197197
SKIP_GROUP_OUTPUT: "true"
198198
DEFAULT_BRANCH: ${{ inputs.branch }}
199199
RUFF_FORMAT: "github"
@@ -235,7 +235,7 @@ jobs:
235235
run: pre-commit run --color always --verbose --hook-stage manual "$MYPY_CHECK" --all-files
236236
env:
237237
VERBOSE: "false"
238-
COLUMNS: "250"
238+
COLUMNS: "202"
239239
SKIP_GROUP_OUTPUT: "true"
240240
DEFAULT_BRANCH: ${{ inputs.branch }}
241241
RUFF_FORMAT: "github"

Dockerfile.ci

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -848,6 +848,10 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true
848848

849849
ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1}
850850

851+
if [[ "${CI=}" == "true" ]]; then
852+
export COLUMNS="202"
853+
fi
854+
851855
. "${IN_CONTAINER_DIR}/check_connectivity.sh"
852856

853857
function wait_for_asset_compilation() {

dev/breeze/src/airflow_breeze/utils/console.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def escaped_title(self) -> str:
9191
return self.title.replace("[", "\\[")
9292

9393

94-
CONSOLE_WIDTH: int | None = int(os.environ.get("CI_WIDTH", "400")) if os.environ.get("CI") else None
94+
CONSOLE_WIDTH: int | None = int(os.environ.get("CI_WIDTH", "202")) if os.environ.get("CI") else None
9595

9696

9797
@clearable_cache

providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_sql.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
from collections.abc import Sequence
2121
from typing import TYPE_CHECKING, Any
2222

23-
from airflow.models import BaseOperator
2423
from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook
24+
from airflow.providers.apache.spark.version_compat import BaseOperator
2525

2626
if TYPE_CHECKING:
2727
from airflow.utils.context import Context

providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@
2121
from typing import TYPE_CHECKING, Any
2222

2323
from airflow.configuration import conf
24-
from airflow.models import BaseOperator
2524
from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook
25+
from airflow.providers.apache.spark.version_compat import BaseOperator
2626
from airflow.providers.common.compat.openlineage.utils.spark import (
2727
inject_parent_job_information_into_spark_properties,
2828
inject_transport_information_into_spark_properties,

providers/apache/spark/src/airflow/providers/apache/spark/version_compat.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,13 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
3333

3434

3535
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36+
37+
if AIRFLOW_V_3_0_PLUS:
38+
from airflow.sdk import BaseOperator
39+
else:
40+
from airflow.models import BaseOperator
41+
42+
__all__ = [
43+
"AIRFLOW_V_3_0_PLUS",
44+
"BaseOperator",
45+
]

providers/edge3/docs/deployment.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ Minimum Airflow configuration settings for the Edge Worker to make it running is
5050

5151
- Section ``[core]``
5252

53-
- ``execution_api_server_url``: The url of the execution api server. Default is ``{BASE_URL}/execution/``
54-
where {BASE_URL} is the base url of the API Server. If ``{BASE_URL}`` is not set, it will use
55-
``http://localhost:8080`` as the default base url.
53+
- ``execution_api_server_url``: If not set, the base URL from ``edge.api_url`` will be used. For example,
54+
when ``edge.api_url`` is set to ``https://your-hostname-and-port/edge_worker/v1/rpcapi``, it will
55+
default to ``https://your-hostname-and-port/execution/``.
5656
- ``executor``: Executor must be set or added to be ``airflow.providers.edge3.executors.EdgeExecutor``
5757
- ``internal_api_secret_key``: An encryption key must be set on api-server and Edge Worker component as
5858
shared secret to authenticate traffic. It should be a random string like the fernet key

scripts/docker/entrypoint_ci.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,10 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true
4949

5050
ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1}
5151

52+
if [[ "${CI=}" == "true" ]]; then
53+
export COLUMNS="202"
54+
fi
55+
5256
# shellcheck disable=SC1091
5357
. "${IN_CONTAINER_DIR}/check_connectivity.sh"
5458

0 commit comments

Comments
 (0)