Skip to content

Commit

Permalink
Optimize deferrable mode execution for BigQueryValueCheckOperator (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
phanikumv committed Jun 21, 2023
1 parent 79eac76 commit 8f41584
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 13 deletions.
34 changes: 21 additions & 13 deletions airflow/providers/google/cloud/operators/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,19 +365,27 @@ def execute(self, context: Context) -> None: # type: ignore[override]

job = self._submit_job(hook, job_id="")
context["ti"].xcom_push(key="job_id", value=job.job_id)
self.defer(
timeout=self.execution_timeout,
trigger=BigQueryValueCheckTrigger(
conn_id=self.gcp_conn_id,
job_id=job.job_id,
project_id=hook.project_id,
sql=self.sql,
pass_value=self.pass_value,
tolerance=self.tol,
poll_interval=self.poll_interval,
),
method_name="execute_complete",
)
if job.running():
self.defer(
timeout=self.execution_timeout,
trigger=BigQueryValueCheckTrigger(
conn_id=self.gcp_conn_id,
job_id=job.job_id,
project_id=hook.project_id,
sql=self.sql,
pass_value=self.pass_value,
tolerance=self.tol,
poll_interval=self.poll_interval,
),
method_name="execute_complete",
)
self._handle_job_error(job)
self.log.info("Current state of job %s is %s", job.job_id, job.state)

@staticmethod
def _handle_job_error(job: BigQueryJob | UnknownJob) -> None:
if job.error_result:
raise AirflowException(f"BigQuery job {job.job_id} failed: {job.error_result}")

def execute_complete(self, context: Context, event: dict[str, Any]) -> None:
"""
Expand Down
27 changes: 27 additions & 0 deletions tests/providers/google/cloud/operators/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -1778,6 +1778,33 @@ def test_bigquery_value_check_async(self, mock_hook, create_task_instance_of_ope
exc.value.trigger, BigQueryValueCheckTrigger
), "Trigger is not a BigQueryValueCheckTrigger"

@mock.patch("airflow.providers.google.cloud.operators.bigquery.BigQueryValueCheckOperator.execute")
@mock.patch("airflow.providers.google.cloud.operators.bigquery.BigQueryValueCheckOperator.defer")
@mock.patch("airflow.providers.google.cloud.operators.bigquery.BigQueryHook")
def test_bigquery_value_check_operator_async_finish_before_deferred(
self, mock_hook, mock_defer, mock_execute, create_task_instance_of_operator
):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"

mock_hook.return_value.insert_job.return_value = MagicMock(job_id=real_job_id, error_result=False)
mock_hook.return_value.insert_job.return_value.running.return_value = False

ti = create_task_instance_of_operator(
BigQueryValueCheckOperator,
dag_id="dag_id",
task_id="check_value",
sql="SELECT COUNT(*) FROM Any",
pass_value=2,
use_legacy_sql=True,
deferrable=True,
)

ti.task.execute(MagicMock())
assert not mock_defer.called
assert mock_execute.called

@pytest.mark.parametrize(
"kwargs, expected",
[
Expand Down

0 comments on commit 8f41584

Please sign in to comment.