Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: avoid unnecessary API call in QueryJob.result() when job is already finished #1900

Merged
merged 14 commits into from
Apr 18, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Prev Previous commit
Next Next commit
fix remaining unit tests
  • Loading branch information
tswast committed Apr 13, 2024
commit 511231525fb56579bf20c613270dcee49426c12c
42 changes: 25 additions & 17 deletions tests/unit/job/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -1081,7 +1081,7 @@ def test_result_with_done_job_calls_get_query_results(self):
method="GET",
path=query_results_path,
query_params={"maxResults": 0, "location": "EU"},
timeout=None,
timeout=google.cloud.bigquery.client._MIN_GET_QUERY_RESULTS_TIMEOUT,
)
query_results_page_call = mock.call(
method="GET",
Expand Down Expand Up @@ -1347,6 +1347,7 @@ def test_result_w_timeout(self):
import google.cloud.bigquery.client

begun_resource = self._make_resource()
chalmerlowe marked this conversation as resolved.
Show resolved Hide resolved
begun_resource["jobReference"]["location"] = "US"
query_resource = {
"jobComplete": True,
"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID},
Expand All @@ -1357,26 +1358,33 @@ def test_result_w_timeout(self):
connection = make_connection(begun_resource, query_resource, done_resource)
client = _make_client(project=self.PROJECT, connection=connection)
job = self._make_one(self.JOB_ID, self.QUERY, client)
job._properties["jobReference"]["location"] = "US"

with freezegun.freeze_time("1970-01-01 00:00:00", tick=False):
job.result(timeout=1.0)

self.assertEqual(len(connection.api_request.call_args_list), 3)
begin_request = connection.api_request.call_args_list[0]
query_request = connection.api_request.call_args_list[1]
reload_request = connection.api_request.call_args_list[2]
self.assertEqual(begin_request[1]["method"], "POST")
self.assertEqual(query_request[1]["method"], "GET")
self.assertEqual(
query_request[1]["path"],
"/projects/{}/queries/{}".format(self.PROJECT, self.JOB_ID),
job.result(timeout=1.125)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there are reason we are using such a specific number?
1.125.

Can I get a comment here to let future me know why we picked this number?


reload_call = mock.call(
method="GET",
path=f"/projects/{self.PROJECT}/jobs/{self.JOB_ID}",
query_params={"location": "US"},
timeout=1.125,
)
self.assertEqual(query_request[1]["timeout"], 120)
self.assertEqual(
query_request[1]["timeout"],
google.cloud.bigquery.client._MIN_GET_QUERY_RESULTS_TIMEOUT,
get_query_results_call = mock.call(
method="GET",
path=f"/projects/{self.PROJECT}/queries/{self.JOB_ID}",
query_params={
"maxResults": 0,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is maxResults of 0 synonymous with asking for all results? OR is it really asking for zero results?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We actually want 0 rows. If we omit this or ask for non-zero number of rows, the jobs.getQueryResults API can hang when the query has wide rows (many columns).

"location": "US",
},
timeout=google.cloud.bigquery.client._MIN_GET_QUERY_RESULTS_TIMEOUT,
)
connection.api_request.assert_has_calls(
[
reload_call,
get_query_results_call,
reload_call,
]
)
self.assertEqual(reload_request[1]["method"], "GET")

def test_result_w_page_size(self):
# Arrange
Expand Down