|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import datetime |
|
import re |
|
from unittest import mock |
|
|
|
import pytest |
|
|
|
import google.api_core.exceptions |
|
import google.api_core.retry |
|
import freezegun |
|
import requests.exceptions |
|
|
|
from google.cloud.bigquery import _job_helpers |
|
import google.cloud.bigquery.retry |
|
|
|
from .helpers import make_client, make_connection |
|
|
|
|
|
_RETRY_NOT_FOUND = { |
|
"job_retry": google.api_core.retry.Retry( |
|
predicate=google.api_core.retry.if_exception_type( |
|
google.api_core.exceptions.NotFound, |
|
), |
|
), |
|
} |
|
_RETRY_BAD_REQUEST = { |
|
"job_retry": google.api_core.retry.Retry( |
|
predicate=google.api_core.retry.if_exception_type( |
|
google.api_core.exceptions.BadRequest, |
|
), |
|
), |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@mock.patch("time.sleep") |
|
@pytest.mark.parametrize( |
|
"reason, job_retry, result_retry", |
|
[ |
|
pytest.param( |
|
"rateLimitExceeded", |
|
{}, |
|
{}, |
|
id="no job_retry", |
|
), |
|
pytest.param( |
|
"notFound", |
|
_RETRY_NOT_FOUND, |
|
{}, |
|
id="Query NotFound", |
|
), |
|
pytest.param( |
|
"notFound", |
|
_RETRY_NOT_FOUND, |
|
_RETRY_NOT_FOUND, |
|
id="Result NotFound", |
|
), |
|
pytest.param( |
|
"notFound", |
|
_RETRY_BAD_REQUEST, |
|
_RETRY_NOT_FOUND, |
|
id="BadRequest", |
|
), |
|
], |
|
) |
|
def test_retry_failed_jobs(sleep, reason, job_retry, result_retry): |
|
client = make_client() |
|
err = dict(reason=reason) |
|
conn = client._connection = make_connection( |
|
dict( |
|
status=dict(state="DONE", errors=[err], errorResult=err), |
|
jobReference={"jobId": "id_1"}, |
|
), |
|
dict( |
|
status=dict(state="DONE", errors=[err], errorResult=err), |
|
jobReference={"jobId": "id_1"}, |
|
), |
|
dict( |
|
status=dict(state="DONE", errors=[err], errorResult=err), |
|
jobReference={"jobId": "id_1"}, |
|
), |
|
dict(status=dict(state="DONE"), jobReference={"jobId": "id_2"}), |
|
dict(rows=[{"f": [{"v": "1"}]}], totalRows="1"), |
|
) |
|
|
|
job = client.query("select 1", **job_retry) |
|
result = job.result(**result_retry) |
|
|
|
assert result.total_rows == 1 |
|
|
|
|
|
assert conn.api_request.call_count == 5 |
|
|
|
|
|
assert job.job_id == "id_2" |
|
|
|
|
|
assert len(sleep.mock_calls) == 3 |
|
|
|
|
|
assert min(c[1][0] for c in sleep.mock_calls) > 0 |
|
|
|
|
|
|
|
assert max(c[1][0] for c in sleep.mock_calls) <= 8 |
|
|
|
|
|
conn = client._connection = make_connection( |
|
dict(rows=[{"f": [{"v": "1"}]}], totalRows="1"), |
|
) |
|
result = job.result() |
|
|
|
assert result.total_rows == 1 |
|
|
|
|
|
assert conn.api_request.call_count == 1 |
|
|
|
|
|
|
|
assert job.job_id == "id_2" |
|
|
|
|
|
def test_retry_connection_error_with_default_retries_and_successful_first_job( |
|
monkeypatch, client |
|
): |
|
""" |
|
Make sure ConnectionError can be retried at `is_job_done` level, even if |
|
retries are exhaused by API-level retry. |
|
|
|
Note: Because restart_query_job is set to True only in the case of a |
|
confirmed job failure, this should be safe to do even when a job is not |
|
idempotent. |
|
|
|
Regression test for issue |
|
https://github.com/googleapis/python-bigquery/issues/1929 |
|
""" |
|
job_counter = 0 |
|
|
|
def make_job_id(*args, **kwargs): |
|
nonlocal job_counter |
|
job_counter += 1 |
|
return f"{job_counter}" |
|
|
|
monkeypatch.setattr(_job_helpers, "make_job_id", make_job_id) |
|
conn = client._connection = make_connection() |
|
project = client.project |
|
job_reference_1 = {"projectId": project, "jobId": "1", "location": "test-loc"} |
|
NUM_API_RETRIES = 2 |
|
|
|
with freezegun.freeze_time( |
|
"2024-01-01 00:00:00", |
|
|
|
|
|
|
|
|
|
auto_tick_seconds=( |
|
google.cloud.bigquery.retry._DEFAULT_RETRY_DEADLINE / NUM_API_RETRIES |
|
) |
|
+ 1, |
|
): |
|
conn.api_request.side_effect = [ |
|
|
|
{"jobReference": job_reference_1, "status": {"state": "PENDING"}}, |
|
|
|
{"jobReference": job_reference_1, "status": {"state": "RUNNING"}}, |
|
|
|
requests.exceptions.ConnectionError(), |
|
requests.exceptions.ConnectionError(), |
|
|
|
|
|
|
|
{"jobReference": job_reference_1, "status": {"state": "DONE"}}, |
|
|
|
{"jobReference": job_reference_1, "jobComplete": True}, |
|
] |
|
|
|
job = client.query("select 1") |
|
rows_iter = job.result() |
|
|
|
assert job.done() |
|
assert rows_iter is not None |
|
|
|
|
|
assert job_counter == 1 |
|
|
|
|
|
conn.api_request.assert_has_calls( |
|
[ |
|
|
|
mock.call( |
|
method="POST", |
|
path="/projects/PROJECT/jobs", |
|
data={ |
|
"jobReference": {"jobId": "1", "projectId": "PROJECT"}, |
|
"configuration": { |
|
"query": {"useLegacySql": False, "query": "select 1"} |
|
}, |
|
}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/1", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/1", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/1", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/1", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/1", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
], |
|
) |
|
|
|
|
|
def test_query_retry_with_default_retry_and_ambiguous_errors_only_retries_with_failed_job( |
|
client, monkeypatch |
|
): |
|
""" |
|
Some errors like 'rateLimitExceeded' can be ambiguous. Make sure we only |
|
retry the job when we know for sure that the job has failed for a retriable |
|
reason. We can only be sure after a "successful" call to jobs.get to fetch |
|
the failed job status. |
|
""" |
|
job_counter = 0 |
|
|
|
def make_job_id(*args, **kwargs): |
|
nonlocal job_counter |
|
job_counter += 1 |
|
return f"{job_counter}" |
|
|
|
monkeypatch.setattr(_job_helpers, "make_job_id", make_job_id) |
|
|
|
project = client.project |
|
job_reference_1 = {"projectId": project, "jobId": "1", "location": "test-loc"} |
|
job_reference_2 = {"projectId": project, "jobId": "2", "location": "test-loc"} |
|
NUM_API_RETRIES = 2 |
|
|
|
|
|
|
|
internal_error = google.api_core.exceptions.InternalServerError( |
|
"Job failed just because...", |
|
errors=[ |
|
{"reason": "internalError"}, |
|
], |
|
) |
|
responses = [ |
|
|
|
{"jobReference": job_reference_1, "status": {"state": "PENDING"}}, |
|
|
|
{"jobReference": job_reference_1, "status": {"state": "RUNNING"}}, |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
internal_error, |
|
internal_error, |
|
|
|
{ |
|
"jobReference": job_reference_1, |
|
"status": {"state": "DONE", "errorResult": {"reason": "internalError"}}, |
|
}, |
|
|
|
{"jobReference": job_reference_2, "status": {"state": "PENDING"}}, |
|
|
|
{"jobReference": job_reference_2, "status": {"state": "RUNNING"}}, |
|
|
|
{"jobReference": job_reference_2, "jobComplete": True}, |
|
|
|
{"jobReference": job_reference_2, "status": {"state": "DONE"}}, |
|
] |
|
|
|
conn = client._connection = make_connection(*responses) |
|
|
|
with freezegun.freeze_time( |
|
|
|
|
|
|
|
|
|
auto_tick_seconds=( |
|
google.cloud.bigquery.retry._DEFAULT_RETRY_DEADLINE / NUM_API_RETRIES |
|
) |
|
+ 1, |
|
): |
|
job = client.query("select 1") |
|
job.result() |
|
|
|
conn.api_request.assert_has_calls( |
|
[ |
|
|
|
mock.call( |
|
method="POST", |
|
path="/projects/PROJECT/jobs", |
|
data={ |
|
"jobReference": {"jobId": "1", "projectId": "PROJECT"}, |
|
"configuration": { |
|
"query": {"useLegacySql": False, "query": "select 1"} |
|
}, |
|
}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/1", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/1", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/1", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/1", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
|
|
mock.call( |
|
method="POST", |
|
path="/projects/PROJECT/jobs", |
|
data={ |
|
"jobReference": { |
|
|
|
"jobId": "2", |
|
"projectId": "PROJECT", |
|
}, |
|
"configuration": { |
|
"query": {"useLegacySql": False, "query": "select 1"} |
|
}, |
|
}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/2", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/queries/2", |
|
query_params={"maxResults": 0, "location": "test-loc"}, |
|
timeout=None, |
|
), |
|
|
|
mock.call( |
|
method="GET", |
|
path="/projects/PROJECT/jobs/2", |
|
query_params={"location": "test-loc", "projection": "full"}, |
|
timeout=google.cloud.bigquery.retry.DEFAULT_GET_JOB_TIMEOUT, |
|
), |
|
] |
|
) |
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("job_retry_on_query", ["Query", "Result"]) |
|
@mock.patch("time.sleep") |
|
def test_disable_retry_failed_jobs(sleep, client, job_retry_on_query): |
|
""" |
|
Test retry of job failures, as opposed to API-invocation failures. |
|
""" |
|
err = dict(reason="rateLimitExceeded") |
|
responses = [dict(status=dict(state="DONE", errors=[err], errorResult=err))] * 3 |
|
|
|
def api_request(method, path, query_params=None, data=None, **kw): |
|
response = responses.pop(0) |
|
response["jobReference"] = data["jobReference"] |
|
return response |
|
|
|
conn = client._connection = make_connection() |
|
conn.api_request.side_effect = api_request |
|
|
|
if job_retry_on_query == "Query": |
|
job_retry = dict(job_retry=None) |
|
else: |
|
job_retry = {} |
|
job = client.query("select 1", **job_retry) |
|
|
|
orig_job_id = job.job_id |
|
job_retry = dict(job_retry=None) if job_retry_on_query == "Result" else {} |
|
with pytest.raises(google.api_core.exceptions.Forbidden): |
|
job.result(**job_retry) |
|
|
|
assert job.job_id == orig_job_id |
|
assert len(sleep.mock_calls) == 0 |
|
|
|
|
|
@mock.patch("time.sleep") |
|
def test_retry_failed_jobs_after_retry_failed(sleep, client): |
|
""" |
|
If at first you don't succeed, maybe you will later. :) |
|
""" |
|
conn = client._connection = make_connection() |
|
|
|
with freezegun.freeze_time("2024-01-01 00:00:00") as frozen_datetime: |
|
err = dict(reason="rateLimitExceeded") |
|
|
|
def api_request(method, path, query_params=None, data=None, **kw): |
|
calls = sleep.mock_calls |
|
if calls: |
|
frozen_datetime.tick(delta=datetime.timedelta(seconds=calls[-1][1][0])) |
|
response = dict(status=dict(state="DONE", errors=[err], errorResult=err)) |
|
response["jobReference"] = data["jobReference"] |
|
return response |
|
|
|
conn.api_request.side_effect = api_request |
|
|
|
job = client.query("select 1") |
|
orig_job_id = job.job_id |
|
|
|
with pytest.raises(google.api_core.exceptions.RetryError): |
|
job.result() |
|
|
|
|
|
assert job.job_id != orig_job_id |
|
|
|
|
|
|
|
err2 = dict(reason="backendError") |
|
responses = [ |
|
dict(status=dict(state="DONE", errors=[err2], errorResult=err2)), |
|
dict(status=dict(state="DONE", errors=[err], errorResult=err)), |
|
dict(status=dict(state="DONE", errors=[err2], errorResult=err2)), |
|
dict(status=dict(state="DONE")), |
|
dict(rows=[{"f": [{"v": "1"}]}], totalRows="1"), |
|
] |
|
|
|
def api_request(method, path, query_params=None, data=None, **kw): |
|
calls = sleep.mock_calls |
|
frozen_datetime.tick(delta=datetime.timedelta(seconds=calls[-1][1][0])) |
|
response = responses.pop(0) |
|
if data: |
|
response["jobReference"] = data["jobReference"] |
|
else: |
|
response["jobReference"] = dict( |
|
jobId=path.split("/")[-1], projectId="PROJECT" |
|
) |
|
return response |
|
|
|
conn.api_request.side_effect = api_request |
|
result = job.result() |
|
assert result.total_rows == 1 |
|
assert not responses |
|
assert job.job_id != orig_job_id |
|
|
|
|
|
def test_raises_on_job_retry_on_query_with_non_retryable_jobs(client): |
|
with pytest.raises( |
|
TypeError, |
|
match=re.escape( |
|
"`job_retry` was provided, but the returned job is" |
|
" not retryable, because a custom `job_id` was" |
|
" provided." |
|
), |
|
): |
|
client.query("select 42", job_id=42, job_retry=google.api_core.retry.Retry()) |
|
|
|
|
|
def test_raises_on_job_retry_on_result_with_non_retryable_jobs(client): |
|
client._connection = make_connection({}) |
|
job = client.query("select 42", job_id=42) |
|
with pytest.raises( |
|
TypeError, |
|
match=re.escape( |
|
"`job_retry` was provided, but this job is" |
|
" not retryable, because a custom `job_id` was" |
|
" provided to the query that created this job." |
|
), |
|
): |
|
job.result(job_retry=google.api_core.retry.Retry()) |
|
|
|
|
|
def test_query_and_wait_retries_job_for_DDL_queries(): |
|
""" |
|
Specific test for retrying DDL queries with "jobRateLimitExceeded" error: |
|
https://github.com/googleapis/python-bigquery/issues/1790 |
|
""" |
|
freezegun.freeze_time(auto_tick_seconds=1) |
|
|
|
client = make_client() |
|
conn = client._connection = make_connection( |
|
{ |
|
"jobReference": { |
|
"projectId": "response-project", |
|
"jobId": "abc", |
|
"location": "response-location", |
|
}, |
|
"jobComplete": False, |
|
}, |
|
google.api_core.exceptions.InternalServerError( |
|
"job_retry me", errors=[{"reason": "jobRateLimitExceeded"}] |
|
), |
|
google.api_core.exceptions.BadRequest( |
|
"retry me", errors=[{"reason": "jobRateLimitExceeded"}] |
|
), |
|
{ |
|
"jobReference": { |
|
"projectId": "response-project", |
|
"jobId": "abc", |
|
"location": "response-location", |
|
}, |
|
"jobComplete": True, |
|
"schema": { |
|
"fields": [ |
|
{"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, |
|
{"name": "age", "type": "INT64", "mode": "NULLABLE"}, |
|
], |
|
}, |
|
"rows": [ |
|
{"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]}, |
|
{"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]}, |
|
{"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, |
|
{"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, |
|
], |
|
}, |
|
) |
|
rows = _job_helpers.query_and_wait( |
|
client, |
|
query="SELECT 1", |
|
location="request-location", |
|
project="request-project", |
|
job_config=None, |
|
page_size=None, |
|
max_results=None, |
|
retry=google.cloud.bigquery.retry.DEFAULT_RETRY, |
|
job_retry=google.cloud.bigquery.retry.DEFAULT_JOB_RETRY, |
|
) |
|
assert len(list(rows)) == 4 |
|
|
|
|
|
|
|
query_request_path = "/projects/request-project/queries" |
|
|
|
calls = conn.api_request.call_args_list |
|
_, kwargs = calls[0] |
|
assert kwargs["method"] == "POST" |
|
assert kwargs["path"] == query_request_path |
|
|
|
|
|
|
|
_, kwargs = calls[3] |
|
assert kwargs["method"] == "POST" |
|
assert kwargs["path"] == query_request_path |
|
|