| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | import contextlib |
| | import threading |
| | import time |
| |
|
| | import google.api_core.exceptions |
| | import google.cloud.bigquery |
| | import pytest |
| |
|
| |
|
| | def thread(func): |
| | thread = threading.Thread(target=func, daemon=True) |
| | thread.start() |
| | return thread |
| |
|
| |
|
| | @pytest.mark.parametrize("job_retry_on_query", [True, False]) |
| | def test_query_retry_539(bigquery_client, dataset_id, job_retry_on_query): |
| | """ |
| | Test job_retry |
| | |
| | See: https://github.com/googleapis/python-bigquery/issues/539 |
| | """ |
| | from google.api_core import exceptions |
| | from google.api_core.retry import if_exception_type, Retry |
| |
|
| | table_name = f"{dataset_id}.t539" |
| |
|
| | |
| | with pytest.raises(google.api_core.exceptions.NotFound): |
| | bigquery_client.query(f"select count(*) from {table_name}").result() |
| |
|
| | retry_notfound = Retry(predicate=if_exception_type(exceptions.NotFound)) |
| |
|
| | job_retry = dict(job_retry=retry_notfound) if job_retry_on_query else {} |
| | job = bigquery_client.query(f"select count(*) from {table_name}", **job_retry) |
| | job_id = job.job_id |
| |
|
| | |
| | |
| | assert job.done() |
| | assert job.exception() is not None |
| |
|
| | @thread |
| | def create_table(): |
| | time.sleep(1) |
| | with contextlib.closing(google.cloud.bigquery.Client()) as client: |
| | client.query(f"create table {table_name} (id int64)").result() |
| |
|
| | job_retry = {} if job_retry_on_query else dict(job_retry=retry_notfound) |
| | [[count]] = list(job.result(**job_retry)) |
| | assert count == 0 |
| |
|
| | |
| | assert job.job_id != job_id |
| |
|
| | |
| | create_table.join() |
| | bigquery_client.query(f"drop table {table_name}").result() |
| |
|