diff --git a/google/cloud/bigquery/_tqdm_helpers.py b/google/cloud/bigquery/_tqdm_helpers.py index 456ca2530..cb81bd8f6 100644 --- a/google/cloud/bigquery/_tqdm_helpers.py +++ b/google/cloud/bigquery/_tqdm_helpers.py @@ -67,7 +67,7 @@ def get_progress_bar(progress_bar_type, description, total, unit): ) elif progress_bar_type == "tqdm_gui": return tqdm.tqdm_gui(desc=description, total=total, unit=unit) - except (KeyError, TypeError): + except (KeyError, TypeError): # pragma: NO COVER # Protect ourselves from any tqdm errors. In case of # unexpected tqdm behavior, just fall back to showing # no progress bar. diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 5521e2e1e..891a54e5c 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -593,7 +593,7 @@ def _ensure_bqstorage_client( ) return None - if bqstorage_client is None: + if bqstorage_client is None: # pragma: NO COVER bqstorage_client = bigquery_storage.BigQueryReadClient( credentials=self._credentials, client_options=client_options, diff --git a/setup.py b/setup.py index 5a35f4136..ed9a6351b 100644 --- a/setup.py +++ b/setup.py @@ -45,8 +45,9 @@ ] pyarrow_dependency = "pyarrow >= 3.0.0" extras = { - # Keep the no-op bqstorage extra for backward compatibility. - # See: https://github.com/googleapis/python-bigquery/issues/757 + # bqstorage had a period where it was a required dependency, and has been + # moved back to optional due to bloat. See + # https://github.com/googleapis/python-bigquery/issues/1196 for more background. "bqstorage": [ "google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev", # Due to an issue in pip's dependency resolver, the `grpc` extra is not diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 414239323..862ef3245 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -54,16 +54,6 @@ from . import helpers -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - -try: - import pyarrow - import pyarrow.types -except ImportError: # pragma: NO COVER - pyarrow = None JOB_TIMEOUT = 120 # 2 minutes DATA_PATH = pathlib.Path(__file__).parent.parent / "data" @@ -1772,11 +1762,10 @@ def test_dbapi_fetchall_from_script(self): row_tuples = [r.values() for r in rows] self.assertEqual(row_tuples, [(5, "foo"), (6, "bar"), (7, "baz")]) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_dbapi_fetch_w_bqstorage_client_large_result_set(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") + bqstorage_client = bigquery_storage.BigQueryReadClient( credentials=Config.CLIENT._credentials ) @@ -1834,10 +1823,8 @@ def test_dbapi_dry_run_query(self): self.assertEqual(list(rows), []) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_dbapi_connection_does_not_leak_sockets(self): + pytest.importorskip("google.cloud.bigquery_storage") current_process = psutil.Process() conn_count_start = len(current_process.connections()) @@ -2382,11 +2369,10 @@ def test_create_table_rows_fetch_nested_schema(self): self.assertEqual(found[7], e_favtime) self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"])) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_nested_table_to_arrow(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pyarrow = pytest.importorskip("pyarrow") + pyarrow.types = pytest.importorskip("pyarrow.types") from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 60dcab85e..e9e74b06b 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -32,15 +32,6 @@ import packaging import pytest -try: - import importlib.metadata as metadata -except ImportError: - import importlib_metadata as metadata - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None try: import opentelemetry @@ -59,11 +50,6 @@ msg = "Error importing from opentelemetry, is the installed version compatible?" raise ImportError(msg) from exc -try: - import pyarrow -except (ImportError, AttributeError): # pragma: NO COVER - pyarrow = None - import google.api_core.exceptions from google.api_core import client_info import google.cloud._helpers @@ -75,18 +61,9 @@ from google.cloud.bigquery.retry import DEFAULT_TIMEOUT import google.cloud.bigquery.table -try: - from google.cloud import bigquery_storage -except (ImportError, AttributeError): # pragma: NO COVER - bigquery_storage = None from test_utils.imports import maybe_fail_import from tests.unit.helpers import make_connection -if pandas is not None: - PANDAS_INSTALLED_VERSION = metadata.version("pandas") -else: - PANDAS_INSTALLED_VERSION = "0.0.0" - def _make_credentials(): import google.auth.credentials @@ -800,10 +777,9 @@ def test_get_dataset(self): self.assertEqual(dataset.dataset_id, self.DS_ID) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_creating_new_instance(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + mock_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) mock_client_instance = object() mock_client.return_value = mock_client_instance @@ -849,10 +825,8 @@ def fail_bqstorage_import(name, globals, locals, fromlist, level): ] assert matching_warnings, "Missing dependency warning not raised." - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_obsolete_dependency(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -869,10 +843,8 @@ def test_ensure_bqstorage_client_obsolete_dependency(self): ] assert matching_warnings, "Obsolete dependency warning not raised." - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_existing_client_check_passes(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) mock_storage_client = mock.sentinel.mock_storage_client @@ -883,10 +855,23 @@ def test_ensure_bqstorage_client_existing_client_check_passes(self): self.assertIs(bqstorage_client, mock_storage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) + def test_ensure_bqstorage_client_is_none(self): + pytest.importorskip("google.cloud.bigquery_storage") + creds = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=creds) + bqstorage_client = None + + assert bqstorage_client is None + bqstorage_client = client._ensure_bqstorage_client( + bqstorage_client=bqstorage_client, + ) + + assert isinstance( + bqstorage_client, google.cloud.bigquery_storage_v1.BigQueryReadClient + ) + def test_ensure_bqstorage_client_existing_client_check_fails(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) mock_storage_client = mock.sentinel.mock_storage_client @@ -972,8 +957,8 @@ def test_create_routine_w_conflict(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(opentelemetry is None, "Requires `opentelemetry`") def test_span_status_is_set(self): + pytest.importorskip("opentelemetry") from google.cloud.bigquery.routine import Routine tracer_provider = TracerProvider() @@ -6039,8 +6024,8 @@ def test_insert_rows_w_numeric(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6126,8 +6111,8 @@ def test_insert_rows_from_dataframe(self): ) assert call == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_nan(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6194,8 +6179,8 @@ def test_insert_rows_from_dataframe_nan(self): ) assert call == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_many_columns(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6247,8 +6232,8 @@ def test_insert_rows_from_dataframe_many_columns(self): assert len(actual_calls) == 1 assert actual_calls[0] == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -7569,9 +7554,9 @@ def test_load_table_from_file_w_default_load_config(self): project=self.PROJECT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import PolicyTagList, SchemaField @@ -7665,9 +7650,9 @@ def test_load_table_from_dataframe(self): # (not passed in via job_config) assert "description" not in field - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_client_location(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7710,9 +7695,9 @@ def test_load_table_from_dataframe_w_client_location(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_wihtout_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7765,9 +7750,9 @@ def test_load_table_from_dataframe_w_custom_job_config_wihtout_source_format(sel # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_w_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7821,9 +7806,9 @@ def test_load_table_from_dataframe_w_custom_job_config_w_source_format(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_parquet_options_none(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7873,9 +7858,9 @@ def test_load_table_from_dataframe_w_parquet_options_none(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.parquet_options.enable_list_inference is True - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_list_inference_none(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7933,9 +7918,9 @@ def test_load_table_from_dataframe_w_list_inference_none(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_explicit_job_config_override(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7994,9 +7979,9 @@ def test_load_table_from_dataframe_w_explicit_job_config_override(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_default_load_config(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8044,9 +8029,9 @@ def test_load_table_from_dataframe_w_default_load_config(self): assert sent_config.write_disposition == job.WriteDisposition.WRITE_TRUNCATE assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_list_inference_false(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8105,9 +8090,9 @@ def test_load_table_from_dataframe_w_list_inference_false(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_w_wrong_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job client = self._make_client() @@ -8125,9 +8110,9 @@ def test_load_table_from_dataframe_w_custom_job_config_w_wrong_source_format(sel assert "Got unexpected source_format:" in str(exc.value) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_automatic_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8226,9 +8211,9 @@ def test_load_table_from_dataframe_w_automatic_schema(self): SchemaField("time_col", "TIME"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_automatic_schema_detection_fails(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -8286,9 +8271,9 @@ def test_load_table_from_dataframe_w_automatic_schema_detection_fails(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema is None - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_index_and_auto_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8348,9 +8333,9 @@ def test_load_table_from_dataframe_w_index_and_auto_schema(self): ] assert sent_schema == expected_sent_schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_unknown_table(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES client = self._make_client() @@ -8384,9 +8369,9 @@ def test_load_table_from_dataframe_unknown_table(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nullable_int64_datatype(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8429,9 +8414,8 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype(self): SchemaField("x", "INT64", "NULLABLE", None), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - # @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8474,9 +8458,9 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(se SchemaField("x", "INT64", "NULLABLE", None), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_struct_fields(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8534,13 +8518,13 @@ def test_load_table_from_dataframe_struct_fields(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_array_fields(self): """Test that a DataFrame with array columns can be uploaded correctly. See: https://github.com/googleapis/python-bigquery/issues/19 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8599,13 +8583,13 @@ def test_load_table_from_dataframe_array_fields(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_array_fields_w_auto_schema(self): """Test that a DataFrame with array columns can be uploaded correctly. See: https://github.com/googleapis/python-bigquery/issues/19 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8662,9 +8646,9 @@ def test_load_table_from_dataframe_array_fields_w_auto_schema(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == expected_schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8746,9 +8730,9 @@ def test_load_table_from_dataframe_w_partial_schema(self): SchemaField("bytes_col", "BYTES"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema_extra_types(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8783,9 +8767,9 @@ def test_load_table_from_dataframe_w_partial_schema_extra_types(self): assert "bq_schema contains fields not present in dataframe" in message assert "unknown_col" in message - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8816,9 +8800,9 @@ def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): assert call_args is not None assert call_args.get("parquet_compression") == "LZ4" - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_wo_pyarrow_raises_error(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") client = self._make_client() records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] dataframe = pandas.DataFrame(records) @@ -8846,8 +8830,8 @@ def test_load_table_from_dataframe_wo_pyarrow_raises_error(self): ) def test_load_table_from_dataframe_w_bad_pyarrow_issues_warning(self): - pytest.importorskip("pandas", reason="Requires `pandas`") - pytest.importorskip("pyarrow", reason="Requires `pyarrow`") + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") client = self._make_client() records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] @@ -8874,14 +8858,14 @@ def test_load_table_from_dataframe_w_bad_pyarrow_issues_warning(self): location=self.LOCATION, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nulls(self): """Test that a DataFrame with null columns can be uploaded if a BigQuery schema is specified. See: https://github.com/googleapis/google-cloud-python/issues/7370 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -8919,8 +8903,8 @@ def test_load_table_from_dataframe_w_nulls(self): assert sent_config.schema == schema assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_load_table_from_dataframe_w_invaild_job_config(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery import job client = self._make_client() @@ -8937,8 +8921,8 @@ def test_load_table_from_dataframe_w_invaild_job_config(self): err_msg = str(exc.value) assert "Expected an instance of LoadJobConfig" in err_msg - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_load_table_from_dataframe_with_csv_source_format(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8987,9 +8971,9 @@ def test_load_table_from_dataframe_with_csv_source_format(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.CSV - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_higher_scale_decimal128_datatype(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField diff --git a/tests/unit/test_dbapi__helpers.py b/tests/unit/test_dbapi__helpers.py index 542f923d2..7e1da0034 100644 --- a/tests/unit/test_dbapi__helpers.py +++ b/tests/unit/test_dbapi__helpers.py @@ -21,16 +21,10 @@ import pytest -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - import google.cloud._helpers from google.cloud.bigquery import query, table from google.cloud.bigquery.dbapi import _helpers from google.cloud.bigquery.dbapi import exceptions -from tests.unit.helpers import _to_pyarrow class TestQueryParameters(unittest.TestCase): @@ -215,8 +209,10 @@ def test_empty_iterable(self): result = _helpers.to_bq_table_rows(rows_iterable) self.assertEqual(list(result), []) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_non_empty_iterable(self): + pytest.importorskip("pyarrow") + from tests.unit.helpers import _to_pyarrow + rows_iterable = [ dict( one=_to_pyarrow(1.1), diff --git a/tests/unit/test_dbapi_connection.py b/tests/unit/test_dbapi_connection.py index 88378ec98..4071e57e0 100644 --- a/tests/unit/test_dbapi_connection.py +++ b/tests/unit/test_dbapi_connection.py @@ -13,14 +13,10 @@ # limitations under the License. import gc +import pytest import unittest from unittest import mock -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - class TestConnection(unittest.TestCase): @staticmethod @@ -41,6 +37,8 @@ def _mock_client(self): def _mock_bqstorage_client(self): # Assumption: bigquery_storage exists. It's the test's responisbility to # not use this helper or skip itself if bqstorage is not installed. + from google.cloud import bigquery_storage + mock_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) mock_client._transport = mock.Mock(spec=["channel"]) mock_client._transport.grpc_channel = mock.Mock(spec=["close"]) @@ -57,10 +55,8 @@ def test_ctor_wo_bqstorage_client(self): self.assertIs(connection._client, mock_client) self.assertIs(connection._bqstorage_client, None) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ctor_w_bqstorage_client(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import Connection mock_client = self._mock_client() @@ -89,10 +85,8 @@ def test_connect_wo_client(self, mock_client): self.assertIsNotNone(connection._client) self.assertIsNotNone(connection._bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_connect_w_client(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection @@ -107,10 +101,8 @@ def test_connect_w_client(self): self.assertIs(connection._client, mock_client) self.assertIs(connection._bqstorage_client, mock_bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_connect_w_both_clients(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection @@ -143,10 +135,8 @@ def test_raises_error_if_closed(self): ): getattr(connection, method)() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_close_closes_all_created_bigquery_clients(self): + pytest.importorskip("google.cloud.bigquery_storage") client = self._mock_client() bqstorage_client = self._mock_bqstorage_client() @@ -168,10 +158,8 @@ def test_close_closes_all_created_bigquery_clients(self): self.assertTrue(client.close.called) self.assertTrue(bqstorage_client._transport.grpc_channel.close.called) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_close_does_not_close_bigquery_clients_passed_to_it(self): + pytest.importorskip("google.cloud.bigquery_storage") client = self._mock_client() bqstorage_client = self._mock_bqstorage_client() connection = self._make_one(client=client, bqstorage_client=bqstorage_client) diff --git a/tests/unit/test_dbapi_cursor.py b/tests/unit/test_dbapi_cursor.py index e9fd2e3dd..6fca4cec0 100644 --- a/tests/unit/test_dbapi_cursor.py +++ b/tests/unit/test_dbapi_cursor.py @@ -21,18 +21,8 @@ import google.cloud.bigquery.table as bq_table -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - from google.api_core import exceptions -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - from tests.unit.helpers import _to_pyarrow @@ -97,6 +87,8 @@ def _mock_client( return mock_client def _mock_bqstorage_client(self, rows=None, stream_count=0): + from google.cloud import bigquery_storage + if rows is None: rows = [] @@ -320,11 +312,9 @@ def test_fetchall_w_row(self): self.assertEqual(len(rows), 1) self.assertEqual(rows[0], (1,)) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_fetchall_w_bqstorage_client_fetch_success(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") from google.cloud.bigquery import dbapi # use unordered data to also test any non-determenistic key order in dicts @@ -380,10 +370,8 @@ def test_fetchall_w_bqstorage_client_fetch_success(self): self.assertEqual(sorted_row_data, expected_row_data) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_fetchall_w_bqstorage_client_fetch_no_rows(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import dbapi mock_client = self._mock_client( @@ -410,10 +398,8 @@ def test_fetchall_w_bqstorage_client_fetch_no_rows(self): # check the data returned self.assertEqual(rows, []) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import dbapi row_data = [bq_table.Row([1.1, 1.2], {"foo": 0, "bar": 1})] @@ -448,11 +434,10 @@ def fake_ensure_bqstorage_client(bqstorage_client=None, **kwargs): # the default client was not used mock_client.list_rows.assert_not_called() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_fetchall_w_bqstorage_client_no_arrow_compression(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import dbapi # Use unordered data to also test any non-determenistic key order in dicts. diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index a8107ee97..dbc5948b8 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -24,11 +24,6 @@ import pytest -try: - import importlib.metadata as metadata -except ImportError: - import importlib_metadata as metadata - import google.api_core.exceptions from test_utils.imports import maybe_fail_import @@ -37,48 +32,6 @@ from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.dataset import DatasetReference -try: - from google.cloud import bigquery_storage - from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( - grpc as big_query_read_grpc_transport, - ) -except ImportError: # pragma: NO COVER - bigquery_storage = None - big_query_read_grpc_transport = None - - -pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import() - -if pyarrow: # pragma: NO COVER - import pyarrow.types - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None - -try: - import db_dtypes # type: ignore -except ImportError: # pragma: NO COVER - db_dtypes = None - -try: - import geopandas -except (ImportError, AttributeError): # pragma: NO COVER - geopandas = None - -try: - import tqdm - from tqdm.std import TqdmDeprecationWarning - -except (ImportError, AttributeError): # pragma: NO COVER - tqdm = None - -if pandas is not None: - PANDAS_INSTALLED_VERSION = metadata.version("pandas") -else: - PANDAS_INSTALLED_VERSION = "0.0.0" - def _mock_client(): from google.cloud.bigquery import client @@ -1948,6 +1901,8 @@ def test_row(self): class Test_EmptyRowIterator(unittest.TestCase): + PYARROW_MINIMUM_VERSION = str(_versions_helpers._MIN_PYARROW_VERSION) + def _make_one(self): from google.cloud.bigquery.table import _EmptyRowIterator @@ -1963,15 +1918,17 @@ def test_to_arrow_error_if_pyarrow_is_none(self): with self.assertRaises(ValueError): row_iterator.to_arrow() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): + pyarrow = pytest.importorskip("pyarrow") row_iterator = self._make_one() tbl = row_iterator.to_arrow() self.assertIsInstance(tbl, pyarrow.Table) self.assertEqual(tbl.num_rows, 0) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_iterable(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) row_iterator = self._make_one() arrow_iter = row_iterator.to_arrow_iterable() @@ -1989,8 +1946,8 @@ def test_to_dataframe_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): + pandas = pytest.importorskip("pandas") row_iterator = self._make_one() df = row_iterator.to_dataframe(create_bqstorage_client=False) self.assertIsInstance(df, pandas.DataFrame) @@ -2002,8 +1959,8 @@ def test_to_dataframe_iterable_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe_iterable() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable(self): + pandas = pytest.importorskip("pandas") row_iterator = self._make_one() df_iter = row_iterator.to_dataframe_iterable() @@ -2027,8 +1984,8 @@ def test_to_geodataframe_if_geopandas_is_none(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe(self): + geopandas = pytest.importorskip("geopandas") row_iterator = self._make_one() df = row_iterator.to_geodataframe(create_bqstorage_client=False) self.assertIsInstance(df, geopandas.GeoDataFrame) @@ -2040,6 +1997,8 @@ def test_to_geodataframe(self): class TestRowIterator(unittest.TestCase): + PYARROW_MINIMUM_VERSION = str(_versions_helpers._MIN_PYARROW_VERSION) + def _class_under_test(self): from google.cloud.bigquery.table import RowIterator @@ -2367,10 +2326,8 @@ def test__should_use_bqstorage_returns_false_when_completely_cached(self): ) ) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test__should_use_bqstorage_returns_true_if_no_cached_results(self): + pytest.importorskip("google.cloud.bigquery_storage") iterator = self._make_one(first_page_response=None) # not cached result = iterator._should_use_bqstorage( bqstorage_client=None, create_bqstorage_client=True @@ -2413,10 +2370,8 @@ def fail_bqstorage_import(name, globals, locals, fromlist, level): self.assertFalse(result) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test__should_use_bqstorage_returns_false_w_warning_if_obsolete_version(self): + pytest.importorskip("google.cloud.bigquery_storage") iterator = self._make_one(first_page_response=None) # not cached patcher = mock.patch( @@ -2435,8 +2390,10 @@ def test__should_use_bqstorage_returns_false_w_warning_if_obsolete_version(self) ] assert matching_warnings, "Obsolete dependency warning not raised." - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_iterable(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2536,14 +2493,17 @@ def test_to_arrow_iterable(self): [[{"name": "Bepples Phlyntstone", "age": 0}, {"name": "Dino", "age": 4}]], ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_iterable_w_bqstorage(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) + from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1 import reader bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) bqstorage_client._transport = mock.create_autospec( @@ -2615,8 +2575,10 @@ def test_to_arrow_iterable_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2697,8 +2659,11 @@ def test_to_arrow(self): ], ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_nulls(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) + import pyarrow.types from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] @@ -2730,8 +2695,10 @@ def test_to_arrow_w_nulls(self): self.assertEqual(names, ["Donkey", "Diddy", "Dixie", None]) self.assertEqual(ages, [32, 29, None, 111]) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_unknown_type(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2773,8 +2740,10 @@ def test_to_arrow_w_unknown_type(self): warning = warned[0] self.assertTrue("sport" in str(warning)) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_empty_table(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2812,11 +2781,9 @@ def test_to_arrow_w_empty_table(self): self.assertEqual(child_field.type.value_type[0].name, "name") self.assertEqual(child_field.type.value_type[1].name, "age") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2856,11 +2823,9 @@ def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self): ) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2896,14 +2861,16 @@ def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self): self.assertFalse(matches) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) bqstorage_client._transport = mock.create_autospec( @@ -2977,13 +2944,15 @@ def test_to_arrow_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage_creates_client(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) mock_client = _mock_client() bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -3008,8 +2977,10 @@ def test_to_arrow_w_bqstorage_creates_client(self): mock_client._ensure_bqstorage_client.assert_called_once() bqstorage_client._transport.grpc_channel.close.assert_called_once() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_ensure_bqstorage_client_wo_bqstorage(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3039,13 +3010,12 @@ def mock_verify_version(raise_if_error: bool = False): self.assertIsInstance(tbl, pyarrow.Table) self.assertEqual(tbl.num_rows, 2) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage_no_streams(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -3079,12 +3049,10 @@ def test_to_arrow_w_bqstorage_no_streams(self): self.assertEqual(actual_table.schema[1].name, "colC") self.assertEqual(actual_table.schema[2].name, "colB") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.notebook.tqdm") - @mock.patch("tqdm.tqdm") - def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock): + def test_to_arrow_progress_bar(self): + pytest.importorskip("pyarrow") + pytest.importorskip("tqdm") + pytest.importorskip("tqdm.notebook") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3101,12 +3069,13 @@ def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_moc api_request = mock.Mock(return_value={"rows": rows}) progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), + ("tqdm", mock.patch("tqdm.tqdm")), + ("tqdm_notebook", mock.patch("tqdm.notebook.tqdm")), + ("tqdm_gui", mock.patch("tqdm.tqdm_gui")), ) - for progress_bar_type, progress_bar_mock in progress_bars: + for progress_bar_type, bar_patch in progress_bars: + progress_bar_mock = bar_patch.start() row_iterator = self._make_one(_mock_client(), api_request, path, schema) tbl = row_iterator.to_arrow( progress_bar_type=progress_bar_type, @@ -3129,8 +3098,8 @@ def test_to_arrow_w_pyarrow_none(self): with self.assertRaises(ValueError): row_iterator.to_arrow() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3171,8 +3140,8 @@ def test_to_dataframe_iterable(self): self.assertEqual(df_2["name"][0], "Sven") self.assertEqual(df_2["age"][0], 33) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable_with_dtypes(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3213,15 +3182,17 @@ def test_to_dataframe_iterable_with_dtypes(self): self.assertEqual(df_2["name"][0], "Sven") self.assertEqual(df_2["age"][0], 33) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_iterable_w_bqstorage(self): + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ pyarrow.field("colA", pyarrow.int64()), @@ -3285,13 +3256,12 @@ def test_to_dataframe_iterable_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable_w_bqstorage_max_results_warning(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -3358,8 +3328,8 @@ def test_to_dataframe_iterable_error_if_pandas_is_none(self): with pytest.raises(ValueError, match="pandas"): row_iterator.to_dataframe_iterable() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3384,9 +3354,9 @@ def test_to_dataframe(self): self.assertEqual(df.name.dtype.name, "object") self.assertEqual(df.age.dtype.name, "Int64") - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_timestamp_out_of_pyarrow_bounds(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("some_timestamp", "TIMESTAMP")] @@ -3412,9 +3382,9 @@ def test_to_dataframe_timestamp_out_of_pyarrow_bounds(self): ], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_datetime_out_of_pyarrow_bounds(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("some_datetime", "DATETIME")] @@ -3436,14 +3406,10 @@ def test_to_dataframe_datetime_out_of_pyarrow_bounds(self): [datetime.datetime(4567, 1, 1), datetime.datetime(9999, 12, 31)], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.notebook.tqdm") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_progress_bar( - self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock - ): + def test_to_dataframe_progress_bar(self): + pytest.importorskip("pandas") + pytest.importorskip("tqdm") + from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3460,12 +3426,13 @@ def test_to_dataframe_progress_bar( api_request = mock.Mock(return_value={"rows": rows}) progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), + ("tqdm", mock.patch("tqdm.tqdm")), + ("tqdm_notebook", mock.patch("tqdm.notebook.tqdm")), + ("tqdm_gui", mock.patch("tqdm.tqdm_gui")), ) - for progress_bar_type, progress_bar_mock in progress_bars: + for progress_bar_type, bar_patch in progress_bars: + progress_bar_mock = bar_patch.start() row_iterator = self._make_one(_mock_client(), api_request, path, schema) df = row_iterator.to_dataframe( progress_bar_type=progress_bar_type, @@ -3477,9 +3444,9 @@ def test_to_dataframe_progress_bar( progress_bar_mock().close.assert_called_once() self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None) def test_to_dataframe_no_tqdm_no_progress_bar(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3505,9 +3472,9 @@ def test_to_dataframe_no_tqdm_no_progress_bar(self): self.assertEqual(len(user_warnings), 0) self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None) def test_to_dataframe_no_tqdm(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3539,12 +3506,12 @@ def test_to_dataframe_no_tqdm(self): # should still work. self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui", new=None) # will raise TypeError on call - @mock.patch("tqdm.notebook.tqdm", new=None) # will raise TypeError on call - @mock.patch("tqdm.tqdm", new=None) # will raise TypeError on call def test_to_dataframe_tqdm_error(self): + pytest.importorskip("pandas") + pytest.importorskip("tqdm") + mock.patch("tqdm.tqdm_gui", new=None) + mock.patch("tqdm.notebook.tqdm", new=None) + mock.patch("tqdm.tqdm", new=None) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3573,14 +3540,14 @@ def test_to_dataframe_tqdm_error(self): # Warn that a progress bar was requested, but creating the tqdm # progress bar failed. - for warning in warned: + for warning in warned: # pragma: NO COVER self.assertIn( warning.category, - [UserWarning, DeprecationWarning, TqdmDeprecationWarning], + [UserWarning, DeprecationWarning], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_empty_results(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3596,8 +3563,8 @@ def test_to_dataframe_w_empty_results(self): self.assertEqual(len(df), 0) # verify the number of rows self.assertEqual(list(df), ["name", "age"]) # verify the column names - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_various_types_nullable(self): + pandas = pytest.importorskip("pandas") import datetime from google.cloud.bigquery.schema import SchemaField @@ -3637,8 +3604,9 @@ def test_to_dataframe_w_various_types_nullable(self): self.assertIsInstance(row.complete, bool) self.assertIsInstance(row.date, datetime.date) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_dtypes_mapper(self): + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3832,9 +3800,11 @@ def test_to_dataframe_w_dtypes_mapper(self): ) self.assertEqual(df.timestamp.dtype.name, "object") - @unittest.skipIf(pandas is None, "Requires `pandas`") - @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="") def test_to_dataframe_w_none_dtypes_mapper(self): + pandas = pytest.importorskip("pandas") + pandas_major_version = pandas.__version__[0:2] + if pandas_major_version not in ["0.", "1."]: + pytest.skip(reason="Requires a version of pandas less than 2.0") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3888,8 +3858,8 @@ def test_to_dataframe_w_none_dtypes_mapper(self): self.assertEqual(df.time.dtype.name, "object") self.assertEqual(df.timestamp.dtype.name, "datetime64[ns, UTC]") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_unsupported_dtypes_mapper(self): + pytest.importorskip("pandas") import numpy from google.cloud.bigquery.schema import SchemaField @@ -3945,9 +3915,11 @@ def test_to_dataframe_w_unsupported_dtypes_mapper(self): timestamp_dtype=numpy.dtype("datetime64[us]"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="") def test_to_dataframe_column_dtypes(self): + pandas = pytest.importorskip("pandas") + pandas_major_version = pandas.__version__[0:2] + if pandas_major_version not in ["0.", "1."]: + pytest.skip("Requires a version of pandas less than 2.0") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3960,9 +3932,9 @@ def test_to_dataframe_column_dtypes(self): SchemaField("date", "DATE"), ] row_data = [ - ["1433836800000000", "420", "1.1", "1.77", "Cash", "true", "1999-12-01"], + ["1433836800000", "420", "1.1", "1.77", "Cash", "true", "1999-12-01"], [ - "1387811700000000", + "1387811700000", "2580", "17.7", "28.5", @@ -3970,7 +3942,7 @@ def test_to_dataframe_column_dtypes(self): "false", "1953-06-14", ], - ["1385565300000000", "2280", "4.4", "7.1", "Credit", "true", "1981-11-04"], + ["1385565300000", "2280", "4.4", "7.1", "Credit", "true", "1981-11-04"], ] rows = [{"f": [{"v": field} for field in row]} for row in row_data] path = "/foo" @@ -3995,13 +3967,12 @@ def test_to_dataframe_column_dtypes(self): self.assertEqual(df.complete.dtype.name, "boolean") self.assertEqual(df.date.dtype.name, "dbdate") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_datetime_objects(self): # When converting date or timestamp values to nanosecond # precision, the result can be out of pyarrow bounds. To avoid # the error when converting to Pandas, we use object type if # necessary. - + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4044,9 +4015,10 @@ def test_to_dataframe_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe() - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.shapely", new=None) def test_to_dataframe_error_if_shapely_is_none(self): + pytest.importorskip("pandas") + with self.assertRaisesRegex( ValueError, re.escape( @@ -4056,8 +4028,9 @@ def test_to_dataframe_error_if_shapely_is_none(self): ): self._make_one_from_data().to_dataframe(geography_as_object=True) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_bqstorage_warning(self): + pytest.importorskip("pandas") + from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4092,8 +4065,8 @@ def test_to_dataframe_max_results_w_bqstorage_warning(self): ] self.assertEqual(len(matches), 1, msg="User warning was not emitted.") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_explicit_bqstorage_client_warning(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4133,8 +4106,8 @@ def test_to_dataframe_max_results_w_explicit_bqstorage_client_warning(self): ) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4170,13 +4143,15 @@ def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self): self.assertFalse(matches) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_creates_client(self): + pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) mock_client = _mock_client() bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -4201,13 +4176,12 @@ def test_to_dataframe_w_bqstorage_creates_client(self): mock_client._ensure_bqstorage_client.assert_called_once() bqstorage_client._transport.grpc_channel.close.assert_called_once() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_no_streams(self): + pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -4230,13 +4204,12 @@ def test_to_dataframe_w_bqstorage_no_streams(self): self.assertEqual(list(got), column_names) self.assertTrue(got.empty) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_logs_session(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.table import Table + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -4255,12 +4228,11 @@ def test_to_dataframe_w_bqstorage_logs_session(self): "with BQ Storage API session 'projects/test-proj/locations/us/sessions/SOMESESSION'." ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_empty_streams(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4310,15 +4282,17 @@ def test_to_dataframe_w_bqstorage_empty_streams(self): self.assertEqual(list(got), column_names) self.assertTrue(got.empty) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_nonempty(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ pyarrow.field("colA", pyarrow.int64()), @@ -4390,12 +4364,10 @@ def test_to_dataframe_w_bqstorage_nonempty(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4444,14 +4416,11 @@ def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self): self.assertEqual(len(got.index), total_rows) self.assertTrue(got.index.is_unique) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_w_bqstorage_updates_progress_bar(self, tqdm_mock): + def test_to_dataframe_w_bqstorage_updates_progress_bar(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("tqdm") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4507,28 +4476,27 @@ def blocking_to_arrow(*args, **kwargs): selected_fields=schema, ) - row_iterator.to_dataframe( - bqstorage_client=bqstorage_client, progress_bar_type="tqdm" - ) + with mock.patch("tqdm.tqdm") as tqdm_mock: + row_iterator.to_dataframe( + bqstorage_client=bqstorage_client, progress_bar_type="tqdm" + ) + + # Make sure that this test updated the progress bar once per page from + # each stream. + total_pages = len(streams) * len(mock_pages) + expected_total_rows = total_pages * len(page_items) + progress_updates = [ + args[0] for args, kwargs in tqdm_mock().update.call_args_list + ] + # Should have sent >1 update due to delay in blocking_to_arrow. + self.assertGreater(len(progress_updates), 1) + self.assertEqual(sum(progress_updates), expected_total_rows) + tqdm_mock().close.assert_called_once() - # Make sure that this test updated the progress bar once per page from - # each stream. - total_pages = len(streams) * len(mock_pages) - expected_total_rows = total_pages * len(page_items) - progress_updates = [ - args[0] for args, kwargs in tqdm_mock().update.call_args_list - ] - # Should have sent >1 update due to delay in blocking_to_arrow. - self.assertGreater(len(progress_updates), 1) - self.assertEqual(sum(progress_updates), expected_total_rows) - tqdm_mock().close.assert_called_once() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_exits_on_keyboardinterrupt(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4611,8 +4579,8 @@ def blocking_to_arrow(*args, **kwargs): # should have been set. self.assertLessEqual(mock_page.to_dataframe.call_count, 2) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_tabledata_list_w_multiple_pages_return_unique_index(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4643,11 +4611,10 @@ def test_to_dataframe_tabledata_list_w_multiple_pages_return_unique_index(self): self.assertEqual(df.name.dtype.name, "object") self.assertTrue(df.index.is_unique) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_raises_auth_error(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + from google.cloud import bigquery_storage from google.cloud.bigquery import table as mut bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -4665,10 +4632,8 @@ def test_to_dataframe_w_bqstorage_raises_auth_error(self): with pytest.raises(google.api_core.exceptions.Forbidden): row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_partition(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4685,10 +4650,8 @@ def test_to_dataframe_w_bqstorage_partition(self): with pytest.raises(ValueError): row_iterator.to_dataframe(bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_snapshot(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4705,15 +4668,17 @@ def test_to_dataframe_w_bqstorage_snapshot(self): with pytest.raises(ValueError): row_iterator.to_dataframe(bqstorage_client) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self): + pytest.importorskip("google.cloud.bigquery_storage") + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ # Not alphabetical to test column order. @@ -4818,8 +4783,9 @@ def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_dataframe_geography_as_object(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY")), ( @@ -4853,8 +4819,8 @@ def test_to_geodataframe_error_if_geopandas_is_none(self): ): self._make_one_from_data().to_geodataframe() - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe(self): + geopandas = pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY")), ( @@ -4883,8 +4849,8 @@ def test_to_geodataframe(self): self.assertEqual(df.geog.crs.srs, "EPSG:4326") self.assertEqual(df.geog.crs.name, "WGS 84") - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_ambiguous_geog(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), () ) @@ -4898,8 +4864,8 @@ def test_to_geodataframe_ambiguous_geog(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_bad_geography_column(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), () ) @@ -4914,8 +4880,8 @@ def test_to_geodataframe_bad_geography_column(self): create_bqstorage_client=False, geography_column="xxx" ) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_no_geog(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "STRING")), () ) @@ -4928,8 +4894,9 @@ def test_to_geodataframe_no_geog(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_w_geography_column(self): + geopandas = pytest.importorskip("geopandas") + pandas = pytest.importorskip("pandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), ( @@ -4974,7 +4941,6 @@ def test_to_geodataframe_w_geography_column(self): ["0.0", "0.0", "0.0"], ) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") @mock.patch("google.cloud.bigquery.table.RowIterator.to_dataframe") def test_rowiterator_to_geodataframe_delegation(self, to_dataframe): """ @@ -4983,6 +4949,8 @@ def test_rowiterator_to_geodataframe_delegation(self, to_dataframe): This test just demonstrates that. We don't need to test all the variations, which are tested for to_dataframe. """ + pandas = pytest.importorskip("pandas") + geopandas = pytest.importorskip("geopandas") import numpy from shapely import wkt @@ -5676,9 +5644,6 @@ def test_from_api_repr_only_foreign_keys_resource(self): self.assertIsNotNone(instance.foreign_keys) -@pytest.mark.skipif( - bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" -) @pytest.mark.parametrize( "table_path", ( @@ -5689,6 +5654,7 @@ def test_from_api_repr_only_foreign_keys_resource(self): ), ) def test_table_reference_to_bqstorage_v1_stable(table_path): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import table as mut expected = "projects/my-project/datasets/my_dataset/tables/my_table"