From 3928aa0680ec03addae1f792c73abb5c9dc8586f Mon Sep 17 00:00:00 2001 From: cojenco Date: Mon, 18 Mar 2024 14:43:40 -0700 Subject: [PATCH] feat: add support for soft delete (#1229) * feat: add support for soft delete * add restore, get object, list_objects, unit tests * integration test * update restore_blob * SoftDeletePolicy data class * update docstrings; address comments --- google/cloud/storage/_helpers.py | 10 ++ google/cloud/storage/blob.py | 36 +++++ google/cloud/storage/bucket.py | 230 +++++++++++++++++++++++++++++++ google/cloud/storage/client.py | 10 ++ tests/system/test_bucket.py | 54 ++++++++ tests/unit/test_blob.py | 49 +++++++ tests/unit/test_bucket.py | 178 ++++++++++++++++++++++++ tests/unit/test_client.py | 3 + 8 files changed, 570 insertions(+) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 6f8702050..b90bf4eb2 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -225,6 +225,7 @@ def reload( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, ): """Reload properties from Cloud Storage. @@ -270,6 +271,13 @@ def reload( :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return + the object metadata if the object exists and is in a soft-deleted state. + :attr:`generation` is required to be set on the blob if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete """ client = self._require_client(client) query_params = self._query_params @@ -283,6 +291,8 @@ def reload( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, ) + if soft_deleted is not None: + query_params["softDeleted"] = soft_deleted headers = self._encryption_headers() _add_etag_match_headers( headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 6cfa56190..9c0cf33ab 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -650,6 +650,7 @@ def exists( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, ): """Determines whether or not this blob exists. @@ -694,6 +695,13 @@ def exists( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return True + if the object exists and is in a soft-deleted state. + :attr:`generation` is required to be set on the blob if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete + :rtype: bool :returns: True if the blob exists in Cloud Storage. """ @@ -702,6 +710,8 @@ def exists( # minimize the returned payload. query_params = self._query_params query_params["fields"] = "name" + if soft_deleted is not None: + query_params["softDeleted"] = soft_deleted _add_generation_match_parameters( query_params, @@ -4700,6 +4710,32 @@ def retention(self): info = self._properties.get("retention", {}) return Retention.from_api_repr(info, self) + @property + def soft_delete_time(self): + """If this object has been soft-deleted, returns the time at which it became soft-deleted. + + :rtype: :class:`datetime.datetime` or ``NoneType`` + :returns: + (readonly) The time that the object became soft-deleted. + Note this property is only set for soft-deleted objects. + """ + soft_delete_time = self._properties.get("softDeleteTime") + if soft_delete_time is not None: + return _rfc3339_nanos_to_datetime(soft_delete_time) + + @property + def hard_delete_time(self): + """If this object has been soft-deleted, returns the time at which it will be permanently deleted. + + :rtype: :class:`datetime.datetime` or ``NoneType`` + :returns: + (readonly) The time that the object will be permanently deleted. + Note this property is only set for soft-deleted objects. + """ + hard_delete_time = self._properties.get("hardDeleteTime") + if hard_delete_time is not None: + return _rfc3339_nanos_to_datetime(hard_delete_time) + def _get_host_name(connection): """Returns the host name from the given connection. diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index caa3ddd57..5855c4c8a 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1188,6 +1188,7 @@ def get_blob( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, **kwargs, ): """Get a blob object by name. @@ -1248,6 +1249,13 @@ def get_blob( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return + the object metadata if the object exists and is in a soft-deleted state. + Object ``generation`` is required if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete + :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. @@ -1275,6 +1283,7 @@ def get_blob( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, retry=retry, + soft_deleted=soft_deleted, ) except NotFound: return None @@ -1297,6 +1306,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1378,6 +1388,13 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + :type soft_deleted: bool + :param soft_deleted: + (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing + generation number. This parameter can only be used successfully if the bucket has a soft delete policy. + Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See: + https://cloud.google.com/storage/docs/soft-delete + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. @@ -1398,6 +1415,7 @@ def list_blobs( timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) def list_notifications( @@ -2060,6 +2078,110 @@ def rename_blob( ) return new_blob + def restore_blob( + self, + blob_name, + client=None, + generation=None, + copy_source_acl=None, + projection=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ): + """Restores a soft-deleted object. + + If :attr:`user_project` is set on the bucket, bills the API request to that project. + + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/restore) + + :type blob_name: str + :param blob_name: The name of the blob to be restored. + + :type client: :class:`~google.cloud.storage.client.Client` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the current bucket. + + :type generation: long + :param generation: (Optional) If present, selects a specific revision of this object. + + :type copy_source_acl: bool + :param copy_source_acl: (Optional) If true, copy the soft-deleted object's access controls. + + :type projection: str + :param projection: (Optional) Specifies the set of properties to return. + If used, must be 'full' or 'noAcl'. + + :type if_generation_match: long + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + + :type if_generation_not_match: long + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + + :type if_metageneration_match: long + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. + The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, which + only restore operations with ``if_generation_match`` or ``generation`` set + will be retried. + + Users can configure non-default retry behavior. A ``None`` value will + disable retries. A ``DEFAULT_RETRY`` value will enable retries + even if restore operations are not guaranteed to be idempotent. + See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + + :rtype: :class:`google.cloud.storage.blob.Blob` + :returns: The restored Blob. + """ + client = self._require_client(client) + query_params = {} + + if self.user_project is not None: + query_params["userProject"] = self.user_project + if generation is not None: + query_params["generation"] = generation + if copy_source_acl is not None: + query_params["copySourceAcl"] = copy_source_acl + if projection is not None: + query_params["projection"] = projection + + _add_generation_match_parameters( + query_params, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + ) + + blob = Blob(bucket=self, name=blob_name) + api_response = client._post_resource( + f"{blob.path}/restore", + None, + query_params=query_params, + timeout=timeout, + retry=retry, + ) + blob._set_properties(api_response) + return blob + @property def cors(self): """Retrieve or set CORS policies configured for this bucket. @@ -2227,6 +2349,18 @@ def iam_configuration(self): info = self._properties.get("iamConfiguration", {}) return IAMConfiguration.from_api_repr(info, self) + @property + def soft_delete_policy(self): + """Retrieve the soft delete policy for this bucket. + + See https://cloud.google.com/storage/docs/soft-delete + + :rtype: :class:`SoftDeletePolicy` + :returns: an instance for managing the bucket's soft delete policy. + """ + policy = self._properties.get("softDeletePolicy", {}) + return SoftDeletePolicy.from_api_repr(policy, self) + @property def lifecycle_rules(self): """Retrieve or set lifecycle rules configured for this bucket. @@ -3432,6 +3566,102 @@ def generate_signed_url( ) +class SoftDeletePolicy(dict): + """Map a bucket's soft delete policy. + + See https://cloud.google.com/storage/docs/soft-delete + + :type bucket: :class:`Bucket` + :param bucket: Bucket for which this instance is the policy. + + :type retention_duration_seconds: int + :param retention_duration_seconds: + (Optional) The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted. + + :type effective_time: :class:`datetime.datetime` + :param effective_time: + (Optional) When the bucket's soft delete policy is effective. + This value should normally only be set by the back-end API. + """ + + def __init__(self, bucket, **kw): + data = {} + retention_duration_seconds = kw.get("retention_duration_seconds") + data["retentionDurationSeconds"] = retention_duration_seconds + + effective_time = kw.get("effective_time") + if effective_time is not None: + effective_time = _datetime_to_rfc3339(effective_time) + data["effectiveTime"] = effective_time + + super().__init__(data) + self._bucket = bucket + + @classmethod + def from_api_repr(cls, resource, bucket): + """Factory: construct instance from resource. + + :type resource: dict + :param resource: mapping as returned from API call. + + :type bucket: :class:`Bucket` + :params bucket: Bucket for which this instance is the policy. + + :rtype: :class:`SoftDeletePolicy` + :returns: Instance created from resource. + """ + instance = cls(bucket) + instance.update(resource) + return instance + + @property + def bucket(self): + """Bucket for which this instance is the policy. + + :rtype: :class:`Bucket` + :returns: the instance's bucket. + """ + return self._bucket + + @property + def retention_duration_seconds(self): + """Get the retention duration of the bucket's soft delete policy. + + :rtype: int or ``NoneType`` + :returns: The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted; Or ``None`` if the + property is not set. + """ + duration = self.get("retentionDurationSeconds") + if duration is not None: + return int(duration) + + @retention_duration_seconds.setter + def retention_duration_seconds(self, value): + """Set the retention duration of the bucket's soft delete policy. + + :type value: int + :param value: + The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted. + """ + self["retentionDurationSeconds"] = value + self.bucket._patch_property("softDeletePolicy", self) + + @property + def effective_time(self): + """Get the effective time of the bucket's soft delete policy. + + :rtype: datetime.datetime or ``NoneType`` + :returns: point-in time at which the bucket's soft delte policy is + effective, or ``None`` if the property is not set. + """ + timestamp = self.get("effectiveTime") + if timestamp is not None: + return _rfc3339_nanos_to_datetime(timestamp) + + def _raise_if_len_differs(expected_len, **generation_match_args): """ Raise an error if any generation match argument diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index e051b9750..73351f1f7 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -1184,6 +1184,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1282,6 +1283,12 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + soft_deleted (bool): + (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing + generation number. This parameter can only be used successfully if the bucket has a soft delete policy. + Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See: + https://cloud.google.com/storage/docs/soft-delete + Returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. The RPC call @@ -1318,6 +1325,9 @@ def list_blobs( if fields is not None: extra_params["fields"] = fields + if soft_deleted is not None: + extra_params["softDeleted"] = soft_deleted + if bucket.user_project is not None: extra_params["userProject"] = bucket.user_project diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py index 19b21bac2..0fb25d54e 100644 --- a/tests/system/test_bucket.py +++ b/tests/system/test_bucket.py @@ -1141,3 +1141,57 @@ def test_config_autoclass_w_existing_bucket( assert ( bucket.autoclass_terminal_storage_class_update_time != previous_tsc_update_time ) + + +def test_soft_delete_policy( + storage_client, + buckets_to_delete, +): + from google.cloud.storage.bucket import SoftDeletePolicy + + # Create a bucket with soft delete policy. + duration_secs = 7 * 86400 + bucket = storage_client.bucket(_helpers.unique_name("w-soft-delete")) + bucket.soft_delete_policy.retention_duration_seconds = duration_secs + bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket) + buckets_to_delete.append(bucket) + + policy = bucket.soft_delete_policy + assert isinstance(policy, SoftDeletePolicy) + assert policy.retention_duration_seconds == duration_secs + assert isinstance(policy.effective_time, datetime.datetime) + + # Insert an object and get object metadata prior soft-deleted. + payload = b"DEADBEEF" + blob_name = _helpers.unique_name("soft-delete") + blob = bucket.blob(blob_name) + blob.upload_from_string(payload) + + blob = bucket.get_blob(blob_name) + gen = blob.generation + assert blob.soft_delete_time is None + assert blob.hard_delete_time is None + + # Delete the object to enter soft-deleted state. + blob.delete() + + iter_default = bucket.list_blobs() + assert len(list(iter_default)) == 0 + iter_w_soft_delete = bucket.list_blobs(soft_deleted=True) + assert len(list(iter_w_soft_delete)) > 0 + + # Get the soft-deleted object. + soft_deleted_blob = bucket.get_blob(blob_name, generation=gen, soft_deleted=True) + assert soft_deleted_blob.soft_delete_time is not None + assert soft_deleted_blob.hard_delete_time is not None + + # Restore the soft-deleted object. + restored_blob = bucket.restore_blob(blob_name, generation=gen) + assert restored_blob.exists() is True + assert restored_blob.generation != gen + + # Patch the soft delete policy on an existing bucket. + new_duration_secs = 10 * 86400 + bucket.soft_delete_policy.retention_duration_seconds = new_duration_secs + bucket.patch() + assert bucket.soft_delete_policy.retention_duration_seconds == new_duration_secs diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 3bc775499..98d744d6c 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -784,6 +784,32 @@ def test_exists_hit_w_generation_w_retry(self): _target_object=None, ) + def test_exists_hit_w_generation_w_soft_deleted(self): + blob_name = "blob-name" + generation = 123456 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket, generation=generation) + + self.assertTrue(blob.exists(retry=None, soft_deleted=True)) + + expected_query_params = { + "fields": "name", + "generation": generation, + "softDeleted": True, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=None, + _target_object=None, + ) + def test_exists_w_etag_match(self): blob_name = "blob-name" etag = "kittens" @@ -5827,6 +5853,29 @@ def test_custom_time_unset(self): blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.custom_time) + def test_soft_hard_delete_time_getter(self): + from google.cloud._helpers import _RFC3339_MICROS + + BLOB_NAME = "blob-name" + bucket = _Bucket() + soft_timstamp = datetime.datetime(2024, 1, 5, 20, 34, 37, tzinfo=_UTC) + soft_delete = soft_timstamp.strftime(_RFC3339_MICROS) + hard_timstamp = datetime.datetime(2024, 1, 15, 20, 34, 37, tzinfo=_UTC) + hard_delete = hard_timstamp.strftime(_RFC3339_MICROS) + properties = { + "softDeleteTime": soft_delete, + "hardDeleteTime": hard_delete, + } + blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) + self.assertEqual(blob.soft_delete_time, soft_timstamp) + self.assertEqual(blob.hard_delete_time, hard_timstamp) + + def test_soft_hard_delte_time_unset(self): + BUCKET = object() + blob = self._make_one("blob-name", bucket=BUCKET) + self.assertIsNone(blob.soft_delete_time) + self.assertIsNone(blob.hard_delete_time) + def test_from_string_w_valid_uri(self): from google.cloud.storage.blob import Blob diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index a5d276391..7f25fee05 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -982,6 +982,40 @@ def test_get_blob_hit_w_user_project(self): _target_object=blob, ) + def test_get_blob_hit_w_generation_w_soft_deleted(self): + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + blob = bucket.get_blob(blob_name, generation=generation, soft_deleted=True) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) + + expected_path = f"/b/{name}/o/{blob_name}" + expected_query_params = { + "generation": generation, + "projection": "noAcl", + "softDeleted": True, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) + def test_get_blob_hit_w_generation_w_timeout(self): from google.cloud.storage.blob import Blob @@ -1143,6 +1177,7 @@ def test_list_blobs_w_defaults(self): expected_versions = None expected_projection = "noAcl" expected_fields = None + soft_deleted = None client.list_blobs.assert_called_once_with( bucket, max_results=expected_max_results, @@ -1158,6 +1193,7 @@ def test_list_blobs_w_defaults(self): timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, match_glob=expected_match_glob, + soft_deleted=soft_deleted, ) def test_list_blobs_w_explicit(self): @@ -1171,6 +1207,7 @@ def test_list_blobs_w_explicit(self): end_offset = "g" include_trailing_delimiter = True versions = True + soft_deleted = True projection = "full" fields = "items/contentLanguage,nextPageToken" bucket = self._make_one(client=None, name=name) @@ -1194,6 +1231,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) self.assertIs(iterator, other_client.list_blobs.return_value) @@ -1209,6 +1247,7 @@ def test_list_blobs_w_explicit(self): expected_versions = versions expected_projection = projection expected_fields = fields + expected_soft_deleted = soft_deleted other_client.list_blobs.assert_called_once_with( bucket, max_results=expected_max_results, @@ -1224,6 +1263,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=expected_match_glob, + soft_deleted=expected_soft_deleted, ) def test_list_notifications_w_defaults(self): @@ -3076,6 +3116,41 @@ def test_object_retention_mode_getter(self): bucket = self._make_one(properties=properties) self.assertEqual(bucket.object_retention_mode, mode) + def test_soft_delete_policy_getter_w_entry(self): + from google.cloud.storage.bucket import SoftDeletePolicy + from google.cloud._helpers import _datetime_to_rfc3339 + + seconds = 86400 * 10 # 10 days + effective_time = _NOW(_UTC) + properties = { + "softDeletePolicy": { + "retentionDurationSeconds": seconds, + "effectiveTime": _datetime_to_rfc3339(effective_time), + } + } + bucket = self._make_one(properties=properties) + + policy = SoftDeletePolicy( + bucket=bucket, + retention_duration_seconds=seconds, + effective_time=effective_time, + ) + self.assertIsInstance(bucket.soft_delete_policy, SoftDeletePolicy) + self.assertEqual(bucket.soft_delete_policy, policy) + self.assertEqual(bucket.soft_delete_policy.retention_duration_seconds, seconds) + self.assertEqual(bucket.soft_delete_policy.effective_time, effective_time) + + def test_soft_delete_policy_setter(self): + bucket = self._make_one() + policy = bucket.soft_delete_policy + self.assertIsNone(policy.retention_duration_seconds) + self.assertIsNone(policy.effective_time) + + seconds = 86400 * 10 # 10 days + bucket.soft_delete_policy.retention_duration_seconds = seconds + self.assertTrue("softDeletePolicy" in bucket._changes) + self.assertEqual(bucket.soft_delete_policy.retention_duration_seconds, seconds) + def test_configure_website_defaults(self): NAME = "name" UNSET = {"website": {"mainPageSuffix": None, "notFoundPage": None}} @@ -4028,6 +4103,109 @@ def test_lock_retention_policy_w_user_project(self): _target_object=bucket, ) + def test_restore_blob_w_defaults(self): + bucket_name = "restore_bucket" + blob_name = "restore_blob" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) + + restored_blob = bucket.restore_blob(blob_name) + + self.assertIs(restored_blob.bucket, bucket) + self.assertEqual(restored_blob.name, blob_name) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + + def test_restore_blob_w_explicit(self): + user_project = "user-project-123" + bucket_name = "restore_bucket" + blob_name = "restore_blob" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one( + client=client, name=bucket_name, user_project=user_project + ) + if_generation_match = 123456 + if_generation_not_match = 654321 + if_metageneration_match = 1 + if_metageneration_not_match = 2 + projection = "noAcl" + + restored_blob = bucket.restore_blob( + blob_name, + client=client, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + projection=projection, + ) + + self.assertEqual(restored_blob.name, blob_name) + self.assertEqual(restored_blob.bucket, bucket) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = { + "userProject": user_project, + "projection": projection, + "ifGenerationMatch": if_generation_match, + "ifGenerationNotMatch": if_generation_not_match, + "ifMetagenerationMatch": if_metageneration_match, + "ifMetagenerationNotMatch": if_metageneration_not_match, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + + def test_restore_blob_explicit_copy_source_acl(self): + bucket_name = "restore_bucket" + blob_name = "restore" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) + copy_source_acl = False + + restored_blob = bucket.restore_blob( + blob_name, + copy_source_acl=copy_source_acl, + generation=generation, + ) + + self.assertEqual(restored_blob.name, blob_name) + self.assertEqual(restored_blob.bucket, bucket) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = { + "copySourceAcl": False, + "generation": generation, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + def test_generate_signed_url_w_invalid_version(self): expiration = "2014-10-16T20:34:37.000Z" client = self._make_client() diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 0adc56e1d..c5da9e4cf 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -2015,6 +2015,7 @@ def test_list_blobs_w_explicit_w_user_project(self): start_offset = "c" end_offset = "g" include_trailing_delimiter = True + soft_deleted = False versions = True projection = "full" page_size = 2 @@ -2047,6 +2048,7 @@ def test_list_blobs_w_explicit_w_user_project(self): timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) self.assertIs(iterator, client._list_resource.return_value) @@ -2068,6 +2070,7 @@ def test_list_blobs_w_explicit_w_user_project(self): "versions": versions, "fields": fields, "userProject": user_project, + "softDeleted": soft_deleted, } expected_page_start = _blobs_page_start expected_page_size = 2