From 1d1942305bd4ec7b3e2a4c1dea664c4f91e32162 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:03:31 -0500 Subject: [PATCH 1/4] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#1233) Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad Co-authored-by: Owl Bot Co-authored-by: cojenco --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 66 +++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2aefd0e91..e4e943e02 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8c11c9f3e..bda8e38c4 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From 3928aa0680ec03addae1f792c73abb5c9dc8586f Mon Sep 17 00:00:00 2001 From: cojenco Date: Mon, 18 Mar 2024 14:43:40 -0700 Subject: [PATCH 2/4] feat: add support for soft delete (#1229) * feat: add support for soft delete * add restore, get object, list_objects, unit tests * integration test * update restore_blob * SoftDeletePolicy data class * update docstrings; address comments --- google/cloud/storage/_helpers.py | 10 ++ google/cloud/storage/blob.py | 36 +++++ google/cloud/storage/bucket.py | 230 +++++++++++++++++++++++++++++++ google/cloud/storage/client.py | 10 ++ tests/system/test_bucket.py | 54 ++++++++ tests/unit/test_blob.py | 49 +++++++ tests/unit/test_bucket.py | 178 ++++++++++++++++++++++++ tests/unit/test_client.py | 3 + 8 files changed, 570 insertions(+) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 6f8702050..b90bf4eb2 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -225,6 +225,7 @@ def reload( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, ): """Reload properties from Cloud Storage. @@ -270,6 +271,13 @@ def reload( :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return + the object metadata if the object exists and is in a soft-deleted state. + :attr:`generation` is required to be set on the blob if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete """ client = self._require_client(client) query_params = self._query_params @@ -283,6 +291,8 @@ def reload( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, ) + if soft_deleted is not None: + query_params["softDeleted"] = soft_deleted headers = self._encryption_headers() _add_etag_match_headers( headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 6cfa56190..9c0cf33ab 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -650,6 +650,7 @@ def exists( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, ): """Determines whether or not this blob exists. @@ -694,6 +695,13 @@ def exists( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return True + if the object exists and is in a soft-deleted state. + :attr:`generation` is required to be set on the blob if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete + :rtype: bool :returns: True if the blob exists in Cloud Storage. """ @@ -702,6 +710,8 @@ def exists( # minimize the returned payload. query_params = self._query_params query_params["fields"] = "name" + if soft_deleted is not None: + query_params["softDeleted"] = soft_deleted _add_generation_match_parameters( query_params, @@ -4700,6 +4710,32 @@ def retention(self): info = self._properties.get("retention", {}) return Retention.from_api_repr(info, self) + @property + def soft_delete_time(self): + """If this object has been soft-deleted, returns the time at which it became soft-deleted. + + :rtype: :class:`datetime.datetime` or ``NoneType`` + :returns: + (readonly) The time that the object became soft-deleted. + Note this property is only set for soft-deleted objects. + """ + soft_delete_time = self._properties.get("softDeleteTime") + if soft_delete_time is not None: + return _rfc3339_nanos_to_datetime(soft_delete_time) + + @property + def hard_delete_time(self): + """If this object has been soft-deleted, returns the time at which it will be permanently deleted. + + :rtype: :class:`datetime.datetime` or ``NoneType`` + :returns: + (readonly) The time that the object will be permanently deleted. + Note this property is only set for soft-deleted objects. + """ + hard_delete_time = self._properties.get("hardDeleteTime") + if hard_delete_time is not None: + return _rfc3339_nanos_to_datetime(hard_delete_time) + def _get_host_name(connection): """Returns the host name from the given connection. diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index caa3ddd57..5855c4c8a 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1188,6 +1188,7 @@ def get_blob( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + soft_deleted=None, **kwargs, ): """Get a blob object by name. @@ -1248,6 +1249,13 @@ def get_blob( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` + :type soft_deleted: bool + :param soft_deleted: + (Optional) If True, looks for a soft-deleted object. Will only return + the object metadata if the object exists and is in a soft-deleted state. + Object ``generation`` is required if ``soft_deleted`` is set to True. + See: https://cloud.google.com/storage/docs/soft-delete + :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. @@ -1275,6 +1283,7 @@ def get_blob( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, retry=retry, + soft_deleted=soft_deleted, ) except NotFound: return None @@ -1297,6 +1306,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1378,6 +1388,13 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + :type soft_deleted: bool + :param soft_deleted: + (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing + generation number. This parameter can only be used successfully if the bucket has a soft delete policy. + Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See: + https://cloud.google.com/storage/docs/soft-delete + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. @@ -1398,6 +1415,7 @@ def list_blobs( timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) def list_notifications( @@ -2060,6 +2078,110 @@ def rename_blob( ) return new_blob + def restore_blob( + self, + blob_name, + client=None, + generation=None, + copy_source_acl=None, + projection=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ): + """Restores a soft-deleted object. + + If :attr:`user_project` is set on the bucket, bills the API request to that project. + + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/restore) + + :type blob_name: str + :param blob_name: The name of the blob to be restored. + + :type client: :class:`~google.cloud.storage.client.Client` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the current bucket. + + :type generation: long + :param generation: (Optional) If present, selects a specific revision of this object. + + :type copy_source_acl: bool + :param copy_source_acl: (Optional) If true, copy the soft-deleted object's access controls. + + :type projection: str + :param projection: (Optional) Specifies the set of properties to return. + If used, must be 'full' or 'noAcl'. + + :type if_generation_match: long + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + + :type if_generation_not_match: long + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + + :type if_metageneration_match: long + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. + The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, which + only restore operations with ``if_generation_match`` or ``generation`` set + will be retried. + + Users can configure non-default retry behavior. A ``None`` value will + disable retries. A ``DEFAULT_RETRY`` value will enable retries + even if restore operations are not guaranteed to be idempotent. + See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + + :rtype: :class:`google.cloud.storage.blob.Blob` + :returns: The restored Blob. + """ + client = self._require_client(client) + query_params = {} + + if self.user_project is not None: + query_params["userProject"] = self.user_project + if generation is not None: + query_params["generation"] = generation + if copy_source_acl is not None: + query_params["copySourceAcl"] = copy_source_acl + if projection is not None: + query_params["projection"] = projection + + _add_generation_match_parameters( + query_params, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + ) + + blob = Blob(bucket=self, name=blob_name) + api_response = client._post_resource( + f"{blob.path}/restore", + None, + query_params=query_params, + timeout=timeout, + retry=retry, + ) + blob._set_properties(api_response) + return blob + @property def cors(self): """Retrieve or set CORS policies configured for this bucket. @@ -2227,6 +2349,18 @@ def iam_configuration(self): info = self._properties.get("iamConfiguration", {}) return IAMConfiguration.from_api_repr(info, self) + @property + def soft_delete_policy(self): + """Retrieve the soft delete policy for this bucket. + + See https://cloud.google.com/storage/docs/soft-delete + + :rtype: :class:`SoftDeletePolicy` + :returns: an instance for managing the bucket's soft delete policy. + """ + policy = self._properties.get("softDeletePolicy", {}) + return SoftDeletePolicy.from_api_repr(policy, self) + @property def lifecycle_rules(self): """Retrieve or set lifecycle rules configured for this bucket. @@ -3432,6 +3566,102 @@ def generate_signed_url( ) +class SoftDeletePolicy(dict): + """Map a bucket's soft delete policy. + + See https://cloud.google.com/storage/docs/soft-delete + + :type bucket: :class:`Bucket` + :param bucket: Bucket for which this instance is the policy. + + :type retention_duration_seconds: int + :param retention_duration_seconds: + (Optional) The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted. + + :type effective_time: :class:`datetime.datetime` + :param effective_time: + (Optional) When the bucket's soft delete policy is effective. + This value should normally only be set by the back-end API. + """ + + def __init__(self, bucket, **kw): + data = {} + retention_duration_seconds = kw.get("retention_duration_seconds") + data["retentionDurationSeconds"] = retention_duration_seconds + + effective_time = kw.get("effective_time") + if effective_time is not None: + effective_time = _datetime_to_rfc3339(effective_time) + data["effectiveTime"] = effective_time + + super().__init__(data) + self._bucket = bucket + + @classmethod + def from_api_repr(cls, resource, bucket): + """Factory: construct instance from resource. + + :type resource: dict + :param resource: mapping as returned from API call. + + :type bucket: :class:`Bucket` + :params bucket: Bucket for which this instance is the policy. + + :rtype: :class:`SoftDeletePolicy` + :returns: Instance created from resource. + """ + instance = cls(bucket) + instance.update(resource) + return instance + + @property + def bucket(self): + """Bucket for which this instance is the policy. + + :rtype: :class:`Bucket` + :returns: the instance's bucket. + """ + return self._bucket + + @property + def retention_duration_seconds(self): + """Get the retention duration of the bucket's soft delete policy. + + :rtype: int or ``NoneType`` + :returns: The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted; Or ``None`` if the + property is not set. + """ + duration = self.get("retentionDurationSeconds") + if duration is not None: + return int(duration) + + @retention_duration_seconds.setter + def retention_duration_seconds(self, value): + """Set the retention duration of the bucket's soft delete policy. + + :type value: int + :param value: + The period of time in seconds that soft-deleted objects in the bucket + will be retained and cannot be permanently deleted. + """ + self["retentionDurationSeconds"] = value + self.bucket._patch_property("softDeletePolicy", self) + + @property + def effective_time(self): + """Get the effective time of the bucket's soft delete policy. + + :rtype: datetime.datetime or ``NoneType`` + :returns: point-in time at which the bucket's soft delte policy is + effective, or ``None`` if the property is not set. + """ + timestamp = self.get("effectiveTime") + if timestamp is not None: + return _rfc3339_nanos_to_datetime(timestamp) + + def _raise_if_len_differs(expected_len, **generation_match_args): """ Raise an error if any generation match argument diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index e051b9750..73351f1f7 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -1184,6 +1184,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1282,6 +1283,12 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + soft_deleted (bool): + (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing + generation number. This parameter can only be used successfully if the bucket has a soft delete policy. + Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See: + https://cloud.google.com/storage/docs/soft-delete + Returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. The RPC call @@ -1318,6 +1325,9 @@ def list_blobs( if fields is not None: extra_params["fields"] = fields + if soft_deleted is not None: + extra_params["softDeleted"] = soft_deleted + if bucket.user_project is not None: extra_params["userProject"] = bucket.user_project diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py index 19b21bac2..0fb25d54e 100644 --- a/tests/system/test_bucket.py +++ b/tests/system/test_bucket.py @@ -1141,3 +1141,57 @@ def test_config_autoclass_w_existing_bucket( assert ( bucket.autoclass_terminal_storage_class_update_time != previous_tsc_update_time ) + + +def test_soft_delete_policy( + storage_client, + buckets_to_delete, +): + from google.cloud.storage.bucket import SoftDeletePolicy + + # Create a bucket with soft delete policy. + duration_secs = 7 * 86400 + bucket = storage_client.bucket(_helpers.unique_name("w-soft-delete")) + bucket.soft_delete_policy.retention_duration_seconds = duration_secs + bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket) + buckets_to_delete.append(bucket) + + policy = bucket.soft_delete_policy + assert isinstance(policy, SoftDeletePolicy) + assert policy.retention_duration_seconds == duration_secs + assert isinstance(policy.effective_time, datetime.datetime) + + # Insert an object and get object metadata prior soft-deleted. + payload = b"DEADBEEF" + blob_name = _helpers.unique_name("soft-delete") + blob = bucket.blob(blob_name) + blob.upload_from_string(payload) + + blob = bucket.get_blob(blob_name) + gen = blob.generation + assert blob.soft_delete_time is None + assert blob.hard_delete_time is None + + # Delete the object to enter soft-deleted state. + blob.delete() + + iter_default = bucket.list_blobs() + assert len(list(iter_default)) == 0 + iter_w_soft_delete = bucket.list_blobs(soft_deleted=True) + assert len(list(iter_w_soft_delete)) > 0 + + # Get the soft-deleted object. + soft_deleted_blob = bucket.get_blob(blob_name, generation=gen, soft_deleted=True) + assert soft_deleted_blob.soft_delete_time is not None + assert soft_deleted_blob.hard_delete_time is not None + + # Restore the soft-deleted object. + restored_blob = bucket.restore_blob(blob_name, generation=gen) + assert restored_blob.exists() is True + assert restored_blob.generation != gen + + # Patch the soft delete policy on an existing bucket. + new_duration_secs = 10 * 86400 + bucket.soft_delete_policy.retention_duration_seconds = new_duration_secs + bucket.patch() + assert bucket.soft_delete_policy.retention_duration_seconds == new_duration_secs diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 3bc775499..98d744d6c 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -784,6 +784,32 @@ def test_exists_hit_w_generation_w_retry(self): _target_object=None, ) + def test_exists_hit_w_generation_w_soft_deleted(self): + blob_name = "blob-name" + generation = 123456 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket, generation=generation) + + self.assertTrue(blob.exists(retry=None, soft_deleted=True)) + + expected_query_params = { + "fields": "name", + "generation": generation, + "softDeleted": True, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=None, + _target_object=None, + ) + def test_exists_w_etag_match(self): blob_name = "blob-name" etag = "kittens" @@ -5827,6 +5853,29 @@ def test_custom_time_unset(self): blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.custom_time) + def test_soft_hard_delete_time_getter(self): + from google.cloud._helpers import _RFC3339_MICROS + + BLOB_NAME = "blob-name" + bucket = _Bucket() + soft_timstamp = datetime.datetime(2024, 1, 5, 20, 34, 37, tzinfo=_UTC) + soft_delete = soft_timstamp.strftime(_RFC3339_MICROS) + hard_timstamp = datetime.datetime(2024, 1, 15, 20, 34, 37, tzinfo=_UTC) + hard_delete = hard_timstamp.strftime(_RFC3339_MICROS) + properties = { + "softDeleteTime": soft_delete, + "hardDeleteTime": hard_delete, + } + blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) + self.assertEqual(blob.soft_delete_time, soft_timstamp) + self.assertEqual(blob.hard_delete_time, hard_timstamp) + + def test_soft_hard_delte_time_unset(self): + BUCKET = object() + blob = self._make_one("blob-name", bucket=BUCKET) + self.assertIsNone(blob.soft_delete_time) + self.assertIsNone(blob.hard_delete_time) + def test_from_string_w_valid_uri(self): from google.cloud.storage.blob import Blob diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index a5d276391..7f25fee05 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -982,6 +982,40 @@ def test_get_blob_hit_w_user_project(self): _target_object=blob, ) + def test_get_blob_hit_w_generation_w_soft_deleted(self): + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + blob = bucket.get_blob(blob_name, generation=generation, soft_deleted=True) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) + + expected_path = f"/b/{name}/o/{blob_name}" + expected_query_params = { + "generation": generation, + "projection": "noAcl", + "softDeleted": True, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) + def test_get_blob_hit_w_generation_w_timeout(self): from google.cloud.storage.blob import Blob @@ -1143,6 +1177,7 @@ def test_list_blobs_w_defaults(self): expected_versions = None expected_projection = "noAcl" expected_fields = None + soft_deleted = None client.list_blobs.assert_called_once_with( bucket, max_results=expected_max_results, @@ -1158,6 +1193,7 @@ def test_list_blobs_w_defaults(self): timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, match_glob=expected_match_glob, + soft_deleted=soft_deleted, ) def test_list_blobs_w_explicit(self): @@ -1171,6 +1207,7 @@ def test_list_blobs_w_explicit(self): end_offset = "g" include_trailing_delimiter = True versions = True + soft_deleted = True projection = "full" fields = "items/contentLanguage,nextPageToken" bucket = self._make_one(client=None, name=name) @@ -1194,6 +1231,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) self.assertIs(iterator, other_client.list_blobs.return_value) @@ -1209,6 +1247,7 @@ def test_list_blobs_w_explicit(self): expected_versions = versions expected_projection = projection expected_fields = fields + expected_soft_deleted = soft_deleted other_client.list_blobs.assert_called_once_with( bucket, max_results=expected_max_results, @@ -1224,6 +1263,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=expected_match_glob, + soft_deleted=expected_soft_deleted, ) def test_list_notifications_w_defaults(self): @@ -3076,6 +3116,41 @@ def test_object_retention_mode_getter(self): bucket = self._make_one(properties=properties) self.assertEqual(bucket.object_retention_mode, mode) + def test_soft_delete_policy_getter_w_entry(self): + from google.cloud.storage.bucket import SoftDeletePolicy + from google.cloud._helpers import _datetime_to_rfc3339 + + seconds = 86400 * 10 # 10 days + effective_time = _NOW(_UTC) + properties = { + "softDeletePolicy": { + "retentionDurationSeconds": seconds, + "effectiveTime": _datetime_to_rfc3339(effective_time), + } + } + bucket = self._make_one(properties=properties) + + policy = SoftDeletePolicy( + bucket=bucket, + retention_duration_seconds=seconds, + effective_time=effective_time, + ) + self.assertIsInstance(bucket.soft_delete_policy, SoftDeletePolicy) + self.assertEqual(bucket.soft_delete_policy, policy) + self.assertEqual(bucket.soft_delete_policy.retention_duration_seconds, seconds) + self.assertEqual(bucket.soft_delete_policy.effective_time, effective_time) + + def test_soft_delete_policy_setter(self): + bucket = self._make_one() + policy = bucket.soft_delete_policy + self.assertIsNone(policy.retention_duration_seconds) + self.assertIsNone(policy.effective_time) + + seconds = 86400 * 10 # 10 days + bucket.soft_delete_policy.retention_duration_seconds = seconds + self.assertTrue("softDeletePolicy" in bucket._changes) + self.assertEqual(bucket.soft_delete_policy.retention_duration_seconds, seconds) + def test_configure_website_defaults(self): NAME = "name" UNSET = {"website": {"mainPageSuffix": None, "notFoundPage": None}} @@ -4028,6 +4103,109 @@ def test_lock_retention_policy_w_user_project(self): _target_object=bucket, ) + def test_restore_blob_w_defaults(self): + bucket_name = "restore_bucket" + blob_name = "restore_blob" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) + + restored_blob = bucket.restore_blob(blob_name) + + self.assertIs(restored_blob.bucket, bucket) + self.assertEqual(restored_blob.name, blob_name) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + + def test_restore_blob_w_explicit(self): + user_project = "user-project-123" + bucket_name = "restore_bucket" + blob_name = "restore_blob" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one( + client=client, name=bucket_name, user_project=user_project + ) + if_generation_match = 123456 + if_generation_not_match = 654321 + if_metageneration_match = 1 + if_metageneration_not_match = 2 + projection = "noAcl" + + restored_blob = bucket.restore_blob( + blob_name, + client=client, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + projection=projection, + ) + + self.assertEqual(restored_blob.name, blob_name) + self.assertEqual(restored_blob.bucket, bucket) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = { + "userProject": user_project, + "projection": projection, + "ifGenerationMatch": if_generation_match, + "ifGenerationNotMatch": if_generation_not_match, + "ifMetagenerationMatch": if_metageneration_match, + "ifMetagenerationNotMatch": if_metageneration_not_match, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + + def test_restore_blob_explicit_copy_source_acl(self): + bucket_name = "restore_bucket" + blob_name = "restore" + generation = 123456 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) + copy_source_acl = False + + restored_blob = bucket.restore_blob( + blob_name, + copy_source_acl=copy_source_acl, + generation=generation, + ) + + self.assertEqual(restored_blob.name, blob_name) + self.assertEqual(restored_blob.bucket, bucket) + expected_path = f"/b/{bucket_name}/o/{blob_name}/restore" + expected_data = None + expected_query_params = { + "copySourceAcl": False, + "generation": generation, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + def test_generate_signed_url_w_invalid_version(self): expiration = "2014-10-16T20:34:37.000Z" client = self._make_client() diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 0adc56e1d..c5da9e4cf 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -2015,6 +2015,7 @@ def test_list_blobs_w_explicit_w_user_project(self): start_offset = "c" end_offset = "g" include_trailing_delimiter = True + soft_deleted = False versions = True projection = "full" page_size = 2 @@ -2047,6 +2048,7 @@ def test_list_blobs_w_explicit_w_user_project(self): timeout=timeout, retry=retry, match_glob=match_glob, + soft_deleted=soft_deleted, ) self.assertIs(iterator, client._list_resource.return_value) @@ -2068,6 +2070,7 @@ def test_list_blobs_w_explicit_w_user_project(self): "versions": versions, "fields": fields, "userProject": user_project, + "softDeleted": soft_deleted, } expected_page_start = _blobs_page_start expected_page_size = 2 From 7bb806538cf3d7a5e16390db1983620933d5e51a Mon Sep 17 00:00:00 2001 From: cojenco Date: Mon, 18 Mar 2024 16:18:33 -0700 Subject: [PATCH 3/4] feat: support includeFoldersAsPrefixes (#1223) * feat: support includeFoldersAsPrefixes * sys test * update sys test with cleanup --- google/cloud/storage/bucket.py | 7 ++++++ google/cloud/storage/client.py | 9 ++++++++ tests/system/test_bucket.py | 41 ++++++++++++++++++++++++++++++++++ tests/unit/test_bucket.py | 6 +++++ tests/unit/test_client.py | 3 +++ 5 files changed, 66 insertions(+) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 5855c4c8a..c83e2a958 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1306,6 +1306,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + include_folders_as_prefixes=None, soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1388,6 +1389,11 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + :type include_folders_as_prefixes: bool + (Optional) If true, includes Folders and Managed Folders in the set of + ``prefixes`` returned by the query. Only applicable if ``delimiter`` is set to /. + See: https://cloud.google.com/storage/docs/managed-folders + :type soft_deleted: bool :param soft_deleted: (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing @@ -1415,6 +1421,7 @@ def list_blobs( timeout=timeout, retry=retry, match_glob=match_glob, + include_folders_as_prefixes=include_folders_as_prefixes, soft_deleted=soft_deleted, ) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 73351f1f7..57bbab008 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -1184,6 +1184,7 @@ def list_blobs( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, match_glob=None, + include_folders_as_prefixes=None, soft_deleted=None, ): """Return an iterator used to find blobs in the bucket. @@ -1283,6 +1284,11 @@ def list_blobs( The string value must be UTF-8 encoded. See: https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob + include_folders_as_prefixes (bool): + (Optional) If true, includes Folders and Managed Folders in the set of + ``prefixes`` returned by the query. Only applicable if ``delimiter`` is set to /. + See: https://cloud.google.com/storage/docs/managed-folders + soft_deleted (bool): (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing generation number. This parameter can only be used successfully if the bucket has a soft delete policy. @@ -1325,6 +1331,9 @@ def list_blobs( if fields is not None: extra_params["fields"] = fields + if include_folders_as_prefixes is not None: + extra_params["includeFoldersAsPrefixes"] = include_folders_as_prefixes + if soft_deleted is not None: extra_params["softDeleted"] = soft_deleted diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py index 0fb25d54e..9b2fcd614 100644 --- a/tests/system/test_bucket.py +++ b/tests/system/test_bucket.py @@ -653,6 +653,47 @@ def test_bucket_list_blobs_w_match_glob( assert [blob.name for blob in blobs] == expected_names +def test_bucket_list_blobs_include_managed_folders( + storage_client, + buckets_to_delete, + blobs_to_delete, + hierarchy_filenames, +): + bucket_name = _helpers.unique_name("ubla-mf") + bucket = storage_client.bucket(bucket_name) + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + _helpers.retry_429_503(bucket.create)() + buckets_to_delete.append(bucket) + + payload = b"helloworld" + for filename in hierarchy_filenames: + blob = bucket.blob(filename) + blob.upload_from_string(payload) + blobs_to_delete.append(blob) + + # Make API call to create a managed folder. + # TODO: change to use storage control client once available. + path = f"/b/{bucket_name}/managedFolders" + properties = {"name": "managedfolder1"} + storage_client._post_resource(path, properties) + + expected_prefixes = set(["parent/"]) + blob_iter = bucket.list_blobs(delimiter="/") + list(blob_iter) + assert blob_iter.prefixes == expected_prefixes + + # Test that managed folders are only included when IncludeFoldersAsPrefixes is set. + expected_prefixes = set(["parent/", "managedfolder1/"]) + blob_iter = bucket.list_blobs(delimiter="/", include_folders_as_prefixes=True) + list(blob_iter) + assert blob_iter.prefixes == expected_prefixes + + # Cleanup: API call to delete a managed folder. + # TODO: change to use storage control client once available. + path = f"/b/{bucket_name}/managedFolders/managedfolder1" + storage_client._delete_resource(path) + + def test_bucket_update_retention_period( storage_client, buckets_to_delete, diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 7f25fee05..d8ce1e0f5 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1177,6 +1177,7 @@ def test_list_blobs_w_defaults(self): expected_versions = None expected_projection = "noAcl" expected_fields = None + expected_include_folders_as_prefixes = None soft_deleted = None client.list_blobs.assert_called_once_with( bucket, @@ -1193,6 +1194,7 @@ def test_list_blobs_w_defaults(self): timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, match_glob=expected_match_glob, + include_folders_as_prefixes=expected_include_folders_as_prefixes, soft_deleted=soft_deleted, ) @@ -1206,6 +1208,7 @@ def test_list_blobs_w_explicit(self): start_offset = "c" end_offset = "g" include_trailing_delimiter = True + include_folders_as_prefixes = True versions = True soft_deleted = True projection = "full" @@ -1231,6 +1234,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=match_glob, + include_folders_as_prefixes=include_folders_as_prefixes, soft_deleted=soft_deleted, ) @@ -1247,6 +1251,7 @@ def test_list_blobs_w_explicit(self): expected_versions = versions expected_projection = projection expected_fields = fields + expected_include_folders_as_prefixes = include_folders_as_prefixes expected_soft_deleted = soft_deleted other_client.list_blobs.assert_called_once_with( bucket, @@ -1263,6 +1268,7 @@ def test_list_blobs_w_explicit(self): timeout=timeout, retry=retry, match_glob=expected_match_glob, + include_folders_as_prefixes=expected_include_folders_as_prefixes, soft_deleted=expected_soft_deleted, ) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index c5da9e4cf..b664e701d 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -2015,6 +2015,7 @@ def test_list_blobs_w_explicit_w_user_project(self): start_offset = "c" end_offset = "g" include_trailing_delimiter = True + include_folders_as_prefixes = True soft_deleted = False versions = True projection = "full" @@ -2048,6 +2049,7 @@ def test_list_blobs_w_explicit_w_user_project(self): timeout=timeout, retry=retry, match_glob=match_glob, + include_folders_as_prefixes=include_folders_as_prefixes, soft_deleted=soft_deleted, ) @@ -2070,6 +2072,7 @@ def test_list_blobs_w_explicit_w_user_project(self): "versions": versions, "fields": fields, "userProject": user_project, + "includeFoldersAsPrefixes": include_folders_as_prefixes, "softDeleted": soft_deleted, } expected_page_start = _blobs_page_start From afc433c1676f59ed127a4ff58e6089169f586024 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 16:51:49 -0700 Subject: [PATCH 4/4] chore(main): release 2.16.0 (#1241) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ google/cloud/storage/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2c5af91c..25b6fe162 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [2.16.0](https://github.com/googleapis/python-storage/compare/v2.15.0...v2.16.0) (2024-03-18) + + +### Features + +* Add support for soft delete ([#1229](https://github.com/googleapis/python-storage/issues/1229)) ([3928aa0](https://github.com/googleapis/python-storage/commit/3928aa0680ec03addae1f792c73abb5c9dc8586f)) +* Support includeFoldersAsPrefixes ([#1223](https://github.com/googleapis/python-storage/issues/1223)) ([7bb8065](https://github.com/googleapis/python-storage/commit/7bb806538cf3d7a5e16390db1983620933d5e51a)) + ## [2.15.0](https://github.com/googleapis/python-storage/compare/v2.14.0...v2.15.0) (2024-02-28) diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index a8381fff6..a93d72c2b 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.15.0" +__version__ = "2.16.0"