|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import datetime |
|
import pytest |
|
|
|
from google.api_core import exceptions |
|
from . import _helpers |
|
|
|
|
|
def test_bucket_create_w_alt_storage_class(storage_client, buckets_to_delete): |
|
from google.cloud.storage import constants |
|
|
|
bucket_name = _helpers.unique_name("bucket-w-archive") |
|
|
|
with pytest.raises(exceptions.NotFound): |
|
storage_client.get_bucket(bucket_name) |
|
|
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.storage_class = constants.ARCHIVE_STORAGE_CLASS |
|
|
|
_helpers.retry_429_503(bucket.create)() |
|
buckets_to_delete.append(bucket) |
|
|
|
created = storage_client.get_bucket(bucket_name) |
|
assert created.storage_class == constants.ARCHIVE_STORAGE_CLASS |
|
|
|
|
|
def test_bucket_lifecycle_rules(storage_client, buckets_to_delete): |
|
from google.cloud.storage import constants |
|
from google.cloud.storage.bucket import LifecycleRuleDelete |
|
from google.cloud.storage.bucket import LifecycleRuleSetStorageClass |
|
from google.cloud.storage.bucket import LifecycleRuleAbortIncompleteMultipartUpload |
|
|
|
bucket_name = _helpers.unique_name("w-lifcycle-rules") |
|
custom_time_before = datetime.date(2018, 8, 1) |
|
noncurrent_before = datetime.date(2018, 8, 1) |
|
matches_prefix = ["storage-sys-test", "gcs-sys-test"] |
|
matches_suffix = ["suffix-test"] |
|
|
|
with pytest.raises(exceptions.NotFound): |
|
storage_client.get_bucket(bucket_name) |
|
|
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.add_lifecycle_delete_rule( |
|
age=42, |
|
number_of_newer_versions=3, |
|
days_since_custom_time=2, |
|
custom_time_before=custom_time_before, |
|
days_since_noncurrent_time=2, |
|
noncurrent_time_before=noncurrent_before, |
|
matches_prefix=matches_prefix, |
|
matches_suffix=matches_suffix, |
|
) |
|
bucket.add_lifecycle_set_storage_class_rule( |
|
constants.COLDLINE_STORAGE_CLASS, |
|
is_live=False, |
|
matches_storage_class=[constants.NEARLINE_STORAGE_CLASS], |
|
) |
|
bucket.add_lifecycle_abort_incomplete_multipart_upload_rule( |
|
age=42, |
|
) |
|
|
|
expected_rules = [ |
|
LifecycleRuleDelete( |
|
age=42, |
|
number_of_newer_versions=3, |
|
days_since_custom_time=2, |
|
custom_time_before=custom_time_before, |
|
days_since_noncurrent_time=2, |
|
noncurrent_time_before=noncurrent_before, |
|
matches_prefix=matches_prefix, |
|
matches_suffix=matches_suffix, |
|
), |
|
LifecycleRuleSetStorageClass( |
|
constants.COLDLINE_STORAGE_CLASS, |
|
is_live=False, |
|
matches_storage_class=[constants.NEARLINE_STORAGE_CLASS], |
|
), |
|
LifecycleRuleAbortIncompleteMultipartUpload( |
|
age=42, |
|
), |
|
] |
|
|
|
_helpers.retry_429_503(bucket.create)(location="us") |
|
buckets_to_delete.append(bucket) |
|
|
|
assert bucket.name == bucket_name |
|
assert list(bucket.lifecycle_rules) == expected_rules |
|
|
|
|
|
expected_rules[0] = LifecycleRuleDelete( |
|
age=30, |
|
matches_prefix=["new-prefix"], |
|
matches_suffix=["new-suffix"], |
|
) |
|
rules = list(bucket.lifecycle_rules) |
|
rules[0]["condition"] = { |
|
"age": 30, |
|
"matchesPrefix": ["new-prefix"], |
|
"matchesSuffix": ["new-suffix"], |
|
} |
|
bucket.lifecycle_rules = rules |
|
bucket.patch() |
|
|
|
assert list(bucket.lifecycle_rules) == expected_rules |
|
|
|
|
|
bucket.clear_lifecyle_rules() |
|
bucket.patch() |
|
|
|
assert list(bucket.lifecycle_rules) == [] |
|
|
|
|
|
@pytest.mark.skipif( |
|
_helpers.is_api_endpoint_override, |
|
reason="Test does not yet support endpoint override", |
|
) |
|
def test_bucket_update_labels(storage_client, buckets_to_delete): |
|
bucket_name = _helpers.unique_name("update-labels") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.exists() |
|
|
|
updated_labels = {"test-label": "label-value"} |
|
bucket.labels = updated_labels |
|
bucket.update() |
|
assert bucket.labels == updated_labels |
|
|
|
new_labels = {"another-label": "another-value"} |
|
bucket.labels = new_labels |
|
bucket.patch() |
|
assert bucket.labels == new_labels |
|
|
|
bucket.labels = {} |
|
|
|
retry_400 = _helpers.RetryErrors(exceptions.BadRequest) |
|
retry_400(bucket.update)() |
|
assert bucket.labels == {} |
|
|
|
|
|
def test_bucket_get_set_iam_policy( |
|
storage_client, |
|
buckets_to_delete, |
|
service_account, |
|
): |
|
from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE |
|
from google.api_core.exceptions import BadRequest |
|
from google.api_core.exceptions import PreconditionFailed |
|
|
|
bucket_name = _helpers.unique_name("iam-policy") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.exists() |
|
|
|
policy_no_version = bucket.get_iam_policy() |
|
assert policy_no_version.version == 1 |
|
|
|
policy = bucket.get_iam_policy(requested_policy_version=3) |
|
assert policy == policy_no_version |
|
|
|
member = f"serviceAccount:{storage_client.get_service_account_email()}" |
|
|
|
binding_w_condition = { |
|
"role": STORAGE_OBJECT_VIEWER_ROLE, |
|
"members": {member}, |
|
"condition": { |
|
"title": "always-true", |
|
"description": "test condition always-true", |
|
"expression": "true", |
|
}, |
|
} |
|
policy.bindings.append(binding_w_condition) |
|
|
|
with pytest.raises(PreconditionFailed, match="enable uniform bucket-level access"): |
|
bucket.set_iam_policy(policy) |
|
|
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = True |
|
bucket.patch() |
|
|
|
policy = bucket.get_iam_policy(requested_policy_version=3) |
|
policy.bindings.append(binding_w_condition) |
|
|
|
with pytest.raises(BadRequest, match="at least 3"): |
|
bucket.set_iam_policy(policy) |
|
|
|
policy.version = 3 |
|
returned_policy = bucket.set_iam_policy(policy) |
|
assert returned_policy.version == 3 |
|
assert returned_policy.bindings == policy.bindings |
|
|
|
fetched_policy = bucket.get_iam_policy(requested_policy_version=3) |
|
assert fetched_policy.bindings == returned_policy.bindings |
|
|
|
|
|
def test_bucket_crud_w_requester_pays(storage_client, buckets_to_delete, user_project): |
|
bucket_name = _helpers.unique_name("w-requester-pays") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)( |
|
bucket_name, requester_pays=True |
|
) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
assert created.requester_pays |
|
|
|
with_user_project = storage_client.bucket( |
|
bucket_name, |
|
user_project=user_project, |
|
) |
|
|
|
try: |
|
|
|
assert with_user_project.exists() |
|
with_user_project.reload() |
|
assert with_user_project.requester_pays |
|
|
|
|
|
with_user_project.configure_website( |
|
main_page_suffix="index.html", not_found_page="404.html" |
|
) |
|
with_user_project.patch() |
|
expected_website = {"mainPageSuffix": "index.html", "notFoundPage": "404.html"} |
|
assert with_user_project._properties["website"] == expected_website |
|
|
|
|
|
new_labels = {"another-label": "another-value"} |
|
with_user_project.labels = new_labels |
|
with_user_project.update() |
|
assert with_user_project.labels == new_labels |
|
|
|
finally: |
|
|
|
with_user_project.delete() |
|
buckets_to_delete.remove(created) |
|
|
|
|
|
def test_bucket_acls_iam_w_user_project( |
|
storage_client, buckets_to_delete, user_project |
|
): |
|
bucket_name = _helpers.unique_name("acl-w-user-project") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)( |
|
bucket_name, |
|
requester_pays=True, |
|
) |
|
buckets_to_delete.append(created) |
|
|
|
with_user_project = storage_client.bucket(bucket_name, user_project=user_project) |
|
|
|
|
|
acl = with_user_project.acl |
|
acl.reload() |
|
acl.all().grant_read() |
|
acl.save() |
|
assert "READER" in acl.all().get_roles() |
|
|
|
del acl.entities["allUsers"] |
|
acl.save() |
|
assert not acl.has_entity("allUsers") |
|
|
|
|
|
doa = with_user_project.default_object_acl |
|
doa.reload() |
|
doa.all().grant_read() |
|
doa.save() |
|
assert "READER" in doa.all().get_roles() |
|
|
|
|
|
test_permissions = ["storage.buckets.get"] |
|
found = with_user_project.test_iam_permissions(test_permissions) |
|
assert found == test_permissions |
|
|
|
policy = with_user_project.get_iam_policy() |
|
viewers = policy.setdefault("roles/storage.objectViewer", set()) |
|
viewers.add(policy.all_users()) |
|
with_user_project.set_iam_policy(policy) |
|
|
|
|
|
def test_bucket_acls_w_metageneration_match(storage_client, buckets_to_delete): |
|
wrong_metageneration_number = 9 |
|
bucket_name = _helpers.unique_name("acl-w-metageneration-match") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
|
|
acl = bucket.acl |
|
acl.group("cloud-developer-relations@google.com").grant_read() |
|
bucket.reload() |
|
|
|
with pytest.raises(exceptions.PreconditionFailed): |
|
acl.save(if_metageneration_match=wrong_metageneration_number) |
|
assert ( |
|
"READER" |
|
not in acl.group("cloud-developer-relations@google.com").get_roles() |
|
) |
|
|
|
acl.save(if_metageneration_match=bucket.metageneration) |
|
assert "READER" in acl.group("cloud-developer-relations@google.com").get_roles() |
|
|
|
|
|
doa = bucket.default_object_acl |
|
doa.group("cloud-developer-relations@google.com").grant_owner() |
|
bucket.reload() |
|
|
|
with pytest.raises(exceptions.PreconditionFailed): |
|
doa.save(if_metageneration_match=wrong_metageneration_number) |
|
assert ( |
|
"OWNER" not in doa.group("cloud-developer-relations@google.com").get_roles() |
|
) |
|
|
|
doa.save(if_metageneration_match=bucket.metageneration) |
|
assert "OWNER" in doa.group("cloud-developer-relations@google.com").get_roles() |
|
|
|
|
|
def test_bucket_copy_blob( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
user_project, |
|
): |
|
payload = b"DEADBEEF" |
|
bucket_name = _helpers.unique_name("copy-blob") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
|
|
blob = created.blob("CloudLogo") |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
new_blob = _helpers.retry_bad_copy(created.copy_blob)( |
|
blob, created, "CloudLogoCopy" |
|
) |
|
blobs_to_delete.append(new_blob) |
|
|
|
copied_contents = new_blob.download_as_bytes() |
|
assert copied_contents == payload |
|
|
|
|
|
def test_bucket_copy_blob_w_user_project( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
user_project, |
|
): |
|
payload = b"DEADBEEF" |
|
bucket_name = _helpers.unique_name("copy-w-requester-pays") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)( |
|
bucket_name, requester_pays=True |
|
) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
assert created.requester_pays |
|
|
|
blob = created.blob("simple") |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
with_user_project = storage_client.bucket(bucket_name, user_project=user_project) |
|
|
|
new_blob = _helpers.retry_bad_copy(with_user_project.copy_blob)( |
|
blob, with_user_project, "simple-copy" |
|
) |
|
blobs_to_delete.append(new_blob) |
|
|
|
assert new_blob.download_as_bytes() == payload |
|
|
|
|
|
def test_bucket_copy_blob_w_generation_match( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
payload = b"DEADBEEF" |
|
bucket_name = _helpers.unique_name("generation-match") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
|
|
blob = created.blob("simple") |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
dest_bucket = storage_client.bucket(bucket_name) |
|
|
|
new_blob = dest_bucket.copy_blob( |
|
blob, |
|
dest_bucket, |
|
"simple-copy", |
|
if_source_generation_match=blob.generation, |
|
) |
|
blobs_to_delete.append(new_blob) |
|
|
|
assert new_blob.download_as_bytes() == payload |
|
|
|
|
|
def test_bucket_copy_blob_w_metageneration_match( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
payload = b"DEADBEEF" |
|
bucket_name = _helpers.unique_name("generation-match") |
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.requester_pays = True |
|
created = _helpers.retry_429_503(storage_client.create_bucket)(bucket) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
|
|
blob = created.blob("simple") |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
dest_bucket = storage_client.bucket(bucket_name) |
|
|
|
new_blob = dest_bucket.copy_blob( |
|
blob, |
|
dest_bucket, |
|
"simple-copy", |
|
if_source_metageneration_match=blob.metageneration, |
|
) |
|
blobs_to_delete.append(new_blob) |
|
|
|
assert new_blob.download_as_bytes() == payload |
|
|
|
|
|
def test_bucket_get_blob_with_user_project( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
user_project, |
|
): |
|
blob_name = "blob-name" |
|
payload = b"DEADBEEF" |
|
bucket_name = _helpers.unique_name("w-requester-pays") |
|
created = _helpers.retry_429_503(storage_client.create_bucket)( |
|
bucket_name, requester_pays=True |
|
) |
|
buckets_to_delete.append(created) |
|
assert created.name == bucket_name |
|
assert created.requester_pays |
|
|
|
with_user_project = storage_client.bucket(bucket_name, user_project=user_project) |
|
|
|
assert with_user_project.get_blob("nonesuch") is None |
|
|
|
to_add = created.blob(blob_name) |
|
to_add.upload_from_string(payload) |
|
blobs_to_delete.append(to_add) |
|
|
|
found = with_user_project.get_blob(blob_name) |
|
assert found.download_as_bytes() == payload |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs(listable_bucket, listable_filenames): |
|
all_blobs = list(listable_bucket.list_blobs()) |
|
assert sorted(blob.name for blob in all_blobs) == sorted(listable_filenames) |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_w_user_project( |
|
storage_client, |
|
listable_bucket, |
|
listable_filenames, |
|
user_project, |
|
): |
|
with_user_project = storage_client.bucket( |
|
listable_bucket.name, user_project=user_project |
|
) |
|
all_blobs = list(with_user_project.list_blobs()) |
|
assert sorted(blob.name for blob in all_blobs) == sorted(listable_filenames) |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_paginated(listable_bucket, listable_filenames): |
|
truncation_size = 1 |
|
count = len(listable_filenames) - truncation_size |
|
iterator = listable_bucket.list_blobs(max_results=count) |
|
page_iter = iterator.pages |
|
|
|
page1 = next(page_iter) |
|
blobs = list(page1) |
|
assert len(blobs) == count |
|
assert iterator.next_page_token is not None |
|
|
|
assert iterator.num_results == iterator.max_results |
|
|
|
|
|
iterator.max_results = None |
|
|
|
page2 = next(page_iter) |
|
last_blobs = list(page2) |
|
assert len(last_blobs) == truncation_size |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_paginated_w_offset(listable_bucket, listable_filenames): |
|
truncation_size = 1 |
|
inclusive_start_offset = listable_filenames[1] |
|
exclusive_end_offset = listable_filenames[-1] |
|
desired_files = listable_filenames[1:-1] |
|
count = len(desired_files) - truncation_size |
|
iterator = listable_bucket.list_blobs( |
|
max_results=count, |
|
start_offset=inclusive_start_offset, |
|
end_offset=exclusive_end_offset, |
|
) |
|
page_iter = iterator.pages |
|
|
|
page1 = next(page_iter) |
|
blobs = list(page1) |
|
assert len(blobs) == count |
|
assert blobs[0].name == desired_files[0] |
|
assert iterator.next_page_token is not None |
|
|
|
assert iterator.num_results == iterator.max_results |
|
|
|
|
|
iterator.max_results = None |
|
|
|
page2 = next(page_iter) |
|
last_blobs = list(page2) |
|
assert len(last_blobs) == truncation_size |
|
assert last_blobs[-1].name == desired_files[-1] |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_blob_exists_hierarchy(hierarchy_bucket, hierarchy_filenames): |
|
for filename in hierarchy_filenames: |
|
blob = hierarchy_bucket.blob(filename) |
|
assert blob.exists() |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_hierarchy_root_level(hierarchy_bucket, hierarchy_filenames): |
|
expected_names = ["file01.txt"] |
|
expected_prefixes = set(["parent/"]) |
|
|
|
iterator = hierarchy_bucket.list_blobs(delimiter="/") |
|
page = next(iterator.pages) |
|
blobs = list(page) |
|
|
|
assert [blob.name for blob in blobs] == expected_names |
|
assert iterator.next_page_token is None |
|
assert iterator.prefixes == expected_prefixes |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_hierarchy_first_level(hierarchy_bucket, hierarchy_filenames): |
|
expected_names = ["parent/", "parent/file11.txt"] |
|
expected_prefixes = set(["parent/child/"]) |
|
|
|
iterator = hierarchy_bucket.list_blobs(delimiter="/", prefix="parent/") |
|
page = next(iterator.pages) |
|
blobs = list(page) |
|
|
|
assert [blob.name for blob in blobs] == expected_names |
|
assert iterator.next_page_token is None |
|
assert iterator.prefixes == expected_prefixes |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_hierarchy_second_level( |
|
hierarchy_bucket, hierarchy_filenames |
|
): |
|
expected_names = ["parent/child/file21.txt", "parent/child/file22.txt"] |
|
expected_prefixes = set(["parent/child/grand/", "parent/child/other/"]) |
|
|
|
iterator = hierarchy_bucket.list_blobs(delimiter="/", prefix="parent/child/") |
|
page = next(iterator.pages) |
|
blobs = list(page) |
|
assert [blob.name for blob in blobs] == expected_names |
|
assert iterator.next_page_token is None |
|
assert iterator.prefixes == expected_prefixes |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_hierarchy_third_level(hierarchy_bucket, hierarchy_filenames): |
|
|
|
|
|
|
|
|
|
expected_names = ["parent/child/grand/file31.txt"] |
|
expected_prefixes = set() |
|
|
|
iterator = hierarchy_bucket.list_blobs(delimiter="/", prefix="parent/child/grand/") |
|
page = next(iterator.pages) |
|
blobs = list(page) |
|
|
|
assert [blob.name for blob in blobs] == expected_names |
|
assert iterator.next_page_token is None |
|
assert iterator.prefixes == expected_prefixes |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_hierarchy_w_include_trailing_delimiter( |
|
hierarchy_bucket, |
|
hierarchy_filenames, |
|
): |
|
expected_names = ["file01.txt", "parent/"] |
|
expected_prefixes = set(["parent/"]) |
|
|
|
iterator = hierarchy_bucket.list_blobs( |
|
delimiter="/", include_trailing_delimiter=True |
|
) |
|
page = next(iterator.pages) |
|
blobs = list(page) |
|
|
|
assert [blob.name for blob in blobs] == expected_names |
|
assert iterator.next_page_token is None |
|
assert iterator.prefixes == expected_prefixes |
|
|
|
|
|
@_helpers.retry_failures |
|
def test_bucket_list_blobs_w_match_glob( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
bucket_name = _helpers.unique_name("w-matchglob") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
payload = b"helloworld" |
|
blob_names = ["foo/bar", "foo/baz", "foo/foobar", "foobar"] |
|
for name in blob_names: |
|
blob = bucket.blob(name) |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
match_glob_results = { |
|
"foo*bar": ["foobar"], |
|
"foo**bar": ["foo/bar", "foo/foobar", "foobar"], |
|
"**/foobar": ["foo/foobar", "foobar"], |
|
"*/ba[rz]": ["foo/bar", "foo/baz"], |
|
"*/ba[!a-y]": ["foo/baz"], |
|
"**/{foobar,baz}": ["foo/baz", "foo/foobar", "foobar"], |
|
"foo/{foo*,*baz}": ["foo/baz", "foo/foobar"], |
|
} |
|
for match_glob, expected_names in match_glob_results.items(): |
|
blob_iter = bucket.list_blobs(match_glob=match_glob) |
|
blobs = list(blob_iter) |
|
assert [blob.name for blob in blobs] == expected_names |
|
|
|
|
|
def test_bucket_list_blobs_include_managed_folders( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
hierarchy_filenames, |
|
): |
|
bucket_name = _helpers.unique_name("ubla-mf") |
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = True |
|
_helpers.retry_429_503(bucket.create)() |
|
buckets_to_delete.append(bucket) |
|
|
|
payload = b"helloworld" |
|
for filename in hierarchy_filenames: |
|
blob = bucket.blob(filename) |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
|
|
|
|
path = f"/b/{bucket_name}/managedFolders" |
|
properties = {"name": "managedfolder1"} |
|
storage_client._post_resource(path, properties) |
|
|
|
expected_prefixes = set(["parent/"]) |
|
blob_iter = bucket.list_blobs(delimiter="/") |
|
list(blob_iter) |
|
assert blob_iter.prefixes == expected_prefixes |
|
|
|
|
|
expected_prefixes = set(["parent/", "managedfolder1/"]) |
|
blob_iter = bucket.list_blobs(delimiter="/", include_folders_as_prefixes=True) |
|
list(blob_iter) |
|
assert blob_iter.prefixes == expected_prefixes |
|
|
|
|
|
|
|
path = f"/b/{bucket_name}/managedFolders/managedfolder1" |
|
storage_client._delete_resource(path) |
|
|
|
|
|
def test_bucket_update_retention_period( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
period_secs = 3 |
|
bucket_name = _helpers.unique_name("w-retention-period") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
bucket.retention_period = period_secs |
|
bucket.default_event_based_hold = False |
|
bucket.patch() |
|
|
|
|
|
|
|
_helpers.retry_has_retention_period(bucket.reload)() |
|
|
|
assert bucket.retention_period == period_secs |
|
assert isinstance(bucket.retention_policy_effective_time, datetime.datetime) |
|
assert not bucket.default_event_based_hold |
|
assert not bucket.retention_policy_locked |
|
|
|
bucket.retention_period = None |
|
bucket.patch() |
|
|
|
|
|
|
|
_helpers.retry_no_retention_period(bucket.reload)() |
|
|
|
assert bucket.retention_period is None |
|
assert bucket.retention_policy_effective_time is None |
|
assert not bucket.default_event_based_hold |
|
assert not bucket.retention_policy_locked |
|
|
|
|
|
def test_delete_object_bucket_w_retention_period( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
|
|
period_secs = 12 |
|
bucket = storage_client.bucket(_helpers.unique_name("w-retention-period")) |
|
bucket.retention_period = period_secs |
|
bucket.default_event_based_hold = False |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket) |
|
buckets_to_delete.append(bucket) |
|
|
|
_helpers.retry_has_retention_period(bucket.reload)() |
|
assert bucket.retention_period == period_secs |
|
assert isinstance(bucket.retention_policy_effective_time, datetime.datetime) |
|
|
|
payload = b"DEADBEEF" |
|
blob = bucket.blob(_helpers.unique_name("w-retention")) |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
_helpers.retry_has_retention_expiration(blob.reload)() |
|
assert isinstance(blob.retention_expiration_time, datetime.datetime) |
|
assert not blob.event_based_hold |
|
assert not blob.temporary_hold |
|
|
|
|
|
with pytest.raises(exceptions.Forbidden): |
|
blob.delete() |
|
|
|
|
|
_helpers.await_config_changes_propagate(sec=period_secs) |
|
blob.delete() |
|
blobs_to_delete.pop() |
|
|
|
|
|
def test_bucket_w_default_event_based_hold( |
|
storage_client, |
|
blobs_to_delete, |
|
default_ebh_bucket, |
|
): |
|
bucket = storage_client.get_bucket(default_ebh_bucket) |
|
assert bucket.default_event_based_hold |
|
assert bucket.retention_period is None |
|
assert bucket.retention_policy_effective_time is None |
|
assert not bucket.retention_policy_locked |
|
|
|
blob_name = "test-blob" |
|
payload = b"DEADBEEF" |
|
blob = bucket.blob(blob_name) |
|
blob.upload_from_string(payload) |
|
|
|
blobs_to_delete.append(blob) |
|
|
|
other = bucket.get_blob(blob_name) |
|
|
|
assert other.event_based_hold |
|
assert not other.temporary_hold |
|
assert other.retention_expiration_time is None |
|
|
|
with pytest.raises(exceptions.Forbidden): |
|
other.delete() |
|
|
|
other.event_based_hold = False |
|
other.patch() |
|
other.delete() |
|
|
|
bucket.default_event_based_hold = False |
|
bucket.patch() |
|
|
|
assert not bucket.default_event_based_hold |
|
assert bucket.retention_period is None |
|
assert bucket.retention_policy_effective_time is None |
|
assert not bucket.retention_policy_locked |
|
|
|
|
|
|
|
_helpers.await_config_changes_propagate() |
|
|
|
blob.upload_from_string(payload) |
|
|
|
|
|
_helpers.retry_no_event_based_hold(blob.reload)() |
|
|
|
assert not blob.event_based_hold |
|
assert not blob.temporary_hold |
|
assert blob.retention_expiration_time is None |
|
|
|
blob.delete() |
|
blobs_to_delete.pop() |
|
|
|
|
|
def test_blob_w_temporary_hold( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
bucket_name = _helpers.unique_name("w-tmp-hold") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
blob_name = "test-blob" |
|
payload = b"DEADBEEF" |
|
blob = bucket.blob(blob_name) |
|
blob.upload_from_string(payload) |
|
|
|
blobs_to_delete.append(blob) |
|
|
|
other = bucket.get_blob(blob_name) |
|
other.temporary_hold = True |
|
other.patch() |
|
|
|
assert other.temporary_hold |
|
assert not other.event_based_hold |
|
assert other.retention_expiration_time is None |
|
|
|
with pytest.raises(exceptions.Forbidden): |
|
other.delete() |
|
|
|
other.temporary_hold = False |
|
other.patch() |
|
|
|
other.delete() |
|
blobs_to_delete.pop() |
|
|
|
|
|
def test_bucket_lock_retention_policy( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
period_secs = 10 |
|
bucket_name = _helpers.unique_name("loc-ret-policy") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
bucket.retention_period = period_secs |
|
bucket.patch() |
|
|
|
assert bucket.retention_period == period_secs |
|
assert isinstance(bucket.retention_policy_effective_time, datetime.datetime) |
|
assert not bucket.default_event_based_hold |
|
assert not bucket.retention_policy_locked |
|
|
|
bucket.lock_retention_policy() |
|
|
|
bucket.reload() |
|
assert bucket.retention_policy_locked |
|
|
|
bucket.retention_period = None |
|
with pytest.raises(exceptions.Forbidden): |
|
bucket.patch() |
|
|
|
|
|
@pytest.mark.skipif( |
|
_helpers.is_api_endpoint_override, |
|
reason="Test does not yet support endpoint override", |
|
) |
|
def test_new_bucket_w_ubla( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
bucket_name = _helpers.unique_name("new-w-ubla") |
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = True |
|
_helpers.retry_429_503(bucket.create)() |
|
buckets_to_delete.append(bucket) |
|
|
|
bucket_acl = bucket.acl |
|
with pytest.raises(exceptions.BadRequest): |
|
bucket_acl.reload() |
|
|
|
bucket_acl.loaded = True |
|
bucket_acl.group("cloud-developer-relations@google.com").grant_read() |
|
with pytest.raises(exceptions.BadRequest): |
|
bucket_acl.save() |
|
|
|
blob_name = "my-blob.txt" |
|
blob = bucket.blob(blob_name) |
|
payload = b"DEADBEEF" |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
found = bucket.get_blob(blob_name) |
|
assert found.download_as_bytes() == payload |
|
|
|
blob_acl = blob.acl |
|
with pytest.raises(exceptions.BadRequest): |
|
blob_acl.reload() |
|
|
|
blob_acl.loaded = True |
|
blob_acl.group("cloud-developer-relations@google.com").grant_read() |
|
with pytest.raises(exceptions.BadRequest): |
|
blob_acl.save() |
|
|
|
|
|
def test_ubla_set_unset_preserves_acls( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
bucket_name = _helpers.unique_name("ubla-acls") |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
|
|
blob_name = "my-blob.txt" |
|
blob = bucket.blob(blob_name) |
|
payload = b"DEADBEEF" |
|
blob.upload_from_string(payload) |
|
blobs_to_delete.append(blob) |
|
|
|
|
|
bucket_acl_before = list(bucket.acl) |
|
blob_acl_before = list(bucket.acl) |
|
|
|
|
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = True |
|
bucket.patch() |
|
|
|
assert bucket.iam_configuration.uniform_bucket_level_access_enabled |
|
|
|
|
|
with pytest.raises(exceptions.BadRequest): |
|
bucket.acl.reload() |
|
|
|
|
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = False |
|
bucket.patch() |
|
_helpers.await_config_changes_propagate() |
|
|
|
|
|
bucket.acl.reload() |
|
bucket_acl_after = list(bucket.acl) |
|
blob.acl.reload() |
|
blob_acl_after = list(bucket.acl) |
|
|
|
assert bucket_acl_before == bucket_acl_after |
|
assert blob_acl_before == blob_acl_after |
|
|
|
|
|
def test_new_bucket_created_w_inherited_pap( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
from google.cloud.storage import constants |
|
|
|
bucket_name = _helpers.unique_name("new-w-pap-inherited") |
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = True |
|
bucket.create() |
|
buckets_to_delete.append(bucket) |
|
|
|
|
|
assert bucket.iam_configuration.public_access_prevention in [ |
|
constants.PUBLIC_ACCESS_PREVENTION_UNSPECIFIED, |
|
constants.PUBLIC_ACCESS_PREVENTION_INHERITED, |
|
] |
|
|
|
bucket.iam_configuration.public_access_prevention = ( |
|
constants.PUBLIC_ACCESS_PREVENTION_ENFORCED |
|
) |
|
bucket.patch() |
|
assert ( |
|
bucket.iam_configuration.public_access_prevention |
|
== constants.PUBLIC_ACCESS_PREVENTION_ENFORCED |
|
) |
|
assert bucket.iam_configuration.uniform_bucket_level_access_enabled |
|
|
|
bucket.iam_configuration.uniform_bucket_level_access_enabled = False |
|
bucket.patch() |
|
|
|
_helpers.await_config_changes_propagate() |
|
|
|
assert ( |
|
bucket.iam_configuration.public_access_prevention |
|
== constants.PUBLIC_ACCESS_PREVENTION_ENFORCED |
|
) |
|
|
|
with pytest.raises(exceptions.BadRequest): |
|
bucket.iam_configuration.public_access_prevention = "unexpected value" |
|
bucket.patch() |
|
|
|
with pytest.raises(exceptions.PreconditionFailed): |
|
bucket.make_public() |
|
|
|
blob_name = "my-blob.txt" |
|
blob = bucket.blob(blob_name) |
|
payload = b"DEADBEEF" |
|
blob.upload_from_string(payload) |
|
|
|
with pytest.raises(exceptions.PreconditionFailed): |
|
blob.make_public() |
|
|
|
|
|
@pytest.mark.skip(reason="Unspecified PAP is changing to inherited") |
|
def test_new_bucket_created_w_enforced_pap( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
from google.cloud.storage import constants |
|
|
|
bucket_name = _helpers.unique_name("new-w-pap-enforced") |
|
bucket = storage_client.bucket(bucket_name) |
|
bucket.iam_configuration.public_access_prevention = ( |
|
constants.PUBLIC_ACCESS_PREVENTION_ENFORCED |
|
) |
|
bucket.create() |
|
buckets_to_delete.append(bucket) |
|
|
|
assert ( |
|
bucket.iam_configuration.public_access_prevention |
|
== constants.PUBLIC_ACCESS_PREVENTION_ENFORCED |
|
) |
|
|
|
bucket.iam_configuration.public_access_prevention = ( |
|
constants.PUBLIC_ACCESS_PREVENTION_INHERITED |
|
) |
|
bucket.patch() |
|
|
|
|
|
assert bucket.iam_configuration.public_access_prevention in [ |
|
constants.PUBLIC_ACCESS_PREVENTION_UNSPECIFIED, |
|
constants.PUBLIC_ACCESS_PREVENTION_INHERITED, |
|
] |
|
assert not bucket.iam_configuration.uniform_bucket_level_access_enabled |
|
|
|
|
|
@pytest.mark.skipif( |
|
_helpers.is_api_endpoint_override, |
|
reason="Test does not yet support endpoint override", |
|
) |
|
def test_new_bucket_with_rpo( |
|
storage_client, |
|
buckets_to_delete, |
|
blobs_to_delete, |
|
): |
|
from google.cloud.storage import constants |
|
|
|
bucket_name = _helpers.unique_name("new-w-turbo-replication") |
|
bucket = storage_client.create_bucket(bucket_name, location="NAM4") |
|
buckets_to_delete.append(bucket) |
|
|
|
assert bucket.rpo == constants.RPO_DEFAULT |
|
|
|
bucket.rpo = constants.RPO_ASYNC_TURBO |
|
bucket.patch() |
|
|
|
bucket_from_server = storage_client.get_bucket(bucket_name) |
|
|
|
assert bucket_from_server.rpo == constants.RPO_ASYNC_TURBO |
|
|
|
|
|
def test_new_bucket_with_autoclass( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
from google.cloud.storage import constants |
|
|
|
|
|
bucket_name = _helpers.unique_name("new-w-autoclass") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket_obj.autoclass_enabled = True |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
previous_toggle_time = bucket.autoclass_toggle_time |
|
buckets_to_delete.append(bucket) |
|
|
|
|
|
assert bucket.autoclass_enabled is True |
|
assert bucket.autoclass_terminal_storage_class == constants.NEARLINE_STORAGE_CLASS |
|
|
|
|
|
bucket.autoclass_enabled = False |
|
bucket.patch(if_metageneration_match=bucket.metageneration) |
|
|
|
assert bucket.autoclass_enabled is False |
|
assert bucket.autoclass_toggle_time != previous_toggle_time |
|
|
|
|
|
def test_bucket_delete_force(storage_client): |
|
bucket_name = _helpers.unique_name("version-disabled") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
|
|
BLOB_NAME = "my_object" |
|
blob = bucket.blob(BLOB_NAME) |
|
blob.upload_from_string("abcd") |
|
blob.upload_from_string("efgh") |
|
|
|
blobs = bucket.list_blobs(versions=True) |
|
counter = 0 |
|
for blob in blobs: |
|
counter += 1 |
|
assert blob.name == BLOB_NAME |
|
assert counter == 1 |
|
|
|
bucket.delete(force=True) |
|
|
|
|
|
def test_bucket_delete_force_works_with_versions(storage_client): |
|
bucket_name = _helpers.unique_name("version-enabled") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket_obj.versioning_enabled = True |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
assert bucket.versioning_enabled |
|
|
|
BLOB_NAME = "my_versioned_object" |
|
blob = bucket.blob(BLOB_NAME) |
|
blob.upload_from_string("abcd") |
|
blob.upload_from_string("efgh") |
|
|
|
blobs = bucket.list_blobs(versions=True) |
|
counter = 0 |
|
for blob in blobs: |
|
counter += 1 |
|
assert blob.name == BLOB_NAME |
|
assert counter == 2 |
|
|
|
bucket.delete(force=True) |
|
|
|
|
|
def test_config_autoclass_w_existing_bucket( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
from google.cloud.storage import constants |
|
|
|
bucket_name = _helpers.unique_name("for-autoclass") |
|
bucket = storage_client.create_bucket(bucket_name) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.autoclass_enabled is False |
|
assert bucket.autoclass_toggle_time is None |
|
assert bucket.autoclass_terminal_storage_class is None |
|
assert bucket.autoclass_terminal_storage_class_update_time is None |
|
|
|
|
|
bucket.autoclass_enabled = True |
|
bucket.autoclass_terminal_storage_class = constants.ARCHIVE_STORAGE_CLASS |
|
bucket.patch(if_metageneration_match=bucket.metageneration) |
|
previous_tsc_update_time = bucket.autoclass_terminal_storage_class_update_time |
|
assert bucket.autoclass_enabled is True |
|
assert bucket.autoclass_terminal_storage_class == constants.ARCHIVE_STORAGE_CLASS |
|
|
|
|
|
bucket.autoclass_terminal_storage_class = constants.NEARLINE_STORAGE_CLASS |
|
bucket.patch(if_metageneration_match=bucket.metageneration) |
|
assert bucket.autoclass_enabled is True |
|
assert bucket.autoclass_terminal_storage_class == constants.NEARLINE_STORAGE_CLASS |
|
assert ( |
|
bucket.autoclass_terminal_storage_class_update_time != previous_tsc_update_time |
|
) |
|
|
|
|
|
def test_soft_delete_policy( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
from google.cloud.storage.bucket import SoftDeletePolicy |
|
|
|
|
|
duration_secs = 7 * 86400 |
|
bucket = storage_client.bucket(_helpers.unique_name("w-soft-delete")) |
|
bucket.soft_delete_policy.retention_duration_seconds = duration_secs |
|
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket) |
|
buckets_to_delete.append(bucket) |
|
|
|
policy = bucket.soft_delete_policy |
|
assert isinstance(policy, SoftDeletePolicy) |
|
assert policy.retention_duration_seconds == duration_secs |
|
assert isinstance(policy.effective_time, datetime.datetime) |
|
|
|
|
|
payload = b"DEADBEEF" |
|
blob_name = _helpers.unique_name("soft-delete") |
|
blob = bucket.blob(blob_name) |
|
blob.upload_from_string(payload) |
|
|
|
blob = bucket.get_blob(blob_name) |
|
gen = blob.generation |
|
assert blob.soft_delete_time is None |
|
assert blob.hard_delete_time is None |
|
|
|
|
|
blob.delete() |
|
|
|
iter_default = bucket.list_blobs() |
|
assert len(list(iter_default)) == 0 |
|
iter_w_soft_delete = bucket.list_blobs(soft_deleted=True) |
|
assert len(list(iter_w_soft_delete)) > 0 |
|
|
|
|
|
soft_deleted_blob = bucket.get_blob(blob_name, generation=gen, soft_deleted=True) |
|
assert soft_deleted_blob.soft_delete_time is not None |
|
assert soft_deleted_blob.hard_delete_time is not None |
|
|
|
|
|
restored_blob = bucket.restore_blob(blob_name, generation=gen) |
|
assert restored_blob.exists() is True |
|
assert restored_blob.generation != gen |
|
|
|
|
|
new_duration_secs = 10 * 86400 |
|
bucket.soft_delete_policy.retention_duration_seconds = new_duration_secs |
|
bucket.patch() |
|
assert bucket.soft_delete_policy.retention_duration_seconds == new_duration_secs |
|
|
|
|
|
def test_new_bucket_with_hierarchical_namespace( |
|
storage_client, |
|
buckets_to_delete, |
|
): |
|
|
|
bucket_name = _helpers.unique_name("new-wo-hns") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.hierarchical_namespace_enabled is None |
|
|
|
|
|
bucket_name = _helpers.unique_name("new-hns-disabled") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket_obj.hierarchical_namespace_enabled = False |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.hierarchical_namespace_enabled is False |
|
|
|
|
|
bucket_name = _helpers.unique_name("new-hns-enabled") |
|
bucket_obj = storage_client.bucket(bucket_name) |
|
bucket_obj.hierarchical_namespace_enabled = True |
|
bucket_obj.iam_configuration.uniform_bucket_level_access_enabled = True |
|
bucket = storage_client.create_bucket(bucket_obj) |
|
buckets_to_delete.append(bucket) |
|
assert bucket.hierarchical_namespace_enabled is True |
|
|