entry : canonicalizedExtensionHeaders.entrySet()) {
+ if ("X-Goog-Content-SHA256".equalsIgnoreCase(entry.getKey())) {
+ userProvidedHash = entry.getValue();
+ break;
+ }
+ }
canonicalRequest.append(userProvidedHash == null ? "UNSIGNED-PAYLOAD" : userProvidedHash);
return Hashing.sha256()
diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/testing/RemoteStorageHelper.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/testing/RemoteStorageHelper.java
index da4d96a119..4045641746 100644
--- a/google-cloud-storage/src/main/java/com/google/cloud/storage/testing/RemoteStorageHelper.java
+++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/testing/RemoteStorageHelper.java
@@ -16,6 +16,7 @@
package com.google.cloud.storage.testing;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.paging.Page;
import com.google.api.gax.retrying.RetrySettings;
import com.google.auth.oauth2.GoogleCredentials;
@@ -186,7 +187,27 @@ public static String generateBucketName() {
}
/**
- * Creates a {@code RemoteStorageHelper} object for the given project id and JSON key input
+ * This method is obsolete because of a potential security risk. Use the {@link #create(String,
+ * GoogleCredentials)} method instead.
+ *
+ * If you know that you will be loading credential configurations of a specific type, it is
+ * recommended to use a credential-type-specific `fromStream()` method. This will ensure that an
+ * unexpected credential type with potential for malicious intent is not loaded unintentionally.
+ * You might still have to do validation for certain credential types. Please follow the
+ * recommendation for that method.
+ *
+ *
If you are loading your credential configuration from an untrusted source and have not
+ * mitigated the risks (e.g. by validating the configuration yourself), make these changes as soon
+ * as possible to prevent security risks to your environment.
+ *
+ *
Regardless of the method used, it is always your responsibility to validate configurations
+ * received from external sources.
+ *
+ *
See the {@see documentation}
+ * for more details.
+ *
+ *
Creates a {@code RemoteStorageHelper} object for the given project id and JSON key input
* stream.
*
* @param projectId id of the project to be used for running the tests
@@ -195,21 +216,13 @@ public static String generateBucketName() {
* @throws com.google.cloud.storage.testing.RemoteStorageHelper.StorageHelperException if {@code
* keyStream} is not a valid JSON key stream
*/
+ @ObsoleteApi(
+ "This method is obsolete because of a potential security risk. Use the create() variant with"
+ + " Credential parameter instead")
public static RemoteStorageHelper create(String projectId, InputStream keyStream)
throws StorageHelperException {
try {
- HttpTransportOptions transportOptions =
- HttpStorageOptions.defaults().getDefaultTransportOptions();
- transportOptions =
- transportOptions.toBuilder().setConnectTimeout(60000).setReadTimeout(60000).build();
- StorageOptions storageOptions =
- StorageOptions.http()
- .setCredentials(GoogleCredentials.fromStream(keyStream))
- .setProjectId(projectId)
- .setRetrySettings(retrySettings())
- .setTransportOptions(transportOptions)
- .build();
- return new RemoteStorageHelper(storageOptions);
+ return create(projectId, GoogleCredentials.fromStream(keyStream));
} catch (IOException ex) {
if (log.isLoggable(Level.WARNING)) {
log.log(Level.WARNING, ex.getMessage());
@@ -218,6 +231,28 @@ public static RemoteStorageHelper create(String projectId, InputStream keyStream
}
}
+ /**
+ * Creates a {@code RemoteStorageHelper} object for the given project id and Credential.
+ *
+ * @param projectId id of the project to be used for running the tests
+ * @param credentials GoogleCredential to set to StorageOptions
+ * @return A {@code RemoteStorageHelper} object for the provided options
+ */
+ public static RemoteStorageHelper create(String projectId, GoogleCredentials credentials) {
+ HttpTransportOptions transportOptions =
+ HttpStorageOptions.defaults().getDefaultTransportOptions();
+ transportOptions =
+ transportOptions.toBuilder().setConnectTimeout(60000).setReadTimeout(60000).build();
+ StorageOptions storageOptions =
+ StorageOptions.http()
+ .setCredentials(credentials)
+ .setProjectId(projectId)
+ .setRetrySettings(retrySettings())
+ .setTransportOptions(transportOptions)
+ .build();
+ return new RemoteStorageHelper(storageOptions);
+ }
+
/**
* Creates a {@code RemoteStorageHelper} object using default project id and authentication
* credentials.
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/FakeServer.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/FakeServer.java
index d4ba66360b..0481f2b062 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/FakeServer.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/FakeServer.java
@@ -75,7 +75,7 @@ static FakeServer of(StorageGrpc.StorageImplBase service) throws IOException {
.setRetryDelayMultiplier(1.2)
.setMaxRetryDelayDuration(Duration.ofSeconds(16))
.setMaxAttempts(6)
- .setInitialRpcTimeoutDuration(Duration.ofSeconds(25))
+ .setInitialRpcTimeoutDuration(Duration.ofSeconds(1))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofSeconds(25))
.build())
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/GapicUploadSessionBuilderSyntaxTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/GapicUploadSessionBuilderSyntaxTest.java
index e35278b5b1..dddd5b8e16 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/GapicUploadSessionBuilderSyntaxTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/GapicUploadSessionBuilderSyntaxTest.java
@@ -20,9 +20,9 @@
import static org.mockito.Mockito.when;
import com.google.api.core.ApiFuture;
-import com.google.api.core.ApiFutures;
import com.google.api.gax.rpc.ClientStreamingCallable;
import com.google.api.gax.rpc.UnaryCallable;
+import com.google.cloud.storage.Retrying.RetrierWithAlg;
import com.google.cloud.storage.UnifiedOpts.Opts;
import com.google.storage.v2.StartResumableWriteRequest;
import com.google.storage.v2.StartResumableWriteResponse;
@@ -60,8 +60,8 @@ public final class GapicUploadSessionBuilderSyntaxTest {
@Before
public void setUp() throws Exception {
- when(startResumableWrite.futureCall(any()))
- .thenReturn(ApiFutures.immediateFuture(StartResumableWriteResponse.getDefaultInstance()));
+ when(startResumableWrite.call(any()))
+ .thenReturn(StartResumableWriteResponse.getDefaultInstance());
}
@Test
@@ -95,7 +95,9 @@ public void syntax_directBuffered_fluent() {
@Test
public void syntax_resumableUnbuffered_fluent() {
ApiFuture startAsync =
- ResumableMedia.gapic().write().resumableWrite(startResumableWrite, req, Opts.empty());
+ ResumableMedia.gapic()
+ .write()
+ .resumableWrite(startResumableWrite, req, Opts.empty(), RetrierWithAlg.attemptOnce());
UnbufferedWritableByteChannelSession session =
ResumableMedia.gapic()
.write()
@@ -111,7 +113,9 @@ public void syntax_resumableUnbuffered_fluent() {
@Test
public void syntax_resumableBuffered_fluent() {
ApiFuture startAsync =
- ResumableMedia.gapic().write().resumableWrite(startResumableWrite, req, Opts.empty());
+ ResumableMedia.gapic()
+ .write()
+ .resumableWrite(startResumableWrite, req, Opts.empty(), RetrierWithAlg.attemptOnce());
BufferedWritableByteChannelSession session =
ResumableMedia.gapic()
.write()
@@ -151,7 +155,9 @@ public void syntax_directBuffered_incremental() {
@Test
public void syntax_resumableUnbuffered_incremental() {
ApiFuture startAsync =
- ResumableMedia.gapic().write().resumableWrite(startResumableWrite, req, Opts.empty());
+ ResumableMedia.gapic()
+ .write()
+ .resumableWrite(startResumableWrite, req, Opts.empty(), RetrierWithAlg.attemptOnce());
GapicWritableByteChannelSessionBuilder b1 =
ResumableMedia.gapic()
.write()
@@ -165,7 +171,9 @@ public void syntax_resumableUnbuffered_incremental() {
@Test
public void syntax_resumableBuffered_incremental() {
ApiFuture startAsync =
- ResumableMedia.gapic().write().resumableWrite(startResumableWrite, req, Opts.empty());
+ ResumableMedia.gapic()
+ .write()
+ .resumableWrite(startResumableWrite, req, Opts.empty(), RetrierWithAlg.attemptOnce());
GapicWritableByteChannelSessionBuilder b1 =
ResumableMedia.gapic()
.write()
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedReadableByteChannelTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedReadableByteChannelTest.java
index 8006d2b534..1e1c05915a 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedReadableByteChannelTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedReadableByteChannelTest.java
@@ -18,6 +18,7 @@
import static com.google.cloud.storage.TestUtils.apiException;
import static com.google.cloud.storage.TestUtils.getChecksummedData;
+import static com.google.cloud.storage.TestUtils.xxd;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
@@ -236,7 +237,7 @@ public void readObject(
}
@Test
- public void ioException_if_crc32c_mismatch_individual_message()
+ public void ifCrc32cMismatchIndividualMessage_restartFromCorrectOffset()
throws IOException, InterruptedException {
StorageGrpc.StorageImplBase fakeStorage =
new StorageGrpc.StorageImplBase() {
@@ -245,10 +246,12 @@ public void readObject(
ReadObjectRequest request, StreamObserver responseObserver) {
if (request.equals(req1)) {
responseObserver.onNext(resp1);
- ReadObjectResponse.Builder b = resp2.toBuilder();
+ responseObserver.onNext(resp2);
+ ReadObjectResponse.Builder b = resp3.toBuilder();
// set a bad checksum value
b.getChecksummedDataBuilder().setCrc32C(1);
responseObserver.onNext(b.build());
+ } else if (request.equals(req2)) {
responseObserver.onNext(resp3);
responseObserver.onNext(resp4);
responseObserver.onCompleted();
@@ -276,10 +279,10 @@ public void readObject(
retryOnly(DataLossException.class)));
byte[] actualBytes = new byte[40];
try (UnbufferedReadableByteChannel c = session.open()) {
- IOException ioException =
- assertThrows(IOException.class, () -> c.read(ByteBuffer.wrap(actualBytes)));
+ int read = c.read(ByteBuffer.wrap(actualBytes));
- assertThat(ioException).hasMessageThat().contains("Mismatch checksum");
+ assertThat(read).isEqualTo(40);
+ assertThat(xxd(actualBytes)).isEqualTo(xxd(bytes));
}
}
}
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedWritableByteChannelTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedWritableByteChannelTest.java
index e23b2a57f7..3aa567a4d6 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedWritableByteChannelTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITGapicUnbufferedWritableByteChannelTest.java
@@ -20,9 +20,12 @@
import static com.google.cloud.storage.TestUtils.getChecksummedData;
import static com.google.common.truth.Truth.assertThat;
+import com.google.api.core.ApiFuture;
import com.google.api.core.SettableApiFuture;
+import com.google.api.gax.grpc.GrpcCallContext;
import com.google.api.gax.rpc.PermissionDeniedException;
import com.google.cloud.storage.Retrying.RetrierWithAlg;
+import com.google.cloud.storage.UnifiedOpts.Opts;
import com.google.cloud.storage.WriteCtx.SimpleWriteObjectRequestBuilderFactory;
import com.google.cloud.storage.WriteCtx.WriteObjectRequestBuilderFactory;
import com.google.common.collect.ImmutableList;
@@ -47,7 +50,10 @@
import java.util.List;
import java.util.Locale;
import java.util.Set;
+import java.util.UUID;
import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
@@ -210,6 +216,45 @@ public void resumableUpload() throws IOException, InterruptedException, Executio
}
}
+ @Test
+ public void startResumableUpload_deadlineExceeded_isRetried()
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+
+ String uploadId = UUID.randomUUID().toString();
+ AtomicInteger callCount = new AtomicInteger(0);
+ StorageImplBase service =
+ new StorageImplBase() {
+ @Override
+ public void startResumableWrite(
+ StartResumableWriteRequest req, StreamObserver respond) {
+ if (callCount.getAndIncrement() > 0) {
+ respond.onNext(
+ StartResumableWriteResponse.newBuilder().setUploadId(uploadId).build());
+ respond.onCompleted();
+ }
+ }
+ };
+ try (FakeServer fake = FakeServer.of(service)) {
+ GrpcStorageImpl gsi = (GrpcStorageImpl) fake.getGrpcStorageOptions().getService();
+ ApiFuture f =
+ gsi.startResumableWrite(
+ GrpcCallContext.createDefault(),
+ WriteObjectRequest.newBuilder()
+ .setWriteObjectSpec(
+ WriteObjectSpec.newBuilder()
+ .setResource(
+ Object.newBuilder().setBucket("bucket").setName("name").build())
+ .setIfGenerationMatch(0)
+ .build())
+ .build(),
+ Opts.empty());
+
+ ResumableWrite resumableWrite = f.get(2, TimeUnit.MINUTES);
+ assertThat(callCount.get()).isEqualTo(2);
+ assertThat(resumableWrite.newBuilder().build().getUploadId()).isEqualTo(uploadId);
+ }
+ }
+
@Test
public void resumableUpload_chunkAutomaticRetry()
throws IOException, InterruptedException, ExecutionException {
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITUnbufferedResumableUploadTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITUnbufferedResumableUploadTest.java
index 69b876ad35..59f694fe30 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITUnbufferedResumableUploadTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITUnbufferedResumableUploadTest.java
@@ -24,6 +24,7 @@
import com.google.api.services.storage.model.StorageObject;
import com.google.cloud.storage.ITUnbufferedResumableUploadTest.ObjectSizes;
import com.google.cloud.storage.Retrying.Retrier;
+import com.google.cloud.storage.Retrying.RetrierWithAlg;
import com.google.cloud.storage.TransportCompatibility.Transport;
import com.google.cloud.storage.UnbufferedWritableByteChannelSession.UnbufferedWritableByteChannel;
import com.google.cloud.storage.UnifiedOpts.ObjectTargetOpt;
@@ -241,7 +242,8 @@ private UnbufferedWritableByteChannelSession grpcSession()
.resumableWrite(
storageClient.startResumableWriteCallable().withDefaultCallContext(merge),
request,
- opts);
+ opts,
+ RetrierWithAlg.attemptOnce());
return ResumableMedia.gapic()
.write()
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ObjectReadSessionStreamReadTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ObjectReadSessionStreamReadTest.java
index 3e70f9c048..9db08fd638 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ObjectReadSessionStreamReadTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ObjectReadSessionStreamReadTest.java
@@ -356,31 +356,9 @@ public void streamingRead_eofShouldBeReturnedIfNoOtherBytesRead() throws Excepti
1, RangeSpec.of(0, 137), Hasher.enabled(), RetryContext.neverRetry())) {
read.eof();
assertThat(read.read(ByteBuffer.allocate(1))).isEqualTo(-1);
-
- assertAll(
- () -> assertThrows(ClosedChannelException.class, () -> read.read((ByteBuffer) null)),
- () -> assertThat(read.isOpen()).isFalse());
- }
- }
-
- @Test
- public void streamingRead_closedOnceEofIsRead() throws Exception {
- try (StreamingRead read =
- ObjectReadSessionStreamRead.streamingRead(
- 1, RangeSpec.of(0, 137), Hasher.enabled(), RetryContext.neverRetry())) {
- ByteString bytes1 = ByteString.copyFrom(DataGenerator.base64Characters().genBytes(62));
- try (ResponseContentLifecycleHandle handle = noopContentHandle(bytes1)) {
- read.accept(handle.borrow(Function.identity()));
- }
-
- ByteBuffer buf = ByteBuffer.allocate(512);
- read.read(buf);
- read.eof();
- assertThat(read.read(buf)).isEqualTo(-1);
-
- assertAll(
- () -> assertThrows(ClosedChannelException.class, () -> read.read(buf)),
- () -> assertThat(read.isOpen()).isFalse());
+ assertThat(read.isOpen()).isTrue();
+ read.close();
+ assertThat(read.isOpen()).isFalse();
}
}
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/SignatureInfoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/SignatureInfoTest.java
index 7b146a7134..711a84d041 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/SignatureInfoTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/SignatureInfoTest.java
@@ -20,7 +20,9 @@
import static org.junit.Assert.assertTrue;
import com.google.cloud.storage.SignatureInfo.Builder;
+import com.google.common.hash.Hashing;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
@@ -98,4 +100,39 @@ public void constructV4QueryString() {
+ "auto%2Fstorage%2Fgoog4_request&X-Goog-Date=20010909T014640Z&X-Goog-Expires=10&X-Goog-SignedHeaders=host",
queryString);
}
+
+ @Test
+ public void constructV4UnsignedPayloadWithContentSha256Header() {
+ Builder builder = new SignatureInfo.Builder(HttpMethod.PUT, 10L, URI.create(RESOURCE));
+ builder.setSignatureVersion(Storage.SignUrlOption.SignatureVersion.V4);
+ builder.setAccountEmail("me@google.com");
+ builder.setTimestamp(1000000000000L);
+
+ Map extensionHeaders = new HashMap<>();
+ // Add the header with a lowercase key, which triggers the bug.
+ String contentSha256 = "sha256";
+ extensionHeaders.put("X-goog-content-sha256", contentSha256);
+ builder.setCanonicalizedExtensionHeaders(extensionHeaders);
+
+ // This is the payload hash that SHOULD be generated
+ String correctCanonicalRequest =
+ "PUT\n"
+ + "/bucketName/blobName\n"
+ + "X-Goog-Algorithm=GOOG4-RSA-SHA256&X-Goog-Credential=me%40google.com%2F20010909%2Fauto%2Fstorage%2Fgoog4_request&X-Goog-Date=20010909T014640Z&X-Goog-Expires=10&X-Goog-SignedHeaders=host%3Bx-goog-content-sha256\n"
+ + "host:storage.googleapis.com\n"
+ + "x-goog-content-sha256:"
+ + contentSha256
+ + "\n"
+ + "\n"
+ + "host;x-goog-content-sha256\n"
+ + contentSha256;
+ String expectedPayloadHash =
+ Hashing.sha256().hashString(correctCanonicalRequest, StandardCharsets.UTF_8).toString();
+
+ String unsignedPayload = builder.build().constructUnsignedPayload();
+ String[] parts = unsignedPayload.split("\n");
+ String generatedPayloadHash = parts[parts.length - 1];
+
+ assertEquals(expectedPayloadHash, generatedPayloadHash);
+ }
}
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITAccessTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITAccessTest.java
index 2b5e7b9e7b..04d74ebcff 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITAccessTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITAccessTest.java
@@ -385,12 +385,18 @@ public void testEnableAndDisableUniformBucketLevelAccessOnExistingBucket() throw
BucketTargetOption.metagenerationMatch());
Bucket remoteBucket =
- storage.get(bpoBucket, Storage.BucketGetOption.fields(BucketField.IAMCONFIGURATION));
+ storage.get(
+ bpoBucket,
+ Storage.BucketGetOption.fields(
+ BucketField.IAMCONFIGURATION, BucketField.METAGENERATION));
assertTrue(remoteBucket.getIamConfiguration().isUniformBucketLevelAccessEnabled());
assertNotNull(remoteBucket.getIamConfiguration().getUniformBucketLevelAccessLockedTime());
- remoteBucket.toBuilder().setIamConfiguration(ublaDisabledIamConfiguration).build().update();
+ remoteBucket.toBuilder()
+ .setIamConfiguration(ublaDisabledIamConfiguration)
+ .build()
+ .update(BucketTargetOption.metagenerationMatch());
remoteBucket =
storage.get(
@@ -600,7 +606,10 @@ public void testRetentionPolicyNoLock() throws Exception {
assertThat(remoteBucket.retentionPolicyIsLocked()).isAnyOf(null, false);
Bucket remoteBucket2 =
- storage.get(bucketName, Storage.BucketGetOption.fields(BucketField.RETENTION_POLICY));
+ storage.get(
+ bucketName,
+ Storage.BucketGetOption.fields(
+ BucketField.RETENTION_POLICY, BucketField.METAGENERATION));
assertEquals(RETENTION_PERIOD, remoteBucket2.getRetentionPeriod());
assertThat(remoteBucket2.getRetentionPeriodDuration()).isEqualTo(RETENTION_PERIOD_DURATION);
assertNotNull(remoteBucket2.getRetentionEffectiveTime());
@@ -611,7 +620,11 @@ public void testRetentionPolicyNoLock() throws Exception {
Blob remoteBlob = storage.create(blobInfo);
assertNotNull(remoteBlob.getRetentionExpirationTime());
- Bucket remoteBucket3 = remoteBucket2.toBuilder().setRetentionPeriod(null).build().update();
+ Bucket remoteBucket3 =
+ remoteBucket2.toBuilder()
+ .setRetentionPeriod(null)
+ .build()
+ .update(BucketTargetOption.metagenerationMatch());
assertNull(remoteBucket3.getRetentionPeriod());
}
}
@@ -653,7 +666,7 @@ public void testEnableAndDisableBucketPolicyOnlyOnExistingBucket() throws Except
.setIamConfiguration(
bpoEnabledIamConfiguration.toBuilder().setIsBucketPolicyOnlyEnabled(false).build())
.build()
- .update();
+ .update(BucketTargetOption.metagenerationMatch());
remoteBucket =
storage.get(
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBucketTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBucketTest.java
index 378f1bd45b..7cf2f7f4ee 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBucketTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBucketTest.java
@@ -17,7 +17,6 @@
package com.google.cloud.storage.it;
import static com.google.common.truth.Truth.assertThat;
-import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
@@ -40,6 +39,7 @@
import com.google.cloud.storage.Rpo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.Storage.BlobField;
+import com.google.cloud.storage.Storage.BlobTargetOption;
import com.google.cloud.storage.Storage.BucketField;
import com.google.cloud.storage.Storage.BucketGetOption;
import com.google.cloud.storage.Storage.BucketListOption;
@@ -62,12 +62,10 @@
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.StreamSupport;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -321,54 +319,6 @@ private void retentionPolicyLockRequesterPays(boolean requesterPays) {
}
}
- @Test
- // Bug in UpdateMask
- @CrossRun.Exclude(transports = Transport.GRPC)
- @Ignore("Make hermetic, currently mutates global bucket")
- public void testUpdateBucketLabel() {
- Bucket remoteBucket =
- storage.get(
- bucket.getName(), Storage.BucketGetOption.fields(BucketField.ID, BucketField.BILLING));
- assertNull(remoteBucket.getLabels());
- remoteBucket = remoteBucket.toBuilder().setLabels(BUCKET_LABELS).build();
- Bucket updatedBucket = storage.update(remoteBucket);
- assertEquals(BUCKET_LABELS, updatedBucket.getLabels());
- remoteBucket.toBuilder().setLabels(Collections.emptyMap()).build().update();
- assertNull(storage.get(bucket.getName()).getLabels());
- }
-
- @Test
- @CrossRun.Exclude(transports = Transport.GRPC)
- @Ignore("Make hermetic, currently mutates global bucket")
- public void testUpdateBucketRequesterPays() {
- // Bug in UpdateMask
- unsetRequesterPays();
- Bucket remoteBucket =
- storage.get(
- requesterPaysBucket.getName(),
- Storage.BucketGetOption.fields(BucketField.ID, BucketField.BILLING));
- assertTrue(remoteBucket.requesterPays() == null || !remoteBucket.requesterPays());
- remoteBucket = remoteBucket.toBuilder().setRequesterPays(true).build();
- Bucket updatedBucket = storage.update(remoteBucket);
- assertTrue(updatedBucket.requesterPays());
-
- String projectId = storage.getOptions().getProjectId();
- Bucket.BlobTargetOption option = Bucket.BlobTargetOption.userProject(projectId);
- String blobName = "test-create-empty-blob-requester-pays";
- Blob remoteBlob = updatedBucket.create(blobName, BLOB_BYTE_CONTENT, option);
- assertNotNull(remoteBlob);
- byte[] readBytes =
- storage.readAllBytes(
- requesterPaysBucket.getName(),
- blobName,
- Storage.BlobSourceOption.userProject(projectId));
- assertArrayEquals(BLOB_BYTE_CONTENT, readBytes);
- remoteBucket = remoteBucket.toBuilder().setRequesterPays(false).build();
- updatedBucket = storage.update(remoteBucket, Storage.BucketTargetOption.userProject(projectId));
-
- assertFalse(updatedBucket.requesterPays());
- }
-
@Test
public void testEnableDisableBucketDefaultEventBasedHold() {
String bucketName = generator.randomBucketName();
@@ -378,19 +328,30 @@ public void testEnableDisableBucketDefaultEventBasedHold() {
assertTrue(remoteBucket.getDefaultEventBasedHold());
remoteBucket =
storage.get(
- bucketName, Storage.BucketGetOption.fields(BucketField.DEFAULT_EVENT_BASED_HOLD));
+ bucketName,
+ Storage.BucketGetOption.fields(
+ BucketField.DEFAULT_EVENT_BASED_HOLD, BucketField.METAGENERATION));
assertTrue(remoteBucket.getDefaultEventBasedHold());
String blobName = generator.randomObjectName();
BlobInfo blobInfo = BlobInfo.newBuilder(bucketName, blobName).build();
- Blob remoteBlob = storage.create(blobInfo);
+ Blob remoteBlob = storage.create(blobInfo, BlobTargetOption.doesNotExist());
assertTrue(remoteBlob.getEventBasedHold());
remoteBlob =
storage.get(
- blobInfo.getBlobId(), Storage.BlobGetOption.fields(BlobField.EVENT_BASED_HOLD));
+ blobInfo.getBlobId(),
+ Storage.BlobGetOption.fields(BlobField.EVENT_BASED_HOLD, BlobField.METAGENERATION));
assertTrue(remoteBlob.getEventBasedHold());
- remoteBlob = remoteBlob.toBuilder().setEventBasedHold(false).build().update();
+ remoteBlob =
+ remoteBlob.toBuilder()
+ .setEventBasedHold(false)
+ .build()
+ .update(BlobTargetOption.metagenerationMatch());
assertFalse(remoteBlob.getEventBasedHold());
- remoteBucket = remoteBucket.toBuilder().setDefaultEventBasedHold(false).build().update();
+ remoteBucket =
+ remoteBucket.toBuilder()
+ .setDefaultEventBasedHold(false)
+ .build()
+ .update(BucketTargetOption.metagenerationMatch());
assertFalse(remoteBucket.getDefaultEventBasedHold());
} finally {
BucketCleaner.doCleanup(bucketName, storage);
@@ -465,7 +426,9 @@ public void testObjectRetention() {
.setRetainUntilTime(now.plusHours(1))
.build())
.build()
- .update(Storage.BlobTargetOption.overrideUnlockedRetention(true));
+ .update(
+ Storage.BlobTargetOption.overrideUnlockedRetention(true),
+ BlobTargetOption.metagenerationMatch());
remoteBlob = storage.get(bucketName, "retentionObject");
assertEquals(
@@ -480,7 +443,9 @@ public void testObjectRetention() {
remoteBlob.toBuilder()
.setRetention(null)
.build()
- .update(Storage.BlobTargetOption.overrideUnlockedRetention(true));
+ .update(
+ Storage.BlobTargetOption.overrideUnlockedRetention(true),
+ BlobTargetOption.metagenerationMatch());
remoteBlob = storage.get(bucketName, "retentionObject");
assertNull(remoteBlob.getRetention());
@@ -545,7 +510,7 @@ public void testUpdateBucket_noModification() throws Exception {
storage.get(
bucket.getName(), BucketGetOption.metagenerationMatch(bucket.getMetageneration()));
- Bucket gen2 = storage.update(gen1);
+ Bucket gen2 = storage.update(gen1, BucketTargetOption.metagenerationMatch());
assertThat(gen2).isEqualTo(gen1);
}
}
@@ -606,7 +571,7 @@ public void testSoftDeletePolicy() {
.setRetentionDuration(Duration.ofDays(20))
.build())
.build()
- .update();
+ .update(BucketTargetOption.metagenerationMatch());
assertEquals(
Duration.ofDays(20),
@@ -688,19 +653,4 @@ public void testListObjectsWithFolders() throws Exception {
BucketCleaner.doCleanup(bucketName, storage);
}
}
-
- private void unsetRequesterPays() {
- Bucket remoteBucket =
- storage.get(
- requesterPaysBucket.getName(),
- Storage.BucketGetOption.fields(BucketField.ID, BucketField.BILLING),
- Storage.BucketGetOption.userProject(storage.getOptions().getProjectId()));
- // Disable requester pays in case a test fails to clean up.
- if (remoteBucket.requesterPays() != null && remoteBucket.requesterPays() == true) {
- remoteBucket.toBuilder()
- .setRequesterPays(false)
- .build()
- .update(Storage.BucketTargetOption.userProject(storage.getOptions().getProjectId()));
- }
- }
}
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITObjectTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITObjectTest.java
index 3b3c3e7dc6..dc2797616f 100644
--- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITObjectTest.java
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITObjectTest.java
@@ -84,7 +84,6 @@
import java.nio.file.Paths;
import java.security.Key;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
@@ -146,7 +145,7 @@ public void testCreateBlob() {
String blobName = generator.randomObjectName();
BlobInfo blob =
BlobInfo.newBuilder(bucket, blobName).setCustomTime(System.currentTimeMillis()).build();
- Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
+ Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT, BlobTargetOption.doesNotExist());
assertNotNull(remoteBlob);
assertNotNull(remoteBlob.getCustomTime());
assertEquals(blob.getBucket(), remoteBlob.getBucket());
@@ -165,7 +164,7 @@ public void testCreateBlobMd5Crc32cFromHexString() {
.setMd5FromHexString("3b54781b51c94835084898e821899585")
.setCrc32cFromHexString("f4ddc43d")
.build();
- Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
+ Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT, BlobTargetOption.doesNotExist());
assertNotNull(remoteBlob);
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertEquals(blob.getName(), remoteBlob.getName());
@@ -180,7 +179,12 @@ public void testCreateBlobMd5Crc32cFromHexString() {
public void testCreateGetBlobWithEncryptionKey() {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
- Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT, BlobTargetOption.encryptionKey(KEY));
+ Blob remoteBlob =
+ storage.create(
+ blob,
+ BLOB_BYTE_CONTENT,
+ BlobTargetOption.encryptionKey(KEY),
+ BlobTargetOption.doesNotExist());
assertNotNull(remoteBlob);
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertEquals(blob.getName(), remoteBlob.getName());
@@ -201,7 +205,7 @@ public void testCreateGetBlobWithEncryptionKey() {
public void testCreateEmptyBlob() {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
- Blob remoteBlob = storage.create(blob);
+ Blob remoteBlob = storage.create(blob, BlobTargetOption.doesNotExist());
assertNotNull(remoteBlob);
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertEquals(blob.getName(), remoteBlob.getName());
@@ -218,7 +222,8 @@ public void testZeroByteFileUpload() throws Exception {
File zeroByteFile = File.createTempFile("zerobyte", null);
zeroByteFile.deleteOnExit();
- storage.createFrom(blobInfo, Paths.get(zeroByteFile.getAbsolutePath()));
+ storage.createFrom(
+ blobInfo, Paths.get(zeroByteFile.getAbsolutePath()), BlobWriteOption.doesNotExist());
byte[] readBytes = storage.readAllBytes(bucket.getName(), blobName);
assertArrayEquals(new byte[0], readBytes);
@@ -230,7 +235,7 @@ public void testCreateBlobStream() {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).setContentType(CONTENT_TYPE).build();
ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
- Blob remoteBlob = storage.create(blob, stream);
+ Blob remoteBlob = storage.create(blob, stream, BlobWriteOption.doesNotExist());
assertNotNull(remoteBlob);
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertEquals(blob.getName(), remoteBlob.getName());
@@ -245,7 +250,9 @@ public void testCreateBlobStreamDisableGzipContent() {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).setContentType(CONTENT_TYPE).build();
ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
- Blob remoteBlob = storage.create(blob, stream, BlobWriteOption.disableGzipContent());
+ Blob remoteBlob =
+ storage.create(
+ blob, stream, BlobWriteOption.disableGzipContent(), BlobWriteOption.doesNotExist());
assertNotNull(remoteBlob);
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertEquals(blob.getName(), remoteBlob.getName());
@@ -258,7 +265,7 @@ public void testCreateBlobStreamDisableGzipContent() {
public void testCreateBlobFail() {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
- Blob remoteBlob = storage.create(blob);
+ Blob remoteBlob = storage.create(blob, BlobTargetOption.doesNotExist());
assertNotNull(remoteBlob);
BlobInfo wrongGenerationBlob = BlobInfo.newBuilder(bucket, blobName, -1L).build();
try {
@@ -687,75 +694,15 @@ public void testUpdateBlob() {
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
Blob remoteBlob = storage.create(blob);
assertNotNull(remoteBlob);
- Blob updatedBlob = remoteBlob.toBuilder().setContentType(CONTENT_TYPE).build().update();
- assertNotNull(updatedBlob);
- assertEquals(blob.getName(), updatedBlob.getName());
- assertEquals(blob.getBucket(), updatedBlob.getBucket());
- assertEquals(CONTENT_TYPE, updatedBlob.getContentType());
- }
-
- @Test
- public void testUpdateBlobReplaceMetadata() {
- String blobName = generator.randomObjectName();
- ImmutableMap metadata = ImmutableMap.of("k1", "a");
- ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
- BlobInfo blob =
- BlobInfo.newBuilder(bucket, blobName)
+ Blob updatedBlob =
+ remoteBlob.toBuilder()
.setContentType(CONTENT_TYPE)
- .setMetadata(metadata)
- .build();
- Blob remoteBlob = storage.create(blob);
- assertNotNull(remoteBlob);
- Blob updatedBlob = remoteBlob.toBuilder().setMetadata(null).build().update();
+ .build()
+ .update(BlobTargetOption.metagenerationMatch());
assertNotNull(updatedBlob);
- assertNull(updatedBlob.getMetadata());
- updatedBlob = remoteBlob.toBuilder().setMetadata(newMetadata).build().update();
assertEquals(blob.getName(), updatedBlob.getName());
assertEquals(blob.getBucket(), updatedBlob.getBucket());
- assertEquals(newMetadata, updatedBlob.getMetadata());
- }
-
- @Test
- public void testUpdateBlobMergeMetadata() {
- String blobName = generator.randomObjectName();
- ImmutableMap metadata = ImmutableMap.of("k1", "a");
- ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
- ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a", "k2", "b");
- BlobInfo blob =
- BlobInfo.newBuilder(bucket, blobName)
- .setContentType(CONTENT_TYPE)
- .setMetadata(metadata)
- .build();
- Blob remoteBlob = storage.create(blob);
- assertNotNull(remoteBlob);
- Blob updatedBlob = remoteBlob.toBuilder().setMetadata(newMetadata).build().update();
- assertNotNull(updatedBlob);
- assertEquals(blob.getName(), updatedBlob.getName());
- assertEquals(blob.getBucket(), updatedBlob.getBucket());
- assertEquals(expectedMetadata, updatedBlob.getMetadata());
- }
-
- @Test
- public void testUpdateBlobUnsetMetadata() {
-
- String blobName = generator.randomObjectName();
- ImmutableMap metadata = ImmutableMap.of("k1", "a", "k2", "b");
- Map newMetadata = new HashMap<>();
- newMetadata.put("k1", "a");
- newMetadata.put("k2", null);
- ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a");
- BlobInfo blob =
- BlobInfo.newBuilder(bucket, blobName)
- .setContentType(CONTENT_TYPE)
- .setMetadata(metadata)
- .build();
- Blob remoteBlob = storage.create(blob);
- assertNotNull(remoteBlob);
- Blob updatedBlob = remoteBlob.toBuilder().setMetadata(newMetadata).build().update();
- assertNotNull(updatedBlob);
- assertEquals(blob.getName(), updatedBlob.getName());
- assertEquals(blob.getBucket(), updatedBlob.getBucket());
- assertEquals(expectedMetadata, updatedBlob.getMetadata());
+ assertEquals(CONTENT_TYPE, updatedBlob.getContentType());
}
@Test
@@ -767,7 +714,9 @@ public void testUpdateBlobFail() {
BlobInfo wrongGenerationBlob =
BlobInfo.newBuilder(bucket, blobName, -1L).setContentType(CONTENT_TYPE).build();
try {
- storage.update(wrongGenerationBlob, BlobTargetOption.generationMatch());
+ storage.update(
+ wrongGenerationBlob,
+ BlobTargetOption.metagenerationMatch(remoteBlob.getMetageneration()));
fail("StorageException was expected");
} catch (StorageException ex) {
// expected
@@ -957,7 +906,8 @@ public void testCopyBlobWithPredefinedAcl() {
.setSource(source)
.setTarget(
BlobId.of(bucket.getName(), targetBlobName),
- BlobTargetOption.predefinedAcl(PredefinedAcl.PUBLIC_READ))
+ BlobTargetOption.predefinedAcl(PredefinedAcl.PUBLIC_READ),
+ BlobTargetOption.doesNotExist())
.build();
CopyWriter copyWriter = storage.copy(req);
Blob gen1 = copyWriter.getResult();
@@ -992,7 +942,10 @@ public void testCopyBlobWithEncryptionKeys() {
CopyRequest req1 =
CopyRequest.newBuilder()
.setSource(source)
- .setTarget(target, BlobTargetOption.encryptionKey(OTHER_BASE64_KEY))
+ .setTarget(
+ target,
+ BlobTargetOption.encryptionKey(OTHER_BASE64_KEY),
+ BlobTargetOption.doesNotExist())
.setSourceOptions(BlobSourceOption.decryptionKey(BASE64_KEY))
.build();
CopyWriter copyWriter1 = storage.copy(req1);
@@ -1037,7 +990,11 @@ public void testCopyBlobUpdateMetadata() {
.setContentType(CONTENT_TYPE)
.setMetadata(metadata)
.build();
- CopyRequest req = CopyRequest.of(source, target);
+ CopyRequest req =
+ CopyRequest.newBuilder()
+ .setSource(source)
+ .setTarget(target, BlobTargetOption.doesNotExist())
+ .build();
CopyWriter copyWriter = storage.copy(req);
Blob gen1 = copyWriter.getResult();
assertEquals(bucket.getName(), gen1.getBucket());
@@ -1049,52 +1006,6 @@ public void testCopyBlobUpdateMetadata() {
assertTrue(storage.delete(gen1.getBlobId()));
}
- @Test
- public void testCopyBlobUpdateStorageClass() {
- String sourceBlobName = generator.randomObjectName() + "-source";
- BlobId source = BlobId.of(bucket.getName(), sourceBlobName);
- BlobInfo sourceInfo =
- BlobInfo.newBuilder(source).setStorageClass(StorageClass.STANDARD).build();
- Blob remoteSourceBlob = storage.create(sourceInfo, BLOB_BYTE_CONTENT);
- assertNotNull(remoteSourceBlob);
- assertEquals(StorageClass.STANDARD, remoteSourceBlob.getStorageClass());
-
- String targetBlobName = generator.randomObjectName() + "-target";
- BlobInfo targetInfo =
- BlobInfo.newBuilder(bucket, targetBlobName).setStorageClass(StorageClass.COLDLINE).build();
- CopyRequest req = CopyRequest.of(source, targetInfo);
- CopyWriter copyWriter = storage.copy(req);
- Blob gen1 = copyWriter.getResult();
- assertEquals(bucket.getName(), gen1.getBucket());
- assertEquals(targetBlobName, gen1.getName());
- assertEquals(StorageClass.COLDLINE, gen1.getStorageClass());
- assertTrue(copyWriter.isDone());
- assertTrue(remoteSourceBlob.delete());
- assertTrue(storage.delete(gen1.getBlobId()));
- }
-
- @Test
- public void testCopyBlobNoContentType() {
-
- String sourceBlobName = generator.randomObjectName() + "-source";
- BlobId source = BlobId.of(bucket.getName(), sourceBlobName);
- Blob remoteSourceBlob = storage.create(BlobInfo.newBuilder(source).build(), BLOB_BYTE_CONTENT);
- assertNotNull(remoteSourceBlob);
- String targetBlobName = generator.randomObjectName() + "-target";
- ImmutableMap metadata = ImmutableMap.of("k", "v");
- BlobInfo target = BlobInfo.newBuilder(bucket, targetBlobName).setMetadata(metadata).build();
- CopyRequest req = CopyRequest.of(source, target);
- CopyWriter copyWriter = storage.copy(req);
- Blob gen1 = copyWriter.getResult();
- assertEquals(bucket.getName(), gen1.getBucket());
- assertEquals(targetBlobName, gen1.getName());
- assertTrue(gen1.getContentType() == null || gen1.getContentType().isEmpty());
- assertEquals(metadata, gen1.getMetadata());
- assertTrue(copyWriter.isDone());
- assertTrue(remoteSourceBlob.delete());
- assertTrue(storage.delete(gen1.getBlobId()));
- }
-
@Test
public void testCopyBlobFail() {
@@ -1110,7 +1021,7 @@ public void testCopyBlobFail() {
CopyRequest.newBuilder()
.setSource(bucket.getName(), sourceBlobName)
.setSourceOptions(BlobSourceOption.generationMatch(-1L))
- .setTarget(target)
+ .setTarget(target, BlobTargetOption.doesNotExist())
.build();
try {
storage.copy(req);
@@ -1137,7 +1048,9 @@ public void testReadAndWriteChannelWithEncryptionKey() throws IOException {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
byte[] stringBytes;
- try (WriteChannel writer = storage.writer(blob, BlobWriteOption.encryptionKey(BASE64_KEY))) {
+ try (WriteChannel writer =
+ storage.writer(
+ blob, BlobWriteOption.encryptionKey(BASE64_KEY), BlobWriteOption.doesNotExist())) {
stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
writer.write(ByteBuffer.wrap(stringBytes));
@@ -1197,7 +1110,7 @@ private void doTestReadAndWriteChannelsWithSize(int blobSize) throws IOException
Random rnd = new Random();
byte[] bytes = new byte[blobSize];
rnd.nextBytes(bytes);
- try (WriteChannel writer = storage.writer(blob)) {
+ try (WriteChannel writer = storage.writer(blob, BlobWriteOption.doesNotExist())) {
writer.write(ByteBuffer.wrap(bytes));
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
@@ -1217,7 +1130,7 @@ public void testReadAndWriteCaptureChannels() throws IOException {
String blobName = generator.randomObjectName();
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).build();
byte[] stringBytes;
- WriteChannel writer = storage.writer(blob);
+ WriteChannel writer = storage.writer(blob, BlobWriteOption.doesNotExist());
stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
RestorableState writerState = writer.capture();
@@ -1386,12 +1299,18 @@ public void testAttemptObjectDeleteWithRetentionPolicy()
public void testEnableDisableTemporaryHold() {
String blobName = generator.randomObjectName();
BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).setTemporaryHold(true).build();
- Blob remoteBlob = storage.create(blobInfo);
+ Blob remoteBlob = storage.create(blobInfo, BlobTargetOption.doesNotExist());
assertTrue(remoteBlob.getTemporaryHold());
remoteBlob =
- storage.get(remoteBlob.getBlobId(), BlobGetOption.fields(BlobField.TEMPORARY_HOLD));
+ storage.get(
+ remoteBlob.getBlobId(),
+ BlobGetOption.fields(BlobField.TEMPORARY_HOLD, BlobField.METAGENERATION));
assertTrue(remoteBlob.getTemporaryHold());
- remoteBlob = remoteBlob.toBuilder().setTemporaryHold(false).build().update();
+ remoteBlob =
+ remoteBlob.toBuilder()
+ .setTemporaryHold(false)
+ .build()
+ .update(BlobTargetOption.metagenerationMatch());
assertFalse(remoteBlob.getTemporaryHold());
}
@@ -1399,7 +1318,7 @@ public void testEnableDisableTemporaryHold() {
public void testAttemptObjectDeleteWithEventBasedHold() {
String blobName = generator.randomObjectName();
BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).setEventBasedHold(true).build();
- Blob remoteBlob = storage.create(blobInfo);
+ Blob remoteBlob = storage.create(blobInfo, BlobTargetOption.doesNotExist());
assertTrue(remoteBlob.getEventBasedHold());
try {
remoteBlob.delete();
@@ -1415,7 +1334,7 @@ public void testAttemptObjectDeleteWithEventBasedHold() {
public void testAttemptDeletionObjectTemporaryHold() {
String blobName = generator.randomObjectName();
BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).setTemporaryHold(true).build();
- Blob remoteBlob = storage.create(blobInfo);
+ Blob remoteBlob = storage.create(blobInfo, BlobTargetOption.doesNotExist());
assertTrue(remoteBlob.getTemporaryHold());
try {
remoteBlob.delete();
@@ -1432,7 +1351,7 @@ public void testBlobReload() throws Exception {
String blobName = generator.randomObjectName();
BlobId blobId = BlobId.of(bucket.getName(), blobName);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
- Blob blob = storage.create(blobInfo, new byte[] {0, 1, 2});
+ Blob blob = storage.create(blobInfo, new byte[] {0, 1, 2}, BlobTargetOption.doesNotExist());
Blob blobUnchanged = blob.reload();
// gRPC and json have differing defaults on projections b/258835631
@@ -1463,7 +1382,9 @@ public void testUploadWithEncryption() throws Exception {
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
ByteArrayInputStream content = new ByteArrayInputStream(BLOB_BYTE_CONTENT);
- Blob blob = storage.createFrom(blobInfo, content, BlobWriteOption.encryptionKey(KEY));
+ Blob blob =
+ storage.createFrom(
+ blobInfo, content, BlobWriteOption.encryptionKey(KEY), BlobWriteOption.doesNotExist());
try {
blob.getContent();
@@ -1482,18 +1403,25 @@ private Blob createBlob(String method, BlobInfo blobInfo, boolean detectType) th
switch (method) {
case "create":
return detectType
- ? storage.create(blobInfo, BlobTargetOption.detectContentType())
- : storage.create(blobInfo);
+ ? storage.create(
+ blobInfo, BlobTargetOption.detectContentType(), BlobTargetOption.doesNotExist())
+ : storage.create(blobInfo, BlobTargetOption.doesNotExist());
case "createFrom":
InputStream inputStream = new ByteArrayInputStream(BLOB_BYTE_CONTENT);
return detectType
- ? storage.createFrom(blobInfo, inputStream, BlobWriteOption.detectContentType())
- : storage.createFrom(blobInfo, inputStream);
+ ? storage.createFrom(
+ blobInfo,
+ inputStream,
+ BlobWriteOption.detectContentType(),
+ BlobWriteOption.doesNotExist())
+ : storage.createFrom(blobInfo, inputStream, BlobWriteOption.doesNotExist());
case "writer":
if (detectType) {
- storage.writer(blobInfo, BlobWriteOption.detectContentType()).close();
+ storage
+ .writer(blobInfo, BlobWriteOption.detectContentType(), BlobWriteOption.doesNotExist())
+ .close();
} else {
- storage.writer(blobInfo).close();
+ storage.writer(blobInfo, BlobWriteOption.doesNotExist()).close();
}
return storage.get(BlobId.of(blobInfo.getBucket(), blobInfo.getName()));
default:
@@ -1501,51 +1429,13 @@ private Blob createBlob(String method, BlobInfo blobInfo, boolean detectType) th
}
}
- private void testAutoContentType(String method) throws IOException {
- String[] names = {
- generator.randomObjectName() + ".txt",
- generator.randomObjectName() + "with space/Pic.Jpg",
- generator.randomObjectName() + "no_extension"
- };
- String[] types = {"text/plain", "image/jpeg", "application/octet-stream"};
- for (int i = 0; i < names.length; i++) {
- BlobId blobId = BlobId.of(bucket.getName(), names[i]);
- BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
- Blob blob_true = createBlob(method, blobInfo, true);
- assertEquals(types[i], blob_true.getContentType());
-
- Blob blob_false = createBlob(method, blobInfo, false);
- assertThat(blob_false.getContentType()).isAnyOf("application/octet-stream", "");
- }
- String customType = "custom/type";
- BlobId blobId = BlobId.of(bucket.getName(), names[0]);
- BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType(customType).build();
- Blob blob = createBlob(method, blobInfo, true);
- assertEquals(customType, blob.getContentType());
- }
-
- @Test
- public void testAutoContentTypeCreate() throws IOException {
- testAutoContentType("create");
- }
-
- @Test
- public void testAutoContentTypeCreateFrom() throws IOException {
- testAutoContentType("createFrom");
- }
-
- @Test
- public void testAutoContentTypeWriter() throws IOException {
- testAutoContentType("writer");
- }
-
@Test
public void testBlobTimeStorageClassUpdated() {
String blobName = generator.randomObjectName();
StorageClass storageClass = StorageClass.COLDLINE;
BlobInfo blob = BlobInfo.newBuilder(bucket, blobName).setStorageClass(storageClass).build();
- Blob remoteBlob = storage.create(blob);
+ Blob remoteBlob = storage.create(blob, BlobTargetOption.doesNotExist());
assertThat(remoteBlob).isNotNull();
assertEquals(blob.getBucket(), remoteBlob.getBucket());
assertThat(remoteBlob.getName()).isEqualTo(blob.getName());
@@ -1559,7 +1449,9 @@ public void testBlobTimeStorageClassUpdated() {
CopyRequest request =
CopyRequest.newBuilder()
.setSource(blobId)
- .setTarget(BlobInfo.newBuilder(blobId).setStorageClass(StorageClass.STANDARD).build())
+ .setTarget(
+ BlobInfo.newBuilder(blobId).setStorageClass(StorageClass.STANDARD).build(),
+ BlobTargetOption.generationMatch(remoteBlob.getGeneration()))
.build();
Blob updatedBlob1 = storage.copy(request).getResult();
assertThat(updatedBlob1.getTimeStorageClassUpdated()).isNotNull();
@@ -1570,7 +1462,11 @@ public void testBlobTimeStorageClassUpdated() {
// Updates the other properties of the blob's to check the difference between blob updateTime
// and timeStorageClassUpdated.
- Blob updatedBlob2 = updatedBlob1.toBuilder().setContentType(CONTENT_TYPE).build().update();
+ Blob updatedBlob2 =
+ updatedBlob1.toBuilder()
+ .setContentType(CONTENT_TYPE)
+ .build()
+ .update(BlobTargetOption.metagenerationMatch());
assertThat(updatedBlob2.getUpdateTime())
.isGreaterThan(updatedBlob2.getTimeStorageClassUpdated());
assertThat(updatedBlob2.getTimeStorageClassUpdated())
@@ -1583,9 +1479,9 @@ public void testUpdateBlob_noModification() {
BlobInfo info = BlobInfo.newBuilder(bucket, generator.randomObjectName()).build();
// in grpc, create will return acls but update does not. re-get the metadata with default fields
- Blob gen1 = storage.create(info);
+ Blob gen1 = storage.create(info, BlobTargetOption.doesNotExist());
gen1 = storage.get(gen1.getBlobId());
- Blob gen2 = storage.update(gen1);
+ Blob gen2 = storage.update(gen1, BlobTargetOption.metagenerationMatch());
assertThat(gen2).isEqualTo(gen1);
}
@@ -1601,8 +1497,8 @@ public void blob_update() throws Exception {
BlobInfo info2 =
BlobInfo.newBuilder(versionedBucket, randomObjectName).setMetadata(meta2).build();
- BlobInfo gen1 = storage.create(info1);
- BlobInfo gen2 = storage.create(info2);
+ BlobInfo gen1 = storage.create(info1, BlobTargetOption.doesNotExist());
+ BlobInfo gen2 = storage.create(info2, BlobTargetOption.generationMatch(gen1.getGeneration()));
BlobInfo update1 = gen1.toBuilder().setMetadata(meta3).build();
diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITReadableByteChannelBehaviorTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITReadableByteChannelBehaviorTest.java
new file mode 100644
index 0000000000..b4082ed4fc
--- /dev/null
+++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITReadableByteChannelBehaviorTest.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.storage.it;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assertWithMessage;
+
+import com.google.cloud.ReadChannel;
+import com.google.cloud.storage.BlobId;
+import com.google.cloud.storage.BlobReadSession;
+import com.google.cloud.storage.BucketInfo;
+import com.google.cloud.storage.ReadProjectionConfig;
+import com.google.cloud.storage.ReadProjectionConfigs;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.TransportCompatibility.Transport;
+import com.google.cloud.storage.it.runner.StorageITRunner;
+import com.google.cloud.storage.it.runner.annotations.Backend;
+import com.google.cloud.storage.it.runner.annotations.CrossRun;
+import com.google.cloud.storage.it.runner.annotations.Inject;
+import com.google.cloud.storage.it.runner.registry.Generator;
+import com.google.cloud.storage.it.runner.registry.ObjectsFixture;
+import com.google.common.io.ByteStreams;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.channels.ReadableByteChannel;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+@RunWith(StorageITRunner.class)
+@CrossRun(
+ backends = {Backend.PROD},
+ transports = {Transport.HTTP, Transport.GRPC})
+public final class ITReadableByteChannelBehaviorTest {
+
+ @Inject public Storage storage;
+ @Inject public BucketInfo bucket;
+ @Inject public Generator generator;
+ @Inject public ObjectsFixture objectsFixture;
+
+ @Test
+ public void eofReturnedMultipleTimes_reader() throws IOException {
+ BlobId id = objectsFixture.getObj512KiB().getInfo().getBlobId();
+
+ try (ReadChannel reader = storage.reader(id)) {
+ eofReturnedMultipleTimes_doTest(reader);
+ }
+ }
+
+ @Test
+ @CrossRun.Exclude(transports = Transport.HTTP)
+ public void eofReturnedMultipleTimes_blobReadSession_channel()
+ throws ExecutionException, InterruptedException, TimeoutException, IOException {
+ eofReturnedMultipleTimes_doTestBlobReadSession(ReadProjectionConfigs.asChannel());
+ }
+
+ @Test
+ @CrossRun.Exclude(transports = Transport.HTTP)
+ public void eofReturnedMultipleTimes_blobReadSession_seekableChannel()
+ throws ExecutionException, InterruptedException, TimeoutException, IOException {
+ eofReturnedMultipleTimes_doTestBlobReadSession(ReadProjectionConfigs.asSeekableChannel());
+ }
+
+ private void eofReturnedMultipleTimes_doTestBlobReadSession(
+ ReadProjectionConfig extends ReadableByteChannel> config)
+ throws IOException, ExecutionException, InterruptedException, TimeoutException {
+ BlobId id = objectsFixture.getObj512KiB().getInfo().getBlobId();
+
+ try (BlobReadSession session = storage.blobReadSession(id).get(3, TimeUnit.SECONDS)) {
+ try (ReadableByteChannel c = session.readAs(config)) {
+ eofReturnedMultipleTimes_doTest(c);
+ }
+ }
+ }
+
+ private void eofReturnedMultipleTimes_doTest(ReadableByteChannel c) throws IOException {
+ long copy = ByteStreams.copy(c, Channels.newChannel(ByteStreams.nullOutputStream()));
+ assertThat(copy).isEqualTo(objectsFixture.getObj512KiB().getInfo().getSize());
+
+ ByteBuffer buf = ByteBuffer.allocate(8);
+ int i = ThreadLocalRandom.current().nextInt(3, 10);
+ for (int j = 0; j < i; j++) {
+ assertWithMessage("expected EOF " + j).that(c.read(buf)).isEqualTo(-1);
+ }
+ }
+}
diff --git a/grpc-google-cloud-storage-control-v2/pom.xml b/grpc-google-cloud-storage-control-v2/pom.xml
index 42534e1793..9d25ee46c5 100644
--- a/grpc-google-cloud-storage-control-v2/pom.xml
+++ b/grpc-google-cloud-storage-control-v2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-storage-control-v2
- 2.58.1
+ 2.59.0
grpc-google-cloud-storage-control-v2
GRPC library for google-cloud-storage
com.google.cloud
google-cloud-storage-parent
- 2.58.1
+ 2.59.0
diff --git a/grpc-google-cloud-storage-v2/pom.xml b/grpc-google-cloud-storage-v2/pom.xml
index b72149d67b..9d5fb64685 100644
--- a/grpc-google-cloud-storage-v2/pom.xml
+++ b/grpc-google-cloud-storage-v2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-storage-v2
- 2.58.1
+ 2.59.0
grpc-google-cloud-storage-v2
GRPC library for grpc-google-cloud-storage-v2
com.google.cloud
google-cloud-storage-parent
- 2.58.1
+ 2.59.0
diff --git a/pom.xml b/pom.xml
index a8c81db55c..a9ef213685 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-storage-parent
pom
- 2.58.1
+ 2.59.0
Storage Parent
https://github.com/googleapis/java-storage
@@ -14,7 +14,7 @@
com.google.cloud
sdk-platform-java-config
- 3.52.3
+ 3.53.0
@@ -82,7 +82,7 @@
com.google.cloud
google-cloud-storage
- 2.58.1
+ 2.59.0
com.google.apis
@@ -92,7 +92,7 @@
com.google.cloud
google-cloud-pubsub
- 1.141.5
+ 1.142.0
test
@@ -104,32 +104,32 @@
com.google.api.grpc
proto-google-cloud-storage-v2
- 2.58.1
+ 2.59.0
com.google.api.grpc
grpc-google-cloud-storage-v2
- 2.58.1
+ 2.59.0
com.google.api.grpc
gapic-google-cloud-storage-v2
- 2.58.1
+ 2.59.0
com.google.api.grpc
grpc-google-cloud-storage-control-v2
- 2.58.1
+ 2.59.0
com.google.api.grpc
proto-google-cloud-storage-control-v2
- 2.58.1
+ 2.59.0
com.google.cloud
google-cloud-storage-control
- 2.58.1
+ 2.59.0
com.google.cloud
diff --git a/proto-google-cloud-storage-control-v2/pom.xml b/proto-google-cloud-storage-control-v2/pom.xml
index 6f868e12aa..a258e6b249 100644
--- a/proto-google-cloud-storage-control-v2/pom.xml
+++ b/proto-google-cloud-storage-control-v2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-storage-control-v2
- 2.58.1
+ 2.59.0
proto-google-cloud-storage-control-v2
Proto library for proto-google-cloud-storage-control-v2
com.google.cloud
google-cloud-storage-parent
- 2.58.1
+ 2.59.0
diff --git a/proto-google-cloud-storage-v2/pom.xml b/proto-google-cloud-storage-v2/pom.xml
index 85530d5cd8..7788c504e4 100644
--- a/proto-google-cloud-storage-v2/pom.xml
+++ b/proto-google-cloud-storage-v2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-storage-v2
- 2.58.1
+ 2.59.0
proto-google-cloud-storage-v2
PROTO library for proto-google-cloud-storage-v2
com.google.cloud
google-cloud-storage-parent
- 2.58.1
+ 2.59.0
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index d9e5315b36..b77249bb47 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -30,12 +30,12 @@
com.google.cloud
google-cloud-storage
- 2.58.0
+ 2.58.1
com.google.cloud
google-cloud-storage-control
- 2.58.0
+ 2.58.1
@@ -66,19 +66,19 @@
com.google.cloud
google-cloud-pubsub
- 1.141.5
+ 1.142.0
test
com.google.cloud
google-cloud-kms
- 2.78.0
+ 2.80.0
test
com.google.cloud
google-cloud-storage
- 2.58.0
+ 2.58.1
tests
test
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index e21f905908..2ac6fe76f2 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -28,12 +28,12 @@
com.google.cloud
google-cloud-storage
- 2.58.1
+ 2.59.0
com.google.cloud
google-cloud-storage-control
- 2.58.1
+ 2.59.0
compile
@@ -58,19 +58,19 @@
com.google.cloud
google-cloud-pubsub
- 1.141.5
+ 1.142.0
test
com.google.cloud
google-cloud-kms
- 2.78.0
+ 2.80.0
test
com.google.cloud
google-cloud-storage
- 2.58.1
+ 2.59.0
tests
test
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index 1740ae08b3..36544329ce 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -31,7 +31,7 @@
com.google.cloud
libraries-bom
- 26.69.0
+ 26.70.0
pom
import
@@ -76,13 +76,13 @@
com.google.cloud
google-cloud-pubsub
- 1.141.5
+ 1.142.0
test
com.google.cloud
google-cloud-kms
- 2.78.0
+ 2.80.0
test
@@ -99,7 +99,7 @@
com.google.cloud
google-cloud-storage
- 2.58.0
+ 2.58.1
tests
test
diff --git a/samples/snippets/src/main/java/com/example/storage/object/StartAppendableObjectUpload.java b/samples/snippets/src/main/java/com/example/storage/object/CreateAndWriteAppendableObject.java
similarity index 78%
rename from samples/snippets/src/main/java/com/example/storage/object/StartAppendableObjectUpload.java
rename to samples/snippets/src/main/java/com/example/storage/object/CreateAndWriteAppendableObject.java
index cda8b7c898..29495d0ec6 100644
--- a/samples/snippets/src/main/java/com/example/storage/object/StartAppendableObjectUpload.java
+++ b/samples/snippets/src/main/java/com/example/storage/object/CreateAndWriteAppendableObject.java
@@ -16,7 +16,7 @@
package com.example.storage.object;
-// [START storage_start_appendable_object_upload]
+// [START storage_create_and_write_appendable_object_upload]
import com.google.cloud.storage.BlobAppendableUpload;
import com.google.cloud.storage.BlobAppendableUpload.AppendableUploadWriteableByteChannel;
@@ -24,6 +24,7 @@
import com.google.cloud.storage.BlobAppendableUploadConfig.CloseAction;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
+import com.google.cloud.storage.FlushPolicy;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.io.ByteStreams;
@@ -33,8 +34,8 @@
import java.nio.file.Paths;
import java.util.Locale;
-public class StartAppendableObjectUpload {
- public static void startAppendableObjectUpload(
+public class CreateAndWriteAppendableObject {
+ public static void createAndWriteAppendableObject(
String bucketName, String objectName, String filePath) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";
@@ -49,12 +50,18 @@ public static void startAppendableObjectUpload(
BlobId blobId = BlobId.of(bucketName, objectName);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
+ int flushSize = 64 * 1000;
+ FlushPolicy.MaxFlushSizeFlushPolicy flushPolicy = FlushPolicy.maxFlushSize(flushSize);
BlobAppendableUploadConfig config =
- BlobAppendableUploadConfig.of().withCloseAction(CloseAction.CLOSE_WITHOUT_FINALIZING);
+ BlobAppendableUploadConfig.of()
+ .withCloseAction(CloseAction.FINALIZE_WHEN_CLOSING)
+ .withFlushPolicy(flushPolicy);
BlobAppendableUpload uploadSession = storage.blobAppendableUpload(blobInfo, config);
try (AppendableUploadWriteableByteChannel channel = uploadSession.open();
ReadableByteChannel readableByteChannel = FileChannel.open(Paths.get(filePath))) {
ByteStreams.copy(readableByteChannel, channel);
+ // Since the channel is in a try-with-resources block, channel.close()
+ // will be implicitly called here, which triggers the finalization.
} catch (IOException ex) {
throw new IOException("Failed to upload to object " + blobId.toGsUtilUri(), ex);
}
@@ -67,4 +74,4 @@ public static void startAppendableObjectUpload(
}
}
-// [END storage_start_appendable_object_upload]
+// [END storage_create_and_write_appendable_object_upload]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/GetObjectContexts.java b/samples/snippets/src/main/java/com/example/storage/object/GetObjectContexts.java
new file mode 100644
index 0000000000..a0ab377763
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/storage/object/GetObjectContexts.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.storage.object;
+
+// [START storage_get_object_contexts]
+
+import com.google.cloud.storage.Blob;
+import com.google.cloud.storage.BlobInfo.ObjectContexts;
+import com.google.cloud.storage.BlobInfo.ObjectCustomContextPayload;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
+import java.util.Map;
+
+public class GetObjectContexts {
+ public static void getObjectContexts(String projectId, String bucketName, String objectName)
+ throws Exception {
+ // The ID of your GCP project
+ // String projectId = "your-project-id";
+
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object
+ // String objectName = "your-object-name";
+
+ try (Storage storage =
+ StorageOptions.newBuilder().setProjectId(projectId).build().getService()) {
+
+ Blob blob = storage.get(bucketName, objectName);
+ if (blob == null) {
+ System.out.println("The object " + objectName + " was not found in " + bucketName);
+ return;
+ }
+ ObjectContexts objectContexts = blob.getContexts();
+
+ if (objectContexts != null) {
+ Map customContexts = objectContexts.getCustom();
+ if (customContexts == null) {
+ System.out.println("No custom contexts found for object: " + objectName);
+ return;
+ }
+ // Print blob's object contexts
+ System.out.println("\nCustom Contexts:");
+ for (Map.Entry custom : customContexts.entrySet()) {
+ System.out.println(custom.getKey() + "=" + custom.getValue());
+ }
+ }
+ }
+ }
+}
+// [END storage_get_object_contexts]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/ListObjectContexts.java b/samples/snippets/src/main/java/com/example/storage/object/ListObjectContexts.java
new file mode 100644
index 0000000000..3becd448a4
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/storage/object/ListObjectContexts.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.storage.object;
+
+// [START storage_list_object_contexts]
+
+import com.google.api.gax.paging.Page;
+import com.google.cloud.storage.Blob;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
+
+public class ListObjectContexts {
+ public static void listObjectContexts(String projectId, String bucketName, String key)
+ throws Exception {
+ // The ID of your GCP project
+ // String projectId = "your-project-id";
+
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The context key you want to filter
+ // String key = "your-context-key";
+
+ try (Storage storage =
+ StorageOptions.newBuilder().setProjectId(projectId).build().getService()) {
+ /*
+ * List any object that has a context with the specified key attached
+ * String filter = "contexts.\"KEY\":*";
+ *
+ * List any object that that does not have a context with the specified key attached
+ * String filter = "NOT contexts.\"KEY\":*";
+ *
+ * List any object that has a context with the specified key and value attached
+ * String filter = "contexts.\"KEY\"=\"VALUE\"";
+ *
+ * List any object that does not have a context with the specified key and value attached
+ * String filter = "NOT contexts.\"KEY\"=\"VALUE\"";
+ */
+
+ String filter = "contexts.\"" + key + "\":*";
+
+ System.out.println("Listing objects for bucket: " + bucketName + "with context key: " + key);
+ Page blobs = storage.list(bucketName, Storage.BlobListOption.filter(filter));
+ for (Blob blob : blobs.iterateAll()) {
+ System.out.println(blob.getBlobId().toGsUtilUri());
+ }
+ }
+ }
+}
+// [END storage_list_object_contexts]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/OpenMultipleObjectsRangedRead.java b/samples/snippets/src/main/java/com/example/storage/object/OpenMultipleObjectsRangedRead.java
new file mode 100644
index 0000000000..33dfb916b3
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/storage/object/OpenMultipleObjectsRangedRead.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.storage.object;
+
+// [START storage_open_multiple_objects_ranged_read]
+
+import com.google.api.core.ApiFuture;
+import com.google.api.core.ApiFutures;
+import com.google.cloud.storage.BlobId;
+import com.google.cloud.storage.BlobReadSession;
+import com.google.cloud.storage.RangeSpec;
+import com.google.cloud.storage.ReadAsFutureBytes;
+import com.google.cloud.storage.ReadProjectionConfigs;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
+import com.google.common.util.concurrent.MoreExecutors;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+public class OpenMultipleObjectsRangedRead {
+ public static void multipleObjectsSingleRangedRead(
+ String bucketName, List objectNames, long startOffset, int length) throws Exception {
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS objects to read
+ // List objectName = Arrays.asList("object-1", "object-2", "object-3");
+
+ RangeSpec singleRange = RangeSpec.of(startOffset, length);
+ ReadAsFutureBytes rangeConfig =
+ ReadProjectionConfigs.asFutureBytes().withRangeSpec(singleRange);
+
+ try (Storage storage = StorageOptions.grpc().build().getService()) {
+ List> futuresToWaitOn = new ArrayList<>();
+
+ System.out.printf(
+ "Initiating single ranged read [%d, %d] on %d objects...%n",
+ startOffset, startOffset + length - 1, objectNames.size());
+
+ for (String objectName : objectNames) {
+ BlobId blobId = BlobId.of(bucketName, objectName);
+ ApiFuture futureReadSession = storage.blobReadSession(blobId);
+
+ ApiFuture readAndCloseFuture =
+ ApiFutures.transformAsync(
+ futureReadSession,
+ (BlobReadSession session) -> {
+ ApiFuture readFuture = session.readAs(rangeConfig);
+
+ readFuture.addListener(
+ () -> {
+ try {
+ session.close();
+ } catch (java.io.IOException e) {
+ System.err.println(
+ "WARN: Background error while closing session: " + e.getMessage());
+ }
+ },
+ MoreExecutors.directExecutor());
+ return readFuture;
+ },
+ MoreExecutors.directExecutor());
+
+ futuresToWaitOn.add(readAndCloseFuture);
+ }
+ ApiFutures.allAsList(futuresToWaitOn).get(30, TimeUnit.SECONDS);
+
+ System.out.println("All concurrent single-ranged read operations are complete.");
+ }
+ }
+}
+// [END storage_open_multiple_objects_ranged_read]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectMultipleRangedRead.java b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectMultipleRangedRead.java
similarity index 78%
rename from samples/snippets/src/main/java/com/example/storage/object/AppendableObjectMultipleRangedRead.java
rename to samples/snippets/src/main/java/com/example/storage/object/OpenObjectMultipleRangedRead.java
index ce36771ad9..f3e8233418 100644
--- a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectMultipleRangedRead.java
+++ b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectMultipleRangedRead.java
@@ -16,7 +16,7 @@
package com.example.storage.object;
-// [START storage_read_appendable_object_multiple_ranges]
+// [START storage_open_object_multiple_ranged_read]
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
@@ -30,10 +30,28 @@
import java.util.List;
import java.util.concurrent.TimeUnit;
-public class AppendableObjectMultipleRangedRead {
- public static void appendableObjectMultipleRangedRead(
+public class OpenObjectMultipleRangedRead {
+ public static void openObjectMultipleRangedRead(
String bucketName, String objectName, long offset1, int length1, long offset2, int length2)
throws Exception {
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object
+ // String objectName = "your-object-name";
+
+ // The beginning of the range 1
+ // long offset = 0
+
+ // The maximum number of bytes to read in range 1
+ // int length = 16;
+
+ // The beginning of the range 2
+ // long offset = 16
+
+ // The maximum number of bytes to read in range 2
+ // int length = 32;
+
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture futureBlobReadSession = storage.blobReadSession(blobId);
@@ -62,4 +80,4 @@ public static void appendableObjectMultipleRangedRead(
}
}
-// [END storage_read_appendable_object_multiple_ranges]
+// [END storage_open_object_multiple_ranged_read]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectReadFullObject.java b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectReadFullObject.java
similarity index 84%
rename from samples/snippets/src/main/java/com/example/storage/object/AppendableObjectReadFullObject.java
rename to samples/snippets/src/main/java/com/example/storage/object/OpenObjectReadFullObject.java
index 0b24051ca6..a1a22ffd26 100644
--- a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectReadFullObject.java
+++ b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectReadFullObject.java
@@ -16,7 +16,7 @@
package com.example.storage.object;
-// [START storage_read_appendable_object_full]
+// [START storage_open_object_read_full_object]
import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
@@ -30,9 +30,15 @@
import java.util.Locale;
import java.util.concurrent.TimeUnit;
-public class AppendableObjectReadFullObject {
- public static void appendableObjectReadFullObject(String bucketName, String objectName)
+public class OpenObjectReadFullObject {
+ public static void openObjectReadFullObject(String bucketName, String objectName)
throws Exception {
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object to read
+ // String objectName = "your-object-name";
+
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture futureBlobReadSession = storage.blobReadSession(blobId);
@@ -60,4 +66,4 @@ public static void appendableObjectReadFullObject(String bucketName, String obje
}
}
}
-// [END storage_read_appendable_object_full]
+// [END storage_open_object_read_full_object]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectSingleRangedRead.java b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectSingleRangedRead.java
similarity index 79%
rename from samples/snippets/src/main/java/com/example/storage/object/AppendableObjectSingleRangedRead.java
rename to samples/snippets/src/main/java/com/example/storage/object/OpenObjectSingleRangedRead.java
index 437f47203f..55446ea266 100644
--- a/samples/snippets/src/main/java/com/example/storage/object/AppendableObjectSingleRangedRead.java
+++ b/samples/snippets/src/main/java/com/example/storage/object/OpenObjectSingleRangedRead.java
@@ -16,7 +16,7 @@
package com.example.storage.object;
-// [START storage_read_appendable_object_single_range]
+// [START storage_open_object_single_ranged_read]
import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
@@ -27,9 +27,20 @@
import com.google.cloud.storage.StorageOptions;
import java.util.concurrent.TimeUnit;
-public class AppendableObjectSingleRangedRead {
- public static void appendableObjectSingleRangedRead(
+public class OpenObjectSingleRangedRead {
+ public static void openObjectSingleRangedRead(
String bucketName, String objectName, long offset, int length) throws Exception {
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object
+ // String objectName = "your-object-name";
+
+ // The beginning of the range
+ // long offset = 0
+
+ // The maximum number of bytes to read from the object.
+ // int length = 64;
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
@@ -55,4 +66,4 @@ public static void appendableObjectSingleRangedRead(
}
}
}
-// [END storage_read_appendable_object_single_range]
+// [END storage_open_object_single_ranged_read]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/ResumeAppendableObjectUpload.java b/samples/snippets/src/main/java/com/example/storage/object/PauseAndResumeAppendableObjectUpload.java
similarity index 51%
rename from samples/snippets/src/main/java/com/example/storage/object/ResumeAppendableObjectUpload.java
rename to samples/snippets/src/main/java/com/example/storage/object/PauseAndResumeAppendableObjectUpload.java
index 8852b585d8..c364ee0937 100644
--- a/samples/snippets/src/main/java/com/example/storage/object/ResumeAppendableObjectUpload.java
+++ b/samples/snippets/src/main/java/com/example/storage/object/PauseAndResumeAppendableObjectUpload.java
@@ -5,7 +5,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -16,7 +16,7 @@
package com.example.storage.object;
-// [START storage_resume_appendable_object_upload]
+// [START storage_pause_and_resume_appendable_object_upload]
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobAppendableUpload;
@@ -26,20 +26,23 @@
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageChannelUtils;
import com.google.cloud.storage.StorageOptions;
import com.google.common.io.ByteStreams;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Locale;
-public class ResumeAppendableObjectUpload {
- public static void resumeAppendableObjectUpload(
+public class PauseAndResumeAppendableObjectUpload {
+ public static void pauseAndResumeAppendableObjectUpload(
String bucketName, String objectName, String filePath) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";
- // The ID of your GCS unfinalized appendable object
+ // The ID of your GCS object
// String objectName = "your-object-name";
// The path to the file to upload
@@ -47,45 +50,58 @@ public static void resumeAppendableObjectUpload(
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
- Blob existingBlob = storage.get(blobId);
- BlobInfo blobInfoForTakeover = BlobInfo.newBuilder(existingBlob.getBlobId()).build();
+ BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
+
+ // --- Step 1: Initial string write (PAUSE) ---
+ // Default close action will be CLOSE_WITHOUT_FINALIZING
+ BlobAppendableUploadConfig initialConfig = BlobAppendableUploadConfig.of();
+ BlobAppendableUpload initialUploadSession =
+ storage.blobAppendableUpload(blobInfo, initialConfig);
+
+ try (AppendableUploadWriteableByteChannel channel = initialUploadSession.open()) {
+ String initialData = "Initial data segment.\n";
+ ByteBuffer buffer = ByteBuffer.wrap(initialData.getBytes(StandardCharsets.UTF_8));
+ long totalBytesWritten = StorageChannelUtils.blockingEmptyTo(buffer, channel);
+ channel.flush();
+
+ System.out.printf(
+ Locale.US, "Wrote %d bytes (initial string) in first segment.\n", totalBytesWritten);
+ } catch (IOException ex) {
+ throw new IOException("Failed initial upload to object " + blobId.toGsUtilUri(), ex);
+ }
+ Blob existingBlob = storage.get(blobId);
long currentObjectSize = existingBlob.getSize();
System.out.printf(
Locale.US,
- "Resuming upload for %s. Currently uploaded size: %d bytes\n",
- blobId.toGsUtilUri(),
+ "Initial upload paused. Currently uploaded size: %d bytes\n",
currentObjectSize);
- BlobAppendableUploadConfig config =
- BlobAppendableUploadConfig.of().withCloseAction(CloseAction.CLOSE_WITHOUT_FINALIZING);
+ // --- Step 2: Resume upload with file content and finalize ---
+ // Use FINALIZE_WHEN_CLOSING to ensure the object is finalized on channel closure.
+ BlobAppendableUploadConfig resumeConfig =
+ BlobAppendableUploadConfig.of().withCloseAction(CloseAction.FINALIZE_WHEN_CLOSING);
BlobAppendableUpload resumeUploadSession =
- storage.blobAppendableUpload(blobInfoForTakeover, config);
+ storage.blobAppendableUpload(existingBlob.toBuilder().build(), resumeConfig);
+
try (FileChannel fileChannel = FileChannel.open(Paths.get(filePath));
AppendableUploadWriteableByteChannel channel = resumeUploadSession.open()) {
+ long bytesToAppend = fileChannel.size();
+ System.out.printf(
+ Locale.US,
+ "Appending the entire file (%d bytes) after the initial string.\n",
+ bytesToAppend);
- if (fileChannel.size() < currentObjectSize) {
- throw new IOException(
- "Local file is smaller than the already uploaded data. File size: "
- + fileChannel.size()
- + ", Uploaded size: "
- + currentObjectSize);
- } else if (fileChannel.size() == currentObjectSize) {
- System.out.println("No more data to upload.");
- } else {
- fileChannel.position(currentObjectSize);
- System.out.printf(
- Locale.US, "Appending %d bytes\n", fileChannel.size() - currentObjectSize);
- ByteStreams.copy(fileChannel, channel);
- }
+ ByteStreams.copy(fileChannel, channel);
}
+
BlobInfo result = storage.get(blobId);
System.out.printf(
Locale.US,
- "Object %s successfully resumed. Total size: %d\n",
+ "\nObject %s successfully resumed and finalized. Total size: %d bytes\n",
result.getBlobId().toGsUtilUriWithGeneration(),
result.getSize());
}
}
}
-// [END storage_resume_appendable_object_upload]
+// [END storage_pause_and_resume_appendable_object_upload]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/ReadAppendableObjectTail.java b/samples/snippets/src/main/java/com/example/storage/object/ReadAppendableObjectTail.java
new file mode 100644
index 0000000000..98cf31e967
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/storage/object/ReadAppendableObjectTail.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.storage.object;
+
+// [START storage_read_appendable_object_tail]
+
+import com.google.api.core.ApiFuture;
+import com.google.cloud.storage.BlobAppendableUpload;
+import com.google.cloud.storage.BlobAppendableUpload.AppendableUploadWriteableByteChannel;
+import com.google.cloud.storage.BlobAppendableUploadConfig;
+import com.google.cloud.storage.BlobId;
+import com.google.cloud.storage.BlobInfo;
+import com.google.cloud.storage.BlobReadSession;
+import com.google.cloud.storage.FlushPolicy;
+import com.google.cloud.storage.RangeSpec;
+import com.google.cloud.storage.ReadProjectionConfigs;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageChannelUtils;
+import com.google.cloud.storage.StorageOptions;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+public class ReadAppendableObjectTail {
+ public static void readAppendableObjectTail(String bucketName, String objectName)
+ throws Exception {
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object
+ // String objectName = "your-object-name";
+
+ try (Storage storage = StorageOptions.grpc().build().getService()) {
+ BlobId blobId = BlobId.of(bucketName, objectName);
+ BlobInfo info = BlobInfo.newBuilder(blobId).build();
+ int totalToWrite = 64 * 1000;
+ // Define our flush policy to flush small increments
+ // This is useful for demonstration purposes, but you should use more appropriate values for
+ // your workload.
+ int flushSize = totalToWrite / 8;
+ FlushPolicy.MinFlushSizeFlushPolicy flushPolicy =
+ FlushPolicy.minFlushSize(flushSize).withMaxPendingBytes(flushSize);
+ BlobAppendableUploadConfig appendableUploadConfig =
+ BlobAppendableUploadConfig.of().withFlushPolicy(flushPolicy);
+ BlobAppendableUpload upload =
+ storage.blobAppendableUpload(
+ info, appendableUploadConfig, Storage.BlobWriteOption.doesNotExist());
+ // Create the object, we'll takeover to write for our example.
+ upload.open().closeWithoutFinalizing();
+ BlobInfo gen1 = upload.getResult().get();
+ BlobAppendableUpload takeover = storage.blobAppendableUpload(gen1, appendableUploadConfig);
+
+ try (AppendableUploadWriteableByteChannel channel = takeover.open()) {
+ // Start a background thread to write some data on a periodic basis
+ // In reality, you're application would probably be doing thing in another scope
+ Thread writeThread = startWriteThread(totalToWrite, channel, flushPolicy);
+ try (BlobReadSession readSession =
+ storage.blobReadSession(gen1.getBlobId()).get(10, TimeUnit.SECONDS)) {
+ int zeroCnt = 0;
+ long read = 0;
+ while (read < totalToWrite) {
+ if (zeroCnt >= 30 && !channel.isOpen()) {
+ System.out.println("breaking");
+ break;
+ }
+ ApiFuture future =
+ readSession.readAs(
+ ReadProjectionConfigs.asFutureBytes()
+ .withRangeSpec(RangeSpec.of(read, flushPolicy.getMinFlushSize())));
+ byte[] bytes = future.get(20, TimeUnit.SECONDS);
+
+ read += bytes.length;
+ long defaultSleep = 1_500L;
+ if (bytes.length == 0) {
+ zeroCnt++;
+ long millis = defaultSleep * zeroCnt;
+ System.out.println("millis = " + millis);
+ Thread.sleep(millis);
+ } else {
+ zeroCnt = 0;
+ System.out.println("bytes.length = " + bytes.length + " read = " + read);
+ Thread.sleep(defaultSleep);
+ }
+ }
+ assert read == totalToWrite : "not enough bytes";
+ }
+ writeThread.join();
+ }
+ }
+ }
+
+ private static Thread startWriteThread(
+ int totalToWrite,
+ AppendableUploadWriteableByteChannel channel,
+ FlushPolicy.MinFlushSizeFlushPolicy flushPolicy) {
+ Thread writeThread =
+ new Thread(
+ () -> {
+ try {
+ for (long written = 0; written < totalToWrite; ) {
+ byte alphaOffset = (byte) (written % 0x1a);
+
+ ByteBuffer buf = ByteBuffer.wrap(new byte[] {(byte) (0x41 + alphaOffset)});
+ int w = StorageChannelUtils.blockingEmptyTo(buf, channel);
+ written += w;
+ if (written % flushPolicy.getMinFlushSize() == 0) {
+ channel.flush();
+ Thread.sleep(40);
+ }
+ }
+ channel.closeWithoutFinalizing();
+
+ } catch (IOException | InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ writeThread.start();
+ return writeThread;
+ }
+}
+// [END storage_read_appendable_object_tail]
diff --git a/samples/snippets/src/main/java/com/example/storage/object/SetObjectContexts.java b/samples/snippets/src/main/java/com/example/storage/object/SetObjectContexts.java
new file mode 100644
index 0000000000..169399a710
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/storage/object/SetObjectContexts.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.storage.object;
+
+// [START storage_set_object_contexts]
+
+import com.google.cloud.storage.Blob;
+import com.google.cloud.storage.BlobId;
+import com.google.cloud.storage.BlobInfo;
+import com.google.cloud.storage.BlobInfo.ObjectContexts;
+import com.google.cloud.storage.BlobInfo.ObjectCustomContextPayload;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
+import com.google.common.collect.Maps;
+import java.util.Map;
+
+public class SetObjectContexts {
+ public static void setObjectContexts(
+ String projectId, String bucketName, String objectName, String key, String value)
+ throws Exception {
+ // The ID of your GCP project
+ // String projectId = "your-project-id";
+
+ // The ID of your GCS bucket
+ // String bucketName = "your-unique-bucket-name";
+
+ // The ID of your GCS object
+ // String objectName = "your-object-name";
+
+ // The context key-value you want to add
+ // String key = "your-context-key";
+ // String value = "your-context-value";
+
+ try (Storage storage =
+ StorageOptions.newBuilder().setProjectId(projectId).build().getService()) {
+ BlobId blobId = BlobId.of(bucketName, objectName);
+ Blob blob = storage.get(blobId);
+ if (blob == null) {
+ System.out.println("The object " + objectName + " was not found in " + bucketName);
+ return;
+ }
+
+ // Recommended: Set a generation-match precondition to avoid potential race
+ // conditions and data corruptions. The request to update returns a 412 error if
+ // the object's generation number does not match your precondition.
+ Storage.BlobTargetOption precondition = Storage.BlobTargetOption.generationMatch();
+
+ // This section demonstrates how to upsert, delete all, and delete a specific context.
+
+ // To upsert a context (if the key already exists, its value is replaced;
+ // otherwise, a new key-value pair is added):
+ ObjectCustomContextPayload payload =
+ ObjectCustomContextPayload.newBuilder().setValue(value).build();
+ Map custom = Maps.newHashMap();
+ custom.put(key, payload);
+ ObjectContexts contexts = ObjectContexts.newBuilder().setCustom(custom).build();
+
+ /*
+ * To delete all existing contexts:
+ * ObjectContexts contexts = ObjectContexts.newBuilder().setCustom(null).build();
+ */
+
+ /*
+ * To delete a specific key from the context:
+ * Map custom = Maps.newHashMap();
+ * custom.put(key, null);
+ * ObjectContexts contexts = ObjectContexts.newBuilder().setCustom(custom).build();
+ */
+ BlobInfo pendingUpdate = blob.toBuilder().setContexts(contexts).build();
+ storage.update(pendingUpdate, precondition);
+
+ System.out.println(
+ "Updated custom contexts for object " + objectName + " in bucket " + bucketName);
+ }
+ }
+}
+// [END storage_set_object_contexts]
diff --git a/storage-shared-benchmarking/pom.xml b/storage-shared-benchmarking/pom.xml
index 134504016d..2379fec2f2 100644
--- a/storage-shared-benchmarking/pom.xml
+++ b/storage-shared-benchmarking/pom.xml
@@ -10,7 +10,7 @@
com.google.cloud
google-cloud-storage-parent
- 2.58.1
+ 2.59.0
@@ -31,7 +31,7 @@
com.google.cloud
google-cloud-storage
- 2.58.1
+ 2.59.0
tests
diff --git a/versions.txt b/versions.txt
index 1f8f3d6f0e..e73ee03edb 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,10 +1,10 @@
# Format:
# module:released-version:current-version
-google-cloud-storage:2.58.1:2.58.1
-gapic-google-cloud-storage-v2:2.58.1:2.58.1
-grpc-google-cloud-storage-v2:2.58.1:2.58.1
-proto-google-cloud-storage-v2:2.58.1:2.58.1
-google-cloud-storage-control:2.58.1:2.58.1
-proto-google-cloud-storage-control-v2:2.58.1:2.58.1
-grpc-google-cloud-storage-control-v2:2.58.1:2.58.1
+google-cloud-storage:2.59.0:2.59.0
+gapic-google-cloud-storage-v2:2.59.0:2.59.0
+grpc-google-cloud-storage-v2:2.59.0:2.59.0
+proto-google-cloud-storage-v2:2.59.0:2.59.0
+google-cloud-storage-control:2.59.0:2.59.0
+proto-google-cloud-storage-control-v2:2.59.0:2.59.0
+grpc-google-cloud-storage-control-v2:2.59.0:2.59.0