Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enforce java.nio.charset.StandardCharsets against guava Charsets #32083

Merged
merged 2 commits into from
Aug 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.beam.examples.complete.kafkatopubsub.transforms;

import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import org.apache.beam.examples.complete.kafkatopubsub.avro.AvroDataClass;
Expand All @@ -37,7 +38,6 @@
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableMap;
import org.apache.kafka.common.serialization.StringDeserializer;

Expand Down Expand Up @@ -120,7 +120,8 @@ public PDone expand(PCollection<String> input) {
MapElements.into(TypeDescriptor.of(PubsubMessage.class))
.via(
(String json) ->
new PubsubMessage(json.getBytes(Charsets.UTF_8), ImmutableMap.of())))
new PubsubMessage(
json.getBytes(StandardCharsets.UTF_8), ImmutableMap.of())))
.apply(
"writePubsubMessagesToPubSub", PubsubIO.writeMessages().to(options.getOutputTopic()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.io;

import java.nio.charset.StandardCharsets;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -60,7 +60,8 @@ public void run(SourceContext<WindowedValue<byte[]>> ctx) {
while (running && (messageCount == 0 || count < subtaskCount)) {
synchronized (ctx.getCheckpointLock()) {
ctx.collect(
WindowedValue.valueInGlobalWindow(String.valueOf(count).getBytes(Charsets.UTF_8)));
WindowedValue.valueInGlobalWindow(
String.valueOf(count).getBytes(StandardCharsets.UTF_8)));
count++;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import java.nio.charset.StandardCharsets;
import org.junit.Test;

/** Tests for {@link FlinkJobServerDriver}. */
Expand Down Expand Up @@ -104,7 +104,7 @@ public void testJobServerDriver() throws Exception {
boolean success = false;
while (!success) {
newErr.flush();
String output = baos.toString(Charsets.UTF_8.name());
String output = baos.toString(StandardCharsets.UTF_8.name());
if (output.contains("JobService started on localhost:")
&& output.contains("ArtifactStagingService started on localhost:")
&& output.contains("ExpansionService started on localhost:")) {
Expand All @@ -114,7 +114,8 @@ public void testJobServerDriver() throws Exception {
}
}
assertThat(driver.getJobServerUrl(), is(not(nullValue())));
assertThat(baos.toString(Charsets.UTF_8.name()), containsString(driver.getJobServerUrl()));
assertThat(
baos.toString(StandardCharsets.UTF_8.name()), containsString(driver.getJobServerUrl()));
assertThat(driverThread.isAlive(), is(true));
} catch (Throwable t) {
// restore to print exception
Expand Down Expand Up @@ -149,7 +150,7 @@ public void testJobServerDriverWithoutExpansionService() throws Exception {
boolean success = false;
while (!success) {
newErr.flush();
String output = baos.toString(Charsets.UTF_8.name());
String output = baos.toString(StandardCharsets.UTF_8.name());
if (output.contains("JobService started on localhost:")
&& output.contains("ArtifactStagingService started on localhost:")) {
success = true;
Expand All @@ -161,7 +162,8 @@ public void testJobServerDriverWithoutExpansionService() throws Exception {
}
}
assertThat(driver.getJobServerUrl(), is(not(nullValue())));
assertThat(baos.toString(Charsets.UTF_8.name()), containsString(driver.getJobServerUrl()));
assertThat(
baos.toString(StandardCharsets.UTF_8.name()), containsString(driver.getJobServerUrl()));
assertThat(driverThread.isAlive(), is(true));
} catch (Throwable t) {
// restore to print exception
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
Expand All @@ -56,7 +57,6 @@
import org.apache.beam.sdk.util.construction.PTransformMatchers;
import org.apache.beam.sdk.util.construction.PTransformTranslation;
import org.apache.beam.sdk.util.construction.resources.PipelineResources;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.RemoteEnvironment;
Expand Down Expand Up @@ -373,7 +373,7 @@ public void processElement(ProcessContext ctx) {
}
replacementStdErr.flush();
assertThat(
new String(byteArrayOutputStream.toByteArray(), Charsets.UTF_8),
new String(byteArrayOutputStream.toByteArray(), StandardCharsets.UTF_8),
containsString(
"UnboundedSources present which rely on checkpointing, but checkpointing is disabled."));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.security.Permission;
import java.util.Collection;
Expand All @@ -30,7 +31,6 @@
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.util.construction.resources.PipelineResources;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Iterables;
Expand Down Expand Up @@ -200,7 +200,7 @@ private static void prepareEnvironment() throws Exception {
RestOptions.PORT.key(),
flinkCluster.getRestPort());

Files.write(file.toPath(), config.getBytes(Charsets.UTF_8));
Files.write(file.toPath(), config.getBytes(StandardCharsets.UTF_8));

// Create a new environment with the location of the Flink config for CliFrontend
ImmutableMap<String, String> newEnv =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,6 @@
import org.apache.beam.sdk.values.WindowingStrategy;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.Struct;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Iterables;
Expand Down Expand Up @@ -818,7 +817,7 @@ private void testEnsureDeferredStateCleanupTimerFiring(boolean withCheckpointing
BagState<ByteString> state = // State from the SDK Harness is stored as ByteStrings
operator.keyedStateInternals.state(
stateNamespace, StateTags.bag(stateId, ByteStringCoder.of()));
state.add(ByteString.copyFrom("userstate".getBytes(Charsets.UTF_8)));
state.add(ByteString.copyFrom("userstate".getBytes(StandardCharsets.UTF_8)));
assertThat(testHarness.numKeyedStateEntries(), is(1));

// user timer that fires after the end of the window and after state cleanup
Expand Down Expand Up @@ -966,7 +965,7 @@ public void testEnsureStateCleanupOnFinalWatermark() throws Exception {
BagState<ByteString> state = // State from the SDK Harness is stored as ByteStrings
operator.keyedStateInternals.state(
stateNamespace, StateTags.bag(stateId, ByteStringCoder.of()));
state.add(ByteString.copyFrom("userstate".getBytes(Charsets.UTF_8)));
state.add(ByteString.copyFrom("userstate".getBytes(StandardCharsets.UTF_8)));
// No timers have been set for cleanup
assertThat(testHarness.numEventTimeTimers(), is(0));
// State has been created
Expand All @@ -988,8 +987,8 @@ public void testCacheTokenHandling() throws Exception {
new ExecutableStageDoFnOperator.BagUserStateFactory<>(
test, stateBackend, NoopLock.get(), null);

ByteString key1 = ByteString.copyFrom("key1", Charsets.UTF_8);
ByteString key2 = ByteString.copyFrom("key2", Charsets.UTF_8);
ByteString key1 = ByteString.copyFrom("key1", StandardCharsets.UTF_8);
ByteString key2 = ByteString.copyFrom("key2", StandardCharsets.UTF_8);

Map<String, Map<String, ProcessBundleDescriptors.BagUserStateSpec>> userStateMapMock =
Mockito.mock(Map.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,12 @@
import static org.hamcrest.core.Is.is;

import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.junit.Test;

/** Tests for {@link FlinkKeyUtils}. */
Expand Down Expand Up @@ -66,7 +66,7 @@ public void testCoderContext() throws Exception {
@Test
@SuppressWarnings("ByteBufferBackingArray")
public void testFromEncodedKey() {
ByteString input = ByteString.copyFrom("hello world".getBytes(Charsets.UTF_8));
ByteString input = ByteString.copyFrom("hello world".getBytes(StandardCharsets.UTF_8));
ByteBuffer encodedKey = FlinkKeyUtils.fromEncodedKey(input);
assertThat(encodedKey.array(), is(input.toByteArray()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.Step;
import com.google.api.services.dataflow.model.WorkerPool;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
Expand Down Expand Up @@ -110,7 +111,6 @@
import org.apache.beam.sdk.values.WindowingStrategy;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.annotations.VisibleForTesting;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Iterables;
import org.apache.commons.codec.EncoderException;
import org.apache.commons.codec.net.PercentCodec;
Expand Down Expand Up @@ -618,7 +618,7 @@ static class StepTranslator implements StepTranslationContext {
// For compatibility with URL encoding implementations that represent space as +,
// always encode + as %2b even though we don't encode space as +.
private final PercentCodec percentCodec =
new PercentCodec("+".getBytes(Charsets.US_ASCII), false);
new PercentCodec("+".getBytes(StandardCharsets.US_ASCII), false);

private StepTranslator(Translator translator, Step step) {
this.translator = translator;
Expand Down Expand Up @@ -764,7 +764,8 @@ private void addResourceHints(ResourceHints hints) {
try {
urlEncodedHints.put(
entry.getKey(),
new String(percentCodec.encode(entry.getValue().toBytes()), Charsets.US_ASCII));
new String(
percentCodec.encode(entry.getValue().toBytes()), StandardCharsets.US_ASCII));
} catch (EncoderException e) {
// Should never happen.
throw new RuntimeException("Invalid value for resource hint: " + entry.getKey(), e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import org.apache.beam.runners.dataflow.worker.util.common.worker.Operation;
import org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.extensions.gcp.util.Transport;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.MoreObjects;

/** Container class for different types of network nodes. All nodes only have reference equality. */
Expand All @@ -59,7 +59,7 @@ private static String toStringWithTrimmedLiterals(GenericJson json) {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
final JsonGenerator baseGenerator =
MoreObjects.firstNonNull(json.getFactory(), Transport.getJsonFactory())
.createJsonGenerator(byteStream, Charsets.UTF_8);
.createJsonGenerator(byteStream, StandardCharsets.UTF_8);
JsonGenerator generator =
new JsonGenerator() {
@Override
Expand Down Expand Up @@ -164,7 +164,7 @@ public void enablePrettyPrint() throws IOException {
generator.enablePrettyPrint();
generator.serialize(json);
generator.flush();
return byteStream.toString(Charsets.UTF_8.name());
return byteStream.toString(StandardCharsets.UTF_8.name());
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.util.common.Reiterator;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Lists;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.After;
Expand Down Expand Up @@ -130,10 +129,10 @@ private void setCurrentExecutionState(String mockOriginalName) {
private static ShuffleEntry shuffleEntry(String key, String value) {
return new ShuffleEntry(
/* use key itself as position */
ByteArrayShufflePosition.of(key.getBytes(Charsets.UTF_8)),
ByteString.copyFrom(key.getBytes(Charsets.UTF_8)),
ByteArrayShufflePosition.of(key.getBytes(StandardCharsets.UTF_8)),
ByteString.copyFrom(key.getBytes(StandardCharsets.UTF_8)),
ByteString.copyFrom(new byte[0]),
ByteString.copyFrom(value.getBytes(Charsets.UTF_8)));
ByteString.copyFrom(value.getBytes(StandardCharsets.UTF_8)));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.beam.sdk.values.TimestampedValue;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Supplier;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ArrayListMultimap;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -225,7 +224,7 @@ public void resetUnderTest() {
.forComputation("comp")
.forKey(
WindmillComputationKey.create(
"comp", ByteString.copyFrom("dummyKey", Charsets.UTF_8), 123),
"comp", ByteString.copyFrom("dummyKey", StandardCharsets.UTF_8), 123),
17L,
workToken)
.forFamily(STATE_FAMILY),
Expand All @@ -240,7 +239,7 @@ public void resetUnderTest() {
.forComputation("comp")
.forKey(
WindmillComputationKey.create(
"comp", ByteString.copyFrom("dummyNewKey", Charsets.UTF_8), 123),
"comp", ByteString.copyFrom("dummyNewKey", StandardCharsets.UTF_8), 123),
17L,
workToken)
.forFamily(STATE_FAMILY),
Expand All @@ -255,7 +254,7 @@ public void resetUnderTest() {
.forComputation("comp")
.forKey(
WindmillComputationKey.create(
"comp", ByteString.copyFrom("dummyNewKey", Charsets.UTF_8), 123),
"comp", ByteString.copyFrom("dummyNewKey", StandardCharsets.UTF_8), 123),
17L,
workToken)
.forFamily(STATE_FAMILY),
Expand Down Expand Up @@ -2004,7 +2003,9 @@ false, key(NAMESPACE, tag), STATE_FAMILY, VarIntCoder.of()))
}

// clear cache and recreate multimapState
cache.forComputation("comp").invalidate(ByteString.copyFrom("dummyKey", Charsets.UTF_8), 123);
cache
.forComputation("comp")
.invalidate(ByteString.copyFrom("dummyKey", StandardCharsets.UTF_8), 123);
resetUnderTest();
multimapState = underTest.state(NAMESPACE, addr);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import com.google.api.client.util.Lists;
import com.google.common.collect.Maps;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
Expand All @@ -48,7 +49,6 @@
import org.apache.beam.sdk.util.ByteStringOutputStream;
import org.apache.beam.sdk.values.TimestampedValue;
import org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Iterables;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Range;
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.BaseEncoding;
Expand Down Expand Up @@ -1151,8 +1151,8 @@ public void testReadSortedListWithContinuations() throws Exception {
.addFetchRanges(SortedListRange.newBuilder().setStart(beginning).setLimit(end))
.setFetchMaxBytes(WindmillStateReader.MAX_ORDERED_LIST_BYTES));

final ByteString CONT_1 = ByteString.copyFrom("CONTINUATION_1", Charsets.UTF_8);
final ByteString CONT_2 = ByteString.copyFrom("CONTINUATION_2", Charsets.UTF_8);
final ByteString CONT_1 = ByteString.copyFrom("CONTINUATION_1", StandardCharsets.UTF_8);
final ByteString CONT_2 = ByteString.copyFrom("CONTINUATION_2", StandardCharsets.UTF_8);
Windmill.KeyedGetDataResponse.Builder response1 =
Windmill.KeyedGetDataResponse.newBuilder()
.setKey(DATA_KEY)
Expand Down Expand Up @@ -1327,7 +1327,7 @@ public void testReadTagValuePrefixWithContinuations() throws Exception {
.setStateFamily(STATE_FAMILY)
.setFetchMaxBytes(WindmillStateReader.MAX_TAG_VALUE_PREFIX_BYTES));

final ByteString CONT = ByteString.copyFrom("CONTINUATION", Charsets.UTF_8);
final ByteString CONT = ByteString.copyFrom("CONTINUATION", StandardCharsets.UTF_8);
Windmill.KeyedGetDataResponse.Builder response1 =
Windmill.KeyedGetDataResponse.newBuilder()
.setKey(DATA_KEY)
Expand Down
Loading
Loading