From 3493efc4333f88ede3e1be1e7037f86869c6acab Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 28 Oct 2023 11:32:29 +0100 Subject: [PATCH] format issues --- .../hadoop/mapreduce/security/SecureShuffleUtils.java | 2 +- .../nativetask/buffer/TestByteBufferReadWrite.java | 1 - .../main/java/org/apache/hadoop/examples/pi/Util.java | 10 ++++++---- .../java/org/apache/hadoop/maven/plugin/util/Exec.java | 1 - .../TestAbfsConfigurationFieldsValidation.java | 1 - .../apache/hadoop/tools/rumen/RandomSeedGenerator.java | 1 - .../java/org/apache/hadoop/streaming/PipeMapper.java | 6 ++++-- .../java/org/apache/hadoop/streaming/PipeReducer.java | 6 ++++-- .../apache/hadoop/streaming/io/TextOutputReader.java | 1 - .../streaming/mapreduce/StreamBaseRecordReader.java | 3 ++- .../org/apache/hadoop/yarn/client/cli/ClusterCLI.java | 1 - .../org/apache/hadoop/yarn/client/cli/NodeCLI.java | 1 - .../ifile/LogAggregationIndexedFileController.java | 1 - .../containermanager/launcher/TestContainerLaunch.java | 1 - 14 files changed, 17 insertions(+), 19 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java index 79c195e6e3038..47c8d12cb4b2f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java @@ -86,7 +86,7 @@ private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) { */ public static String hashFromString(String enc_str, SecretKey key) throws IOException { - return generateHash(enc_str.getBytes(StandardCharsets.UTF_8), key); + return generateHash(enc_str.getBytes(StandardCharsets.UTF_8), key); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java index 84a34f10008f9..8dfa5322e84e6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java @@ -19,7 +19,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import org.apache.hadoop.mapred.nativetask.NativeDataTarget; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java index c23bdf6f9d529..4e8461525ae3f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java @@ -216,7 +216,8 @@ public static PrintWriter createWriter(File dir, String prefix) throws IOExcepti final File f = new File(dir, prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt"); if (!f.exists()) - return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)); + return new PrintWriter(new OutputStreamWriter( + new FileOutputStream(f), StandardCharsets.UTF_8)); try {Thread.sleep(10);} catch (InterruptedException e) {} } @@ -310,13 +311,14 @@ static List readJobOutputs(FileSystem fs, Path outdir) throws IOExce static void writeResults(String name, List results, FileSystem fs, String dir) throws IOException { final Path outfile = new Path(dir, name + ".txt"); Util.out.println(name + "> writing results to " + outfile); - final PrintWriter out = new PrintWriter(new OutputStreamWriter(fs.create(outfile), StandardCharsets.UTF_8), true); + final PrintWriter printWriter = new PrintWriter(new OutputStreamWriter( + fs.create(outfile), StandardCharsets.UTF_8), true); try { for(TaskResult r : results) - out.println(r); + printWriter.println(r); } finally { - out.close(); + printWriter.close(); } } diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java index ffc202e08a5f7..86e8d9c2a0ed9 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java index 0d8df48d8f7af..f041f4bccdc8c 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java @@ -22,7 +22,6 @@ import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; -import org.apache.commons.codec.Charsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys; import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys; diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java index 4c366c56ad401..817c5c8b2f704 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.tools.rumen; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java index df6ba1ccfb875..438a00057ec14 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java @@ -76,8 +76,10 @@ public void configure(JobConf job) { inputFormatClassName.equals(TextInputFormat.class.getCanonicalName())); } - mapOutputFieldSeparator = job.get("stream.map.output.field.separator", "\t").getBytes(StandardCharsets.UTF_8); - mapInputFieldSeparator = job.get("stream.map.input.field.separator", "\t").getBytes(StandardCharsets.UTF_8); + mapOutputFieldSeparator = job.get("stream.map.output.field.separator", "\t") + .getBytes(StandardCharsets.UTF_8); + mapInputFieldSeparator = job.get("stream.map.input.field.separator", "\t") + .getBytes(StandardCharsets.UTF_8); numOfMapOutputKeyFields = job.getInt("stream.num.map.output.key.fields", 1); } diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java index 47358659e21c5..1f5a247bb2aa1 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java @@ -72,8 +72,10 @@ public void configure(JobConf job) { SkipBadRecords.setAutoIncrReducerProcCount(job, false); skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false); - reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t").getBytes(StandardCharsets.UTF_8); - reduceInputFieldSeparator = job_.get("stream.reduce.input.field.separator", "\t").getBytes(StandardCharsets.UTF_8); + reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t") + .getBytes(StandardCharsets.UTF_8); + reduceInputFieldSeparator = job_.get("stream.reduce.input.field.separator", "\t") + .getBytes(StandardCharsets.UTF_8); this.numOfReduceOutputKeyFields = job_.getInt("stream.num.reduce.output.key.fields", 1); } diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java index 7bc58e657908f..11c84a471f7d7 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java @@ -21,7 +21,6 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; -import java.io.UnsupportedEncodingException; import java.nio.charset.CharacterCodingException; import java.nio.charset.StandardCharsets; diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java index 43c1b1bec0a71..e3c14743cb32b 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java @@ -19,6 +19,7 @@ package org.apache.hadoop.streaming.mapreduce; import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -107,7 +108,7 @@ void numRecStats(byte[] record, int start, int len) throws IOException { numRec_++; if (numRec_ == nextStatusRec_) { String recordStr = new String(record, start, Math.min(len, - statusMaxRecordChars_), "UTF-8"); + statusMaxRecordChars_), StandardCharsets.UTF_8); nextStatusRec_ += 100;// *= 10; String status = getStatus(recordStr); LOG.info(status); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java index b1811eabae488..a0df9d6b18bd1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java @@ -23,7 +23,6 @@ import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index a9e51c2521518..317f30cdde914 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java index fba873645fcc5..7c6be1305d09e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java @@ -26,7 +26,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 0a754698af136..7c4815ff94b6f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -35,7 +35,6 @@ import java.io.PrintStream; import java.io.PrintWriter; import java.nio.ByteBuffer; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths;