This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new b70d35ab64 HDDS-12889. Enable AppendCharacterWithChar PMD rule (#8324)
b70d35ab64 is described below
commit b70d35ab64aeba1a772bf2ea5578895a2c911731
Author: Ivan Zlenko <[email protected]>
AuthorDate: Sun Apr 27 00:14:39 2025 +0500
HDDS-12889. Enable AppendCharacterWithChar PMD rule (#8324)
---
dev-support/pmd/pmd-ruleset.xml | 1 +
.../java/org/apache/hadoop/hdds/HddsUtils.java | 2 +-
.../java/org/apache/hadoop/hdds/NodeDetails.java | 2 +-
.../org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java | 2 +-
.../hadoop/hdds/scm/net/NetworkTopologyImpl.java | 6 ++---
.../apache/hadoop/hdds/scm/pipeline/Pipeline.java | 6 ++---
.../apache/hadoop/hdds/tracing/StringCodec.java | 6 ++---
.../ozone/container/common/helpers/BlockData.java | 2 +-
.../helpers/BlockDeletingServiceMetrics.java | 30 +++++++++++-----------
.../helpers/DeletedContainerBlocksSummary.java | 6 ++---
.../common/impl/StorageLocationReport.java | 2 +-
.../ECReconstructionCoordinator.java | 2 +-
.../ozone/container/keyvalue/KeyValueHandler.java | 2 +-
.../replication/AbstractReplicationTask.java | 2 +-
.../protocol/commands/CloseContainerCommand.java | 2 +-
.../protocol/commands/ClosePipelineCommand.java | 2 +-
.../protocol/commands/CreatePipelineCommand.java | 2 +-
.../protocol/commands/DeleteBlocksCommand.java | 4 +--
.../protocol/commands/DeleteContainerCommand.java | 2 +-
.../commands/FinalizeNewLayoutVersionCommand.java | 2 +-
.../commands/ReconstructECContainersCommand.java | 4 +--
.../commands/RefreshVolumeUsageCommand.java | 2 +-
.../commands/ReplicateContainerCommand.java | 2 +-
.../ozone/protocol/commands/ReregisterCommand.java | 2 +-
.../commands/SetNodeOperationalStateCommand.java | 2 +-
.../container/common/utils/TestHddsVolumeUtil.java | 4 +--
.../container/common/volume/TestDbVolume.java | 2 +-
.../container/ozoneimpl/TestContainerReader.java | 2 +-
.../container/ozoneimpl/TestOzoneContainer.java | 2 +-
.../apache/ozone/erasurecode/TestCoderBase.java | 4 +--
.../rawcoder/RawErasureCoderBenchmark.java | 2 +-
.../hadoop/hdds/server/http/HttpServer2.java | 2 +-
.../hdds/server/http/PrometheusMetricsSink.java | 6 ++---
.../apache/hadoop/hdds/utils/HddsServerUtil.java | 2 +-
.../org/apache/hadoop/hdds/utils/SignalLogger.java | 2 +-
.../rocksdiff/TestRocksDBCheckpointDiffer.java | 2 +-
.../scm/block/ScmBlockDeletingServiceMetrics.java | 16 ++++++------
.../replication/ContainerHealthResult.java | 2 +-
.../replication/ECContainerReplicaCount.java | 2 +-
.../hdds/scm/node/DatanodeAdminMonitorImpl.java | 2 +-
.../hdds/scm/server/SCMClientProtocolServer.java | 2 +-
.../hdds/scm/server/SCMDatanodeProtocolServer.java | 6 ++---
.../apache/ozone/test/TimedOutTestsListener.java | 2 +-
.../hadoop/hdds/scm/cli/TopologySubcommand.java | 4 +--
.../hdds/scm/cli/container/InfoSubcommand.java | 6 ++---
.../hdds/scm/cli/datanode/ListInfoSubcommand.java | 6 ++---
.../scm/cli/container/TestReportSubCommand.java | 2 +-
.../hadoop/ozone/client/MockDatanodeStorage.java | 2 +-
.../main/java/org/apache/hadoop/ozone/OmUtils.java | 4 +--
.../ozone/om/helpers/OmKeyLocationInfoGroup.java | 8 +++---
.../hadoop/ozone/om/helpers/OzoneFileStatus.java | 4 +--
.../ozone/om/helpers/OzoneFileStatusLight.java | 4 +--
.../java/org/apache/ozone/lib/lang/XException.java | 2 +-
.../hadoop/conf/TestConfigurationFieldsBase.java | 2 +-
.../fs/contract/AbstractContractSeekTest.java | 2 +-
.../hadoop/fs/contract/ContractTestUtils.java | 4 +--
.../rpc/TestOzoneClientMultipartUploadWithFSO.java | 2 +-
.../apache/hadoop/ozone/om/TestKeyManagerImpl.java | 4 +--
.../apache/hadoop/ozone/MiniOzoneClusterImpl.java | 4 +--
.../hadoop/ozone/MiniOzoneHAClusterImpl.java | 2 +-
.../org/apache/hadoop/ozone/om/OzoneManager.java | 2 +-
.../ozone/om/ratis/OzoneManagerStateMachine.java | 2 +-
.../ozone/om/request/key/OMKeysDeleteRequest.java | 6 ++---
.../ozone/om/service/OpenKeyCleanupService.java | 2 +-
.../hadoop/ozone/om/snapshot/OmSnapshotUtils.java | 4 +--
.../request/file/TestOMDirectoryCreateRequest.java | 2 +-
.../TestRequestFeatureValidatorProcessor.java | 4 +--
.../apache/hadoop/fs/ozone/FileStatusAdapter.java | 4 +--
.../ozone/s3/signature/StringToSignProducer.java | 4 +--
.../hadoop/ozone/s3/endpoint/TestObjectPut.java | 2 +-
.../debug/audit/parser/common/DatabaseHelper.java | 4 +--
.../hadoop/ozone/freon/OmMetadataGenerator.java | 2 +-
.../org/apache/hadoop/ozone/freon/ProgressBar.java | 4 +--
.../schemaupgrade/ContainerUpgradeResult.java | 2 +-
.../schemaupgrade/VolumeUpgradeResult.java | 2 +-
.../hadoop/ozone/utils/FormattingCLIUtils.java | 20 +++++++--------
76 files changed, 144 insertions(+), 143 deletions(-)
diff --git a/dev-support/pmd/pmd-ruleset.xml b/dev-support/pmd/pmd-ruleset.xml
index 1bdb2be45a..e3af967ff3 100644
--- a/dev-support/pmd/pmd-ruleset.xml
+++ b/dev-support/pmd/pmd-ruleset.xml
@@ -35,6 +35,7 @@
<rule ref="category/java/bestpractices.xml/SimplifiableTestAssertion" />
<rule ref="category/java/performance.xml/AddEmptyString"/>
+ <rule ref="category/java/performance.xml/AppendCharacterWithChar" />
<rule ref="category/java/performance.xml/AvoidFileStream"/>
<rule ref="category/java/performance.xml/InefficientEmptyStringCheck"/>
<rule ref="category/java/performance.xml/InefficientStringBuffering"/>
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
index c450e46e8f..690631ca31 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -845,7 +845,7 @@ public static HddsProtos.UUID toProtobuf(UUID uuid) {
if (elements != null && elements.length > startIndex) {
final StringBuilder sb = new StringBuilder();
for (int line = startIndex; line < elements.length; line++) {
- sb.append(elements[line]).append("\n");
+ sb.append(elements[line]).append('\n');
}
return sb.toString();
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
index 05e8ecb498..0984048bd5 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
@@ -105,7 +105,7 @@ public String getHostAddress() {
public String getRatisHostPortStr() {
StringBuilder hostPort = new StringBuilder();
hostPort.append(getHostName())
- .append(":")
+ .append(':')
.append(ratisPort);
return hostPort.toString();
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
index f3626bfeaf..d0806b34d5 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
@@ -182,7 +182,7 @@ public static List<SCMNodeInfo>
buildNodeInfo(ConfigurationSource conf) {
}
private static String buildAddress(String address, int port) {
- return new StringBuilder().append(address).append(":")
+ return new StringBuilder().append(address).append(':')
.append(port).toString();
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
index 99e718438d..1e5c57266c 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
@@ -893,18 +893,18 @@ public String toString() {
StringBuilder tree = new StringBuilder();
tree.append("Level: ");
tree.append(maxLevel);
- tree.append("\n");
+ tree.append('\n');
netlock.readLock().lock();
try {
// print the number of leaves
int numOfLeaves = clusterTree.getNumOfLeaves();
tree.append("Number of leaves:");
tree.append(numOfLeaves);
- tree.append("\n");
+ tree.append('\n');
// print all nodes
for (int i = 0; i < numOfLeaves; i++) {
tree.append(clusterTree.getLeaf(i).getNetworkFullPath());
- tree.append("\n");
+ tree.append('\n');
}
} finally {
netlock.readLock().unlock();
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
index 605c3bc254..1549f4f782 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
@@ -521,20 +521,20 @@ public int hashCode() {
@Override
public String toString() {
final StringBuilder b =
- new StringBuilder(getClass().getSimpleName()).append("{");
+ new StringBuilder(getClass().getSimpleName()).append('{');
b.append(" Id: ").append(id.getId());
b.append(", Nodes: [");
for (DatanodeDetails datanodeDetails : nodeStatus.keySet()) {
b.append(" {").append(datanodeDetails);
b.append(", ReplicaIndex:
").append(this.getReplicaIndex(datanodeDetails)).append("},");
}
- b.append("]");
+ b.append(']');
b.append(", ReplicationConfig: ").append(replicationConfig);
b.append(", State:").append(getPipelineState());
b.append(", leaderId:").append(leaderId != null ? leaderId.toString() :
"");
b.append(", CreationTimestamp").append(getCreationTimestamp()
.atZone(ZoneId.systemDefault()));
- b.append("}");
+ b.append('}');
return b.toString();
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
index 18f968a9cf..f22393a50d 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
@@ -70,9 +70,9 @@ public JaegerSpanContext extract(StringBuilder s) {
public void inject(JaegerSpanContext context, StringBuilder string) {
int intFlag = context.getFlags() & 255;
string.append(context.getTraceId())
- .append(":").append(Long.toHexString(context.getSpanId()))
- .append(":").append(Long.toHexString(context.getParentId()))
- .append(":").append(Integer.toHexString(intFlag));
+ .append(':').append(Long.toHexString(context.getSpanId()))
+ .append(':').append(Long.toHexString(context.getParentId()))
+ .append(':').append(Integer.toHexString(intFlag));
}
private static long high(String hexString) {
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
index b718e80689..eb1a436b67 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
@@ -279,7 +279,7 @@ public void appendTo(StringBuilder sb) {
sb.append("[blockId=");
blockID.appendTo(sb);
sb.append(", size=").append(size);
- sb.append("]");
+ sb.append(']');
}
public long getBlockGroupLength() {
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
index 03f0fec183..6e4d638e6a 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
@@ -206,22 +206,22 @@ public long getProcessedTransactionFailCount() {
@Override
public String toString() {
StringBuilder buffer = new StringBuilder()
- .append("successCount = ").append(successCount.value()).append("\t")
- .append("successBytes = ").append(successBytes.value()).append("\t")
- .append("failureCount = ").append(failureCount.value()).append("\t")
+ .append("successCount = ").append(successCount.value()).append('\t')
+ .append("successBytes = ").append(successBytes.value()).append('\t')
+ .append("failureCount = ").append(failureCount.value()).append('\t')
.append("outOfOrderDeleteBlockTransactionCount = ")
- .append(outOfOrderDeleteBlockTransactionCount.value()).append("\t")
- .append("totalPendingBlockCount =
").append(totalPendingBlockCount.value()).append("\t")
- .append("totalBlockChosenCount =
").append(totalBlockChosenCount.value()).append("\t")
- .append("totalContainerChosenCount =
").append(totalContainerChosenCount.value()).append("\t")
- .append("receivedTransactionCount =
").append(receivedTransactionCount.value()).append("\t")
- .append("receivedRetryTransactionCount =
").append(receivedRetryTransactionCount.value()).append("\t")
- .append("processedTransactionSuccessCount =
").append(processedTransactionSuccessCount.value()).append("\t")
- .append("processedTransactionFailCount =
").append(processedTransactionFailCount.value()).append("\t")
- .append("receivedContainerCount =
").append(receivedContainerCount.value()).append("\t")
- .append("receivedBlockCount =
").append(receivedBlockCount.value()).append("\t")
- .append("markedBlockCount =
").append(markedBlockCount.value()).append("\t")
- .append("totalLockTimeoutTransactionCount =
").append(totalLockTimeoutTransactionCount.value()).append("\t");
+ .append(outOfOrderDeleteBlockTransactionCount.value()).append('\t')
+ .append("totalPendingBlockCount =
").append(totalPendingBlockCount.value()).append('\t')
+ .append("totalBlockChosenCount =
").append(totalBlockChosenCount.value()).append('\t')
+ .append("totalContainerChosenCount =
").append(totalContainerChosenCount.value()).append('\t')
+ .append("receivedTransactionCount =
").append(receivedTransactionCount.value()).append('\t')
+ .append("receivedRetryTransactionCount =
").append(receivedRetryTransactionCount.value()).append('\t')
+ .append("processedTransactionSuccessCount =
").append(processedTransactionSuccessCount.value()).append('\t')
+ .append("processedTransactionFailCount =
").append(processedTransactionFailCount.value()).append('\t')
+ .append("receivedContainerCount =
").append(receivedContainerCount.value()).append('\t')
+ .append("receivedBlockCount =
").append(receivedBlockCount.value()).append('\t')
+ .append("markedBlockCount =
").append(markedBlockCount.value()).append('\t')
+ .append("totalLockTimeoutTransactionCount =
").append(totalLockTimeoutTransactionCount.value()).append('\t');
return buffer.toString();
}
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
index 638e113756..610e54025b 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
@@ -99,7 +99,7 @@ public String getTxIDSummary() {
@Override public String toString() {
StringBuffer sb = new StringBuffer();
for (DeletedBlocksTransaction blks : blocks) {
- sb.append(" ")
+ sb.append(' ')
.append("TXID=")
.append(blks.getTxID())
.append(", ")
@@ -108,8 +108,8 @@ public String getTxIDSummary() {
.append(", ")
.append(blks.getContainerID())
.append(" : [")
- .append(StringUtils.join(',', blks.getLocalIDList())).append("]")
- .append("\n");
+ .append(StringUtils.join(',', blks.getLocalIDList())).append(']')
+ .append('\n');
}
return sb.toString();
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
index 5f539e121b..d3148df8c8 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
@@ -235,7 +235,7 @@ public static StorageLocationReport
getFromProtobuf(StorageReportProto report)
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128)
- .append("{")
+ .append('{')
.append(" id=").append(id)
.append(" dir=").append(storageLocation)
.append(" type=").append(storageType);
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
index 4694850b93..d8856f081d 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
@@ -373,7 +373,7 @@ private void logBlockGroupDetails(BlockLocationInfo
blockLocationInfo,
int cnt = 0;
for (ContainerProtos.ChunkInfo chunkInfo : data.getChunks()) {
if (cnt > 0) {
- sb.append("\n");
+ sb.append('\n');
}
sb.append(" chunkNum: ")
.append(++cnt)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
index 408897c6bb..8e1080dc48 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
@@ -1427,7 +1427,7 @@ private boolean logBlocksFoundOnDisk(Container container)
throws IOException {
StringBuilder stringBuilder = new StringBuilder();
for (Path block : dir) {
if (notEmpty) {
- stringBuilder.append(",");
+ stringBuilder.append(',');
}
stringBuilder.append(block);
notEmpty = true;
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
index 2ba2b676a9..05932e6edf 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
@@ -144,7 +144,7 @@ protected Object getCommandForDebug() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder()
- .append(getStatus()).append(" ")
+ .append(getStatus()).append(' ')
.append(getCommandForDebug());
if (getStatus() == Status.QUEUED) {
sb.append(", queued at ").append(getQueued());
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
index 8d4b43d1cc..6bd5adb688 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
@@ -90,7 +90,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(getContainerID())
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
index d1c8e9444b..0c1caa3dd6 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
@@ -75,7 +75,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", pipelineID: ").append(getPipelineID());
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
index b6340e86c6..ebd5bcfdcc 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
@@ -159,7 +159,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", pipelineID: ").append(getPipelineID())
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
index e955cabdfa..e6457d4d25 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
@@ -69,7 +69,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", deletedBlocksTransaction: [");
@@ -83,7 +83,7 @@ public String toString() {
if (!blocksTobeDeleted.isEmpty()) {
sb.delete(sb.length() - 2, sb.length());
}
- sb.append("]");
+ sb.append(']');
return sb.toString();
}
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
index d142e23c23..193c67576c 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
@@ -108,7 +108,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(getContainerID())
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
index b686db5ef0..0d5c02202d 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
@@ -78,7 +78,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", finalizeUpgrade: ").append(finalizeUpgrade)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
index 93bbfd8202..3628c2c9e6 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
@@ -131,7 +131,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(containerID)
@@ -139,7 +139,7 @@ public String toString() {
.append(", sources: [").append(getSources().stream()
.map(a -> a.dnDetails
+ " replicaIndex: " + a.getReplicaIndex())
- .collect(Collectors.joining(", "))).append("]")
+ .collect(Collectors.joining(", "))).append(']')
.append(", targets: ").append(getTargetDatanodes())
.append(", missingIndexes: ").append(
Arrays.toString(missingContainerIndexes.toByteArray()));
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
index f685a8282b..6872b152e1 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
@@ -60,7 +60,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
return sb.toString();
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
index d4046bd989..826af4fdd3 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
@@ -159,7 +159,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType());
sb.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
sb.append(", containerId=").append(getContainerID());
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
index adb87a3898..82961d994a 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
@@ -59,7 +59,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
return sb.toString();
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
index 45e51d00e7..51aaf87ae3 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
@@ -91,7 +91,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", opState: ").append(opState)
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
index 9ec248e383..9cbf95f3ea 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
@@ -73,7 +73,7 @@ public void setup() throws Exception {
for (int i = 0; i < VOLUMNE_NUM; i++) {
hddsVolumeDirs[i] =
Files.createDirectory(tempDir.resolve("volumeDir" + i)).toFile();
- hddsDirs.append(hddsVolumeDirs[i]).append(",");
+ hddsDirs.append(hddsVolumeDirs[i]).append(',');
}
conf.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY, hddsDirs.toString());
hddsVolumeSet = new MutableVolumeSet(datanodeId, clusterId, conf, null,
@@ -85,7 +85,7 @@ public void setup() throws Exception {
for (int i = 0; i < VOLUMNE_NUM; i++) {
dbVolumeDirs[i] =
Files.createDirectory(tempDir.resolve("dbVolumeDir" + i)).toFile();
- dbDirs.append(dbVolumeDirs[i]).append(",");
+ dbDirs.append(dbVolumeDirs[i]).append(',');
}
conf.set(OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR,
dbDirs.toString());
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
index 683c2b12b1..280f8597d7 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
@@ -164,7 +164,7 @@ private MutableVolumeSet createHddsVolumeSet(int volumeNum)
for (int i = 0; i < volumeNum; i++) {
hddsVolumeDirs[i] =
Files.createDirectory(folder.resolve("volumeDir" + i)).toFile();
- hddsDirs.append(hddsVolumeDirs[i]).append(",");
+ hddsDirs.append(hddsVolumeDirs[i]).append(',');
}
CONF.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY, hddsDirs.toString());
MutableVolumeSet hddsVolumeSet = new MutableVolumeSet(DATANODE_UUID,
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
index dbe6df0c7b..6a48765c1a 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
@@ -375,7 +375,7 @@ public void
testMultipleContainerReader(ContainerTestVersionInfo versionInfo)
for (int i = 0; i < volumeNum; i++) {
volumeDirs[i] =
Files.createDirectory(tempDir.resolve("volumeDir" + i)).toFile();
- datanodeDirs = datanodeDirs.append(volumeDirs[i]).append(",");
+ datanodeDirs = datanodeDirs.append(volumeDirs[i]).append(',');
}
BlockUtils.shutdownCache(conf);
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
index 34619c1918..de5b2331f1 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -189,7 +189,7 @@ public void testBuildNodeReport(ContainerTestVersionInfo
versionInfo)
for (int i = 0; i < 3; i++) {
dbPaths[i] =
Files.createDirectory(folder.resolve(Integer.toString(i))).toFile();
- dbDirString.append(dbPaths[i]).append(",");
+ dbDirString.append(dbPaths[i]).append(',');
}
conf.set(OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR,
dbDirString.toString());
diff --git
a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
index 3f5d66bc14..a6f6dba517 100644
---
a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
+++
b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
@@ -482,7 +482,7 @@ protected void dumpSetting() {
StringBuilder sb = new StringBuilder("Erasure coder test settings:\n");
sb.append(" numDataUnits=").append(numDataUnits);
sb.append(" numParityUnits=").append(numParityUnits);
- sb.append(" chunkSize=").append(chunkSize).append("\n");
+ sb.append(" chunkSize=").append(chunkSize).append('\n');
sb.append(" erasedDataIndexes=").
append(Arrays.toString(erasedDataIndexes));
@@ -490,7 +490,7 @@ protected void dumpSetting() {
append(Arrays.toString(erasedParityIndexes));
sb.append(" usingDirectBuffer=").append(usingDirectBuffer);
sb.append(" allowVerboseDump=").append(allowDump);
- sb.append("\n");
+ sb.append('\n');
System.out.println(sb.toString());
}
diff --git
a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
index ef9b59325d..89c15316ef 100644
---
a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
+++
b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
@@ -85,7 +85,7 @@ private static void printAvailableCoders() {
StringBuilder sb = new StringBuilder(
"Available coders with coderIndex:\n");
for (CODER coder : CODER.values()) {
- sb.append(coder.ordinal()).append(":").append(coder).append("\n");
+ sb.append(coder.ordinal()).append(':').append(coder).append('\n');
}
System.out.println(sb);
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index aa7eb407ae..0e505633dd 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -1410,7 +1410,7 @@ public String toString() {
: STATE_DESCRIPTION_NOT_LIVE)
.append("), listening at:");
for (ServerConnector l : listeners) {
- sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
+ sb.append(l.getHost()).append(':').append(l.getPort()).append("/,");
}
return sb.toString();
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
index 6f431bfe89..89cf9adbb6 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
@@ -90,7 +90,7 @@ private String getPrometheusMetricKeyAsString(MetricsRecord
metricsRecord,
String key, String username) {
StringBuilder prometheusMetricKey = new StringBuilder();
prometheusMetricKey.append(key)
- .append("{");
+ .append('{');
String sep = "";
List<MetricsTag> metricsTags =
@@ -110,10 +110,10 @@ private String
getPrometheusMetricKeyAsString(MetricsRecord metricsRecord,
.append(tagName)
.append("=\"")
.append(tag.value())
- .append("\"");
+ .append('"');
sep = ",";
}
- prometheusMetricKey.append("}");
+ prometheusMetricKey.append('}');
return prometheusMetricKey.toString();
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
index b3e21db17f..9a780be8b3 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
@@ -707,7 +707,7 @@ public static String toStartupShutdownString(String prefix,
String... msg) {
StringBuilder b = new StringBuilder(prefix);
b.append("\n/************************************************************");
for (String s : msg) {
- b.append("\n").append(prefix).append(s);
+ b.append('\n').append(prefix).append(s);
}
b.append("\n************************************************************/");
return b.toString();
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
index 9e5832c9e6..02dcc136c7 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
@@ -94,7 +94,7 @@ public void register(final Logger log) {
log.info("Error installing UNIX signal handler for {}", signal, e);
}
}
- bld.append("]");
+ bld.append(']');
if (log.isInfoEnabled()) {
log.info(bld.toString());
}
diff --git
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
index b975ea26dc..2bafef9818 100644
---
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
+++
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
@@ -996,7 +996,7 @@ private void printMutableGraphFromAGivenNode(
for (CompactionNode current : currentLevel) {
Set<CompactionNode> successors = mutableGraph.successors(current);
for (CompactionNode succNode : successors) {
- sb.append(succNode.getFileName()).append(" ");
+ sb.append(succNode.getFileName()).append(' ');
nextLevel.add(succNode);
}
}
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
index 2f298a5ebc..20293bbd4a 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
@@ -334,15 +334,15 @@ public long getNumCommandsDatanodeFailed() {
@Override
public String toString() {
StringBuilder buffer = new StringBuilder()
- .append("numBlockDeletionTransactionCreated =
").append(numBlockDeletionTransactionCreated.value()).append("\t")
+ .append("numBlockDeletionTransactionCreated =
").append(numBlockDeletionTransactionCreated.value()).append('\t')
.append("numBlockDeletionTransactionCompleted = ")
- .append(numBlockDeletionTransactionCompleted.value()).append("\t")
- .append("numBlockDeletionCommandSent =
").append(numBlockDeletionCommandSent.value()).append("\t")
- .append("numBlockDeletionCommandSuccess =
").append(numBlockDeletionCommandSuccess.value()).append("\t")
- .append("numBlockDeletionCommandFailure =
").append(numBlockDeletionCommandFailure.value()).append("\t")
- .append("numBlockDeletionTransactionSent =
").append(numBlockDeletionTransactionSent.value()).append("\t")
- .append("numBlockDeletionTransactionSuccess =
").append(numBlockDeletionTransactionSuccess.value()).append("\t")
- .append("numBlockDeletionTransactionFailure =
").append(numBlockDeletionTransactionFailure.value()).append("\t")
+ .append(numBlockDeletionTransactionCompleted.value()).append('\t')
+ .append("numBlockDeletionCommandSent =
").append(numBlockDeletionCommandSent.value()).append('\t')
+ .append("numBlockDeletionCommandSuccess =
").append(numBlockDeletionCommandSuccess.value()).append('\t')
+ .append("numBlockDeletionCommandFailure =
").append(numBlockDeletionCommandFailure.value()).append('\t')
+ .append("numBlockDeletionTransactionSent =
").append(numBlockDeletionTransactionSent.value()).append('\t')
+ .append("numBlockDeletionTransactionSuccess =
").append(numBlockDeletionTransactionSuccess.value()).append('\t')
+ .append("numBlockDeletionTransactionFailure =
").append(numBlockDeletionTransactionFailure.value()).append('\t')
.append("numDeletionCommandsPerDatanode =
").append(numCommandsDatanode);
return buffer.toString();
}
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
index f4489da345..1788bf9f52 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
@@ -294,7 +294,7 @@ public String toString() {
if (requeueCount > 0) {
sb.append(" requeued:").append(requeueCount);
}
- return sb.append("}").toString();
+ return sb.append('}').toString();
}
}
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
index 36be55c41b..13ad1d37b4 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
@@ -569,7 +569,7 @@ public String toString() {
if (!pendingDelete.isEmpty()) {
sb.append(", PendingDelete: ").append(pendingDelete.size());
}
- sb.append(")")
+ sb.append(')')
.append(", ReplicationConfig: ").append(repConfig)
.append(", RemainingMaintenanceRedundancy: ")
.append(remainingMaintenanceRedundancy);
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
index 4733c0629d..f5bcf6ff63 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
@@ -499,7 +499,7 @@ private String replicaDetails(Collection<ContainerReplica>
replicas) {
sb.append(replicas.stream()
.map(Object::toString)
.collect(Collectors.joining(",")));
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
index 7bc437627a..b945786ac0 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
@@ -379,7 +379,7 @@ public List<ContainerWithPipeline>
getContainerWithPipelineBatch(
ContainerWithPipeline cp = getContainerWithPipelineCommon(containerID);
cpList.add(cp);
strContainerIDs.append(ContainerID.valueOf(containerID).toString());
- strContainerIDs.append(",");
+ strContainerIDs.append(',');
} catch (IOException ex) {
AUDIT.logReadFailure(buildAuditMessageForFailure(
SCMAction.GET_CONTAINER_WITH_PIPELINE_BATCH,
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
index df615eec3b..eb4feec2fc 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
@@ -276,14 +276,14 @@ public static SCMRegisteredResponseProto
getRegisteredResponse(
private String constructCommandAuditMap(List<SCMCommandProto> cmds) {
StringBuilder auditMap = new StringBuilder();
- auditMap.append("[");
+ auditMap.append('[');
for (SCMCommandProto cmd : cmds) {
if (cmd.getCommandType().equals(deleteBlocksCommand)) {
auditMap.append("commandType: ").append(cmd.getCommandType());
auditMap.append(" deleteTransactionsCount: ")
.append(cmd.getDeleteBlocksCommandProto().getDeletedBlocksTransactionsCount());
auditMap.append(" cmdID:
").append(cmd.getDeleteBlocksCommandProto().getCmdId());
- auditMap.append(" encodedToken:
\"").append(cmd.getEncodedToken()).append("\"");
+ auditMap.append(" encodedToken:
\"").append(cmd.getEncodedToken()).append('"');
auditMap.append(" deadlineMsSinceEpoch:
").append(cmd.getDeadlineMsSinceEpoch());
} else {
auditMap.append(TextFormat.shortDebugString(cmd));
@@ -294,7 +294,7 @@ private String
constructCommandAuditMap(List<SCMCommandProto> cmds) {
if (len > 2) {
auditMap.delete(len - 2, len);
}
- auditMap.append("]");
+ auditMap.append(']');
return auditMap.toString();
}
diff --git
a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
index c2d07137a5..c7a8873c69 100644
---
a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
+++
b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
@@ -96,7 +96,7 @@ private static String buildThreadDump() {
dump.append("\n at ");
dump.append(stackTraceElement);
}
- dump.append("\n");
+ dump.append('\n');
}
return dump.toString();
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
index b2f7ce1384..5a850551c2 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
@@ -181,9 +181,9 @@ private String formatPortOutput(List<HddsProtos.Port>
ports) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < ports.size(); i++) {
HddsProtos.Port port = ports.get(i);
- sb.append(port.getName()).append("=").append(port.getValue());
+ sb.append(port.getName()).append('=').append(port.getValue());
if (i < ports.size() - 1) {
- sb.append(",");
+ sb.append(',');
}
}
return sb.toString();
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
index 38df564536..2c3ad44c97 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
@@ -195,11 +195,11 @@ private static String
buildDatanodeDetails(DatanodeDetails details) {
private static String buildReplicaDetails(ContainerReplicaInfo replica) {
StringBuilder sb = new StringBuilder()
- .append("State: ").append(replica.getState()).append(";");
+ .append("State: ").append(replica.getState()).append(';');
if (replica.getReplicaIndex() != -1) {
- sb.append(" ReplicaIndex:
").append(replica.getReplicaIndex()).append(";");
+ sb.append(" ReplicaIndex:
").append(replica.getReplicaIndex()).append(';');
}
- sb.append(" Origin:
").append(replica.getPlaceOfBirth().toString()).append(";")
+ sb.append(" Origin:
").append(replica.getPlaceOfBirth().toString()).append(';')
.append(" Location:
").append(buildDatanodeDetails(replica.getDatanodeDetails()));
return sb.toString();
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
index 929873c658..90e678f281 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
@@ -145,9 +145,9 @@ private void printDatanodeInfo(DatanodeWithAttributes dna) {
relatedPipelineNum = relatedPipelines.size();
relatedPipelines.forEach(
p -> pipelineListInfo.append(p.getId().getId().toString())
- .append("/").append(p.getReplicationConfig().toString())
- .append("/").append(p.getType().toString())
-
.append("/").append(p.getPipelineState().toString()).append("/")
+ .append('/').append(p.getReplicationConfig().toString())
+ .append('/').append(p.getType().toString())
+
.append('/').append(p.getPipelineState().toString()).append('/')
.append(datanode.getUuid().equals(p.getLeaderId()) ?
"Leader" : "Follower")
.append(System.getProperty("line.separator")));
diff --git
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
index f74850e6c4..ad661ea3c8 100644
---
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
+++
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
@@ -177,7 +177,7 @@ private String containerList(int start, int end) {
if (i != start) {
sb.append(", ");
}
- sb.append("#").append(i);
+ sb.append('#').append(i);
}
return sb.toString();
}
diff --git
a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
index 92f0b6ea4c..a526cebe2c 100644
---
a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
+++
b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
@@ -133,7 +133,7 @@ public BlockData getBlock(DatanodeBlockID blockID) {
if (!blocks.containsKey(id)) {
StringBuilder sb = new StringBuilder();
for (BlockID bid : blocks.keySet()) {
- sb.append(bid).append("\n");
+ sb.append(bid).append('\n');
}
throw new AssertionError("blockID " + id +
" not found in blocks. Available block ID: \n" + sb);
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
index 789d26c888..9ca59b9c38 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
@@ -919,10 +919,10 @@ public static String
getOMAddressListPrintString(List<OMNodeDetails> omList) {
}
printString.append(omList.get(0).getOMPrintInfo());
for (int i = 1; i < omList.size(); i++) {
- printString.append(",")
+ printString.append(',')
.append(omList.get(i).getOMPrintInfo());
}
- printString.append("]");
+ printString.append(']');
return printString.toString();
}
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
index 01d5fbaa70..ddf62a45ed 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
@@ -185,14 +185,14 @@ void addAll(long versionToAdd, List<OmKeyLocationInfo>
locationInfoList) {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("version:").append(version).append(" ");
- sb.append("isMultipartKey:").append(isMultipartKey).append(" ");
+ sb.append("version:").append(version).append(' ');
+ sb.append("isMultipartKey:").append(isMultipartKey).append(' ');
for (List<OmKeyLocationInfo> kliList : locationVersionMap.values()) {
for (OmKeyLocationInfo kli: kliList) {
sb.append("conID ").append(kli.getContainerID());
- sb.append(" ");
+ sb.append(' ');
sb.append("locID ").append(kli.getLocalID());
- sb.append(" ");
+ sb.append(' ');
sb.append("bcsID ").append(kli.getBlockCommitSequenceId());
sb.append(" || ");
}
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
index cafffb4809..99ea12b1cd 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
@@ -134,7 +134,7 @@ public int hashCode() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
- sb.append("{");
+ sb.append('{');
if (keyInfo == null) {
sb.append("<root>");
} else {
@@ -143,7 +143,7 @@ public String toString() {
sb.append(" (dir)");
}
}
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
index 213431188b..5d07f33108 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
@@ -132,7 +132,7 @@ public int hashCode() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
- sb.append("{");
+ sb.append('{');
if (keyInfo == null) {
sb.append("<root>");
} else {
@@ -141,7 +141,7 @@ public String toString() {
sb.append(" (dir)");
}
}
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
index b773d2b4db..d9e0147987 100644
---
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
+++
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
@@ -99,7 +99,7 @@ private static String format(ERROR error, Object... args) {
if (template == null) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < args.length; i++) {
- sb.append(" {").append(i).append("}");
+ sb.append(" {").append(i).append('}');
}
template = sb.deleteCharAt(0).toString();
}
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index 8a1fcb5b55..198874fac8 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -435,7 +435,7 @@ public void testCompareConfigurationClassAgainstXml() {
StringBuilder xmlErrorMsg = new StringBuilder();
for (Class c : configurationClasses) {
xmlErrorMsg.append(c);
- xmlErrorMsg.append(" ");
+ xmlErrorMsg.append(' ');
}
xmlErrorMsg.append("has ");
xmlErrorMsg.append(missingXmlSize);
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
index ca1450382d..39ecfdda09 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
@@ -361,7 +361,7 @@ public void testRandomSeeks() throws Throwable {
sb.append("Sequence of actions:\n");
for (int j = 0; j < seeks.length; j++) {
sb.append("seek @ ").append(seeks[j]).append(" ")
- .append("read ").append(reads[j]).append("\n");
+ .append("read ").append(reads[j]).append('\n');
}
LOG.error(sb.toString());
throw afe;
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
index 1d3fcf4ff5..1ddaa079bb 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
@@ -1456,9 +1456,9 @@ public static String pathsToString(Collection<Path>
paths) {
builder.append(nl);
for (Path path : paths) {
builder.append(" \"").append(path.toString())
- .append("\"").append(nl);
+ .append('"').append(nl);
}
- builder.append("]");
+ builder.append(']');
return builder.toString();
}
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
index f69869c286..1055087d82 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
@@ -870,7 +870,7 @@ public void testListMultipartUploadsPagination() throws
Exception {
StringBuilder key = new StringBuilder();
int depth = 1 + i % 3; // Creates varying depth (1-3 levels)
for (int j = 0; j < depth; j++) {
- key.append("dir").append(j + 1).append("/");
+ key.append("dir").append(j + 1).append('/');
}
key.append("file").append(i);
keys.add(key.toString());
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
index f1568407a7..0679fcf24f 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
@@ -363,7 +363,7 @@ public void testCreateDirectory() throws IOException {
.setKeyName(keyNameBuf.toString())
.build();
for (int i = 0; i < 5; i++) {
-
keyNameBuf.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+
keyNameBuf.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
String keyName = keyNameBuf.toString();
writeClient.createDirectory(keyArgs);
@@ -430,7 +430,7 @@ public void testOpenFile() throws IOException {
StringBuffer keyNameBuf = new StringBuffer();
keyNameBuf.append(RandomStringUtils.secure().nextAlphabetic(5));
for (int i = 0; i < 5; i++) {
-
keyNameBuf.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+
keyNameBuf.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
keyName = keyNameBuf.toString();
keyArgs = createBuilder()
diff --git
a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
index cef2724de2..ef097ab579 100644
---
a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
+++
b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
@@ -712,10 +712,10 @@ private String
getSCMAddresses(List<StorageContainerManager> scms) {
while (iter.hasNext()) {
StorageContainerManager scm = iter.next();
stringBuilder.append(scm.getDatanodeRpcAddress().getHostString())
- .append(":")
+ .append(':')
.append(scm.getDatanodeRpcAddress().getPort());
if (iter.hasNext()) {
- stringBuilder.append(",");
+ stringBuilder.append(',');
}
}
diff --git
a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
index e221520333..92e1360732 100644
---
a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
+++
b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
@@ -634,7 +634,7 @@ private void initSCMHAConfig() {
if (i == 1) {
conf.set(ScmConfigKeys.OZONE_SCM_PRIMORDIAL_NODE_ID_KEY, scmNodeId);
}
- scmNodesKeyValue.append(",").append(scmNodeId);
+ scmNodesKeyValue.append(',').append(scmNodeId);
String scmAddrKey = ConfUtils.addKeySuffixes(
ScmConfigKeys.OZONE_SCM_ADDRESS_KEY, scmServiceId, scmNodeId);
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
index 0624f0ee0e..df3cef96a5 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
@@ -905,7 +905,7 @@ private void logVersionMismatch(OzoneConfiguration conf,
ScmInfo scmInfo) {
StringBuilder scmBlockAddressBuilder = new StringBuilder();
for (SCMNodeInfo scmNodeInfo : scmNodeInfoList) {
scmBlockAddressBuilder.append(scmNodeInfo.getBlockClientAddress())
- .append(",");
+ .append(',');
}
String scmBlockAddress = scmBlockAddressBuilder.toString();
if (!StringUtils.isBlank(scmBlockAddress)) {
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
index 71e810d78e..8eadd5bff0 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
@@ -225,7 +225,7 @@ public void notifyConfigurationChanged(long term, long
index,
.append(", index=").append(index)
.append(", New Peer list: ");
newPeers.forEach(peer -> logBuilder.append(peer.getId().toStringUtf8())
- .append("(")
+ .append('(')
.append(peer.getAddress())
.append("), "));
LOG.info(logBuilder.substring(0, logBuilder.length() - 2));
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
index b9a09cbc75..36b61f83e7 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
@@ -354,9 +354,9 @@ protected static void addDeletedKeys(Map<String, String>
auditMap,
StringBuilder keys = new StringBuilder();
for (int i = 0; i < deletedKeyInfos.size(); i++) {
OmKeyInfo key = deletedKeyInfos.get(i);
-
keys.append("{").append(KEY).append("=").append(key.getKeyName()).append(", ");
- keys.append(DATA_SIZE).append("=").append(key.getDataSize()).append(",
");
-
keys.append(REPLICATION_CONFIG).append("=").append(key.getReplicationConfig()).append("}");
+
keys.append('{').append(KEY).append('=').append(key.getKeyName()).append(", ");
+ keys.append(DATA_SIZE).append('=').append(key.getDataSize()).append(",
");
+
keys.append(REPLICATION_CONFIG).append('=').append(key.getReplicationConfig()).append('}');
if (i < deletedKeyInfos.size() - 1) {
keys.append(", ");
}
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
index 1583f3fef7..d4f97b23ee 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
@@ -208,7 +208,7 @@ public BackgroundTaskResult call() throws Exception {
.append(": ")
.append(openKey.getKeysList().stream().map(OzoneManagerProtocolProtos.OpenKey::getName)
.collect(Collectors.toList()))
- .append("\n");
+ .append('\n');
}
LOG.debug("Non-hsync'ed openKeys being deleted in current
iteration: \n" + sb);
}
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
index 8fd47c998e..f5805044b7 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
@@ -92,8 +92,8 @@ public static Path createHardLinkList(int truncateLength,
fixedFile = f.toString();
}
}
- sb.append(truncateFileName(truncateLength, entry.getKey())).append("\t")
- .append(fixedFile).append("\n");
+ sb.append(truncateFileName(truncateLength, entry.getKey())).append('\t')
+ .append(fixedFile).append('\n');
}
Files.write(data, sb.toString().getBytes(StandardCharsets.UTF_8));
return data;
diff --git
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
index 31fb48c57d..65cc1c0d7e 100644
---
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
+++
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
@@ -539,7 +539,7 @@ private String genRandomKeyName() {
StringBuilder keyNameBuilder = new StringBuilder();
keyNameBuilder.append(RandomStringUtils.secure().nextAlphabetic(5));
for (int i = 0; i < 3; i++) {
-
keyNameBuilder.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+
keyNameBuilder.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
return keyNameBuilder.toString();
}
diff --git
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
index d55cefc2e7..cb30adaa42 100644
---
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
+++
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
@@ -487,9 +487,9 @@ private StringBuilder buildMethodSignature(
StringBuilder signature = new StringBuilder();
signature.append(" ");
for (String modifier : modifiers) {
- signature.append(modifier).append(" ");
+ signature.append(modifier).append(' ');
}
- signature.append(returnType).append(" ");
+ signature.append(returnType).append(' ');
signature.append("validatorMethod(");
signature.append(createParameterList(paramspecs));
signature.append(") ");
diff --git
a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
index cf858bb335..eba24f88bb 100644
---
a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
+++
b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
@@ -145,7 +145,7 @@ public BlockLocation[] getBlockLocations() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName())
- .append("{")
+ .append('{')
.append("path=").append(path)
.append("; isDirectory=").append(isdir);
if (isFile()) {
@@ -159,7 +159,7 @@ public String toString() {
.append("; group=").append(group)
.append("; permission=").append(permission)
.append("; isSymlink=").append(getSymlink())
- .append("}");
+ .append('}');
return sb.toString();
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index a2cb0c6993..5a474a024e 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -177,7 +177,7 @@ public static String buildCanonicalRequest(
for (String header : StringUtils.split(signedHeaders, ';')) {
canonicalHeaders.append(header.toLowerCase());
- canonicalHeaders.append(":");
+ canonicalHeaders.append(':');
if (headers.containsKey(header)) {
String headerValue = headers.get(header);
canonicalHeaders.append(headerValue);
@@ -291,7 +291,7 @@ private static String getQueryParamString(
for (String p : params) {
if (!p.equals("X-Amz-Signature")) {
if (result.length() > 0) {
- result.append("&");
+ result.append('&');
}
result.append(urlEncode(p));
result.append('=');
diff --git
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
index 4e8b9b6bad..4f22bd4184 100644
---
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
+++
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
@@ -312,7 +312,7 @@ public void testPutObjectWithTooManyTags() throws Exception
{
for (int i = 0; i < TAG_NUM_LIMIT + 1; i++) {
sb.append(String.format("tag%d=value%d", i, i));
if (i < TAG_NUM_LIMIT) {
- sb.append("&");
+ sb.append('&');
}
}
when(headersWithTooManyTags.getHeaderString(TAG_HEADER)).thenReturn(sb.toString());
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
index 8b68f396f6..7d6271aca3 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
@@ -209,9 +209,9 @@ private static String executeStatement(String dbName,
String sql)
while (rs.next()) {
for (int index = 1; index <= cols; index++) {
result.append(rs.getObject(index));
- result.append("\t");
+ result.append('\t');
}
- result.append("\n");
+ result.append('\n');
}
}
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
index 834eb60b99..e89c71ed11 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
@@ -308,7 +308,7 @@ public Supplier<String> realTimeStatusSupplier() {
maxValueRecorder.put(name, maxValue);
valueRecorder.put(name, curValue);
instantsRecorder.put(name, now);
- sb.append(" ")
+ sb.append(' ')
.append(name)
.append(": rate ")
.append(rate)
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
index b578a3430c..4fddb9054b 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
@@ -177,7 +177,7 @@ private void printProgressBar(PrintStream stream, long
value) {
if (!realTimeMessage.isEmpty()) {
shrinkTimes = 3;
}
- sb.append(" ").append(String.format("%.2f", percent)).append("% |");
+ sb.append(' ').append(String.format("%.2f", percent)).append("% |");
for (int i = 0; i <= percent / shrinkTimes; i++) {
sb.append('█');
}
@@ -185,7 +185,7 @@ private void printProgressBar(PrintStream stream, long
value) {
sb.append(' ');
}
sb.append("| ");
- sb.append(value).append("/").append(maxValue);
+ sb.append(value).append('/').append(maxValue);
long timeInSec = TimeUnit.SECONDS.convert(
System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
String timeToPrint = String.format("%d:%02d:%02d", timeInSec / 3600,
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
index aa7b3dbb26..83f6c4d552 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
@@ -113,7 +113,7 @@ public String toString() {
stringBuilder.append(getCostMs());
stringBuilder.append(", status=");
stringBuilder.append(status);
- stringBuilder.append("}");
+ stringBuilder.append('}');
return stringBuilder.toString();
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
index d7dce31e87..067de25b3b 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
@@ -94,7 +94,7 @@ public String toString() {
if (resultMap != null) {
resultMap.forEach((k, r) -> {
stringBuilder.append(r.toString());
- stringBuilder.append("\n");
+ stringBuilder.append('\n');
total.addAndGet(r.getTotalRow());
});
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
index df5844ca08..6bc2284201 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
@@ -133,12 +133,12 @@ private void buildTitle() {
if (this.title.length() > maxTitleSize) {
this.title = this.title.substring(0, maxTitleSize);
}
- this.join.append("+");
+ this.join.append('+');
for (int i = 0; i < maxTitleSize + 2; i++) {
- this.join.append("-");
+ this.join.append('-');
}
this.join.append("+\n")
- .append("|")
+ .append('|')
.append(StrUtils.center(this.title, maxTitleSize + 2, ' '))
.append("|\n");
this.lastTableRowType = TableRowType.TITLE;
@@ -177,14 +177,14 @@ private void buildTable() {
* @param data dataLine
*/
private void buildRowBorder(List<String> data) {
- this.join.append("+");
+ this.join.append('+');
for (int i = 0, len = data.size(); i < len; i++) {
for (int j = 0; j < this.maxColMap.get(i) + 2; j++) {
- this.join.append("-");
+ this.join.append('-');
}
- this.join.append("+");
+ this.join.append('+');
}
- this.join.append("\n");
+ this.join.append('\n');
}
/**
@@ -192,12 +192,12 @@ private void buildRowBorder(List<String> data) {
* @param data dataLine
*/
private void buildRowLine(List<String> data) {
- this.join.append("|");
+ this.join.append('|');
for (int i = 0, len = data.size(); i < len; i++) {
this.join.append(StrUtils.center(data.get(i), this.maxColMap.get(i) + 2,
' '))
- .append("|");
+ .append('|');
}
- this.join.append("\n");
+ this.join.append('\n');
}
/**
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]