This is an automated email from the ASF dual-hosted git repository.

jackie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git


The following commit(s) were added to refs/heads/master by this push:
     new ec87186017 Replace More String.format() References With Concatenation 
(#15148)
ec87186017 is described below

commit ec871860175a672b5d10a13400dfbfa962417d1b
Author: ashishjayamohan <46698969+ashishjayamo...@users.noreply.github.com>
AuthorDate: Thu Feb 27 18:18:06 2025 -0800

    Replace More String.format() References With Concatenation (#15148)
---
 .../apache/pinot/sql/parsers/CalciteSqlParser.java |  2 +-
 .../pinot/common/metrics/MetricValueUtils.java     |  2 +-
 .../PinotAccessControlUserRestletResource.java     |  9 +-
 .../helix/core/PinotHelixResourceManager.java      | 95 ++++++++++------------
 .../tests/ClusterIntegrationTestUtils.java         | 30 ++++---
 .../pinot/integration/tests/ClusterTest.java       |  4 +-
 .../tests/MultiStageEngineIntegrationTest.java     |  4 +-
 .../tests/OfflineClusterIntegrationTest.java       |  8 +-
 .../integration/tests/TlsIntegrationTest.java      |  2 +-
 .../spark/SparkSegmentGenerationJobRunner.java     |  4 +-
 .../spark3/SparkSegmentGenerationJobRunner.java    |  9 +-
 .../standalone/SegmentGenerationJobRunner.java     |  2 +-
 .../BaseMultipleSegmentsConversionExecutor.java    | 28 +++----
 .../pinot/plugin/minion/tasks/MinionTaskUtils.java | 11 ++-
 .../tasks/mergerollup/MergeRollupTaskExecutor.java |  4 +-
 .../mergerollup/MergeRollupTaskGenerator.java      | 16 ++--
 .../UpsertCompactionTaskExecutor.java              | 11 ++-
 .../filebased/FileBasedSegmentWriter.java          | 16 ++--
 .../org/apache/pinot/tools/HybridQuickstart.java   |  5 +-
 .../pinot/tools/perf/PerfBenchmarkDriver.java      |  3 +-
 20 files changed, 121 insertions(+), 144 deletions(-)

diff --git 
a/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java 
b/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
index 6beecdd0ca..e28708faea 100644
--- 
a/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
+++ 
b/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
@@ -301,7 +301,7 @@ public class CalciteSqlParser {
       List<Expression> operands = 
filterExpression.getFunctionCall().getOperands();
       for (int i = 1; i < operands.size(); i++) {
         if (operands.get(i).getLiteral().isSetNullValue()) {
-          throw new IllegalStateException(String.format("Using NULL in %s 
filter is not supported", operator));
+          throw new IllegalStateException("Using NULL in " + operator + " 
filter is not supported");
         }
       }
     }
diff --git 
a/pinot-common/src/test/java/org/apache/pinot/common/metrics/MetricValueUtils.java
 
b/pinot-common/src/test/java/org/apache/pinot/common/metrics/MetricValueUtils.java
index 9008f8971c..33cdf4821b 100644
--- 
a/pinot-common/src/test/java/org/apache/pinot/common/metrics/MetricValueUtils.java
+++ 
b/pinot-common/src/test/java/org/apache/pinot/common/metrics/MetricValueUtils.java
@@ -115,7 +115,7 @@ public class MetricValueUtils {
     } else if (metrics instanceof MinionMetrics) {
       metricPrefix = "pinot.minion.";
     } else {
-      throw new RuntimeException(String.format("unsupported AbstractMetrics 
type: %s", metrics.getClass().toString()));
+      throw new RuntimeException("unsupported AbstractMetrics type: " + 
metrics.getClass().toString());
     }
     return metrics.getMetricsRegistry().allMetrics()
         .get(new YammerMetricName(new MetricName(metrics.getClass(), 
metricPrefix + metricName)));
diff --git 
a/pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/PinotAccessControlUserRestletResource.java
 
b/pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/PinotAccessControlUserRestletResource.java
index 61da0ea860..5a8ee9a28b 100644
--- 
a/pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/PinotAccessControlUserRestletResource.java
+++ 
b/pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/PinotAccessControlUserRestletResource.java
@@ -149,8 +149,8 @@ public class PinotAccessControlUserRestletResource {
         }
         try {
             _pinotHelixResourceManager.addUser(userConfig);
-            return new SuccessResponse(String.format("User %s has been 
successfully added!",
-                userConfig.getUserName() + '_' + 
userConfig.getComponentType()));
+            return new SuccessResponse("User " + userConfig.getUserName() + 
'_' + userConfig.getComponentType()
+                + " has been successfully added!");
         } catch (Exception e) {
             if (e instanceof UserAlreadyExistsException) {
                 throw new ControllerApplicationException(LOGGER, 
e.getMessage(), Response.Status.CONFLICT, e);
@@ -214,9 +214,8 @@ public class PinotAccessControlUserRestletResource {
             }
             String usernameWithComponentTypeFromUserConfig = 
userConfig.getUsernameWithComponent();
             if 
(!usernameWithComponentType.equals(usernameWithComponentTypeFromUserConfig)) {
-                throw new ControllerApplicationException(LOGGER,
-                    String.format("Request user %s does not match %s in the 
Request body",
-                        usernameWithComponentType, 
usernameWithComponentTypeFromUserConfig),
+                throw new ControllerApplicationException(LOGGER, "Request user 
" + usernameWithComponentType
+                    + " does not match " + 
usernameWithComponentTypeFromUserConfig + " in the Request body",
                     Response.Status.BAD_REQUEST);
             }
             if (!_pinotHelixResourceManager.hasUser(username, 
componentTypeStr)) {
diff --git 
a/pinot-controller/src/main/java/org/apache/pinot/controller/helix/core/PinotHelixResourceManager.java
 
b/pinot-controller/src/main/java/org/apache/pinot/controller/helix/core/PinotHelixResourceManager.java
index 5351d48063..15ee91acfa 100644
--- 
a/pinot-controller/src/main/java/org/apache/pinot/controller/helix/core/PinotHelixResourceManager.java
+++ 
b/pinot-controller/src/main/java/org/apache/pinot/controller/helix/core/PinotHelixResourceManager.java
@@ -583,8 +583,7 @@ public class PinotHelixResourceManager {
     String instanceId = InstanceUtils.getHelixInstanceId(instance);
     InstanceConfig instanceConfig = getHelixInstanceConfig(instanceId);
     if (instanceConfig != null) {
-      throw new ClientErrorException(String.format("Instance: %s already 
exists", instanceId),
-          Response.Status.CONFLICT);
+      throw new ClientErrorException("Instance: " + instanceId + " already 
exists", Response.Status.CONFLICT);
     }
 
     instanceConfig = InstanceUtils.toHelixInstanceConfig(instance);
@@ -606,8 +605,8 @@ public class PinotHelixResourceManager {
       HelixHelper.updateBrokerResource(_helixZkManager, instanceId, 
newBrokerTags, tablesAdded, null);
       LOGGER.info("Updated broker resource for broker: {} with tags: {} in 
{}ms, tables added: {}", instanceId,
           newBrokerTags, System.currentTimeMillis() - startTimeMs, 
tablesAdded);
-      return PinotResourceManagerResponse.success(
-          String.format("Added instance: %s, and updated broker resource - 
tables added: %s", instanceId, tablesAdded));
+      return PinotResourceManagerResponse.success("Added instance: " + 
instanceId + ", and updated broker resource - "
+          + "tables added: " + tablesAdded);
     } else {
       return PinotResourceManagerResponse.success("Added instance: " + 
instanceId);
     }
@@ -648,9 +647,8 @@ public class PinotHelixResourceManager {
       HelixHelper.updateBrokerResource(_helixZkManager, instanceId, 
newBrokerTags, tablesAdded, tablesRemoved);
       LOGGER.info("Updated broker resource for broker: {} with tags: {} in 
{}ms, tables added: {}, tables removed: {}",
           instanceId, newBrokerTags, System.currentTimeMillis() - startTimeMs, 
tablesAdded, tablesRemoved);
-      return PinotResourceManagerResponse.success(
-          String.format("Updated instance: %s, and updated broker resource - 
tables added: %s, tables removed: %s",
-              instanceId, tablesAdded, tablesRemoved));
+      return PinotResourceManagerResponse.success("Updated instance: " + 
instanceId + ", and updated broker resource - "
+          + "tables added: " + tablesAdded + ", tables removed: " + 
tablesRemoved);
     } else {
       return PinotResourceManagerResponse.success("Updated instance: " + 
instanceId);
     }
@@ -689,12 +687,10 @@ public class PinotHelixResourceManager {
       HelixHelper.updateBrokerResource(_helixZkManager, instanceId, 
newBrokerTags, tablesAdded, tablesRemoved);
       LOGGER.info("Updated broker resource for broker: {} with tags: {} in 
{}ms, tables added: {}, tables removed: {}",
           instanceId, newBrokerTags, System.currentTimeMillis() - startTimeMs, 
tablesAdded, tablesRemoved);
-      return PinotResourceManagerResponse.success(String.format(
-          "Updated tags: %s for instance: %s, and updated broker resource - 
tables added: %s, tables removed: %s",
-          newTags, instanceId, tablesAdded, tablesRemoved));
+      return PinotResourceManagerResponse.success("Updated tags: " + newTags + 
" for instance: " + instanceId
+          + ", and updated broker resource - tables added: " + tablesAdded + 
", tables removed: " + tablesRemoved);
     } else {
-      return PinotResourceManagerResponse.success(
-          String.format("Updated tags: %s for instance: %s", newTags, 
instanceId));
+      return PinotResourceManagerResponse.success("Updated tags: " + newTags + 
" for instance: " + instanceId);
     }
   }
 
@@ -719,9 +715,8 @@ public class PinotHelixResourceManager {
     HelixHelper.updateBrokerResource(_helixZkManager, instanceId, brokerTags, 
tablesAdded, tablesRemoved);
     LOGGER.info("Updated broker resource for broker: {} with tags: {} in {}ms, 
tables added: {}, tables removed: {}",
         instanceId, brokerTags, System.currentTimeMillis() - startTimeMs, 
tablesAdded, tablesRemoved);
-    return PinotResourceManagerResponse.success(
-        String.format("Updated broker resource for broker: %s - tables added: 
%s, tables removed: %s", instanceId,
-            tablesAdded, tablesRemoved));
+    return PinotResourceManagerResponse.success("Updated broker resource for 
broker: " + instanceId
+        + " - tables added: " + tablesAdded + ", tables removed: " + 
tablesRemoved);
   }
 
   /**
@@ -1381,9 +1376,8 @@ public class PinotHelixResourceManager {
     int numOfflineInstances = serverTenant.getOfflineInstances();
     int numRealtimeInstances = serverTenant.getRealtimeInstances();
     if (numInstances < numOfflineInstances || numInstances < 
numRealtimeInstances) {
-      throw new BadRequestException(
-          String.format("Cannot request more offline instances: %d or realtime 
instances: %d than total instances: %d",
-              numOfflineInstances, numRealtimeInstances, numInstances));
+      throw new BadRequestException("Cannot request more offline instances: " 
+ numOfflineInstances
+          + " or realtime instances: " + numRealtimeInstances + " than total 
instances: " + numInstances);
     }
     // TODO: Consider throwing BadRequestException
     List<String> untaggedInstances = getOnlineUnTaggedServerInstanceList();
@@ -1529,7 +1523,7 @@ public class PinotHelixResourceManager {
       if (override) {
         updateSchema(schema, oldSchema, force);
       } else {
-        throw new SchemaAlreadyExistsException(String.format("Schema: %s 
already exists", schemaName));
+        throw new SchemaAlreadyExistsException("Schema: " + schemaName + " 
already exists");
       }
     } else {
       // Add new schema
@@ -1556,7 +1550,7 @@ public class PinotHelixResourceManager {
 
     Schema oldSchema = ZKMetadataProvider.getSchema(_propertyStore, 
schemaName);
     if (oldSchema == null) {
-      throw new SchemaNotFoundException(String.format("Schema: %s does not 
exist", schemaName));
+      throw new SchemaNotFoundException("Schema: " + schemaName + " does not 
exist");
     }
 
     updateSchema(schema, oldSchema, forceTableSchemaUpdate);
@@ -1588,8 +1582,8 @@ public class PinotHelixResourceManager {
         LOGGER.warn("Force updated schema: {} which is backward incompatible 
with the existing schema", oldSchema);
       } else {
         // TODO: Add the reason of the incompatibility
-        throw new SchemaBackwardIncompatibleException(
-            String.format("New schema: %s is not backward-compatible with the 
existing schema", schemaName));
+        throw new SchemaBackwardIncompatibleException("New schema: " + 
schemaName + " is not backward-compatible with "
+            + "the existing schema");
       }
     }
     ZKMetadataProvider.setSchema(_propertyStore, schema);
@@ -1880,9 +1874,8 @@ public class PinotHelixResourceManager {
     if (CollectionUtils.isNotEmpty(tierConfigList)) {
       for (TierConfig tierConfig : tierConfigList) {
         if (getInstancesWithTag(tierConfig.getServerTag()).isEmpty()) {
-          throw new InvalidTableConfigException(
-              String.format("Failed to find instances with tag: %s as used by 
tier: %s for table: %s",
-                  tierConfig.getServerTag(), tierConfig.getName(), 
tableNameWithType));
+          throw new InvalidTableConfigException("Failed to find instances with 
tag: " + tierConfig.getServerTag()
+              + " as used by tier: " + tierConfig.getName() + " for table: " + 
tableNameWithType);
         }
       }
     }
@@ -1902,9 +1895,8 @@ public class PinotHelixResourceManager {
         String taskInstanceTag = 
taskTypeConfig.getOrDefault(PinotTaskManager.MINION_INSTANCE_TAG_CONFIG,
             CommonConstants.Helix.UNTAGGED_MINION_INSTANCE);
         if (!minionInstanceTagSet.contains(taskInstanceTag)) {
-          throw new InvalidTableConfigException(
-              String.format("Failed to find minion instances with tag: %s for 
table: %s", taskInstanceTag,
-                  tableConfig.getTableName()));
+          throw new InvalidTableConfigException("Failed to find minion 
instances with tag: " + taskInstanceTag
+              + " for table: " + tableConfig.getTableName());
         }
       });
     }
@@ -2862,7 +2854,7 @@ public class PinotHelixResourceManager {
     if (liveInstance == null) {
       // check if the instance exists in the cluster
       String instanceConfigPath = 
PropertyPathBuilder.instanceConfig(_helixClusterName, instanceName);
-      throw new RuntimeException(String.format("Can't find instance: %s on 
%s", instanceName, instanceConfigPath));
+      throw new RuntimeException("Can't find instance: " + instanceName + " on 
" + instanceConfigPath);
     }
 
     // gather metadata for sending state transition message.
@@ -2884,9 +2876,9 @@ public class PinotHelixResourceManager {
           || !resetPartitionNames.contains(message.getPartitionName())) {
         continue;
       }
-      throw new RuntimeException(
-          String.format("Can't reset state for %s.%s on %s, because a pending 
message %s exists for resource %s",
-              resourceName, resetPartitionNames, instanceName, message, 
message.getResourceName()));
+      throw new RuntimeException("Can't reset state for " + resourceName + "." 
+ resetPartitionNames + " on "
+          + instanceName + ", because a pending message " + message + " exists 
for resource "
+          + message.getResourceName());
     }
 
     String adminName = null;
@@ -3747,9 +3739,8 @@ public class PinotHelixResourceManager {
       try {
         instanceAdminEndpoint = _instanceAdminEndpointCache.get(instance);
       } catch (Exception e) {
-        String errorMessage =
-            String.format("Caught exception while getting instance admin 
endpoint for instance: %s. Error message: %s",
-                instance, e.getMessage());
+        String errorMessage = "Caught exception while getting instance admin 
endpoint for instance: " + instance
+            + ". Error message: " + e.getMessage();
         LOGGER.error(errorMessage, e);
         throw new InvalidConfigException(errorMessage);
       }
@@ -3997,8 +3988,8 @@ public class PinotHelixResourceManager {
           }
         });
       } catch (Exception e) {
-        String errorMsg = String.format("Failed to update the segment lineage 
during startReplaceSegments. "
-            + "(tableName = %s, segmentsFrom = %s, segmentsTo = %s)", 
tableNameWithType, segmentsFrom, segmentsTo);
+        String errorMsg = "Failed to update the segment lineage during 
startReplaceSegments. (tableName = "
+            + tableNameWithType + ", segmentsFrom = " + segmentsFrom + ", 
segmentsTo = " + segmentsTo + ")";
         LOGGER.error(errorMsg, e);
         throw new RuntimeException(errorMsg, e);
       }
@@ -4053,10 +4044,9 @@ public class PinotHelixResourceManager {
               + "segmentLineageEntryId = {})", tableNameWithType, 
segmentLineageEntryId);
           return true;
         } else if (lineageEntry.getState() == LineageEntryState.REVERTED) {
-          String errorMsg = String.format(
-              "The target lineage entry state is not 'IN_PROGRESS'. Cannot 
update to 'COMPLETED' state. "
-                  + "(tableNameWithType=%s, segmentLineageEntryId=%s, 
state=%s)", tableNameWithType,
-              segmentLineageEntryId, lineageEntry.getState());
+          String errorMsg = "The target lineage entry state is not 
'IN_PROGRESS'. Cannot update to 'COMPLETED' state. "
+              + "(tableNameWithType=" + tableNameWithType + ", 
segmentLineageEntryId=" + segmentLineageEntryId
+              + ", state=" + lineageEntry.getState() + ")";
           LOGGER.error(errorMsg);
           throw new RuntimeException(errorMsg);
         }
@@ -4109,8 +4099,8 @@ public class PinotHelixResourceManager {
         }
       });
     } catch (Exception e) {
-      String errorMsg = String.format("Failed to update the segment lineage 
during endReplaceSegments. "
-          + "(tableName = %s, segmentLineageEntryId = %s)", tableNameWithType, 
segmentLineageEntryId);
+      String errorMsg = "Failed to update the segment lineage during 
endReplaceSegments. (tableName = "
+          + tableNameWithType + ", segmentLineageEntryId = " + 
segmentLineageEntryId + ")";
       LOGGER.error(errorMsg, e);
       throw new RuntimeException(errorMsg, e);
     }
@@ -4253,8 +4243,8 @@ public class PinotHelixResourceManager {
         }
       });
     } catch (Exception e) {
-      String errorMsg = String.format("Failed to update the segment lineage 
during revertReplaceSegments. "
-          + "(tableName = %s, segmentLineageEntryId = %s)", tableNameWithType, 
segmentLineageEntryId);
+      String errorMsg = "Failed to update the segment lineage during 
revertReplaceSegments. (tableName = "
+          + tableNameWithType + ", segmentLineageEntryId = " + 
segmentLineageEntryId + ")";
       LOGGER.error(errorMsg, e);
       throw new RuntimeException(errorMsg, e);
     }
@@ -4290,9 +4280,8 @@ public class PinotHelixResourceManager {
 
         // If the lineage entry doesn't match with the previously fetched 
lineage, we need to fail the request.
         if (!currentLineageEntry.equals(lineageEntryToMatch)) {
-          String errorMsg = String.format(
-              "Aborting the to update lineage entry since we find that the 
entry has been modified for table %s, "
-                  + "entry id: %s", tableConfig.getTableName(), 
lineageEntryId);
+          String errorMsg = "Aborting the to update lineage entry since we 
find that the entry has been modified for "
+              + "table " + tableConfig.getTableName() + ", entry id: " + 
lineageEntryId;
           LOGGER.error(errorMsg);
           throw new RuntimeException(errorMsg);
         }
@@ -4309,8 +4298,8 @@ public class PinotHelixResourceManager {
                 segmentLineageToUpdate);
             break;
           default:
-            String errorMsg = String.format("Aborting the lineage entry update 
with type: %s, as the allowed update"
-                + "types in this method are END and REVERT", 
lineageUpdateType);
+            String errorMsg = "Aborting the lineage entry update with type: " 
+ lineageUpdateType + ", as the allowed "
+                + "update types in this method are END and REVERT";
             throw new IllegalStateException(errorMsg);
         }
 
@@ -4364,7 +4353,7 @@ public class PinotHelixResourceManager {
   public Set<String> getOnlineSegmentsFromExternalView(String 
tableNameWithType) {
     ExternalView externalView = getTableExternalView(tableNameWithType);
     if (externalView == null) {
-      LOGGER.warn(String.format("External view is null for table (%s)", 
tableNameWithType));
+      LOGGER.warn("External view is null for table (" + tableNameWithType + 
")");
       return Collections.emptySet();
     }
     Map<String, Map<String, String>> segmentAssignment = 
externalView.getRecord().getMapFields();
@@ -4501,7 +4490,7 @@ public class PinotHelixResourceManager {
     TableType inputTableType = 
TableNameBuilder.getTableTypeFromTableName(tableName);
     if (inputTableType != null) {
       if (!hasTable(tableName)) {
-        throw new TableNotFoundException(String.format("Table=%s not found", 
tableName));
+        throw new TableNotFoundException("Table=" + tableName + " not found");
       }
       return getLiveBrokersForTable(ev, tableName);
     }
@@ -4510,7 +4499,7 @@ public class PinotHelixResourceManager {
     boolean hasOfflineTable = hasTable(offlineTableName);
     boolean hasRealtimeTable = hasTable(realtimeTableName);
     if (!hasOfflineTable && !hasRealtimeTable) {
-      throw new TableNotFoundException(String.format("Table=%s not found", 
tableName));
+      throw new TableNotFoundException("Table=" + tableName + " not found");
     }
     if (hasOfflineTable && hasRealtimeTable) {
       Set<String> offlineTables = new HashSet<>(getLiveBrokersForTable(ev, 
offlineTableName));
diff --git 
a/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterIntegrationTestUtils.java
 
b/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterIntegrationTestUtils.java
index a54fcd2ba0..2a73c93994 100644
--- 
a/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterIntegrationTestUtils.java
+++ 
b/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterIntegrationTestUtils.java
@@ -137,8 +137,7 @@ public class ClusterIntegrationTestUtils {
               h2FieldNameAndTypes.add(buildH2FieldNameAndType(fieldName, type, 
true));
               break;
             }
-            Assert.fail(
-                String.format("Unsupported UNION Avro field: %s with 
underlying types: %s", fieldName, typesInUnion));
+            Assert.fail("Unsupported UNION Avro field: " + fieldName + " with 
underlying types: " + typesInUnion);
             break;
           case ARRAY:
             Schema.Type type = field.schema().getElementType().getType();
@@ -150,21 +149,21 @@ public class ClusterIntegrationTestUtils {
             if (isSingleValueAvroFieldType(fieldType)) {
               h2FieldNameAndTypes.add(buildH2FieldNameAndType(fieldName, 
fieldType, false));
             } else {
-              Assert.fail(String.format("Unsupported Avro field: %s with 
underlying types: %s", fieldName, fieldType));
+              Assert.fail("Unsupported Avro field: " + fieldName + " with 
underlying types: " + fieldType);
             }
             break;
         }
       }
 
-      h2Connection.prepareCall(String.format("DROP TABLE IF EXISTS %s", 
tableName)).execute();
+      h2Connection.prepareCall("DROP TABLE IF EXISTS " + tableName).execute();
       String columnsStr = StringUtil.join(",", h2FieldNameAndTypes.toArray(new 
String[0]));
-      h2Connection.prepareCall(String.format("CREATE TABLE %s (%s)", 
tableName, columnsStr)).execute();
+      h2Connection.prepareCall("CREATE TABLE " + tableName + " (" + columnsStr 
+ ")").execute();
     }
 
     // Insert Avro records into H2 table
     String params = "?" + StringUtils.repeat(",?", h2FieldNameAndTypes.size() 
- 1);
     PreparedStatement h2Statement =
-        h2Connection.prepareStatement(String.format("INSERT INTO %s VALUES 
(%s)", tableName, params));
+        h2Connection.prepareStatement("INSERT INTO " + tableName + " VALUES (" 
+ params + ")");
     for (File avroFile : avroFiles) {
       try (DataFileStream<GenericRecord> reader = 
AvroUtils.getAvroReader(avroFile)) {
         for (GenericRecord record : reader) {
@@ -215,7 +214,7 @@ public class ClusterIntegrationTestUtils {
       Assert.assertTrue(isSingleValueAvroFieldType(type1));
       return type1;
     }
-    Assert.fail(String.format("Unsupported UNION Avro field with underlying 
types: %s, %s", type1, type2));
+    Assert.fail("Unsupported UNION Avro field with underlying types: " + type1 
+ ", " + type2);
     return null;
   }
 
@@ -269,12 +268,12 @@ public class ClusterIntegrationTestUtils {
     }
     // if column is array data type, add Array with size.
     if (arrayType) {
-      h2FieldType = String.format("%s  ARRAY[%d]", h2FieldType, 
MAX_NUM_ELEMENTS_IN_MULTI_VALUE_TO_COMPARE);
+      h2FieldType = h2FieldType + "  ARRAY[" + 
MAX_NUM_ELEMENTS_IN_MULTI_VALUE_TO_COMPARE + "]";
     }
     if (nullable) {
-      return String.format("`%s` %s", fieldName, h2FieldType);
+      return "`" + fieldName + "` " + h2FieldType;
     } else {
-      return String.format("`%s` %s not null", fieldName, h2FieldType);
+      return "`" + fieldName + "` " + h2FieldType + " not null";
     }
   }
 
@@ -1006,16 +1005,15 @@ public class ClusterIntegrationTestUtils {
         String actualOrderByValue = actualOrderByValueBuilder.toString();
         // Check actual value in expected values set, skip comparison if query 
response is truncated by limit
         if ((!isLimitSet || limit > h2NumRows) && 
!expectedValues.contains(actualValue)) {
-          throw new RuntimeException(String.format(
-              "Selection result differ in Pinot from H2: Pinot row: [ %s ] not 
found in H2 result set: [%s].",
-              actualValue, expectedValues));
+          throw new RuntimeException("Selection result differ in Pinot from 
H2: Pinot row: [ " + actualValue
+              + " ] not found in H2 result set: [" + expectedValues + "].");
         }
         if (!orderByColumns.isEmpty()) {
           // Check actual group value is the same as expected group value in 
the same order.
           if (!expectedOrderByValues.get(rowIndex).equals(actualOrderByValue)) 
{
-            throw new RuntimeException(String.format(
-                "Selection Order by result at row index: %d in Pinot: [ %s ] 
is different than result in H2: [ %s ].",
-                rowIndex, actualOrderByValue, 
expectedOrderByValues.get(rowIndex)));
+            throw new RuntimeException("Selection Order by result at row 
index: " + rowIndex + " in Pinot: [ "
+                + actualOrderByValue + " ] is different than result in H2: [ " 
+ expectedOrderByValues.get(rowIndex)
+                + " ].");
           }
         }
       }
diff --git 
a/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterTest.java
 
b/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterTest.java
index 85b1383aaa..c244cb02c2 100644
--- 
a/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterTest.java
+++ 
b/pinot-integration-test-base/src/test/java/org/apache/pinot/integration/tests/ClusterTest.java
@@ -448,8 +448,8 @@ public abstract class ClusterTest extends ControllerTest {
       FileUploadDownloadClient fileUploadDownloadClient, File segmentTarFile)
       throws IOException, HttpErrorStatusException {
     List<Header> headers = List.of(new 
BasicHeader(FileUploadDownloadClient.CustomHeaders.DOWNLOAD_URI,
-            String.format("file://%s/%s", 
segmentTarFile.getParentFile().getAbsolutePath(),
-                URIUtils.encode(segmentTarFile.getName()))),
+            "file://" + segmentTarFile.getParentFile().getAbsolutePath() + "/"
+                + URIUtils.encode(segmentTarFile.getName())),
         new BasicHeader(FileUploadDownloadClient.CustomHeaders.UPLOAD_TYPE,
             FileUploadDownloadClient.FileUploadType.METADATA.toString()));
     // Add table name and table type as request parameters
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/MultiStageEngineIntegrationTest.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/MultiStageEngineIntegrationTest.java
index 488032e3af..85cef315c0 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/MultiStageEngineIntegrationTest.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/MultiStageEngineIntegrationTest.java
@@ -261,7 +261,7 @@ public class MultiStageEngineIntegrationTest extends 
BaseClusterIntegrationTestS
     Assert.assertEquals(numericResultFunctions.length, 
expectedNumericResults.length);
 
     for (int i = 0; i < numericResultFunctions.length; i++) {
-      String pinotQuery = String.format("SELECT %s(DaysSinceEpoch) FROM 
mytable", numericResultFunctions[i]);
+      String pinotQuery = "SELECT " + numericResultFunctions[i] + 
"(DaysSinceEpoch) FROM mytable";
       JsonNode jsonNode = postQuery(pinotQuery);
       
Assert.assertEquals(jsonNode.get("resultTable").get("rows").get(0).get(0).asDouble(),
 expectedNumericResults[i]);
     }
@@ -274,7 +274,7 @@ public class MultiStageEngineIntegrationTest extends 
BaseClusterIntegrationTestS
         3904
     };
     for (int i = 0; i < binaryResultFunctions.length; i++) {
-      String pinotQuery = String.format("SELECT %s(DaysSinceEpoch) FROM 
mytable", binaryResultFunctions[i]);
+      String pinotQuery = "SELECT " + binaryResultFunctions[i] + 
"(DaysSinceEpoch) FROM mytable";
       JsonNode jsonNode = postQuery(pinotQuery);
       
Assert.assertEquals(jsonNode.get("resultTable").get("rows").get(0).get(0).asText().length(),
           expectedBinarySizeResults[i]);
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/OfflineClusterIntegrationTest.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/OfflineClusterIntegrationTest.java
index 7b8e4168a7..8c3552ffe8 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/OfflineClusterIntegrationTest.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/OfflineClusterIntegrationTest.java
@@ -544,8 +544,8 @@ public class OfflineClusterIntegrationTest extends 
BaseClusterIntegrationTestSet
       }
       Thread.sleep(EXTERNAL_VIEW_CHECK_INTERVAL_MS);
     } while (System.currentTimeMillis() < endTimeMs);
-    throw new TimeoutException(
-        String.format("Time out while waiting segments become ONLINE. 
(tableNameWithType = %s)", tableNameWithType));
+    throw new TimeoutException("Time out while waiting segments become ONLINE. 
(tableNameWithType = "
+        + tableNameWithType + ")");
   }
 
   @Test(dependsOnMethods = "testRangeIndexTriggering")
@@ -596,8 +596,8 @@ public class OfflineClusterIntegrationTest extends 
BaseClusterIntegrationTestSet
     // with only one segment being reloaded with force download and dropping 
the inverted index.
     long tableSizeAfterReloadSegment = getTableSize(getTableName());
     assertTrue(tableSizeAfterReloadSegment > _tableSize && 
tableSizeAfterReloadSegment < tableSizeWithNewIndex,
-        String.format("Table size: %d should be between %d and %d after 
dropping inverted index from segment: %s",
-            tableSizeAfterReloadSegment, _tableSize, tableSizeWithNewIndex, 
segmentName));
+        "Table size: " + tableSizeAfterReloadSegment + " should be between " + 
_tableSize + " and "
+            + tableSizeWithNewIndex + " after dropping inverted index from 
segment: " + segmentName);
 
     // Add inverted index back to check if reloading whole table with force 
download works.
     // Note that because we have force downloaded a segment above, it's 
important to reset the table state by adding
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/TlsIntegrationTest.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/TlsIntegrationTest.java
index eceafc732c..a5239a29e5 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/TlsIntegrationTest.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/TlsIntegrationTest.java
@@ -608,7 +608,7 @@ public class TlsIntegrationTest extends 
BaseClusterIntegrationTest {
     HttpPost request = new HttpPost("https://localhost:"; + port + 
"/query/sql");
     request.addHeader(CLIENT_HEADER);
     request.setEntity(
-        new StringEntity(String.format("{\"sql\":\"%s\", \"queryOptions\": 
\"useMultistageEngine=true\"}", query)));
+        new StringEntity("{\"sql\":\"" + query + "\", \"queryOptions\": 
\"useMultistageEngine=true\"}"));
     return request;
   }
 
diff --git 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-2.4/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark/SparkSegmentGenerationJobRunner.java
 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-2.4/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark/SparkSegmentGenerationJobRunner.java
index edcd13e3a6..73581206d7 100644
--- 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-2.4/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark/SparkSegmentGenerationJobRunner.java
+++ 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-2.4/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark/SparkSegmentGenerationJobRunner.java
@@ -187,12 +187,12 @@ public class SparkSegmentGenerationJobRunner implements 
IngestionJobRunner, Seri
           List<String> siblingFiles = localDirIndex.get(parentPath);
           Collections.sort(siblingFiles);
           for (int i = 0; i < siblingFiles.size(); i++) {
-            pathAndIdxList.add(String.format("%s %d", siblingFiles.get(i), i));
+            pathAndIdxList.add(siblingFiles.get(i) + " " + i);
           }
         }
       } else {
         for (int i = 0; i < filteredFiles.size(); i++) {
-          pathAndIdxList.add(String.format("%s %d", filteredFiles.get(i), i));
+          pathAndIdxList.add(filteredFiles.get(i) + " " + i);
         }
       }
       int numDataFiles = pathAndIdxList.size();
diff --git 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-3/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark3/SparkSegmentGenerationJobRunner.java
 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-3/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark3/SparkSegmentGenerationJobRunner.java
index c3ecdb3326..a40bbf652e 100644
--- 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-3/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark3/SparkSegmentGenerationJobRunner.java
+++ 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-spark-3/src/main/java/org/apache/pinot/plugin/ingestion/batch/spark3/SparkSegmentGenerationJobRunner.java
@@ -163,9 +163,8 @@ public class SparkSegmentGenerationJobRunner implements 
IngestionJobRunner, Seri
         stagingDirURI = new File(stagingDir).toURI();
       }
       if (!outputDirURI.getScheme().equals(stagingDirURI.getScheme())) {
-        throw new RuntimeException(
-            String.format("The scheme of staging directory URI [%s] and output 
directory URI [%s] has to be same.",
-                stagingDirURI, outputDirURI));
+        throw new RuntimeException("The scheme of staging directory URI [" + 
stagingDirURI + "] and output directory "
+            + "URI [" + outputDirURI + "] has to be same.");
       }
       outputDirFS.mkdir(stagingDirURI);
     }
@@ -198,12 +197,12 @@ public class SparkSegmentGenerationJobRunner implements 
IngestionJobRunner, Seri
           List<String> siblingFiles = localDirIndex.get(parentPath);
           Collections.sort(siblingFiles);
           for (int i = 0; i < siblingFiles.size(); i++) {
-            pathAndIdxList.add(String.format("%s %d", siblingFiles.get(i), i));
+            pathAndIdxList.add(siblingFiles.get(i) + " " + i);
           }
         }
       } else {
         for (int i = 0; i < filteredFiles.size(); i++) {
-          pathAndIdxList.add(String.format("%s %d", filteredFiles.get(i), i));
+          pathAndIdxList.add(filteredFiles.get(i) + " " + i);
         }
       }
       int numDataFiles = pathAndIdxList.size();
diff --git 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-standalone/src/main/java/org/apache/pinot/plugin/ingestion/batch/standalone/SegmentGenerationJobRunner.java
 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-standalone/src/main/java/org/apache/pinot/plugin/ingestion/batch/standalone/SegmentGenerationJobRunner.java
index 0df6671418..dc6cc6cb73 100644
--- 
a/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-standalone/src/main/java/org/apache/pinot/plugin/ingestion/batch/standalone/SegmentGenerationJobRunner.java
+++ 
b/pinot-plugins/pinot-batch-ingestion/pinot-batch-ingestion-standalone/src/main/java/org/apache/pinot/plugin/ingestion/batch/standalone/SegmentGenerationJobRunner.java
@@ -127,7 +127,7 @@ public class SegmentGenerationJobRunner implements 
IngestionJobRunner {
       if (!_outputDirFS.exists(_outputDirURI)) {
         _outputDirFS.mkdir(_outputDirURI);
       } else if (!_outputDirFS.isDirectory(_outputDirURI)) {
-        throw new RuntimeException(String.format("Output Directory URI: %s is 
not a directory", _outputDirURI));
+        throw new RuntimeException("Output Directory URI: " + _outputDirURI + 
" is not a directory");
       }
     } catch (IOException e) {
       throw new RuntimeException("Failed to validate output 'outputDirURI': " 
+ _outputDirURI, e);
diff --git 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/BaseMultipleSegmentsConversionExecutor.java
 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/BaseMultipleSegmentsConversionExecutor.java
index 19d145ce3d..c2d621428b 100644
--- 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/BaseMultipleSegmentsConversionExecutor.java
+++ 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/BaseMultipleSegmentsConversionExecutor.java
@@ -126,9 +126,8 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
         new 
HashSet<>(Arrays.asList(inputSegmentNames.split(MinionConstants.SEGMENT_NAME_SEPARATOR)));
     nonExistingSegmentNames.removeAll(segmentNamesForTable);
     if (!CollectionUtils.isEmpty(nonExistingSegmentNames)) {
-      throw new RuntimeException(
-          String.format("table: %s does not have the following segments to 
process: %s", tableNameWithType,
-              nonExistingSegmentNames));
+      throw new RuntimeException("table: " + tableNameWithType + " does not 
have the following segments to process: "
+          + nonExistingSegmentNames);
     }
   }
 
@@ -202,9 +201,8 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
       for (int i = 0; i < downloadURLs.length; i++) {
         String segmentName = segmentNames[i];
         // Download and decompress the segment file
-        _eventObserver.notifyProgress(_pinotTaskConfig,
-            String.format("Downloading and decompressing segment from: %s (%d 
out of %d)", downloadURLs[i], (i + 1),
-                downloadURLs.length));
+        _eventObserver.notifyProgress(_pinotTaskConfig, "Downloading and 
decompressing segment from: " + downloadURLs[i]
+            + " (" + (i + 1) + " out of " + downloadURLs.length + ")");
         File indexDir;
         try {
           indexDir = downloadSegmentToLocalAndUntar(tableNameWithType, 
segmentName, downloadURLs[i], taskType,
@@ -241,9 +239,8 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
         reportSegmentUploadMetrics(convertedSegmentDir, tableNameWithType, 
taskType);
 
         // Tar the converted segment
-        _eventObserver.notifyProgress(_pinotTaskConfig,
-            String.format("Compressing segment: %s (%d out of %d)", 
segmentConversionResult.getSegmentName(), count++,
-                numOutputSegments));
+        _eventObserver.notifyProgress(_pinotTaskConfig, "Compressing segment: "
+            + segmentConversionResult.getSegmentName() + " (" + (count++) + " 
out of " + numOutputSegments + ")");
         File convertedSegmentTarFile = new File(convertedTarredSegmentDir,
             segmentConversionResult.getSegmentName() + 
TarCompressionUtils.TAR_GZ_FILE_EXTENSION);
         TarCompressionUtils.createCompressedTarFile(convertedSegmentDir, 
convertedSegmentTarFile);
@@ -280,8 +277,8 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
         File convertedTarredSegmentFile = tarredSegmentFiles.get(i);
         SegmentConversionResult segmentConversionResult = 
segmentConversionResults.get(i);
         String resultSegmentName = segmentConversionResult.getSegmentName();
-        _eventObserver.notifyProgress(_pinotTaskConfig,
-            String.format("Uploading segment: %s (%d out of %d)", 
resultSegmentName, (i + 1), numOutputSegments));
+        _eventObserver.notifyProgress(_pinotTaskConfig, "Uploading segment: " 
+ resultSegmentName + " (" + (i + 1)
+            + " out of " + numOutputSegments + ")");
         String pushMode = 
taskConfigs.getOrDefault(BatchConfigProperties.PUSH_MODE,
             BatchConfigProperties.SegmentPushType.TAR.name());
         URI outputSegmentTarURI;
@@ -361,8 +358,7 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
       PushJobSpec pushJobSpec) {
     String segmentName = segmentConversionResult.getSegmentName();
     if 
(!taskConfigs.containsKey(BatchConfigProperties.OUTPUT_SEGMENT_DIR_URI)) {
-      throw new RuntimeException(String.format("Output dir URI missing for 
metadata push while processing segment: %s",
-          segmentName));
+      throw new RuntimeException("Output dir URI missing for metadata push 
while processing segment: " + segmentName);
     }
     URI outputSegmentDirURI = 
URI.create(taskConfigs.get(BatchConfigProperties.OUTPUT_SEGMENT_DIR_URI));
     Map<String, String> localSegmentUriToTarPathMap =
@@ -423,7 +419,7 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
     if (tableType != null) {
       params.add(new 
BasicNameValuePair(FileUploadDownloadClient.QueryParameters.TABLE_TYPE, 
tableType.toString()));
     } else {
-      throw new RuntimeException(String.format("Failed to determine the 
tableType from name: %s", tableNameWithType));
+      throw new RuntimeException("Failed to determine the tableType from name: 
" + tableNameWithType);
     }
     return params;
   }
@@ -514,8 +510,8 @@ public abstract class 
BaseMultipleSegmentsConversionExecutor extends BaseTaskExe
           
URI.create(MinionTaskUtils.normalizeDirectoryURI(outputSegmentDirURI) + 
localSegmentTarFile.getName());
       if 
(!Boolean.parseBoolean(taskConfigs.get(BatchConfigProperties.OVERWRITE_OUTPUT)) 
&& outputFileFS.exists(
           outputSegmentTarURI)) {
-        throw new RuntimeException(String.format("Output file: %s already 
exists. "
-            + "Set 'overwriteOutput' to true to ignore this error", 
outputSegmentTarURI));
+        throw new RuntimeException("Output file: " + outputSegmentTarURI + " 
already exists. Set 'overwriteOutput' to "
+            + "true to ignore this error");
       } else {
         outputFileFS.copyFromLocalFile(localSegmentTarFile, 
outputSegmentTarURI);
       }
diff --git 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/MinionTaskUtils.java
 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/MinionTaskUtils.java
index 5e41720cde..e827e25e97 100644
--- 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/MinionTaskUtils.java
+++ 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/MinionTaskUtils.java
@@ -215,9 +215,8 @@ public class MinionTaskUtils {
             
serverSegmentMetadataReader.getValidDocIdsBitmapFromServer(tableNameWithType, 
segmentName, endpoint,
                 validDocIdsType, 60_000);
       } catch (Exception e) {
-        LOGGER.warn(
-            String.format("Unable to retrieve validDocIds bitmap for segment: 
%s from endpoint: %s", segmentName,
-                endpoint), e);
+        LOGGER.warn("Unable to retrieve validDocIds bitmap for segment: " + 
segmentName + " from endpoint: "
+            + endpoint, e);
         continue;
       }
 
@@ -231,9 +230,9 @@ public class MinionTaskUtils {
       if (!expectedCrc.equals(crcFromValidDocIdsBitmap)) {
         // In this scenario, we are hitting the other replica of the segment 
which did not commit to ZK or deepstore.
         // We will skip processing this bitmap to query other server to 
confirm if there is a valid matching CRC.
-        String message = String.format("CRC mismatch for segment: %s, expected 
value based on task generator: %s, "
-                + "actual crc from validDocIdsBitmapResponse from endpoint %s: 
%s", segmentName, expectedCrc, endpoint,
-            crcFromValidDocIdsBitmap);
+        String message = "CRC mismatch for segment: " + segmentName + ", 
expected value based on task generator: "
+            + expectedCrc + ", actual crc from validDocIdsBitmapResponse from 
endpoint " + endpoint + ": "
+            + crcFromValidDocIdsBitmap;
         LOGGER.warn(message);
         continue;
       }
diff --git 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskExecutor.java
 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskExecutor.java
index 43c75b5ea7..19880be6e1 100644
--- 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskExecutor.java
+++ 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskExecutor.java
@@ -106,8 +106,8 @@ public class MergeRollupTaskExecutor extends 
BaseMultipleSegmentsConversionExecu
     List<RecordReader> recordReaders = new ArrayList<>(numInputSegments);
     int count = 1;
     for (File segmentDir : segmentDirs) {
-      _eventObserver.notifyProgress(_pinotTaskConfig,
-          String.format("Creating RecordReader for: %s (%d out of %d)", 
segmentDir, count++, numInputSegments));
+      _eventObserver.notifyProgress(_pinotTaskConfig, "Creating RecordReader 
for: " + segmentDir + " (" + (count++)
+          + " out of " + numInputSegments + ")");
       PinotSegmentRecordReader recordReader = new PinotSegmentRecordReader();
       // NOTE: Do not fill null field with default value to be consistent with 
other record readers
       recordReader.init(segmentDir, null, null, true);
diff --git 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskGenerator.java
 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskGenerator.java
index 15afbdfe4f..10dcfcbf66 100644
--- 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskGenerator.java
+++ 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/mergerollup/MergeRollupTaskGenerator.java
@@ -496,8 +496,8 @@ public class MergeRollupTaskGenerator extends 
BaseTaskGenerator {
     // check no mis-configured columns when erasing dimensions
     Set<String> dimensionsToErase = 
MergeRollupTaskUtils.getDimensionsToErase(taskConfigs);
     for (String dimension : dimensionsToErase) {
-      Preconditions.checkState(columnNames.contains(dimension),
-          String.format("Column dimension to erase \"%s\" not found in 
schema!", dimension));
+      Preconditions.checkState(columnNames.contains(dimension), "Column 
dimension to erase \"" + dimension
+          + "\" not found in schema!");
     }
     // check no mis-configured aggregation function parameters
     Set<String> allowedFunctionParameterNames = 
ImmutableSet.of(Constants.CPCSKETCH_LGK_KEY.toLowerCase(),
@@ -507,8 +507,8 @@ public class MergeRollupTaskGenerator extends 
BaseTaskGenerator {
         MergeRollupTaskUtils.getAggregationFunctionParameters(taskConfigs);
     for (String fieldName : aggregationFunctionParameters.keySet()) {
       // check that function parameter field name exists
-      Preconditions.checkState(columnNames.contains(fieldName),
-          String.format("Metric column \"%s\" for aggregation function 
parameter not found in schema!", fieldName));
+      Preconditions.checkState(columnNames.contains(fieldName), "Metric column 
\"" + fieldName + "\" for aggregation "
+          + "function parameter not found in schema!");
       Map<String, String> functionParameters = 
aggregationFunctionParameters.get(fieldName);
       for (String functionParameterName : functionParameters.keySet()) {
         // check that function parameter name is valid
@@ -518,8 +518,8 @@ public class MergeRollupTaskGenerator extends 
BaseTaskGenerator {
         if (functionParameterName.equalsIgnoreCase(Constants.CPCSKETCH_LGK_KEY)
             || 
functionParameterName.equalsIgnoreCase(Constants.THETA_TUPLE_SKETCH_NOMINAL_ENTRIES))
 {
           String value = functionParameters.get(functionParameterName);
-          String err = String.format("Aggregation function parameter \"%s\" on 
column \"%s\" has invalid value: %s",
-              functionParameterName, fieldName, value);
+          String err = "Aggregation function parameter \"" + 
functionParameterName + "\" on column \"" + fieldName
+              + "\" has invalid value: " + value;
           try {
             Preconditions.checkState(Integer.parseInt(value) > 0, err);
           } catch (NumberFormatException e) {
@@ -529,8 +529,8 @@ public class MergeRollupTaskGenerator extends 
BaseTaskGenerator {
         // check that function parameter value is valid for sampling 
probability
         if 
(functionParameterName.equalsIgnoreCase(Constants.THETA_TUPLE_SKETCH_SAMPLING_PROBABILITY))
 {
           String value = functionParameters.get(functionParameterName);
-          String err = String.format("Aggregation function parameter \"%s\" on 
column \"%s\" has invalid value: %s",
-              functionParameterName, fieldName, value);
+          String err = "Aggregation function parameter \"" + 
functionParameterName + "\" on column \"" + fieldName
+              + "\" has invalid value: " + value;
           try {
             float p = Float.parseFloat(value);
             Preconditions.checkState(p >= 0.0f && p <= 1.0f, err);
diff --git 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/upsertcompaction/UpsertCompactionTaskExecutor.java
 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/upsertcompaction/UpsertCompactionTaskExecutor.java
index e5469a22ae..e7714e50d5 100644
--- 
a/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/upsertcompaction/UpsertCompactionTaskExecutor.java
+++ 
b/pinot-plugins/pinot-minion-tasks/pinot-minion-builtin-tasks/src/main/java/org/apache/pinot/plugin/minion/tasks/upsertcompaction/UpsertCompactionTaskExecutor.java
@@ -66,9 +66,9 @@ public class UpsertCompactionTaskExecutor extends 
BaseSingleSegmentConversionExe
     boolean ignoreCrcMismatch = 
Boolean.parseBoolean(configs.getOrDefault(UpsertCompactionTask.IGNORE_CRC_MISMATCH_KEY,
         String.valueOf(UpsertCompactionTask.DEFAULT_IGNORE_CRC_MISMATCH)));
     if (!ignoreCrcMismatch && 
!originalSegmentCrcFromTaskGenerator.equals(crcFromDeepStorageSegment)) {
-      String message = String.format("Crc mismatched between ZK and deepstore 
copy of segment: %s. Expected crc "
-              + "from ZK: %s, crc from deepstore: %s", segmentName, 
originalSegmentCrcFromTaskGenerator,
-          crcFromDeepStorageSegment);
+      String message = "Crc mismatched between ZK and deepstore copy of 
segment: " + segmentName
+          + ". Expected crc from ZK: " + originalSegmentCrcFromTaskGenerator + 
", crc from deepstore: "
+          + crcFromDeepStorageSegment;
       LOGGER.error(message);
       throw new IllegalStateException(message);
     }
@@ -78,9 +78,8 @@ public class UpsertCompactionTaskExecutor extends 
BaseSingleSegmentConversionExe
     if (validDocIds == null) {
       // no valid crc match found or no validDocIds obtained from all servers
       // error out the task instead of silently failing so that we can track 
it via task-error metrics
-      String message = String.format("No validDocIds found from all servers. 
They either failed to download "
-              + "or did not match crc from segment copy obtained from 
deepstore / servers. " + "Expected crc: %s",
-          originalSegmentCrcFromTaskGenerator);
+      String message = "No validDocIds found from all servers. They either 
failed to download or did not match crc from"
+          + " segment copy obtained from deepstore / servers. Expected crc: " 
+ originalSegmentCrcFromTaskGenerator;
       LOGGER.error(message);
       throw new IllegalStateException(message);
     }
diff --git 
a/pinot-plugins/pinot-segment-writer/pinot-segment-writer-file-based/src/main/java/org/apache/pinot/plugin/segmentwriter/filebased/FileBasedSegmentWriter.java
 
b/pinot-plugins/pinot-segment-writer/pinot-segment-writer-file-based/src/main/java/org/apache/pinot/plugin/segmentwriter/filebased/FileBasedSegmentWriter.java
index 3601e76107..ab3de86aaa 100644
--- 
a/pinot-plugins/pinot-segment-writer/pinot-segment-writer-file-based/src/main/java/org/apache/pinot/plugin/segmentwriter/filebased/FileBasedSegmentWriter.java
+++ 
b/pinot-plugins/pinot-segment-writer/pinot-segment-writer-file-based/src/main/java/org/apache/pinot/plugin/segmentwriter/filebased/FileBasedSegmentWriter.java
@@ -119,8 +119,8 @@ public class FileBasedSegmentWriter implements 
SegmentWriter {
     _reusableRecord = new GenericData.Record(_avroSchema);
 
     // Create tmp dir
-    _stagingDir = new File(FileUtils.getTempDirectory(),
-        String.format("segment_writer_staging_%s_%d", _tableNameWithType, 
System.currentTimeMillis()));
+    _stagingDir = new File(FileUtils.getTempDirectory(), 
"segment_writer_staging_" + _tableNameWithType + "_"
+        + System.currentTimeMillis());
     Preconditions.checkState(_stagingDir.mkdirs(), "Failed to create staging 
dir: %s", _stagingDir.getAbsolutePath());
 
     // Create buffer file
@@ -198,14 +198,14 @@ public class FileBasedSegmentWriter implements 
SegmentWriter {
       File segmentTarFile = new File(_outputDirURI, segmentName + 
Constants.TAR_GZ_FILE_EXT);
       if (segmentTarFile.exists()) {
         if (!_batchConfig.isOverwriteOutput()) {
-          throw new IllegalArgumentException(String.format("Duplicate segment 
name generated '%s' in '%s', please "
-              + "adjust segment name generator config to avoid duplicates, or 
allow batch config overwrite",
-              segmentName, _outputDirURI));
+          throw new IllegalArgumentException("Duplicate segment name generated 
'" + segmentName + "' in '"
+              + _outputDirURI + "', please adjust segment name generator 
config to avoid duplicates, or allow batch "
+              + "config overwrite");
         } else {
-          LOGGER.warn(String.format("Duplicate segment name detected '%s' in 
file '%s', deleting old segment",
-              segmentName, segmentDir));
+          LOGGER.warn("Duplicate segment name detected '" + segmentName + "' 
in file '" + segmentDir + "', deleting "
+              + "old segment");
           if (segmentTarFile.delete()) {
-            LOGGER.warn(String.format("Segment file deleted: '%s/%s'", 
_outputDirURI, segmentName));
+            LOGGER.warn("Segment file deleted: '" + _outputDirURI + "/" + 
segmentName + "'");
           }
         }
       }
diff --git 
a/pinot-tools/src/main/java/org/apache/pinot/tools/HybridQuickstart.java 
b/pinot-tools/src/main/java/org/apache/pinot/tools/HybridQuickstart.java
index 2fb859e25b..c14845ff29 100644
--- a/pinot-tools/src/main/java/org/apache/pinot/tools/HybridQuickstart.java
+++ b/pinot-tools/src/main/java/org/apache/pinot/tools/HybridQuickstart.java
@@ -141,8 +141,7 @@ public class HybridQuickstart extends Quickstart {
     printStatus(Color.YELLOW, "***** Realtime quickstart setup complete 
*****");
     runSampleQueries(runner);
 
-    printStatus(Color.GREEN,
-        String.format("You can always go to http://localhost:%d to play around 
in the query console",
-            QuickstartRunner.DEFAULT_CONTROLLER_PORT));
+    printStatus(Color.GREEN, "You can always go to http://localhost:"; + 
QuickstartRunner.DEFAULT_CONTROLLER_PORT
+        + " to play around in the query console");
   }
 }
diff --git 
a/pinot-tools/src/main/java/org/apache/pinot/tools/perf/PerfBenchmarkDriver.java
 
b/pinot-tools/src/main/java/org/apache/pinot/tools/perf/PerfBenchmarkDriver.java
index 4328cec746..526b500854 100644
--- 
a/pinot-tools/src/main/java/org/apache/pinot/tools/perf/PerfBenchmarkDriver.java
+++ 
b/pinot-tools/src/main/java/org/apache/pinot/tools/perf/PerfBenchmarkDriver.java
@@ -321,8 +321,7 @@ public class PerfBenchmarkDriver {
       helixManager.connect();
       return helixManager;
     } catch (Exception e) {
-      String errorMsg =
-          String.format("Exception when connecting the instance %s as 
Spectator role to Helix.", instanceId);
+      String errorMsg = "Exception when connecting the instance " + instanceId 
+ " as Spectator role to Helix.";
       LOGGER.error(errorMsg, e);
       throw new RuntimeException(errorMsg);
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to