This is an automated email from the ASF dual-hosted git repository.

mcvsubbu pushed a commit to branch add-config-file-support
in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git

commit 6eba9df5b3271aee885ddf7bac6fb32b55e164a7
Author: Subbu Subramaniam <ssubr...@linkedin.com>
AuthorDate: Fri Jun 18 15:33:10 2021 -0700

    Adding support for config files in compat tests
    
    Pinot installation administrators can now specify configuration files to 
run the
    tests as per their environment.
    
    Changed the java files to get the ports as declared in the config files.
    Cleaned up the scripts to use variables for ports.
    Updated the scripts to compile with java 8 (so that it can be run on 
desktops
    with one java environment for older and newer release)
---
 compatibility-verifier/checkoutAndBuild.sh         |  4 +-
 compatibility-verifier/compCheck.sh                | 76 +++++++++++++++++-----
 .../config/BrokerConfig.properties                 | 22 +++++++
 .../config/ControllerConfig.properties             | 24 +++++++
 .../config/ServerConfig.properties                 | 25 +++++++
 .../pinot/compat/tests/ClusterDescriptor.java      | 58 ++++++++++++++---
 .../pinot/compat/tests/CompatibilityOpsRunner.java |  4 ++
 .../org/apache/pinot/compat/tests/QueryOp.java     |  2 +-
 .../org/apache/pinot/compat/tests/SegmentOp.java   | 10 +--
 .../org/apache/pinot/compat/tests/StreamOp.java    |  6 +-
 .../org/apache/pinot/compat/tests/TableOp.java     |  6 +-
 11 files changed, 196 insertions(+), 41 deletions(-)

diff --git a/compatibility-verifier/checkoutAndBuild.sh 
b/compatibility-verifier/checkoutAndBuild.sh
index 36d9edf..47eb4ae 100755
--- a/compatibility-verifier/checkoutAndBuild.sh
+++ b/compatibility-verifier/checkoutAndBuild.sh
@@ -50,7 +50,7 @@ function checkoutAndBuild() {
   # Pull the tag list so that we can check out by tag name
   git fetch --tags || exit 1
   git checkout $commitHash || exit 1
-  mvn install package -DskipTests -Pbin-dist || exit 1
+  mvn install package -DskipTests -Pbin-dist -T 4 -Djdk.version=8 || exit 1
   popd || exit 1
   exit 0
 }
@@ -117,7 +117,7 @@ workingDir=$(absPath "$workingDir")
 newTargetDir="$workingDir"/newTargetDir
 if [ -z "$newerCommit" ]; then
   echo "Compiling current tree as newer version"
-  (cd $cmdDir/.. && mvn install package -DskipTests -Pbin-dist && mvn -pl 
pinot-tools package -DskipTests && mvn -pl pinot-integration-tests package 
-DskipTests)
+  (cd $cmdDir/.. && mvn install package -DskipTests -Pbin-dist -T 4 -D 
jdk.version=8 && mvn -pl pinot-tools package -T 4 -DskipTests -Djdk.version=8 
&& mvn -pl pinot-integration-tests package -T 4 -DskipTests -Djdk.version=8)
   if [ $? -ne 0 ]; then
     echo Compile failed.
     exit 1
diff --git a/compatibility-verifier/compCheck.sh 
b/compatibility-verifier/compCheck.sh
index 9d6cc97..3f77cdf 100755
--- a/compatibility-verifier/compCheck.sh
+++ b/compatibility-verifier/compCheck.sh
@@ -60,19 +60,18 @@ function waitForZkReady() {
   status=1
   while [ $status -ne 0 ]; do
     sleep 1
-    echo Checking port 2181 for zk ready
-    echo x | nc localhost 2181 1>/dev/null 2>&1
+    echo Checking port ${ZK_PORT} for zk ready
+    echo x | nc localhost ${ZK_PORT} 1>/dev/null 2>&1
     status=$(echo $?)
   done
 }
 
 function waitForControllerReady() {
-  # TODO: Check real controller port if config file is specified.
   status=1
   while [ $status -ne 0 ]; do
     sleep 1
-    echo Checking port 9000 for controller ready
-    curl localhost:9000/health 1>/dev/null 2>&1
+    echo Checking port ${CONTROLLER_PORT} for controller ready
+    curl localhost:${CONTROLLER_PORT}/health 1>/dev/null 2>&1
     status=$(echo $?)
   done
 }
@@ -88,23 +87,21 @@ function waitForKafkaReady() {
 }
 
 function waitForBrokerReady() {
-  # TODO: We are checking for port 8099. Check for real broker port from 
config if needed.
   local status=1
   while [ $status -ne 0 ]; do
     sleep 1
-    echo Checking port 8099 for broker ready
-    curl localhost:8099/debug/routingTable 1>/dev/null 2>&1
+    echo Checking port ${BROKER_QUERY_PORT} for broker ready
+    curl localhost:${BROKER_QUERY_PORT}/debug/routingTable 1>/dev/null 2>&1
     status=$(echo $?)
   done
 }
 
 function waitForServerReady() {
-  # TODO: We are checking for port 8097. Check for real server port from 
config if needed,
   local status=1
   while [ $status -ne 0 ]; do
     sleep 1
-    echo Checking port 8097 for server ready
-    curl localhost:8097/health 1>/dev/null 2>&1
+    echo Checking port ${SERVER_ADMIN_PORT} for server ready
+    curl localhost:${SERVER_ADMIN_PORT}/health 1>/dev/null 2>&1
     status=$(echo $?)
   done
 }
@@ -150,7 +147,7 @@ function startService() {
     ./pinot-admin.sh StartServer ${configFileArg} 
1>${LOG_DIR}/server.${logCount}.log 2>&1 &
     echo $! >${PID_DIR}/server.pid
   elif [ "$serviceName" = "kafka" ]; then
-    ./pinot-admin.sh StartKafka -zkAddress localhost:2181/kafka 
1>${LOG_DIR}/kafka.${logCount}.log 2>&1 &
+    ./pinot-admin.sh StartKafka -zkAddress localhost:${ZK_PORT}/kafka 
1>${LOG_DIR}/kafka.${logCount}.log 2>&1 &
     echo $! >${PID_DIR}/kafka.pid
   fi
   # Keep log files distinct so we can debug
@@ -219,6 +216,38 @@ function setupCompatTester() {
   export CLASSPATH_PREFIX
 }
 
+function setupControllerVariables() {
+  if [ -f ${CONTROLLER_CONF} ]; then
+    local port=$(grep -F controller.port ${CONTROLLER_CONF} | awk '{print $3}')
+    if [ ! -z "$port" ]; then
+      CONTROLLER_PORT=$port
+    fi
+  fi
+}
+
+function setupBrokerVariables() {
+  if [ -f ${BROKER_CONF} ]; then
+    local port=$(grep -F pinot.broker.client.queryPort ${BROKER_CONF} | awk 
'{print $3}')
+    if [ ! -z "$port" ]; then
+      BROKER_QUERY_PORT=$port
+    fi
+  fi
+}
+
+function setupServerVariables() {
+  if [ -f ${SERVER_CONF} ]; then
+    local port
+    port=$(grep -F pinot.server.adminapi.port ${SERVER_CONF} | awk '{print 
$3}')
+    if [ ! -z "$port" ]; then
+      SERVER_ADMIN_PORT=$port
+    fi
+    port=$(grep -F pinot.server.netty.port ${SERVER_CONF} | awk '{print $3}')
+    if [ ! -z "$port" ]; then
+      SERVER_NETTY_PORT=$port
+    fi
+  fi
+}
+
 #
 # Main
 #
@@ -262,14 +291,27 @@ fi
 
 
COMPAT_TESTER_PATH="pinot-integration-tests/target/pinot-integration-tests-pkg/bin/pinot-compat-test-runner.sh"
 
-BROKER_CONF=${testSuiteDir}/config/BrokerConfig.conf
-CONTROLLER_CONF=${testSuiteDir}/config/ControllerConfig.conf
-SERVER_CONF=${testSuiteDir}/config/ServerConfig.conf
+BROKER_CONF=${testSuiteDir}/config/BrokerConfig.properties
+CONTROLLER_CONF=${testSuiteDir}/config/ControllerConfig.properties
+SERVER_CONF=${testSuiteDir}/config/ServerConfig.properties
+
+BROKER_QUERY_PORT=8099
+ZK_PORT=2181
+CONTROLLER_PORT=9000
+SERVER_ADMIN_PORT=8097
+SERVER_NETTY_PORT=8098
+
 PID_DIR=${workingDir}/pids
 LOG_DIR=${workingDir}/logs
 ${RM} -rf ${PID_DIR}
 ${RM} -rf ${LOG_DIR}
 
+setupControllerVariables
+setupBrokerVariables
+setupServerVariables
+
+export JAVA_OPTS="-DControllerPort=${CONTROLLER_PORT} 
-DBrokerQueryPort=${BROKER_QUERY_PORT} -DServerAdminPort=${SERVER_ADMIN_PORT}"
+
 mkdir ${PID_DIR}
 mkdir ${LOG_DIR}
 
@@ -279,8 +321,8 @@ newTargetDir="$workingDir"/newTargetDir
 setupCompatTester
 
 # check that the default ports are open
-if [ "$(lsof -t -i:8097 -s TCP:LISTEN)" ] || [ "$(lsof -t -i:8098 
-sTCP:LISTEN)" ] || [ "$(lsof -t -i:8099 -sTCP:LISTEN)" ] ||
-  [ "$(lsof -t -i:9000 -sTCP:LISTEN)" ] || [ "$(lsof -t -i:2181 -sTCP:LISTEN)" 
]; then
+if [ "$(lsof -t -i:${SERVER_ADMIN_PORT} -s TCP:LISTEN)" ] || [ "$(lsof -t 
-i:${SERVER_NETTY_PORT} -sTCP:LISTEN)" ] || [ "$(lsof -t 
-i:${BROKER_QUERY_PORT} -sTCP:LISTEN)" ] ||
+  [ "$(lsof -t -i:${CONTROLLER_PORT} -sTCP:LISTEN)" ] || [ "$(lsof -t 
-i:${ZK_PORT} -sTCP:LISTEN)" ]; then
   echo "Cannot start the components since the default ports are not open. 
Check any existing process that may be using the default ports."
   exit 1
 fi
diff --git 
a/compatibility-verifier/sample-test-suite/config/BrokerConfig.properties 
b/compatibility-verifier/sample-test-suite/config/BrokerConfig.properties
new file mode 100644
index 0000000..35dc7b4
--- /dev/null
+++ b/compatibility-verifier/sample-test-suite/config/BrokerConfig.properties
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+pinot.broker.client.queryPort = 8099
+pinot.zk.server = localhost:2181
+pinot.cluster.name = PinotCluster
diff --git 
a/compatibility-verifier/sample-test-suite/config/ControllerConfig.properties 
b/compatibility-verifier/sample-test-suite/config/ControllerConfig.properties
new file mode 100644
index 0000000..86d14c2
--- /dev/null
+++ 
b/compatibility-verifier/sample-test-suite/config/ControllerConfig.properties
@@ -0,0 +1,24 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+controller.host = localhost
+controller.port = 9000
+controller.zk.str = localhost:2181
+controller.data.dir = /tmp/PinotController
+controller.helix.cluster.name = PinotCluster
diff --git 
a/compatibility-verifier/sample-test-suite/config/ServerConfig.properties 
b/compatibility-verifier/sample-test-suite/config/ServerConfig.properties
new file mode 100644
index 0000000..0bf3a9b
--- /dev/null
+++ b/compatibility-verifier/sample-test-suite/config/ServerConfig.properties
@@ -0,0 +1,25 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+pinot.server.adminapi.port = 8097
+pinot.server.netty.port = 8098
+pinot.zk.server = localhost:2181
+pinot.cluster.name = PinotCluster
+pinot.server.instance.dataDir = /tmp/PinotServer/data
+pinot.server.instance.segmentTarDir = /tmp/PinotServer/segments
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/ClusterDescriptor.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/ClusterDescriptor.java
index d927e11..ec0edfe 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/ClusterDescriptor.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/ClusterDescriptor.java
@@ -21,14 +21,52 @@ package org.apache.pinot.compat.tests;
 
 public class ClusterDescriptor {
 
-  public static final String DEFAULT_HOST = "localhost";
-  public static final String ZOOKEEPER_PORT = "2181";
-  public static final String KAFKA_PORT = "19092";
-  public static final String CONTROLLER_PORT = "9000";
-  public static final String BROKER_PORT = "8099";
-
-  public static final String ZOOKEEPER_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, ZOOKEEPER_PORT);
-  public static final String KAFKA_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, KAFKA_PORT);
-  public static final String CONTROLLER_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, CONTROLLER_PORT);
-  public static final String BROKER_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, BROKER_PORT);
+  private static final String DEFAULT_HOST = "localhost";
+  private static final String ZOOKEEPER_PORT = "2181";
+  private static final String KAFKA_PORT = "19092";
+  private static final String ZOOKEEPER_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, ZOOKEEPER_PORT);
+  private static final String KAFKA_URL = String.format("http://%s:%s";, 
DEFAULT_HOST, KAFKA_PORT);
+
+  private static String controllerPort = "9000";
+  private static String brokerQueryPort = "8099";
+  private static String serverAdminPort = "8097";
+
+  public static void setBrokerQueryPort(String port) {
+    if (port != null && !port.isEmpty()) {
+      brokerQueryPort = port;
+    }
+  }
+
+  public static void setControllerPort(String port) {
+    if (port != null && !port.isEmpty()) {
+      controllerPort = port;
+    }
+  }
+
+  public static void setServerAdminPort(String port) {
+    if (port != null && !port.isEmpty()) {
+      serverAdminPort = port;
+    }
+  }
+
+  public static String getControllerUrl() {
+    return String.format("http://%s:%s";, DEFAULT_HOST, controllerPort);
+  }
+
+  public static String getBrokerUrl() {
+    return String.format("http://%s:%s";, DEFAULT_HOST, brokerQueryPort);
+  }
+
+  public static String getDefaultHost() {
+    return DEFAULT_HOST;
+  }
+
+  public static String getKafkaPort() {
+    return KAFKA_PORT;
+  }
+
+  public static String getServerAdminUrl() {
+    return String.format("http://%s:%s";, DEFAULT_HOST, serverAdminPort);
+  }
+
 }
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/CompatibilityOpsRunner.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/CompatibilityOpsRunner.java
index e1e5996..9c47aa4 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/CompatibilityOpsRunner.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/CompatibilityOpsRunner.java
@@ -65,6 +65,10 @@ public class CompatibilityOpsRunner {
     if (args.length != 2) {
       throw new IllegalArgumentException("Need exactly one file name and one 
generation_number as arguments");
     }
+    String port;
+    ClusterDescriptor.setControllerPort(System.getProperty("ControllerPort"));
+    
ClusterDescriptor.setBrokerQueryPort(System.getProperty("BrokerQueryPort"));
+    
ClusterDescriptor.setServerAdminPort(System.getProperty("ServerAdminPort"));
 
     CompatibilityOpsRunner runner = new CompatibilityOpsRunner(args[0], 
Integer.valueOf(args[1]));
     int exitStatus = 1;
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/QueryOp.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/QueryOp.java
index 178bede..55e1b2a 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/QueryOp.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/QueryOp.java
@@ -123,7 +123,7 @@ public class QueryOp extends BaseOp {
         JsonNode actualJson = null;
         if (expectedJson != null) {
           try {
-            actualJson = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.BROKER_URL);
+            actualJson = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.getBrokerUrl());
           } catch (Exception e) {
             LOGGER.error("Comparison FAILED: Line: {} Exception caught while 
running query: '{}'", queryLineNum, query,
                 e);
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/SegmentOp.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/SegmentOp.java
index b9fd90c..7deb567 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/SegmentOp.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/SegmentOp.java
@@ -220,7 +220,7 @@ public class SegmentOp extends BaseOp {
    */
   private void uploadSegment(File segmentTarFile)
       throws Exception {
-    URI controllerURI = FileUploadDownloadClient.getUploadSegmentURI(new 
URI(ClusterDescriptor.CONTROLLER_URL));
+    URI controllerURI = FileUploadDownloadClient.getUploadSegmentURI(new 
URI(ClusterDescriptor.getControllerUrl()));
     try (FileUploadDownloadClient fileUploadDownloadClient = new 
FileUploadDownloadClient()) {
       fileUploadDownloadClient.uploadSegment(controllerURI, 
segmentTarFile.getName(), segmentTarFile, _tableName);
     }
@@ -260,7 +260,7 @@ public class SegmentOp extends BaseOp {
   private boolean verifyRoutingTableUpdated()
       throws Exception {
     String query = "SELECT count(*) FROM " + _tableName;
-    JsonNode result = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.BROKER_URL);
+    JsonNode result = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.getBrokerUrl());
     long startTime = System.currentTimeMillis();
     while (SqlResultComparator.isEmpty(result)) {
       if ((System.currentTimeMillis() - startTime) > 
DEFAULT_MAX_SLEEP_TIME_MS) {
@@ -271,7 +271,7 @@ public class SegmentOp extends BaseOp {
       }
       LOGGER.warn("Routing table has not been updated yet, will retry after {} 
ms.", DEFAULT_SLEEP_INTERVAL_MS);
       Thread.sleep(DEFAULT_SLEEP_INTERVAL_MS);
-      result = ClusterTest.postSqlQuery(query, ClusterDescriptor.BROKER_URL);
+      result = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.getBrokerUrl());
     }
     LOGGER.info("Routing table has been updated.");
     return true;
@@ -290,7 +290,7 @@ public class SegmentOp extends BaseOp {
         _segmentName = _tableName + "_" + _generationNumber;
       }
 
-      
ControllerTest.sendDeleteRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL)
+      
ControllerTest.sendDeleteRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl())
           .forSegmentDelete(_tableName, _segmentName));
       return verifySegmentDeleted();
     } catch (Exception e) {
@@ -330,7 +330,7 @@ public class SegmentOp extends BaseOp {
   private TableViews.TableView getExternalViewForTable()
       throws IOException {
     return JsonUtils.stringToObject(ControllerTest.sendGetRequest(
-        
ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL).forTableExternalView(_tableName)),
+        
ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl()).forTableExternalView(_tableName)),
         TableViews.TableView.class);
   }
 
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java
index 1a4361a..ad35cc7 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java
@@ -164,7 +164,7 @@ public class StreamOp extends BaseOp {
 
       final Map<String, Object> config = new HashMap<>();
       config.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
-          ClusterDescriptor.DEFAULT_HOST + ":" + ClusterDescriptor.KAFKA_PORT);
+          ClusterDescriptor.getDefaultHost() + ":" + 
ClusterDescriptor.getKafkaPort());
       config.put(AdminClientConfig.CLIENT_ID_CONFIG, "Kafka2AdminClient-" + 
UUID.randomUUID().toString());
       config.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 15000);
       AdminClient adminClient = KafkaAdminClient.create(config);
@@ -219,7 +219,7 @@ public class StreamOp extends BaseOp {
       String schemaName = TableNameBuilder.extractRawTableName(tableName);
       String schemaString = ControllerTest.
           sendGetRequest(
-              
ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL).forSchemaGet(schemaName));
+              
ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl()).forSchemaGet(schemaName));
       Schema schema = JsonUtils.stringToObject(schemaString, Schema.class);
       DateTimeFormatSpec dateTimeFormatSpec =
           new 
DateTimeFormatSpec(schema.getSpecForTimeColumn(timeColumn).getFormat());
@@ -264,7 +264,7 @@ public class StreamOp extends BaseOp {
   private long fetchExistingTotalDocs(String tableName)
       throws Exception {
     String query = "SELECT count(*) FROM " + tableName;
-    JsonNode response = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.BROKER_URL);
+    JsonNode response = ClusterTest.postSqlQuery(query, 
ClusterDescriptor.getBrokerUrl());
     if (response == null) {
       String errorMsg = String.format("Failed to query Table: %s", tableName);
       LOGGER.error(errorMsg);
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/TableOp.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/TableOp.java
index 4fd20a7..705dff2 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/TableOp.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/TableOp.java
@@ -107,7 +107,7 @@ public class TableOp extends BaseOp {
         put("Content-type", "application/json");
       }};
       ControllerTest
-          
.sendPostRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL).forSchemaCreate(),
+          
.sendPostRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl()).forSchemaCreate(),
               FileUtils.readFileToString(new 
File(getAbsoluteFileName(_schemaFileName))), headers);
       return true;
     } catch (IOException e) {
@@ -119,7 +119,7 @@ public class TableOp extends BaseOp {
   private boolean createTable() {
     try {
       ControllerTest
-          
.sendPostRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL).forTableCreate(),
+          
.sendPostRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl()).forTableCreate(),
               FileUtils.readFileToString(new 
File(getAbsoluteFileName(_tableConfigFileName))));
       return true;
     } catch (IOException e) {
@@ -131,7 +131,7 @@ public class TableOp extends BaseOp {
   private boolean deleteTable() {
     try {
       TableConfig tableConfig = JsonUtils.fileToObject(new 
File(getAbsoluteFileName(_tableConfigFileName)), TableConfig.class);
-      
ControllerTest.sendDeleteRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.CONTROLLER_URL)
+      
ControllerTest.sendDeleteRequest(ControllerRequestURLBuilder.baseUrl(ClusterDescriptor.getControllerUrl())
           .forTableDelete(tableConfig.getTableName()));
       return true;
     } catch (IOException e) {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to