This is an automated email from the ASF dual-hosted git repository. mcvsubbu pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git
The following commit(s) were added to refs/heads/master by this push: new dbaac00 Fix errors happening due to parallel builds (#7172) dbaac00 is described below commit dbaac00b6f77161f079f07ff7d337cbde55d2cfd Author: Subbu Subramaniam <mcvsu...@users.noreply.github.com> AuthorDate: Sun Jul 18 10:09:01 2021 -0700 Fix errors happening due to parallel builds (#7172) Maven cache interferes when we build two trees in parallel, causing (for example) pinot-spi of one tree to be linked with that of the other tree (since they are of the same version) Changed the build to use different version ID for old and new builds so that they don't pull in each other's jars. Current build always uses the same ID as the current tree so that any changes made can be tested easily without backing out pom files. A different build ID each time can cause maven cache to explode if we run the compile multiple times, so changed the build to use a dedicated cache that is deleted after builds. Also added code to cleanup the controller and server's data directories before starting compat checker. --- compatibility-verifier/checkoutAndBuild.sh | 48 +++++++++++++++++----- compatibility-verifier/compCheck.sh | 21 ++++++++++ .../org/apache/pinot/compat/tests/StreamOp.java | 11 ++++- 3 files changed, 69 insertions(+), 11 deletions(-) diff --git a/compatibility-verifier/checkoutAndBuild.sh b/compatibility-verifier/checkoutAndBuild.sh index 5e67128..b536ff2 100755 --- a/compatibility-verifier/checkoutAndBuild.sh +++ b/compatibility-verifier/checkoutAndBuild.sh @@ -21,6 +21,7 @@ cmdName=`basename $0` cmdDir=`dirname $0` source ${cmdDir}/utils.inc +MVN_CACHE_DIR="mvn-compat-cache" # get usage of the script function usage() { @@ -60,21 +61,42 @@ function checkOut() { # This function builds pinot code and tools necessary to run pinot-admin commands # Optionally, it also builds the integration test jars needed to run the pinot # compatibility tester +# It uses a different version for each build, since we build trees in parallel. +# Building the same version on all trees in parallel causes unstable builds. +# Using indpendent buildIds will cause maven cache to fill up, so we use a +# dedicated maven cache for these builds, and remove the cache after build is +# completed. +# If buildId is less than 0, then the mvn version is not changed in pom files. function build() { local outFile=$1 local buildTests=$2 + local buildId=$3 + local repoOption="" + local versionOption="-Djdk.version=8" - mvn install package -DskipTests -Pbin-dist -D jdk.version=8 ${PINOT_MAVEN_OPTS} 1>${outFile} 2>&1 + if [ ${buildId} -gt 0 ]; then + # Build it in a different env under different version so that maven cache does + # not collide + local pomVersion=$(grep -E "<version>(.*)-SNAPSHOT</version>" pom.xml | cut -d'>' -f2 | cut -d'<' -f1 | cut -d'-' -f1) + mvn versions:set -DnewVersion="${pomVersion}-compat-${buildId}" -q -B 1>${outFile} 2>&1 + mvn versions:commit -q -B 1>${outFile} 2>&1 + repoOption="-Dmaven.repo.local=${mvnCache}/${buildId}" + fi + + mvn install package -DskipTests -Pbin-dist ${versionOption} ${repoOption} ${PINOT_MAVEN_OPTS} 1>${outFile} 2>&1 if [ $? -ne 0 ]; then exit 1; fi - mvn -pl pinot-tools package -DskipTests -Djdk.version=8 ${PINOT_MAVEN_OPTS} 1>>${outFile} 2>&1 + mvn -pl pinot-tools package -DskipTests ${versionOption} ${repoOption} ${PINOT_MAVEN_OPTS} 1>>${outFile} 2>&1 if [ $? -ne 0 ]; then exit 1; fi if [ $buildTests -eq 1 ]; then - mvn -pl pinot-integration-tests package -DskipTests -Djdk.version=8 ${PINOT_MAVEN_OPTS} 1>>${outFile} 2>&1 + mvn -pl pinot-integration-tests package -DskipTests ${versionOption} ${repoOption} ${PINOT_MAVEN_OPTS} 1>>${outFile} 2>&1 if [ $? -ne 0 ]; then exit 1; fi fi - } +# +# Main +# + # get arguments # Args while-loop while [ "$1" != "" ]; do @@ -128,6 +150,8 @@ oldTargetDir="$workingDir"/oldTargetDir newBuildOutFile="${workingDir}/newBuild.out" newTargetDir="$workingDir"/newTargetDir curBuildOutFile="${workingDir}/currentBuild.out" +mvnCache=${workingDir}/${MVN_CACHE_DIR} +mkdir -p ${mvnCache} # Find value of olderCommit hash if [ -z "$olderCommit" -a ! -z "$newerCommit" ]; then @@ -166,21 +190,23 @@ checkOut "$olderCommit" "$oldTargetDir" # Start builds in parallel. # First build the current tree. We need it so that we can # run the compatibiity tester. -echo Starting build for compat checker at ${cmdDir} -(cd ${cmdDir}/..; build ${curBuildOutFile} 1) & +echo Starting build for compat checker at ${cmdDir}, buildId none. +(cd ${cmdDir}/..; build ${curBuildOutFile} 1 -1) & curBuildPid=$! # The old commit has been cloned in oldTargetDir, build it. -echo Starting build for old version at ${oldTargetDir} -(cd ${oldTargetDir}; build ${oldBuildOutFile} 0) & +buildId=$(date +%s) +echo Starting build for old version at ${oldTargetDir} buildId ${buildId} +(cd ${oldTargetDir}; build ${oldBuildOutFile} 0 ${buildId}) & oldBuildPid=$! # In case the user specified the current tree as newer commit, then # We don't need to build newer commit tree (we have already built the # current tree above and linked newTargetDir). Otherwise, build the newTargetDir if [ ${buildNewTarget} -eq 1 ]; then - echo Starting build for new version at ${newTargetDir} - (cd ${newTargetDir}; build ${newBuildOutFile} 0) & + buildId=$((buildId+1)) + echo Starting build for new version at ${newTargetDir} buildId ${buildId} + (cd ${newTargetDir}; build ${newBuildOutFile} 0 ${buildId}) & newBuildPid=$! fi @@ -251,4 +277,6 @@ if [ ${buildNewTarget} -eq 1 ]; then fi fi +/bin/rm -r ${mvnCache} + exit 0 diff --git a/compatibility-verifier/compCheck.sh b/compatibility-verifier/compCheck.sh index 3f77cdf..763cbab 100755 --- a/compatibility-verifier/compCheck.sh +++ b/compatibility-verifier/compCheck.sh @@ -45,6 +45,24 @@ logCount=1 cmdName=`basename $0` source `dirname $0`/utils.inc +function cleanupControllerDirs() { + local dirName=$(grep -F controller.data.dir ${CONTROLLER_CONF} | awk '{print $3}') + if [ ! -z "$dirName" ]; then + ${RM} -rf ${dirName} + fi +} + +function cleanupServerDirs() { + local dirName=$(grep -F pinot.server.instance.dataDir ${SERVER_CONF} | awk '{print $3}') + if [ ! -z "$dirName" ]; then + ${RM} -rf ${dirName} + fi + dirName=$(grep -F pinot.server.instance.segmentTarDir ${SERVER_CONF} | awk '{print $3}') + if [ ! -z "$dirName" ]; then + ${RM} -rf ${dirName} + fi +} + # get usage of the script function usage() { echo "Usage: $cmdName -w <workingDir> -t <testSuiteDir> [-k]" @@ -295,6 +313,9 @@ BROKER_CONF=${testSuiteDir}/config/BrokerConfig.properties CONTROLLER_CONF=${testSuiteDir}/config/ControllerConfig.properties SERVER_CONF=${testSuiteDir}/config/ServerConfig.properties +cleanupControllerDirs +cleanupServerDirs + BROKER_QUERY_PORT=8099 ZK_PORT=2181 CONTROLLER_PORT=9000 diff --git a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java index fb49de3..338ed31 100644 --- a/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java +++ b/pinot-integration-tests/src/test/java/org/apache/pinot/compat/tests/StreamOp.java @@ -36,6 +36,7 @@ import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.KafkaAdminClient; import org.apache.kafka.clients.admin.NewTopic; +import org.apache.pinot.common.exception.QueryException; import org.apache.pinot.controller.helix.ControllerRequestURLBuilder; import org.apache.pinot.controller.helix.ControllerTest; import org.apache.pinot.integration.tests.ClusterTest; @@ -87,6 +88,7 @@ public class StreamOp extends BaseOp { private static final String NUM_PARTITIONS = "numPartitions"; private static final String PARTITION_COLUMN = "partitionColumn"; private static final String EXCEPTIONS = "exceptions"; + private static final String ERROR_CODE = "errorCode"; private static final String NUM_SERVERS_QUERIED = "numServersQueried"; private static final String NUM_SERVERS_RESPONEDED = "numServersResponded"; private static final String TOTAL_DOCS = "totalDocs"; @@ -273,7 +275,14 @@ public class StreamOp extends BaseOp { } if (response.has(EXCEPTIONS) && response.get(EXCEPTIONS).size() > 0) { - String errorMsg = String.format("Failed when running query: %s; the response contains exceptions", query); + String errorMsg = String.format("Failed when running query: '%s'; got exceptions:\n%s\n", query, + response.toPrettyString()); + JsonNode exceptions = response.get(EXCEPTIONS); + if (String.valueOf(QueryException.BROKER_INSTANCE_MISSING_ERROR) + .equals(exceptions.get(ERROR_CODE).toString())) { + LOGGER.warn(errorMsg + ".Trying again"); + return 0; + } LOGGER.error(errorMsg); throw new RuntimeException(errorMsg); } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org For additional commands, e-mail: commits-h...@pinot.apache.org