jackjlli commented on a change in pull request #7331: URL: https://github.com/apache/pinot/pull/7331#discussion_r692512894
########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/ControllerFilePathProvider.java ########## @@ -38,20 +38,20 @@ private static final String UNTARRED_FILE_TEMP_DIR = "untarredFileTemp"; private static final String FILE_DOWNLOAD_TEMP_DIR = "fileDownloadTemp"; - private static ControllerFilePathProvider INSTANCE; + private static ControllerFilePathProvider _instance; /** * NOTE: this should be called only once when starting the controller. We don't check whether INSTANCE is null because Review comment: rename it to `_instance` in the comment? ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/recommender/rules/impl/KafkaPartitionRule.java ########## @@ -35,29 +35,27 @@ * Divide the messages/sec (total aggregate in the topic) by 250 to get an optimal value of the number of kafka partitions */ public class KafkaPartitionRule extends AbstractRule { - private final Logger LOGGER = LoggerFactory.getLogger(KafkaPartitionRule.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPartitionRule.class); PartitionRuleParams _params; public KafkaPartitionRule(InputManager input, ConfigManager output) { super(input, output); - this._params = input.getPartitionRuleParams(); + _params = input.getPartitionRuleParams(); } @Override public void run() { String tableType = _input.getTableType(); - if ((tableType.equalsIgnoreCase(HYBRID) || tableType - .equalsIgnoreCase(REALTIME))) { //The table is real-time or hybrid - if (_input.getNumKafkaPartitions() - == DEFAULT_NUM_KAFKA_PARTITIONS) // Recommend NumKafkaPartitions if it is not given - { + if ((tableType.equalsIgnoreCase(HYBRID) || tableType.equalsIgnoreCase(REALTIME))) { + //The table is real-time or hybrid Review comment: nit: put a space after `//`. ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/recommender/rules/impl/KafkaPartitionRule.java ########## @@ -35,29 +35,27 @@ * Divide the messages/sec (total aggregate in the topic) by 250 to get an optimal value of the number of kafka partitions */ public class KafkaPartitionRule extends AbstractRule { - private final Logger LOGGER = LoggerFactory.getLogger(KafkaPartitionRule.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPartitionRule.class); PartitionRuleParams _params; public KafkaPartitionRule(InputManager input, ConfigManager output) { super(input, output); - this._params = input.getPartitionRuleParams(); + _params = input.getPartitionRuleParams(); } @Override public void run() { String tableType = _input.getTableType(); - if ((tableType.equalsIgnoreCase(HYBRID) || tableType - .equalsIgnoreCase(REALTIME))) { //The table is real-time or hybrid - if (_input.getNumKafkaPartitions() - == DEFAULT_NUM_KAFKA_PARTITIONS) // Recommend NumKafkaPartitions if it is not given - { + if ((tableType.equalsIgnoreCase(HYBRID) || tableType.equalsIgnoreCase(REALTIME))) { + //The table is real-time or hybrid + if (_input.getNumKafkaPartitions() == DEFAULT_NUM_KAFKA_PARTITIONS) { + // Recommend NumKafkaPartitions if it is not given LOGGER.info("Recommending kafka partition configurations"); LOGGER.info("*No kafka partition number found, recommending kafka partition number"); _output.getPartitionConfig().setNumKafkaPartitions((int) Math - .ceil((double) _input.getNumMessagesPerSecInKafkaTopic() / _params.KAFKA_NUM_MESSAGES_PER_SEC_PER_PARTITION)); + .ceil((double) _input.getNumMessagesPerSecInKafkaTopic() / _params._kafkaNumMessagesPerSecPerPartition)); //Divide the messages/sec (total aggregate in the topic) by 250 to get an optimal value of the number of kafka partitions. Review comment: nit: put a space after `//`. ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/recommender/rules/io/params/BloomFilterRuleParams.java ########## @@ -27,26 +27,26 @@ */ public class BloomFilterRuleParams { // The minimum percentage of queries using a EQ predicate on a given dimension, which we want to optimize with BloomFilter - public Double THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER = RecommenderConstants.BloomFilterRule.DEFAULT_THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER; + public Double _thresholdMinPercentEqBloomfilter = RecommenderConstants.BloomFilterRule.DEFAULT_THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER; //Beyond this cardinality the bloom filter grows larger than 1MB, and we currently limit the size to 1MB - public Long THRESHOLD_MAX_CARDINALITY_BLOOMFILTER = RecommenderConstants.BloomFilterRule.DEFAULT_THRESHOLD_MAX_CARDINALITY_BLOOMFILTER; + public Long _thresholdMaxCardinalityBloomfilter = RecommenderConstants.BloomFilterRule.DEFAULT_THRESHOLD_MAX_CARDINALITY_BLOOMFILTER; - public Double getTHRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER() { - return THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER; + public Double getThresholdMinPercentEqBloomfilter() { + return _thresholdMinPercentEqBloomfilter; } @JsonSetter(value = "THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER(Double THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER) { - this.THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER = THRESHOLD_MIN_PERCENT_EQ_BLOOMFILTER; + public void setThresholdMinPercentEqBloomfilter(Double thresholdMinPercentEqBloomfilter) { + _thresholdMinPercentEqBloomfilter = thresholdMinPercentEqBloomfilter; } - public Long getTHRESHOLD_MAX_CARDINALITY_BLOOMFILTER() { - return THRESHOLD_MAX_CARDINALITY_BLOOMFILTER; + public Long getThresholdMaxCardinalityBloomfilter() { + return _thresholdMaxCardinalityBloomfilter; } @JsonSetter(value = "THRESHOLD_MAX_CARDINALITY_BLOOMFILTER", nulls = Nulls.SKIP) - public void setTHRESHOLD_MAX_CARDINALITY_BLOOMFILTER(Long THRESHOLD_MAX_CARDINALITY_BLOOMFILTER) { - this.THRESHOLD_MAX_CARDINALITY_BLOOMFILTER = THRESHOLD_MAX_CARDINALITY_BLOOMFILTER; + public void setThresholdMaxCardinalityBloomfilter(Long thresholdMaxCardinalityBloomfilter) { + _thresholdMaxCardinalityBloomfilter = thresholdMaxCardinalityBloomfilter; } } Review comment: Missing an empty line at the end. ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/api/resources/PinotTableSegmentConfigs.java ########## @@ -49,30 +49,31 @@ private static final Logger LOGGER = LoggerFactory.getLogger(PinotTableSegmentConfigs.class); @Inject - PinotHelixResourceManager pinotHelixResourceManager; + PinotHelixResourceManager _pinotHelixResourceManager; @Inject - ControllerMetrics metrics; + ControllerMetrics _metrics; Review comment: Does this _metrics get used in this class at all? If so, it'd be better to rename it to `_controllerMetrics`. If not, maybe we can remove it. ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/recommender/rules/io/params/NoDictionaryOnHeapDictionaryJointRuleParams.java ########## @@ -23,97 +23,97 @@ import static org.apache.pinot.controller.recommender.rules.io.params.RecommenderConstants.NoDictionaryOnHeapDictionaryJointRule.*; + /** * Thresholds and parameters used in NoDictionaryOnHeapDictionaryJointRule */ public class NoDictionaryOnHeapDictionaryJointRuleParams { - // We won't consider on heap dictionaries if table QPS < this threshold - public Long THRESHOLD_MIN_QPS_ON_HEAP = DEFAULT_THRESHOLD_MIN_QPS_ON_HEAP; + public Long _thresholdMinQpsOnHeap = DEFAULT_THRESHOLD_MIN_QPS_ON_HEAP; // We won't consider on heap dictionaries the frequency of this column used in filter < this threshold - public Double THRESHOLD_MIN_FILTER_FREQ_ON_HEAP = DEFAULT_THRESHOLD_MIN_FILTER_FREQ_ON_HEAP; + public Double _thresholdMinFilterFreqOnHeap = DEFAULT_THRESHOLD_MIN_FILTER_FREQ_ON_HEAP; // The maximum acceptable memory footprint on heap - public Long THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP = DEFAULT_THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP; + public Long _thresholdMaxDictionarySizeOnHeap = DEFAULT_THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP; // For columns used in selection, if frequency >this threshold, we will apply no dictionary on it - public Double THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY = DEFAULT_THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY; + public Double _thresholdMinSelectionFreqNoDictionary = DEFAULT_THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY; // For cols frequently used in filter or groupby, we will add dictionary on that, now default to 0, // meaning all cols ever used in filter or groupby will have dictionary - public Double THRESHOLD_MIN_FILTER_FREQ_DICTIONARY = DEFAULT_THRESHOLD_MIN_FILTER_FREQ_DICTIONARY; + public Double _thresholdMinFilterFreqDictionary = DEFAULT_THRESHOLD_MIN_FILTER_FREQ_DICTIONARY; // The accumulated size of dictionaries of all segments in one push is generally smaller than the whole big dictionary size // (due to that the cardinality we have is the cardianlity for the whole dataset not per segment) // Use factor to shrink the size // TODO: improve this estimation if possible - public Double DICTIONARY_COEFFICIENT = DEFAULT_DICTIONARY_COEFFICIENT; + public Double _dictionaryCoefficient = DEFAULT_DICTIONARY_COEFFICIENT; // For colums not used in filter and selection, apply on heap dictionary only if it can save storage % > this threshold - public Double THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE = DEFAULT_THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE; + public Double _thresholdMinPercentDictionaryStorageSave = DEFAULT_THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE; - public Double getDICTIONARY_COEFFICIENT() { - return DICTIONARY_COEFFICIENT; + public Double getDictionaryCoefficient() { + return _dictionaryCoefficient; } @JsonSetter(value = "DICTIONARY_COEFFICIENT", nulls = Nulls.SKIP) - public void setDICTIONARY_COEFFICIENT(Double DICTIONARY_COEFFICIENT) { - this.DICTIONARY_COEFFICIENT = DICTIONARY_COEFFICIENT; + public void setDictionaryCoefficient(Double dictionaryCoefficient) { + _dictionaryCoefficient = dictionaryCoefficient; } - public Double getTHRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE() { - return THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE; + public Double getThresholdMinPercentDictionaryStorageSave() { + return _thresholdMinPercentDictionaryStorageSave; } @JsonSetter(value = "THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE(Double THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE) { - this.THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE = THRESHOLD_MIN_PERCENT_DICTIONARY_STORAGE_SAVE; + public void setThresholdMinPercentDictionaryStorageSave(Double thresholdMinPercentDictionaryStorageSave) { + _thresholdMinPercentDictionaryStorageSave = thresholdMinPercentDictionaryStorageSave; } - public Double getTHRESHOLD_MIN_FILTER_FREQ_ON_HEAP() { - return THRESHOLD_MIN_FILTER_FREQ_ON_HEAP; + public Double getThresholdMinFilterFreqOnHeap() { + return _thresholdMinFilterFreqOnHeap; } @JsonSetter(value = "THRESHOLD_MIN_FILTER_FREQ_ON_HEAP", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_FILTER_FREQ_ON_HEAP(Double THRESHOLD_MIN_FILTER_FREQ_ON_HEAP) { - this.THRESHOLD_MIN_FILTER_FREQ_ON_HEAP = THRESHOLD_MIN_FILTER_FREQ_ON_HEAP; + public void setThresholdMinFilterFreqOnHeap(Double thresholdMinFilterFreqOnHeap) { + _thresholdMinFilterFreqOnHeap = thresholdMinFilterFreqOnHeap; } - public Long getTHRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP() { - return THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP; + public Long getThresholdMaxDictionarySizeOnHeap() { + return _thresholdMaxDictionarySizeOnHeap; } @JsonSetter(value = "THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP", nulls = Nulls.SKIP) - public void setTHRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP(Long THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP) { - this.THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP = THRESHOLD_MAX_DICTIONARY_SIZE_ON_HEAP; + public void setThresholdMaxDictionarySizeOnHeap(Long thresholdMaxDictionarySizeOnHeap) { + _thresholdMaxDictionarySizeOnHeap = thresholdMaxDictionarySizeOnHeap; } - public Long getTHRESHOLD_MIN_QPS_ON_HEAP() { - return THRESHOLD_MIN_QPS_ON_HEAP; + public Long getThresholdMinQpsOnHeap() { + return _thresholdMinQpsOnHeap; } @JsonSetter(value = "THRESHOLD_MIN_QPS_ON_HEAP", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_QPS_ON_HEAP(Long THRESHOLD_MIN_QPS_ON_HEAP) { - this.THRESHOLD_MIN_QPS_ON_HEAP = THRESHOLD_MIN_QPS_ON_HEAP; + public void setThresholdMinQpsOnHeap(Long thresholdMinQpsOnHeap) { + _thresholdMinQpsOnHeap = thresholdMinQpsOnHeap; } - public Double getTHRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY() { - return THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY; + public Double getThresholdMinSelectionFreqNoDictionary() { + return _thresholdMinSelectionFreqNoDictionary; } @JsonSetter(value = "THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY(Double THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY) { - this.THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY = THRESHOLD_MIN_SELECTION_FREQ_NO_DICTIONARY; + public void setThresholdMinSelectionFreqNoDictionary(Double thresholdMinSelectionFreqNoDictionary) { + _thresholdMinSelectionFreqNoDictionary = thresholdMinSelectionFreqNoDictionary; } - public Double getTHRESHOLD_MIN_FILTER_FREQ_DICTIONARY() { - return THRESHOLD_MIN_FILTER_FREQ_DICTIONARY; + public Double getThresholdMinFilterFreqDictionary() { + return _thresholdMinFilterFreqDictionary; } @JsonSetter(value = "THRESHOLD_MIN_FILTER_FREQ_DICTIONARY", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_FILTER_FREQ_DICTIONARY(Double THRESHOLD_MIN_FILTER_FREQ_DICTIONARY) { - this.THRESHOLD_MIN_FILTER_FREQ_DICTIONARY = THRESHOLD_MIN_FILTER_FREQ_DICTIONARY; + public void setThresholdMinFilterFreqDictionary(Double thresholdMinFilterFreqDictionary) { + _thresholdMinFilterFreqDictionary = thresholdMinFilterFreqDictionary; } } Review comment: Missing an empty line at the end. ########## File path: pinot-controller/src/main/java/org/apache/pinot/controller/recommender/rules/io/params/InvertedSortedIndexJointRuleParams.java ########## @@ -31,123 +32,122 @@ // When the number of indices we recommend increment 1, // the corresponding nESI saved should be > THRESHOLD_GAIN_DIFF_BETWEEN_ITERATION * totalNESI // to be consider a valid gain - public Double THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION = DEFAULT_THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION; + public Double _thresholdRatioMinGainDiffBetweenIteration = DEFAULT_THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION; // When we do not have a valid gain for MAX_NUM_ITERATION_WITHOUT_GAIN of iterations // The process will stop because adding more indices does not bring down the nESI - public Integer MAX_NUM_ITERATION_WITHOUT_GAIN = DEFAULT_MAX_NUM_ITERATION_WITHOUT_GAIN; + public Integer _maxNumIterationWithoutGain = DEFAULT_MAX_NUM_ITERATION_WITHOUT_GAIN; // Algorithm will increase the number of recommended indices for AND predicate if // the nESI saved by adding one more index exceeds THRESHOLD_AND_PREDICATE_INCREMENTAL_VOTE - public Double THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE = DEFAULT_THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE; + public Double _thresholdMinAndPredicateIncrementalVote = DEFAULT_THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE; // For AND connected predicates, iff the nESI saved of top N-th candidate is larger than // THRESHOLD_AND_PREDICATE_TOP_VOTES * nESI_saved_of_top_one_candidate // then candidates [1st, nth] will from a exclusive vote // Meaning that during the overall vote counting, only one candidate can be counted - public Double THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES = DEFAULT_THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES; + public Double _thresholdRatioMinAndPredicateTopCandidates = DEFAULT_THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES; // In the over all recommendation for sorted and inverted indices, iff the nESI saved of top N-th candidate is larger than // THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES * nESI_saved_of_top_one_candidate, // we will pick from [1st, nth] candidates with the largest cardinality as sorted index - public Double THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES = DEFAULT_THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES; + public Double _thresholdRatioMinNesiForTopCandidates = DEFAULT_THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES; // For the predicates it is impractical to estimate the portion // of documents selected out. Thus we use default values. - public Double PERCENT_SELECT_FOR_FUNCTION = DEFAULT_PERCENT_SELECT_FOR_FUNCTION; - public Double PERCENT_SELECT_FOR_TEXT_MATCH = DEFAULT_PERCENT_SELECT_FOR_TEXT_MATCH; - public Double PERCENT_SELECT_FOR_RANGE = DEFAULT_PERCENT_SELECT_FOR_RANGE; - public Double PERCENT_SELECT_FOR_REGEX = DEFAULT_PERCENT_SELECT_FOR_REGEX; - public Double PERCENT_SELECT_FOR_ISNULL = DEFAULT_PERCENT_SELECT_FOR_ISNULL; - + public Double _percentSelectForFunction = DEFAULT_PERCENT_SELECT_FOR_FUNCTION; + public Double _percentSelectForTextMatch = DEFAULT_PERCENT_SELECT_FOR_TEXT_MATCH; + public Double _percentSelectForRange = DEFAULT_PERCENT_SELECT_FOR_RANGE; + public Double _percentSelectForRegex = DEFAULT_PERCENT_SELECT_FOR_REGEX; + public Double _percentSelectForIsnull = DEFAULT_PERCENT_SELECT_FOR_ISNULL; - public Double getTHRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES() { - return THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES; + public Double getThresholdRatioMinNesiForTopCandidates() { + return _thresholdRatioMinNesiForTopCandidates; } @JsonSetter(value = "THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES", nulls = Nulls.SKIP) - public void setTHRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES(Double THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES) { - this.THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES = THRESHOLD_RATIO_MIN_NESI_FOR_TOP_CANDIDATES; + public void setThresholdRatioMinNesiForTopCandidates(Double thresholdRatioMinNesiForTopCandidates) { + _thresholdRatioMinNesiForTopCandidates = thresholdRatioMinNesiForTopCandidates; } - public Double getTHRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION() { - return THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION; + public Double getThresholdRatioMinGainDiffBetweenIteration() { + return _thresholdRatioMinGainDiffBetweenIteration; } @JsonSetter(value = "THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION", nulls = Nulls.SKIP) - public void setTHRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION(Double THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION) { - this.THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION = THRESHOLD_RATIO_MIN_GAIN_DIFF_BETWEEN_ITERATION; + public void setThresholdRatioMinGainDiffBetweenIteration(Double thresholdRatioMinGainDiffBetweenIteration) { + _thresholdRatioMinGainDiffBetweenIteration = thresholdRatioMinGainDiffBetweenIteration; } - public Integer getMAX_NUM_ITERATION_WITHOUT_GAIN() { - return MAX_NUM_ITERATION_WITHOUT_GAIN; + public Integer getMaxNumIterationWithoutGain() { + return _maxNumIterationWithoutGain; } @JsonSetter(value = "MAX_NUM_ITERATION_WITHOUT_GAIN", nulls = Nulls.SKIP) - public void setMAX_NUM_ITERATION_WITHOUT_GAIN(Integer MAX_NUM_ITERATION_WITHOUT_GAIN) { - this.MAX_NUM_ITERATION_WITHOUT_GAIN = MAX_NUM_ITERATION_WITHOUT_GAIN; + public void setMaxNumIterationWithoutGain(Integer maxNumIterationWithoutGain) { + _maxNumIterationWithoutGain = maxNumIterationWithoutGain; } - public Double getTHRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE() { - return THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE; + public Double getThresholdMinAndPredicateIncrementalVote() { + return _thresholdMinAndPredicateIncrementalVote; } @JsonSetter(value = "THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE", nulls = Nulls.SKIP) - public void setTHRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE(Double THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE) { - this.THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE = THRESHOLD_MIN_AND_PREDICATE_INCREMENTAL_VOTE; + public void setThresholdMinAndPredicateIncrementalVote(Double thresholdMinAndPredicateIncrementalVote) { + _thresholdMinAndPredicateIncrementalVote = thresholdMinAndPredicateIncrementalVote; } - public Double getTHRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES() { - return THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES; + public Double getThresholdRatioMinAndPredicateTopCandidates() { + return _thresholdRatioMinAndPredicateTopCandidates; } @JsonSetter(value = "THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES", nulls = Nulls.SKIP) - public void setTHRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES(Double THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES) { - this.THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES = THRESHOLD_RATIO_MIN_AND_PREDICATE_TOP_CANDIDATES; + public void setThresholdRatioMinAndPredicateTopCandidates(Double thresholdRatioMinAndPredicateTopCandidates) { + _thresholdRatioMinAndPredicateTopCandidates = thresholdRatioMinAndPredicateTopCandidates; } - public Double getPERCENT_SELECT_FOR_FUNCTION() { - return PERCENT_SELECT_FOR_FUNCTION; + public Double getPercentSelectForFunction() { + return _percentSelectForFunction; } @JsonSetter(value = "PERCENT_SELECT_FOR_FUNCTION", nulls = Nulls.SKIP) - public void setPERCENT_SELECT_FOR_FUNCTION(Double PERCENT_SELECT_FOR_FUNCTION) { - this.PERCENT_SELECT_FOR_FUNCTION = PERCENT_SELECT_FOR_FUNCTION; + public void setPercentSelectForFunction(Double percentSelectForFunction) { + _percentSelectForFunction = percentSelectForFunction; } - public Double getPERCENT_SELECT_FOR_TEXT_MATCH() { - return PERCENT_SELECT_FOR_TEXT_MATCH; + public Double getPercentSelectForTextMatch() { + return _percentSelectForTextMatch; } @JsonSetter(value = "PERCENT_SELECT_FOR_TEXT_MATCH", nulls = Nulls.SKIP) - public void setPERCENT_SELECT_FOR_TEXT_MATCH(Double PERCENT_SELECT_FOR_TEXT_MATCH) { - this.PERCENT_SELECT_FOR_TEXT_MATCH = PERCENT_SELECT_FOR_TEXT_MATCH; + public void setPercentSelectForTextMatch(Double percentSelectForTextMatch) { + _percentSelectForTextMatch = percentSelectForTextMatch; } - public Double getPERCENT_SELECT_FOR_RANGE() { - return PERCENT_SELECT_FOR_RANGE; + public Double getPercentSelectForRange() { + return _percentSelectForRange; } @JsonSetter(value = "PERCENT_SELECT_FOR_RANGE", nulls = Nulls.SKIP) - public void setPERCENT_SELECT_FOR_RANGE(Double PERCENT_SELECT_FOR_RANGE) { - this.PERCENT_SELECT_FOR_RANGE = PERCENT_SELECT_FOR_RANGE; + public void setPercentSelectForRange(Double percentSelectForRange) { + _percentSelectForRange = percentSelectForRange; } - public Double getPERCENT_SELECT_FOR_REGEX() { - return PERCENT_SELECT_FOR_REGEX; + public Double getPercentSelectForRegex() { + return _percentSelectForRegex; } @JsonSetter(value = "PERCENT_SELECT_FOR_REGEX", nulls = Nulls.SKIP) - public void setPERCENT_SELECT_FOR_REGEX(Double PERCENT_SELECT_FOR_REGEX) { - this.PERCENT_SELECT_FOR_REGEX = PERCENT_SELECT_FOR_REGEX; + public void setPercentSelectForRegex(Double percentSelectForRegex) { + _percentSelectForRegex = percentSelectForRegex; } - public Double getPERCENT_SELECT_FOR_ISNULL() { - return PERCENT_SELECT_FOR_ISNULL; + public Double getPercentSelectForIsnull() { + return _percentSelectForIsnull; } @JsonSetter(value = "PERCENT_SELECT_FOR_ISNULL", nulls = Nulls.SKIP) - public void setPERCENT_SELECT_FOR_ISNULL(Double PERCENT_SELECT_FOR_ISNULL) { - this.PERCENT_SELECT_FOR_ISNULL = PERCENT_SELECT_FOR_ISNULL; + public void setPercentSelectForIsnull(Double percentSelectForIsnull) { + _percentSelectForIsnull = percentSelectForIsnull; } } Review comment: Missing an empty line at the end. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org For additional commands, e-mail: commits-h...@pinot.apache.org