This is an automated email from the ASF dual-hosted git repository.

rongr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git


The following commit(s) were added to refs/heads/master by this push:
     new b3d1ac3d28 [multistage] Resolve case sensitivity issue on functions 
(#9937)
b3d1ac3d28 is described below

commit b3d1ac3d28dd64f168231272e70919e55e75e473
Author: Rong Rong <ro...@apache.org>
AuthorDate: Fri Dec 9 18:30:25 2022 -0800

    [multistage] Resolve case sensitivity issue on functions (#9937)
    
    * resolve case sensitivity issue on functions
    * adding more tests
    * better comments for calcite copied code modification
    
    Co-authored-by: Rong Rong <ro...@startree.ai>
---
 .../calcite/prepare/PinotCalciteCatalogReader.java | 466 +++++++++++++++++++++
 .../apache/calcite/sql/fun/PinotOperatorTable.java |   3 -
 .../sql/util/PinotChainedSqlOperatorTable.java     |  92 ++++
 .../org/apache/pinot/query/QueryEnvironment.java   |   8 +-
 .../runtime/queries/ResourceBasedQueriesTest.java  |   9 +-
 .../test/resources/queries/SelectExpressions.json  |  27 +-
 .../src/test/resources/queries/SelectHaving.json   |   2 +-
 .../src/test/resources/queries/SpecialSyntax.json  |  30 ++
 8 files changed, 618 insertions(+), 19 deletions(-)

diff --git 
a/pinot-query-planner/src/main/java/org/apache/calcite/prepare/PinotCalciteCatalogReader.java
 
b/pinot-query-planner/src/main/java/org/apache/calcite/prepare/PinotCalciteCatalogReader.java
new file mode 100644
index 0000000000..84c71be601
--- /dev/null
+++ 
b/pinot-query-planner/src/main/java/org/apache/calcite/prepare/PinotCalciteCatalogReader.java
@@ -0,0 +1,466 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.calcite.prepare;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.Objects;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import org.apache.calcite.config.CalciteConnectionConfig;
+import org.apache.calcite.jdbc.CalciteSchema;
+import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
+import org.apache.calcite.linq4j.function.Hints;
+import org.apache.calcite.model.ModelHandler;
+import org.apache.calcite.plan.RelOptPlanner;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeFactoryImpl;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.schema.AggregateFunction;
+import org.apache.calcite.schema.ScalarFunction;
+import org.apache.calcite.schema.Table;
+import org.apache.calcite.schema.TableFunction;
+import org.apache.calcite.schema.TableMacro;
+import org.apache.calcite.schema.Wrapper;
+import org.apache.calcite.schema.impl.ScalarFunctionImpl;
+import org.apache.calcite.sql.SqlFunctionCategory;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorTable;
+import org.apache.calcite.sql.SqlSyntax;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.InferTypes;
+import org.apache.calcite.sql.type.OperandTypes;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlOperandTypeInference;
+import org.apache.calcite.sql.type.SqlReturnTypeInference;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.util.ListSqlOperatorTable;
+import org.apache.calcite.sql.validate.SqlMoniker;
+import org.apache.calcite.sql.validate.SqlMonikerImpl;
+import org.apache.calcite.sql.validate.SqlMonikerType;
+import org.apache.calcite.sql.validate.SqlNameMatcher;
+import org.apache.calcite.sql.validate.SqlNameMatchers;
+import org.apache.calcite.sql.validate.SqlUserDefinedAggFunction;
+import org.apache.calcite.sql.validate.SqlUserDefinedFunction;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableFunction;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidatorUtil;
+import org.apache.calcite.util.Optionality;
+import org.apache.calcite.util.Util;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+
+/**
+ * ============================================================================
+ * THIS CLASS IS COPIED FROM Calcite's {@link 
org.apache.calcite.prepare.CalciteCatalogReader} and modified the
+ * case sensitivity of Function lookup. which is ALWAYS case-insensitive 
regardless of conventions on
+ * column/table identifier.
+ * ============================================================================
+ *
+ * Pinot's implementation of {@link 
org.apache.calcite.prepare.Prepare.CatalogReader}
+ * and also {@link org.apache.calcite.sql.SqlOperatorTable} based on tables and
+ * functions defined schemas.
+ */
+public class PinotCalciteCatalogReader implements Prepare.CatalogReader {
+  protected final CalciteSchema _rootSchema;
+  protected final RelDataTypeFactory _typeFactory;
+  private final List<List<String>> _schemaPaths;
+  protected final SqlNameMatcher _nameMatcher;
+  protected final CalciteConnectionConfig _config;
+
+  public PinotCalciteCatalogReader(CalciteSchema rootSchema,
+      List<String> defaultSchema, RelDataTypeFactory typeFactory, 
CalciteConnectionConfig config) {
+    this(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && 
config.caseSensitive()),
+        ImmutableList.of(Objects.requireNonNull(defaultSchema, 
"defaultSchema"),
+            ImmutableList.of()),
+        typeFactory, config);
+  }
+
+  protected PinotCalciteCatalogReader(CalciteSchema rootSchema,
+      SqlNameMatcher nameMatcher, List<List<String>> schemaPaths,
+      RelDataTypeFactory typeFactory, CalciteConnectionConfig config) {
+    _rootSchema = Objects.requireNonNull(rootSchema, "rootSchema");
+    _nameMatcher = nameMatcher;
+    _schemaPaths =
+        Util.immutableCopy(Util.isDistinct(schemaPaths)
+            ? schemaPaths
+            : new LinkedHashSet<>(schemaPaths));
+    _typeFactory = typeFactory;
+    _config = config;
+  }
+
+  @Override public PinotCalciteCatalogReader withSchemaPath(List<String> 
schemaPath) {
+    return new PinotCalciteCatalogReader(_rootSchema, _nameMatcher,
+        ImmutableList.of(schemaPath, ImmutableList.of()), _typeFactory, 
_config);
+  }
+
+  @Override public Prepare.@Nullable PreparingTable getTable(final 
List<String> names) {
+    // First look in the default schema, if any.
+    // If not found, look in the root schema.
+    CalciteSchema.TableEntry entry = SqlValidatorUtil.getTableEntry(this, 
names);
+    if (entry != null) {
+      final Table table = entry.getTable();
+      if (table instanceof Wrapper) {
+        final Prepare.PreparingTable relOptTable =
+            ((Wrapper) table).unwrap(Prepare.PreparingTable.class);
+        if (relOptTable != null) {
+          return relOptTable;
+        }
+      }
+      return RelOptTableImpl.create(this,
+          table.getRowType(_typeFactory), entry, null);
+    }
+    return null;
+  }
+
+  @Override public CalciteConnectionConfig getConfig() {
+    return _config;
+  }
+
+  private Collection<org.apache.calcite.schema.Function> getFunctionsFrom(
+      List<String> names) {
+    final List<org.apache.calcite.schema.Function> functions2 =
+        new ArrayList<>();
+    final List<List<String>> schemaNameList = new ArrayList<>();
+    if (names.size() > 1) {
+      // Name qualified: ignore path. But we do look in "/catalog" and "/",
+      // the last 2 items in the path.
+      if (_schemaPaths.size() > 1) {
+        schemaNameList.addAll(Util.skip(_schemaPaths));
+      } else {
+        schemaNameList.addAll(_schemaPaths);
+      }
+    } else {
+      for (List<String> schemaPath : _schemaPaths) {
+        CalciteSchema schema =
+            SqlValidatorUtil.getSchema(_rootSchema, schemaPath, _nameMatcher);
+        if (schema != null) {
+          schemaNameList.addAll(schema.getPath());
+        }
+      }
+    }
+    for (List<String> schemaNames : schemaNameList) {
+      CalciteSchema schema =
+          SqlValidatorUtil.getSchema(_rootSchema,
+              Iterables.concat(schemaNames, Util.skipLast(names)), 
_nameMatcher);
+      if (schema != null) {
+        final String name = Util.last(names);
+        // ====================================================================
+        // LINES CHANGED BELOW
+        // ====================================================================
+        functions2.addAll(schema.getFunctions(name, false));
+        // ====================================================================
+        // LINES CHANGED ABOVE
+        // ====================================================================
+      }
+    }
+    return functions2;
+  }
+
+  @Override public @Nullable RelDataType getNamedType(SqlIdentifier typeName) {
+    CalciteSchema.TypeEntry typeEntry = 
SqlValidatorUtil.getTypeEntry(getRootSchema(), typeName);
+    if (typeEntry != null) {
+      return typeEntry.getType().apply(_typeFactory);
+    } else {
+      return null;
+    }
+  }
+
+  @Override public List<SqlMoniker> getAllSchemaObjectNames(List<String> 
names) {
+    final CalciteSchema schema =
+        SqlValidatorUtil.getSchema(_rootSchema, names, _nameMatcher);
+    if (schema == null) {
+      return ImmutableList.of();
+    }
+    final ImmutableList.Builder<SqlMoniker> result = new 
ImmutableList.Builder<>();
+
+    // Add root schema if not anonymous
+    if (!schema.name.equals("")) {
+      result.add(moniker(schema, null, SqlMonikerType.SCHEMA));
+    }
+
+    final Map<String, CalciteSchema> schemaMap = schema.getSubSchemaMap();
+
+    for (String subSchema : schemaMap.keySet()) {
+      result.add(moniker(schema, subSchema, SqlMonikerType.SCHEMA));
+    }
+
+    for (String table : schema.getTableNames()) {
+      result.add(moniker(schema, table, SqlMonikerType.TABLE));
+    }
+
+    final NavigableSet<String> functions = schema.getFunctionNames();
+    for (String function : functions) { // views are here as well
+      result.add(moniker(schema, function, SqlMonikerType.FUNCTION));
+    }
+    return result.build();
+  }
+
+  private static SqlMonikerImpl moniker(CalciteSchema schema, @Nullable String 
name,
+      SqlMonikerType type) {
+    final List<String> path = schema.path(name);
+    if (path.size() == 1
+        && !schema.root().name.equals("")
+        && type == SqlMonikerType.SCHEMA) {
+      type = SqlMonikerType.CATALOG;
+    }
+    return new SqlMonikerImpl(path, type);
+  }
+
+  @Override public List<List<String>> getSchemaPaths() {
+    return _schemaPaths;
+  }
+
+  @Override public Prepare.@Nullable PreparingTable 
getTableForMember(List<String> names) {
+    return getTable(names);
+  }
+
+  @SuppressWarnings("deprecation")
+  @Override public @Nullable RelDataTypeField field(RelDataType rowType, 
String alias) {
+    return _nameMatcher.field(rowType, alias);
+  }
+
+  @SuppressWarnings("deprecation")
+  @Override public boolean matches(String string, String name) {
+    return _nameMatcher.matches(string, name);
+  }
+
+  @Override public RelDataType createTypeFromProjection(final RelDataType type,
+      final List<String> columnNameList) {
+    return SqlValidatorUtil.createTypeFromProjection(type, columnNameList, 
_typeFactory,
+        _nameMatcher.isCaseSensitive());
+  }
+
+  @Override public void lookupOperatorOverloads(final SqlIdentifier opName,
+      @Nullable SqlFunctionCategory category,
+      SqlSyntax syntax,
+      List<SqlOperator> operatorList,
+      SqlNameMatcher nameMatcher) {
+    if (syntax != SqlSyntax.FUNCTION) {
+      return;
+    }
+
+    final Predicate<org.apache.calcite.schema.Function> predicate;
+    if (category == null) {
+      predicate = function -> true;
+    } else if (category.isTableFunction()) {
+      predicate = function ->
+          function instanceof TableMacro
+              || function instanceof TableFunction;
+    } else {
+      predicate = function ->
+          !(function instanceof TableMacro
+              || function instanceof TableFunction);
+    }
+    getFunctionsFrom(opName.names)
+        .stream()
+        .filter(predicate)
+        .map(function -> toOp(opName, function))
+        .forEachOrdered(operatorList::add);
+  }
+
+  /** Creates an operator table that contains functions in the given class
+   * or classes.
+   *
+   * @see ModelHandler#addFunctions */
+  public static SqlOperatorTable operatorTable(String... classNames) {
+    // Dummy schema to collect the functions
+    final CalciteSchema schema =
+        CalciteSchema.createRootSchema(false, false);
+    for (String className : classNames) {
+      ModelHandler.addFunctions(schema.plus(), null, ImmutableList.of(),
+          className, "*", true);
+    }
+
+    final ListSqlOperatorTable table = new ListSqlOperatorTable();
+    for (String name : schema.getFunctionNames()) {
+      schema.getFunctions(name, true).forEach(function -> {
+        final SqlIdentifier id = new SqlIdentifier(name, SqlParserPos.ZERO);
+        table.add(toOp(id, function));
+      });
+    }
+    return table;
+  }
+
+  /** Converts a function to a {@link org.apache.calcite.sql.SqlOperator}. */
+  private static SqlOperator toOp(SqlIdentifier name,
+      final org.apache.calcite.schema.Function function) {
+    final Function<RelDataTypeFactory, List<RelDataType>> argTypesFactory =
+        typeFactory -> function.getParameters()
+            .stream()
+            .map(o -> o.getType(typeFactory))
+            .collect(Util.toImmutableList());
+    final Function<RelDataTypeFactory, List<SqlTypeFamily>> 
typeFamiliesFactory =
+        typeFactory -> argTypesFactory.apply(typeFactory)
+            .stream()
+            .map(type ->
+                Util.first(type.getSqlTypeName().getFamily(),
+                    SqlTypeFamily.ANY))
+            .collect(Util.toImmutableList());
+    final Function<RelDataTypeFactory, List<RelDataType>> paramTypesFactory =
+        typeFactory ->
+            argTypesFactory.apply(typeFactory)
+                .stream()
+                .map(type -> toSql(typeFactory, type))
+                .collect(Util.toImmutableList());
+
+    // Use a short-lived type factory to populate "typeFamilies" and 
"argTypes".
+    // SqlOperandMetadata.paramTypes will use the real type factory, during
+    // validation.
+    final RelDataTypeFactory dummyTypeFactory = new JavaTypeFactoryImpl();
+    final List<RelDataType> argTypes = argTypesFactory.apply(dummyTypeFactory);
+    final List<SqlTypeFamily> typeFamilies =
+        typeFamiliesFactory.apply(dummyTypeFactory);
+
+    final SqlOperandTypeInference operandTypeInference =
+        InferTypes.explicit(argTypes);
+
+    final SqlOperandMetadata operandMetadata =
+        OperandTypes.operandMetadata(typeFamilies, paramTypesFactory,
+            i -> function.getParameters().get(i).getName(),
+            i -> function.getParameters().get(i).isOptional());
+
+    final SqlKind kind = kind(function);
+    if (function instanceof ScalarFunction) {
+      final SqlReturnTypeInference returnTypeInference =
+          infer((ScalarFunction) function);
+      return new SqlUserDefinedFunction(name, kind, returnTypeInference,
+          operandTypeInference, operandMetadata, function);
+    } else if (function instanceof AggregateFunction) {
+      final SqlReturnTypeInference returnTypeInference =
+          infer((AggregateFunction) function);
+      return new SqlUserDefinedAggFunction(name, kind,
+          returnTypeInference, operandTypeInference,
+          operandMetadata, (AggregateFunction) function, false, false,
+          Optionality.FORBIDDEN);
+    } else if (function instanceof TableMacro) {
+      return new SqlUserDefinedTableMacro(name, kind, ReturnTypes.CURSOR,
+          operandTypeInference, operandMetadata, (TableMacro) function);
+    } else if (function instanceof TableFunction) {
+      return new SqlUserDefinedTableFunction(name, kind, ReturnTypes.CURSOR,
+          operandTypeInference, operandMetadata, (TableFunction) function);
+    } else {
+      throw new AssertionError("unknown function type " + function);
+    }
+  }
+
+  /** Deduces the {@link org.apache.calcite.sql.SqlKind} of a user-defined
+   * function based on a {@link Hints} annotation, if present. */
+  private static SqlKind kind(org.apache.calcite.schema.Function function) {
+    if (function instanceof ScalarFunctionImpl) {
+      Hints hints =
+          ((ScalarFunctionImpl) function).method.getAnnotation(Hints.class);
+      if (hints != null) {
+        for (String hint : hints.value()) {
+          if (hint.startsWith("SqlKind:")) {
+            return SqlKind.valueOf(hint.substring("SqlKind:".length()));
+          }
+        }
+      }
+    }
+    return SqlKind.OTHER_FUNCTION;
+  }
+
+  private static SqlReturnTypeInference infer(final ScalarFunction function) {
+    return opBinding -> {
+      final RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
+      final RelDataType type;
+      if (function instanceof ScalarFunctionImpl) {
+        type = ((ScalarFunctionImpl) function).getReturnType(typeFactory,
+            opBinding);
+      } else {
+        type = function.getReturnType(typeFactory);
+      }
+      return toSql(typeFactory, type);
+    };
+  }
+
+  private static SqlReturnTypeInference infer(
+      final AggregateFunction function) {
+    return opBinding -> {
+      final RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
+      final RelDataType type = function.getReturnType(typeFactory);
+      return toSql(typeFactory, type);
+    };
+  }
+
+  private static RelDataType toSql(RelDataTypeFactory typeFactory,
+      RelDataType type) {
+    if (type instanceof RelDataTypeFactoryImpl.JavaType
+        && ((RelDataTypeFactoryImpl.JavaType) type).getJavaClass()
+        == Object.class) {
+      return typeFactory.createTypeWithNullability(
+          typeFactory.createSqlType(SqlTypeName.ANY), true);
+    }
+    return JavaTypeFactoryImpl.toSql(typeFactory, type);
+  }
+
+  @Override public List<SqlOperator> getOperatorList() {
+    final ImmutableList.Builder<SqlOperator> builder = ImmutableList.builder();
+    for (List<String> schemaPath : _schemaPaths) {
+      CalciteSchema schema =
+          SqlValidatorUtil.getSchema(_rootSchema, schemaPath, _nameMatcher);
+      if (schema != null) {
+        for (String name : schema.getFunctionNames()) {
+          schema.getFunctions(name, true).forEach(f ->
+              builder.add(toOp(new SqlIdentifier(name, SqlParserPos.ZERO), 
f)));
+        }
+      }
+    }
+    return builder.build();
+  }
+
+  @Override public CalciteSchema getRootSchema() {
+    return _rootSchema;
+  }
+
+  @Override public RelDataTypeFactory getTypeFactory() {
+    return _typeFactory;
+  }
+
+  @Override public void registerRules(RelOptPlanner planner) {
+  }
+
+  @SuppressWarnings("deprecation")
+  @Override public boolean isCaseSensitive() {
+    return _nameMatcher.isCaseSensitive();
+  }
+
+  @Override public SqlNameMatcher nameMatcher() {
+    return _nameMatcher;
+  }
+
+  @Override public <C extends Object> @Nullable C unwrap(Class<C> aClass) {
+    if (aClass.isInstance(this)) {
+      return aClass.cast(this);
+    }
+    return null;
+  }
+}
diff --git 
a/pinot-query-planner/src/main/java/org/apache/calcite/sql/fun/PinotOperatorTable.java
 
b/pinot-query-planner/src/main/java/org/apache/calcite/sql/fun/PinotOperatorTable.java
index aa0cf95284..8f46a0db4e 100644
--- 
a/pinot-query-planner/src/main/java/org/apache/calcite/sql/fun/PinotOperatorTable.java
+++ 
b/pinot-query-planner/src/main/java/org/apache/calcite/sql/fun/PinotOperatorTable.java
@@ -49,9 +49,6 @@ public class PinotOperatorTable extends SqlStdOperatorTable {
   public static final SqlAggFunction BOOL_AND = new 
PinotBoolAndAggregateFunction();
   public static final SqlAggFunction BOOL_OR = new 
PinotBoolOrAggregateFunction();
 
-  // TODO: remove this once https://github.com/apache/pinot/issues/9900 is 
fixed
-  public static final SqlFunction REGEXP_REPLACE = new 
SqlRegexpReplaceFunction();
-
   // TODO: clean up lazy init by using 
Suppliers.memorized(this::computeInstance) and make getter wrapped around
   // supplier instance. this should replace all lazy init static objects in 
the codebase
   public static synchronized PinotOperatorTable instance() {
diff --git 
a/pinot-query-planner/src/main/java/org/apache/calcite/sql/util/PinotChainedSqlOperatorTable.java
 
b/pinot-query-planner/src/main/java/org/apache/calcite/sql/util/PinotChainedSqlOperatorTable.java
new file mode 100644
index 0000000000..8e09d232d8
--- /dev/null
+++ 
b/pinot-query-planner/src/main/java/org/apache/calcite/sql/util/PinotChainedSqlOperatorTable.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.calcite.sql.util;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.calcite.sql.SqlFunctionCategory;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorTable;
+import org.apache.calcite.sql.SqlSyntax;
+import org.apache.calcite.sql.validate.SqlNameMatcher;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+/**
+ * ============================================================================
+ * THIS CLASS IS COPIED FROM Calcite's {@link 
org.apache.calcite.sql.util.ChainedSqlOperatorTable} and modified the
+ * function lookup to terminate early once found from ordered SqlOperatorTable 
list. This is to avoid some
+ * hard-coded casting assuming all Sql identifier looked-up are of the same 
SqlOperator type.
+ * ============================================================================
+ *
+ * PinotChainedSqlOperatorTable implements the {@link SqlOperatorTable} 
interface by
+ * chaining together any number of underlying operator table instances.
+ */
+public class PinotChainedSqlOperatorTable implements SqlOperatorTable {
+  //~ Instance fields --------------------------------------------------------
+
+  protected final List<SqlOperatorTable> _tableList;
+
+  //~ Constructors -----------------------------------------------------------
+
+  public PinotChainedSqlOperatorTable(List<SqlOperatorTable> tableList) {
+    this(ImmutableList.copyOf(tableList));
+  }
+
+  /** Internal constructor; call {@link SqlOperatorTables#chain}. */
+  protected PinotChainedSqlOperatorTable(ImmutableList<SqlOperatorTable> 
tableList) {
+    _tableList = ImmutableList.copyOf(tableList);
+  }
+
+  //~ Methods ----------------------------------------------------------------
+
+  @Deprecated // to be removed before 2.0
+  public void add(SqlOperatorTable table) {
+    if (!_tableList.contains(table)) {
+      _tableList.add(table);
+    }
+  }
+
+  @Override public void lookupOperatorOverloads(SqlIdentifier opName,
+      @Nullable SqlFunctionCategory category, SqlSyntax syntax,
+      List<SqlOperator> operatorList, SqlNameMatcher nameMatcher) {
+    for (SqlOperatorTable table : _tableList) {
+      table.lookupOperatorOverloads(opName, category, syntax, operatorList,
+          nameMatcher);
+      // ====================================================================
+      // LINES CHANGED BELOW
+      // ====================================================================
+      if (!operatorList.isEmpty()) {
+        break;
+      }
+      // ====================================================================
+      // LINES CHANGED ABOVE
+      // ====================================================================
+    }
+  }
+
+  @Override public List<SqlOperator> getOperatorList() {
+    List<SqlOperator> list = new ArrayList<>();
+    for (SqlOperatorTable table : _tableList) {
+      list.addAll(table.getOperatorList());
+    }
+    return list;
+  }
+}
diff --git 
a/pinot-query-planner/src/main/java/org/apache/pinot/query/QueryEnvironment.java
 
b/pinot-query-planner/src/main/java/org/apache/pinot/query/QueryEnvironment.java
index d84a70d3f8..a5f90e74a1 100644
--- 
a/pinot-query-planner/src/main/java/org/apache/pinot/query/QueryEnvironment.java
+++ 
b/pinot-query-planner/src/main/java/org/apache/pinot/query/QueryEnvironment.java
@@ -30,7 +30,7 @@ import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptUtil;
 import org.apache.calcite.plan.hep.HepProgram;
 import org.apache.calcite.plan.hep.HepProgramBuilder;
-import org.apache.calcite.prepare.CalciteCatalogReader;
+import org.apache.calcite.prepare.PinotCalciteCatalogReader;
 import org.apache.calcite.prepare.Prepare;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.RelRoot;
@@ -45,7 +45,7 @@ import org.apache.calcite.sql.SqlExplainLevel;
 import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.SqlNode;
 import org.apache.calcite.sql.fun.PinotOperatorTable;
-import org.apache.calcite.sql.util.ChainedSqlOperatorTable;
+import org.apache.calcite.sql.util.PinotChainedSqlOperatorTable;
 import org.apache.calcite.sql2rel.SqlToRelConverter;
 import org.apache.calcite.sql2rel.StandardConvertletTable;
 import org.apache.calcite.tools.FrameworkConfig;
@@ -88,11 +88,11 @@ public class QueryEnvironment {
     // catalog
     Properties catalogReaderConfigProperties = new Properties();
     
catalogReaderConfigProperties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
 "true");
-    _catalogReader = new CalciteCatalogReader(_rootSchema, 
_rootSchema.path(null), _typeFactory,
+    _catalogReader = new PinotCalciteCatalogReader(_rootSchema, 
_rootSchema.path(null), _typeFactory,
         new CalciteConnectionConfigImpl(catalogReaderConfigProperties));
 
     _config = Frameworks.newConfigBuilder().traitDefs()
-        .operatorTable(new ChainedSqlOperatorTable(Arrays.asList(
+        .operatorTable(new PinotChainedSqlOperatorTable(Arrays.asList(
             PinotOperatorTable.instance(),
             _catalogReader)))
         .defaultSchema(_rootSchema.plus())
diff --git 
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
 
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
index ccc68b7622..5910416ac4 100644
--- 
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
+++ 
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
@@ -28,6 +28,7 @@ import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -280,7 +281,13 @@ public class ResourceBasedQueriesTest extends 
QueryRunnerTestBase {
       if (testFileUrl != null && new File(testFileUrl.getFile()).exists()) {
         Map<String, QueryTestCase> testCases = MAPPER.readValue(new 
File(testFileUrl.getFile()),
             new TypeReference<Map<String, QueryTestCase>>() { });
-        // TODO: potential test case conflicts between files, address later by 
throwing during setUp if already exist.
+        {
+          HashSet<String> hashSet = new HashSet<>(testCaseMap.keySet());
+          hashSet.retainAll(testCases.keySet());
+          if (!hashSet.isEmpty()) {
+            throw new IllegalArgumentException("testCase already exist for the 
following names: " + hashSet);
+          }
+        }
         testCaseMap.putAll(testCases);
       }
     }
diff --git 
a/pinot-query-runtime/src/test/resources/queries/SelectExpressions.json 
b/pinot-query-runtime/src/test/resources/queries/SelectExpressions.json
index af925382ef..24fb9ab2c7 100644
--- a/pinot-query-runtime/src/test/resources/queries/SelectExpressions.json
+++ b/pinot-query-runtime/src/test/resources/queries/SelectExpressions.json
@@ -38,19 +38,26 @@
       { "sql": "SELECT intCol as key, SUM(doubleCol + floatCol) AS aggSum FROM 
{tbl1} GROUP BY intCol"},
       { "sql": "SELECT intCol, SUM(avgVal) FROM (SELECT strCol, intCol, 
AVG(doubleCol) AS avgVal FROM {tbl1} GROUP BY intCol, strCol) GROUP BY intCol"},
       { "sql": "SELECT strCol, MAX(sumVal), MAX(sumVal + avgVal) AS transVal 
FROM (SELECT strCol, intCol, SUM(floatCol + 2 * intCol) AS sumVal, 
AVG(doubleCol) AS avgVal FROM {tbl1} GROUP BY strCol, intCol) GROUP BY strCol 
ORDER BY MAX(sumVal)" },
-      { "sql": "SELECT intCol, intCol FROM {tbl1} WHERE intCol < 100" },
-      { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1}" 
},
-      { "sql": "SELECT intCol, intCol FROM {tbl1} WHERE intCol < 100 ORDER BY 
doubleCol" },
-      { "sql": "SELECT intCol, intCol FROM {tbl1} WHERE intCol < 100 ORDER BY 
intCol" },
-      { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1} 
ORDER BY intCol" },
-      { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1} 
ORDER BY strCol, intCol" },
-      { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1} 
ORDER BY floatCol, intCol" },
-      { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1} 
ORDER BY floatCol" },
-
+      { "sql": "SELECT intCol, intCol AS intAlias FROM {tbl1} WHERE intCol < 
100" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1}" },
+      { "sql": "SELECT intCol, intCol AS intAlias FROM {tbl1} WHERE intCol < 
100 ORDER BY doubleCol" },
+      { "sql": "SELECT intCol, intCol AS intAlias FROM {tbl1} WHERE intCol < 
100 ORDER BY intCol" },
+      { "sql": "SELECT intCol, intCol AS intAlias FROM {tbl1} WHERE intCol < 
100 ORDER BY intAlias" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1} ORDER BY intCol" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1} ORDER BY strCol, intCol" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1} ORDER BY strAlias, intAlias" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1} ORDER BY floatCol, intCol" },
+      { "sql": "SELECT intCol, intCol AS intAlias, doubleCol, strCol, strCol 
AS strAlias FROM {tbl1} ORDER BY floatCol" },
+      { "sql": "SELECT intCol + intAlias FROM (SELECT intCol, intCol as 
intAlias FROM {tbl1}) WHERE intCol < 100" },
       { "sql": "SELECT intCol + alias FROM (SELECT intCol, intCol as alias 
FROM {tbl1}) WHERE intCol < 100" },
       { "sql": "SELECT intCol, intCol, doubleCol, strCol, strCol FROM {tbl1}" 
},
       { "sql": "SELECT {tbl1}.intCol, {tbl1}.intCol, {tbl1}.doubleCol, 
{tbl2}.strCol, {tbl2}.strCol FROM {tbl1}, {tbl2} WHERE {tbl1}.intCol = 
{tbl2}.intCol" },
-      { "sql": "SELECT {tbl2}.intCol, {tbl2}.intCol FROM {tbl1}, {tbl2} WHERE 
{tbl1}.intCol = {tbl2}.intCol AND {tbl1}.intCol < 100 ORDER BY 
{tbl1}.doubleCol" }
+      { "sql": "SELECT {tbl2}.intCol, {tbl2}.intCol FROM {tbl1}, {tbl2} WHERE 
{tbl1}.intCol = {tbl2}.intCol AND {tbl1}.intCol < 100 ORDER BY 
{tbl1}.doubleCol" },
+      {
+        "ignored": true,
+        "comment": "select intCol, intCol without aliasing one of them is not 
supported.",
+        "sql": "SELECT intCol, intCol FROM {tbl1} WHERE intCol < 100"
+      }
     ]
   }
 }
diff --git a/pinot-query-runtime/src/test/resources/queries/SelectHaving.json 
b/pinot-query-runtime/src/test/resources/queries/SelectHaving.json
index f4b9d307dd..9aad9542ca 100644
--- a/pinot-query-runtime/src/test/resources/queries/SelectHaving.json
+++ b/pinot-query-runtime/src/test/resources/queries/SelectHaving.json
@@ -1,5 +1,5 @@
 {
-  "select_expression_test": {
+  "select_having_expression_test": {
     "tables": {
       "test_having": {
         "schema":[
diff --git a/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json 
b/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
index 9f1020cd51..7fb77c5ad7 100644
--- a/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
+++ b/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
@@ -39,6 +39,36 @@
           ["bar", 3],
           ["foo", 4]
         ]
+      },
+      {
+        "description": "test scalar function with STD SQL operator, and scalar 
function without STD SQL operator can be found properly",
+        "sql": "SELECT UpPeR(col1), round_deCiMal(col3_r), 
aDD(pluS(CAST(col3_l AS DOUBLE), CAST(col3_r AS DOUBLE)), CAST(10 AS DOUBLE)) 
FROM (SELECT {tbl1}.col1 AS col1, {tbl1}.col3 AS col3_l, {tbl2}.col3 AS col3_r 
FROM {tbl1} JOIN {tbl2} USING (col2))",
+        "outputs": [
+          ["BAR", 3, 15.0],
+          ["FOO", 4, 15.0]
+        ]
+      },
+      {
+        "description": "test scalar function with STD SQL operator, and scalar 
function without STD SQL operator can be found properly",
+        "sql": "SELECT UpPeR(col1), round_deCiMal(col3), pluS(CAST(col3 AS 
DOUBLE), CAST(10 AS DOUBLE)) FROM {tbl1}",
+        "outputs": [
+          ["BAR", 2, 12.0],
+          ["FOO", 1, 11.0]
+        ]
+      },
+      {
+        "description": "test agg function with STD SQL operator works 
properly",
+        "sql": "SELECT sUm(col3_r), AvG(col3_r), MAX(PLUS(Add(CAST(col3_l AS 
DOUBLE), CAST(col3_r AS DOUBLE)), CAST(10 AS DOUBLE))) FROM (SELECT {tbl1}.col1 
AS col1, {tbl1}.col3 AS col3_l, {tbl2}.col3 AS col3_r FROM {tbl1} JOIN {tbl2} 
USING (col2))",
+        "outputs": [
+          [7, 3, 15.0]
+        ]
+      },
+      {
+        "description": "test scalar function with STD SQL operator, and scalar 
function without STD SQL operator can be found properly",
+        "sql": "SELECT sUm(col3), AvG(col3), MAX(pluS(CAST(col3 AS DOUBLE), 
CAST(10 AS DOUBLE))) FROM {tbl1}",
+        "outputs": [
+          [3, 1, 12.0]
+        ]
       }
     ]
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org


Reply via email to