This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 66c16e04f3005fa4fed407daca11df85d3ca3e37
Author: Yu Gan <yu....@kyligence.io>
AuthorDate: Wed May 24 20:48:45 2023 +0800

    KYLIN-5696 support special characters for logical view
---
 .../apache/kylin/engine/spark/source/NSparkCubingSourceInput.java   | 2 +-
 .../src/main/java/org/apache/spark/sql/LogicalViewLoader.java       | 6 ++++--
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git 
a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkCubingSourceInput.java
 
b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkCubingSourceInput.java
index fe31888d74..7af65fa29c 100644
--- 
a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkCubingSourceInput.java
+++ 
b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkCubingSourceInput.java
@@ -69,7 +69,7 @@ public class NSparkCubingSourceInput implements 
NSparkCubingEngine.NSparkCubingS
 
     private List<ColumnDesc> extractEffectiveColumns(TableDesc table, 
SparkSession ss) {
         List<ColumnDesc> ret = new ArrayList<>();
-        Dataset<Row> sourceTableDS = ss.table(table.getTableAlias());
+        Dataset<Row> sourceTableDS = ss.table(table.getBackTickIdentity());
         Set<String> sourceTableColumns = 
Arrays.stream(sourceTableDS.columns()).map(String::toUpperCase)
                 .collect(Collectors.toSet());
         for (ColumnDesc col : table.getColumns()) {
diff --git 
a/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
 
b/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
index 6444e7b175..e72aecc873 100644
--- 
a/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
+++ 
b/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
@@ -29,6 +29,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.NamedThreadFactory;
+import org.apache.kylin.common.util.StringHelper;
 import org.apache.kylin.metadata.view.LogicalView;
 import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.source.SourceFactory;
@@ -156,8 +157,9 @@ public class LogicalViewLoader {
   }
 
   private static void dropLogicalViewIfExist(String tableName, SparkSession 
spark) {
-    String logicalViewDatabase = 
KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
-    spark.sql("DROP LOGICAL VIEW IF EXISTS " + logicalViewDatabase + "." + 
tableName);
+    String quotedDatabase = 
StringHelper.backtickQuote(KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB());
+    String quotedTableName = StringHelper.backtickQuote(tableName);
+    spark.sql("DROP LOGICAL VIEW IF EXISTS " + quotedDatabase + "." + 
quotedTableName);
   }
 
   public static void checkConfigIfNeed() {

Reply via email to