This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ba2efe1a6ae9e4def756b5b82d2a5ffcab0f4036
Author: Yu Gan <yu....@kyligence.io>
AuthorDate: Mon May 29 10:29:48 2023 +0800

    KYLIN-5699 support special character for bitmap measure column
---
 .../org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala     | 6 +++---
 .../org/apache/kylin/engine/spark/builder/DFDictionaryBuilder.scala | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
index e58a803c62..615a42fbca 100644
--- 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
+++ 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
@@ -51,7 +51,7 @@ object DFBuilderHelper extends Logging {
 
   def selectColumnsInTable(table: Dataset[Row], columns: Set[TblColRef]): 
Set[TblColRef] = {
     columns.filter(col =>
-      isColumnInTable(NSparkCubingUtil.convertFromDot(col.getBackTickExp), 
table))
+      
isColumnInTable(NSparkCubingUtil.convertFromDotWithBackTick(col.getBackTickExp),
 table))
   }
 
   // ============================= Used by {@link DFBuildJob}.Functions are 
deprecated. ========================= //
@@ -66,7 +66,7 @@ object DFBuilderHelper extends Logging {
       isColumnInTable(NSparkCubingUtil.convertFromDot(cc.getBackTickExp), 
table))
   }
 
-  def isColumnInTable(colExpr: String, table: Dataset[Row]): Boolean = {
+  private def isColumnInTable(colExpr: String, table: Dataset[Row]): Boolean = 
{
     Try(table.select(expr(colExpr))) match {
       case Success(_) =>
         true
@@ -77,7 +77,7 @@ object DFBuilderHelper extends Logging {
 
   def chooseSuitableCols(ds: Dataset[Row], needCheckCols: 
Iterable[TblColRef]): Seq[Column] = {
     needCheckCols
-      .filter(ref => isColumnInTable(ref.getExpressionInSourceDB, ds))
+      .filter(ref => isColumnInTable(ref.getBackTickExp, ds))
       .map(ref => 
expr(NSparkCubingUtil.convertFromDotWithBackTick(ref.getBackTickExp))
         .alias(NSparkCubingUtil.convertFromDot(ref.getBackTickIdentity)))
       .toSeq
diff --git 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFDictionaryBuilder.scala
 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFDictionaryBuilder.scala
index 36437a7916..ae95344cd0 100644
--- 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFDictionaryBuilder.scala
+++ 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFDictionaryBuilder.scala
@@ -155,7 +155,7 @@ class DFDictionaryBuilder(
   private def getLockPath(pathName: String) = 
s"/${seg.getProject}${HadoopUtil.GLOBAL_DICT_STORAGE_ROOT}/$pathName/lock"
 
   def wrapCol(ref: TblColRef): Column = {
-    val colName = NSparkCubingUtil.convertFromDot(ref.getBackTickIdentity)
+    val colName = 
NSparkCubingUtil.convertFromDotWithBackTick(ref.getBackTickIdentity)
     expr(colName).cast(StringType)
   }
 

Reply via email to