This is an automated email from the ASF dual-hosted git repository.
pfzhan pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/kylin5 by this push:
new 0c5ad437e6 KYLIN-6052 Internal table loading supports jdbc logical
view (#2302)
0c5ad437e6 is described below
commit 0c5ad437e656e6f537fc07924ae0c6decf115d51
Author: Guoliang Sun <[email protected]>
AuthorDate: Thu Feb 27 10:21:10 2025 +0800
KYLIN-6052 Internal table loading supports jdbc logical view (#2302)
1. Internal table loading supports jdbc logical view
2. Fix replace logical view
Co-authored-by: Yinghao Lin <[email protected]>
---
.../kylin/metadata/view/LogicalViewManager.java | 1 +
.../apache/kylin/rest/service/SparkDDLService.java | 2 +-
.../engine/spark/job/InternalTableLoadingStep.java | 1 +
.../engine/spark/builder/InternalTableLoader.scala | 3 +-
.../java/org/apache/spark/ddl/DDLCheckContext.java | 12 +++++
.../java/org/apache/spark/ddl/DDLConstant.java | 8 ++-
.../org/apache/spark/ddl/DDLCheckContextTest.java | 59 ++++++++++++++++++++++
7 files changed, 83 insertions(+), 3 deletions(-)
diff --git
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
index da42b58ae2..22f825189c 100644
---
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
+++
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
@@ -89,6 +89,7 @@ public class LogicalViewManager {
LogicalView exist = get(view.getTableName());
LogicalView copy = copyForWrite(view);
if (exist != null) {
+ copy.setCreatedSql(view.getCreatedSql());
copy.setLastModified(exist.getLastModified());
copy.setMvcc(exist.getMvcc());
}
diff --git
a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
index 89c1c798fd..6bccc181b0 100644
---
a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
+++
b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
@@ -126,7 +126,7 @@ public class SparkDDLService extends BasicService {
private void saveLogicalView(DDLCheckContext context) {
EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
LogicalViewManager manager =
LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
- LogicalView logicalView = new
LogicalView(context.getLogicalViewName(), context.getSql(),
+ LogicalView logicalView = new
LogicalView(context.getLogicalViewName(), context.getLogicalViewPersistSql(),
context.getUserName(), context.getProject());
manager.update(logicalView);
return null;
diff --git
a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/InternalTableLoadingStep.java
b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/InternalTableLoadingStep.java
index 76cb22f649..6b98b0cd6d 100644
---
a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/InternalTableLoadingStep.java
+++
b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/InternalTableLoadingStep.java
@@ -72,6 +72,7 @@ public class InternalTableLoadingStep extends
NSparkExecutable {
dumpList.add(tableDesc.getResourcePath());
dumpList.add(internalTable.getResourcePath());
dumpList.add(projectInstance.getResourcePath());
+ dumpList.addAll(getLogicalViewMetaDumpList(config));
return dumpList;
}
diff --git
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/InternalTableLoader.scala
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/InternalTableLoader.scala
index 7a7745f854..284c619ab7 100644
---
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/InternalTableLoader.scala
+++
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/InternalTableLoader.scala
@@ -113,7 +113,8 @@ class InternalTableLoader extends Logging {
startDate: String,
endDate: String,
incremental: Boolean): Dataset[Row] = {
- val tableDS = if (this.onlyLoadSchema)
ss.table(table.getTableDesc).limit(0) else ss.table(table.getTableDesc)
+ val tableDS = if (onlyLoadSchema) ss.table(table.getTableDesc).limit(0)
else ss.table(table.getTableDesc)
+
if (incremental) {
val partitionColumn = table.getTablePartition.getPartitionColumns()(0)
val dateFormat = table.getTablePartition.getDatePartitionFormat
diff --git
a/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
index cd2d42e24e..b0dd3a3ada 100644
---
a/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
+++
b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
@@ -23,6 +23,7 @@ import static org.apache.spark.ddl.DDLConstant.HIVE_VIEW;
import static org.apache.spark.ddl.DDLConstant.REPLACE_LOGICAL_VIEW;
import java.util.Set;
+import java.util.regex.Matcher;
import lombok.Data;
@@ -56,6 +57,17 @@ public class DDLCheckContext {
return sql;
}
+ public String getLogicalViewPersistSql() {
+ if (isLogicalViewCommand()) {
+ String trimmedSql = sql.trim();
+ Matcher matcher =
DDLConstant.LOGICAL_VIEW_DDL_CREATE_OR_REPLACE_SYNTAX.matcher(trimmedSql);
+ if (matcher.find()) {
+ return
matcher.replaceFirst(DDLConstant.DDL_CREATE_LOGICAL_VIEW);
+ }
+ }
+ return sql;
+ }
+
public String getProject() {
return project;
}
diff --git
a/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
index 31d351256f..aa93a7ea00 100644
---
a/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
+++
b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
@@ -15,8 +15,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package org.apache.spark.ddl;
+import java.util.regex.Pattern;
+
public class DDLConstant {
public static final String LOGICAL_VIEW = "logic";
public static final String REPLACE_LOGICAL_VIEW = "replaceLogicalView";
@@ -27,7 +30,10 @@ public class DDLConstant {
public static final Integer VIEW_RULE_PRIORITY = 1;
public static final Integer SOURCE_TABLE_RULE_PRIORITY = 2;
- private DDLConstant() {
+ public static final Pattern LOGICAL_VIEW_DDL_CREATE_OR_REPLACE_SYNTAX =
Pattern
+ .compile("(create|replace)\\s+logical\\s+view\\s+",
Pattern.CASE_INSENSITIVE);
+ public static final String DDL_CREATE_LOGICAL_VIEW = "CREATE LOGICAL VIEW
"; // keep the tail space
+ private DDLConstant() {
}
}
diff --git
a/src/spark-project/sparder/src/test/java/org/apache/spark/ddl/DDLCheckContextTest.java
b/src/spark-project/sparder/src/test/java/org/apache/spark/ddl/DDLCheckContextTest.java
new file mode 100644
index 0000000000..c2b5ab80ec
--- /dev/null
+++
b/src/spark-project/sparder/src/test/java/org/apache/spark/ddl/DDLCheckContextTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ddl;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class DDLCheckContextTest {
+
+ @Test
+ void testGetLogicalViewPersistSql() {
+ {
+ DDLCheckContext context = new DDLCheckContext("create logical view
test123 as select * from ssb.lineorder",
+ "default", null, null, null, false);
+ context.setCommandType(DDLConstant.CREATE_LOGICAL_VIEW);
+ assertEquals("CREATE LOGICAL VIEW test123 as select * from
ssb.lineorder",
+ context.getLogicalViewPersistSql());
+ }
+
+ {
+ DDLCheckContext context = new DDLCheckContext("replace logical
view test123 as select * from ssb.lineorder",
+ "default", null, null, null, false);
+ context.setCommandType(DDLConstant.REPLACE_LOGICAL_VIEW);
+ assertEquals("CREATE LOGICAL VIEW test123 as select * from
ssb.lineorder",
+ context.getLogicalViewPersistSql());
+ }
+
+ {
+ DDLCheckContext context = new DDLCheckContext("select * from
ssb.lineorder", "default", null, null, null,
+ false);
+ assertEquals("select * from ssb.lineorder",
context.getLogicalViewPersistSql());
+ }
+
+ {
+ DDLCheckContext context = new DDLCheckContext("drop logical view
test123 as select * from ssb.lineorder",
+ "default", null, null, null, false);
+ context.setCommandType(DDLConstant.DROP_LOGICAL_VIEW);
+ assertEquals("drop logical view test123 as select * from
ssb.lineorder",
+ context.getLogicalViewPersistSql());
+ }
+ }
+}