This is an automated email from the ASF dual-hosted git repository. xxyu pushed a commit to branch kylin5 in repository https://gitbox.apache.org/repos/asf/kylin.git
commit a81cecac2c0fd775777df64dfe64ed7546ef6088 Author: ChenLiang.Lu <31469905+yab...@users.noreply.github.com> AuthorDate: Fri Jan 6 16:54:42 2023 +0800 KYLIN-5461 Improve logical view descriptions --- .../apache/kylin/rest/ddl/SourceTableCheck.java | 20 ++++++++++----- .../kylin/rest/response/LogicalViewResponse.java | 8 +++++- .../apache/kylin/rest/service/SparkDDLService.java | 27 +++++++++++++------- .../apache/kylin/rest/service/TableExtService.java | 3 ++- .../org/apache/kylin/rest/ddl/ViewCheck.scala | 29 ++++++++++++++-------- .../apache/kylin/rest/service/SparkDDLTest.java | 2 +- .../spark/source/NSparkMetadataExplorer.java | 14 ++++++++--- .../tool/security/KylinPasswordResetCLITest.java | 8 +++--- 8 files changed, 74 insertions(+), 37 deletions(-) diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java index 2ad1223719..c8acb0a65f 100644 --- a/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java +++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java @@ -17,6 +17,8 @@ */ package org.apache.kylin.rest.ddl; +import static org.apache.spark.ddl.DDLConstant.SOURCE_TABLE_RULE_PRIORITY; + import java.util.List; import java.util.stream.Collectors; @@ -38,16 +40,22 @@ import lombok.val; import scala.collection.Seq; -import static org.apache.spark.ddl.DDLConstant.SOURCE_TABLE_RULE_PRIORITY; - public class SourceTableCheck implements DDLCheck { @Override public String[] description(String project, String pageType) { - return new String[] { - "The source table used to define the view needs to be loaded into the data source already", - "定义 view 用到的来源表需要已经加载到数据源" - }; + if ("hive".equalsIgnoreCase(pageType)) { + return new String[] { + "The source table used to define the view needs to be loaded into the data source already", + "定义 view 用到的来源表需要已经加载到数据源" + }; + } else { + return new String[] { + "The source tables in Logical View should already be loaded into the project data source." + + "Users can only load Logical View created in the same project into the data source", + "定义 Logical View 用到的来源表需要已经加载到数据源,且用户仅能加载同一项目下创建的Logical View" + }; + } } @Override diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java index 2343207aa3..0691fb18a7 100644 --- a/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java +++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java @@ -19,6 +19,7 @@ package org.apache.kylin.rest.response; import org.apache.kylin.metadata.view.LogicalView; +import org.jetbrains.annotations.NotNull; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; @@ -32,7 +33,7 @@ import lombok.NoArgsConstructor; @NoArgsConstructor @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) -public class LogicalViewResponse { +public class LogicalViewResponse implements Comparable<LogicalViewResponse> { @JsonProperty("table_name") private String tableName; @@ -51,4 +52,9 @@ public class LogicalViewResponse { this.modifiedUser = view.getModifiedUser(); this.createdProject = view.getCreatedProject(); } + + @Override + public int compareTo(@NotNull LogicalViewResponse o) { + return this.getTableName().compareTo(o.getTableName()); + } } diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java index ef76e21e55..e8f2341770 100644 --- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java +++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java @@ -110,8 +110,8 @@ public class SparkDDLService extends BasicService { List<String> descriptionCN = Lists.newArrayList(); for (DDLCheck checker : ddlChecks) { String[] description = checker.description(project, pageType); - descriptionEN.addAll(Arrays.asList(description[0].split("\n"))); - descriptionCN.addAll(Arrays.asList(description[1].split("\n"))); + descriptionEN.addAll(Arrays.asList(description[0].split("\t"))); + descriptionCN.addAll(Arrays.asList(description[1].split("\t"))); } return Lists.newArrayList(descriptionEN, descriptionCN); } @@ -143,13 +143,22 @@ public class SparkDDLService extends BasicService { .filter(table -> table.getTableName().toLowerCase().contains(tableName.toLowerCase())) .collect(Collectors.toList()); } - List<LogicalViewResponse> viewResponses = - logicalViews.stream().map(LogicalViewResponse::new).collect(Collectors.toList()); - viewResponses.forEach(table -> { - if (!table.getCreatedProject().equalsIgnoreCase(project)) { - table.setCreatedSql("***"); - } - }); + List<LogicalViewResponse> viewResponses = Lists.newArrayList(); + List<LogicalViewResponse> viewResponsesInProject = + logicalViews.stream() + .filter(table -> table.getCreatedProject().equalsIgnoreCase(project)) + .map(LogicalViewResponse::new) + .collect(Collectors.toList()); + List<LogicalViewResponse> viewResponsesNotInProject = + logicalViews.stream() + .filter(table -> !table.getCreatedProject().equalsIgnoreCase(project)) + .map(LogicalViewResponse::new) + .collect(Collectors.toList()); + viewResponsesNotInProject.forEach(table -> table.setCreatedSql("***")); + Collections.sort(viewResponsesInProject); + Collections.sort(viewResponsesNotInProject); + viewResponses.addAll(viewResponsesInProject); + viewResponses.addAll(viewResponsesNotInProject); return viewResponses; } } \ No newline at end of file diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java index aabb60b613..cb13bf5944 100644 --- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java +++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java @@ -138,6 +138,7 @@ public class TableExtService extends BasicService { canLoadTables.addAll(toLoadTables); return; } + String viewDB = config.getDDLLogicalViewDB(); LogicalViewManager viewManager = LogicalViewManager.getInstance(config); toLoadTables.stream() .filter(table -> !table.getFirst().isLogicalView()) @@ -151,7 +152,7 @@ public class TableExtService extends BasicService { if (logicalTable != null && viewProject.equalsIgnoreCase(project)) { canLoadTables.add(table); } else { - tableResponse.getFailed().add(tableName); + tableResponse.getFailed().add(viewDB + "." + tableName); } }); } diff --git a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala index 879283b427..f86f2e0267 100644 --- a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala +++ b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala @@ -53,24 +53,31 @@ class ViewCheck extends DDLCheck { if ("hive".equalsIgnoreCase(pageType)) { databasesHasAccess.append(listAllDatabasesHasAccess(project)) syntaxSupport.append("`create view`,`alter view`,`drop view`,`show create table`") - cnDescription.append("Hive View 名称需要以 `KE_` 开头\n") - enDescription.append("Hive View name should start with `KE_`\n") + cnDescription.append("Hive View 名称需要以 `KE_` 开头\t") + enDescription.append("Hive View name should start with `KE_`\t") cnDescription - .append(s"仅支持 ${syntaxSupport} 语法\n") + .append(s"仅支持 ${syntaxSupport} 语法\t") enDescription - .append(s"Only supports ${syntaxSupport} syntax\n") - cnDescription.append(s"仅支持创建 Hive View 在如下数据库: ${databasesHasAccess}\n") - enDescription.append(s"Only supports creating Hive Views in ${databasesHasAccess}\n") + .append(s"Only supports ${syntaxSupport} syntax\t") + cnDescription.append(s"仅支持创建 Hive View 在如下数据库: ${databasesHasAccess}\t") + enDescription.append(s"Only supports creating Hive Views in ${databasesHasAccess}\t") } else { cnDescription.append(s"创建不要加 database 名称,系统自动创建到 ${config.getDDLLogicalViewDB} 库中," - + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \n") - enDescription.append(s"Creating does not require adding database, it is automatically created in" - + s" ${config.getDDLLogicalViewDB} , deleting should add ${config.getDDLLogicalViewDB} database\n") + + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \t") + enDescription.append(s"When creating a new Logical View,please do not use database name,it will be automatically" + + s" created in ${config.getDDLLogicalViewDB} database. When dropping a Logical View," + + s"please add ${config.getDDLLogicalViewDB} database name in SQL.\t") syntaxSupport.append(" `create logical view`, `drop logical view` ") cnDescription - .append(s"仅支持 ${syntaxSupport} 语法\n") + .append(s"仅支持 ${syntaxSupport} 语法\t") enDescription - .append(s"Only supports ${syntaxSupport} syntax\n") + .append(s"Only ${syntaxSupport} SQL sentences are allowed to execute\t") + cnDescription + .append(s"操作举例:\n创建视图:CREATE LOGICAL VIEW your_logical_view AS select * from your_loaded_table\n" + + s"删除视图:DROP LOGICAL VIEW ${config.getDDLLogicalViewDB}.your_logical_view") + enDescription + .append(s"Operation Examples:\nCreate:CREATE LOGICAL VIEW your_logical_view AS select * from your_loaded_table" + + s";\nDrop:DROP LOGICAL VIEW ${config.getDDLLogicalViewDB}.your_logical_view;") } diff --git a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java index 80e9a5f114..c0cf1de847 100644 --- a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java +++ b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java @@ -281,7 +281,7 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase { Assert.assertEquals(4, description.get(0).size()); description = ddlService.pluginsDescription("ssb", "logic"); - Assert.assertEquals(3, description.get(0).size()); + Assert.assertEquals(4, description.get(0).size()); // view list in project List<LogicalViewResponse> logicalViewsInProject = ddlService.listAll("ssb", ""); diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java index c9543e11e0..c037c37bfe 100644 --- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java +++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java @@ -17,6 +17,8 @@ */ package org.apache.kylin.engine.spark.source; +import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR; + import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; @@ -41,11 +43,12 @@ import org.apache.kylin.common.util.Pair; import org.apache.kylin.common.util.RandomUtil; import org.apache.kylin.metadata.model.ColumnDesc; import org.apache.kylin.metadata.model.ISourceAware; +import org.apache.kylin.metadata.model.NTableMetadataManager; import org.apache.kylin.metadata.model.TableDesc; import org.apache.kylin.metadata.model.TableExtDesc; import org.apache.kylin.source.ISampleDataDeployer; import org.apache.kylin.source.ISourceMetadataExplorer; -import org.apache.kylin.metadata.model.NTableMetadataManager; + import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; @@ -54,6 +57,7 @@ import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalog.Database; import org.apache.spark.sql.catalyst.catalog.CatalogTableType; import org.apache.spark.sql.internal.SQLConf; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,7 +65,6 @@ import com.clearspring.analytics.util.Lists; import com.google.common.collect.Sets; import lombok.val; -import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR; public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleDataDeployer, Serializable { @@ -104,12 +107,15 @@ public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleD if (KylinConfig.getInstanceFromEnv().isDDLLogicalViewEnabled()) { String logicalViewDB = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB(); databases.forEach(db -> { - if(db.equalsIgnoreCase(logicalViewDB)){ + if (db.equalsIgnoreCase(logicalViewDB)) { throw new KylinException(DDL_CHECK_ERROR, "Logical view database should not be duplicated " + "with normal hive database!!!"); } }); - databases.add(logicalViewDB); + List<String> databasesWithLogicalDB = Lists.newArrayList(); + databasesWithLogicalDB.add(logicalViewDB); + databasesWithLogicalDB.addAll(databases); + databases = databasesWithLogicalDB; } return databases; } diff --git a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java index 6e677dd0e4..5b067f91fb 100644 --- a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java +++ b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java @@ -27,9 +27,9 @@ import java.nio.charset.Charset; import org.apache.commons.dbcp2.BasicDataSourceFactory; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.persistence.ResourceStore; -import org.apache.kylin.rest.constant.Constant; import org.apache.kylin.common.persistence.metadata.jdbc.AuditLogRowMapper; import org.apache.kylin.common.util.LogOutputTestCase; +import org.apache.kylin.rest.constant.Constant; import org.apache.kylin.metadata.user.ManagedUser; import org.apache.kylin.metadata.user.NKylinUserManager; import org.apache.kylin.tool.garbage.StorageCleaner; @@ -65,7 +65,6 @@ public class KylinPasswordResetCLITest extends LogOutputTestCase { @Test public void testResetAdminPassword() throws Exception { - overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true"); val pwdEncoder = new BCryptPasswordEncoder(); overwriteSystemProp("kylin.security.user-password-encoder", pwdEncoder.getClass().getName()); overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true"); @@ -93,9 +92,10 @@ public class KylinPasswordResetCLITest extends LogOutputTestCase { val afterManager = NKylinUserManager.getInstance(config); Assert.assertFalse(pwdEncoder.matches("KYLIN", afterManager.get(user.getUsername()).getPassword())); + Assert.assertTrue(output.toString(Charset.defaultCharset().name()).startsWith("The metadata backup path is")); Assert.assertTrue(output.toString(Charset.defaultCharset().name()) - .startsWith(StorageCleaner.ANSI_RED + "Reset password of [" + StorageCleaner.ANSI_RESET + "ADMIN" - + StorageCleaner.ANSI_RED + "] succeed. The password is ")); + .contains(StorageCleaner.ANSI_RED + "Reset password of [" + StorageCleaner.ANSI_RESET + "ADMIN" + + StorageCleaner.ANSI_RED + "] succeed. The password is ")); Assert.assertTrue(output.toString(Charset.defaultCharset().name()) .endsWith("Please keep the password properly." + StorageCleaner.ANSI_RESET + "\n"));