This is an automated email from the ASF dual-hosted git repository. xxyu pushed a commit to branch doc5.0 in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/doc5.0 by this push: new c4c016845b Update download for 5.0-alpha c4c016845b is described below commit c4c016845b7f8665d6b508b21fe8f4b182576f28 Author: XiaoxiangYu <x...@apache.org> AuthorDate: Tue Apr 25 16:13:41 2023 +0800 Update download for 5.0-alpha --- .../protocol-buffer/metadata.proto | 83 ++++++++++++++-------- website/docs/development/how_to_release.md | 2 +- website/docs/development/how_to_test.md | 19 ++++- website/docs/download.md | 10 +-- website/docs/quickstart/deploy_kylin.md | 15 ++-- 5 files changed, 85 insertions(+), 44 deletions(-) diff --git a/website/blog/2022-12-18-Introduction_of_Metadata/protocol-buffer/metadata.proto b/website/blog/2022-12-18-Introduction_of_Metadata/protocol-buffer/metadata.proto index 1173f6e676..899243e362 100644 --- a/website/blog/2022-12-18-Introduction_of_Metadata/protocol-buffer/metadata.proto +++ b/website/blog/2022-12-18-Introduction_of_Metadata/protocol-buffer/metadata.proto @@ -69,18 +69,14 @@ message ProjectInstance { string defaultDatabase = 6; string description = 7; - /** - * TODO - */ + // [TODO] string principal = 8; - /** - * TODO - */ + // [TODO] string keytab = 9; - - MaintainModelType maintain_model_type = 10; // [Trivial] + // [Trivial] + MaintainModelType maintain_model_type = 10; /** * 项目级别的配置(键值对), 可以覆盖全局的配置 @@ -93,16 +89,12 @@ message ProjectInstance { SegmentConfig segment_config = 12; enum ProjectStatusEnum { - /** - * TODO:项目 Disable 有什么影响 - */ + // [TODO] DISABLED = 0; ENABLED = 1; }; - /** - * TODO - */ + // [TODO] enum MaintainModelType { MANUAL_MAINTAIN = 0; } @@ -121,13 +113,13 @@ message TableDesc { */ CatalogTableType tableType = 5; - bool isTop = 6; // [Trivial] for front end only + // [Trivial] for front end only + bool isTop = 6; - string data_gen = 7; // [Trivial] + // [Trivial] + string data_gen = 7; - /** - * TODO - */ + // [TODO] string increment_loading = 8; string last_snapshot_path = 9; @@ -183,9 +175,6 @@ message TableDesc { */ message NDataModel { - /** - * common properties(from RootPersistentEntity) - */ RootPersistentEntity basicProp = 1; // model basic properties @@ -210,7 +199,7 @@ message NDataModel { string filterCondition = 23; PartitionDesc partitionDesc = 24; - // dimension and other columns + // all dimension and deleted column repeated NamedColumn allNamedColumns = 25; repeated Measure allMeasures = 26; repeated ComputedColumnDesc computedColumnDescs = 27; @@ -224,6 +213,7 @@ message NDataModel { // check https://kylin.apache.org/5.0/blog/introduction_of_metadata_cn#significant-change int32 semanticVersion = 42; + // [Trivial] int32 storageType = 43; // [Trivial] @@ -237,15 +227,20 @@ message NDataModel { // Broken reason because of schema change BrokenReason brokenReason = 47; - RealizationCapacity capacity = 48; // [Trivial] + // [Trivial] + RealizationCapacity capacity = 48; + + // TODO string multiPartitionDesc = 49; + + // TODO string multiPartitionKeyMapping = 50; // [Trivial] for streaming feature string fusionId = 51; - // Some configuration + // Some configuration of auto segment merge SegmentConfig segmentConfig = 61; enum ModelType { @@ -254,7 +249,7 @@ message NDataModel { HYBRID = 2; // Source is Kafka and Hive, still under developing UNKNOWN = 3; } - enum RealizationCapacity {// Useless + enum RealizationCapacity { SMALL = 0; MEDIUM = 1; LARGE = 2; @@ -273,19 +268,45 @@ message NDataModel { message IndexPlan { string description = 1; int64 retentionRange = 8; + + // [Trivial] int32 engineType = 9; repeated int64 autoMergeTimeRanges = 7; - RuleBasedIndex ruleBasedIndex = 3; // agg group + /** + * 静态 Index 剪枝策略, 例如聚合组规则 + */ + RuleBasedIndex ruleBasedIndex = 3; + + /** + * Indexes 属性用于管理用户 + * 1. 自定义明细索引 + * 2. 基础明细索引、基础聚合索引 + * 3. 以及用户通过扩展 Kylin 自动生成的索引, 例如 CubePlanner + */ repeated IndexEntity indexes = 4; + + /** + * 全局字典相关的属性 + */ repeated DictionaryDesc dictionaries = 10; + /** + * 因为 Schema Change 而需要删除的索引 + */ repeated IndexEntity toBeDeletedIndexes = 6; - int64 nextAggregationIndexId = 11; - int64 nextTableIndexId = 12; + + // TODO + map<int32, string> indexPlanOverrideIndexes = 10; + + // TODO repeated int32 aggShardByColumns = 13; + + // TODO map<int64, int32> layoutBucketNumMapping = 15; + int64 nextAggregationIndexId = 11; + int64 nextTableIndexId = 12; map<string, string> overrideProps = 5; } @@ -333,7 +354,6 @@ message NamedColumn { string name = 2; string aliasDotColumn = 3; - // logical delete symbol ColumnStatus status = 4; enum ColumnStatus { TOMB = 0; @@ -541,11 +561,12 @@ message PartitionDesc { string partitionDateColumn = 1; string partitionDateFormat = 2; PartitionType partitionType = 3; + string partitionConditionBuilderClz = 4; + enum PartitionType { APPEND = 0; UPDATE_INSERT = 1; } - string partitionConditionBuilderClz = 4; } message ColumnDesc { diff --git a/website/docs/development/how_to_release.md b/website/docs/development/how_to_release.md index d0d8fba09d..c1611e5477 100644 --- a/website/docs/development/how_to_release.md +++ b/website/docs/development/how_to_release.md @@ -18,4 +18,4 @@ last_update: # How to release -to be continued \ No newline at end of file +Please check [How to release in docker container](https://github.com/apache/kylin/tree/kylin5/build/release/release-pipeline-docker) guide. \ No newline at end of file diff --git a/website/docs/development/how_to_test.md b/website/docs/development/how_to_test.md index e0cc1e6905..16d46cfa53 100644 --- a/website/docs/development/how_to_test.md +++ b/website/docs/development/how_to_test.md @@ -16,8 +16,21 @@ last_update: date: 08/24/2022 --- -# How to test +# How to run tests ```shell -sh dev-support/unit_testing.sh -``` \ No newline at end of file +bash dev-support/unit_testing.sh +``` + +This scripts will finish in about 1~1.5 hour. The output will be saved in ci-results-YYYY-mm-dd.txt . + +If all testes passed, console will print: + +```text +... +[INFO] --- maven-surefire-plugin:3.0.0-M5:test (default-test) @ kylin-sparder --- +[INFO] --- maven-surefire-plugin:3.0.0-M5:test (default-test) @ kylin-spark-common --- +[INFO] --- maven-surefire-plugin:3.0.0-M5:test (default-test) @ kylin-spark-it --- +<Failed test on following module> +<Failed cases statistics> +``` diff --git a/website/docs/download.md b/website/docs/download.md index 42e9e6304d..e1244c31b4 100644 --- a/website/docs/download.md +++ b/website/docs/download.md @@ -4,13 +4,13 @@ sidebar_position: 1 # Download +### v5.0.0-alpha +- This is the first release for Kylin 5.X, with 112 new features/improvements and bug fixes. +- Source download: [apache-kylin-5.0.0-alpha-source-release.zip](https://www.apache.org/dyn/closer.cgi/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-source-release.zip) \[[asc](https://www.apache.org/dist/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-source-release.zip.asc)\] \[[sha256](https://www.apache.org/dist/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-source-release.zip.sha256)\] +- Binary download: [apache-kylin-5.0.0-alpha-bin.tar.gz](https://www.apache.org/dyn/closer.cgi/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-bin.tar.gz) \[[asc](https://www.apache.org/dist/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-bin.tar.gz.asc)\] \[[sha256](https://www.apache.org/dist/kylin/apache-kylin-5.0.0-alpha/apache-kylin-5.0.0-alpha-bin.tar.gz.sha256)\] -## Release Download -Please wait. - - -## Dockerhub for learning purpose +### Dockerhub for learning purpose If we want to learn what new features did Kylin 5 provided, and you only have a laptop, we recommend you to pulling the docker image and check the [standalone image in dockerhub](https://hub.docker.com/r/apachekylin/apache-kylin-standalone) . diff --git a/website/docs/quickstart/deploy_kylin.md b/website/docs/quickstart/deploy_kylin.md index b559ff76bc..cc42547fc2 100644 --- a/website/docs/quickstart/deploy_kylin.md +++ b/website/docs/quickstart/deploy_kylin.md @@ -18,13 +18,13 @@ In this guide, we will explain how to quickly install and start Kylin 5. Before proceeding, please make sure the [Prerequisite](../deployment/on-premises/prerequisite.md) is met. -### Pull docker for learning +### Try Kylin in one command If we want to learn what new features did Kylin 5 provided, and you only have a laptop, we recommend you to pulling the docker image and check the [standalone image in dockerhub](https://hub.docker.com/r/apachekylin/apache-kylin-standalone) . ```shell -docker pull apachekylin/apache-kylin-standalone:5.0.0 +docker pull apachekylin/apache-kylin-standalone:5.0.0-alpha ``` @@ -49,6 +49,13 @@ docker pull apachekylin/apache-kylin-standalone:5.0.0 ``` The decompressed directory is referred to as **$KYLIN_HOME** or **root directory**. +4. Download Spark + + ```shell + bash $KYLIN_HOME/sbin/download-spark-user.sh + ``` + After executing above script, there will be a `spark` directory under `$KYLIN_HOME` . + 5. Prepare RDBMS metastore. If PostgreSQL or MySQL has been installed already in your environment, you can choose one of them as the metastore. @@ -62,7 +69,7 @@ docker pull apachekylin/apache-kylin-standalone:5.0.0 * [Use PostgreSQL as Metastore](../deployment/on-premises/rdbms_metastore/postgresql/default_metastore.md). * [Use MySQL as Metastore](../deployment/on-premises/rdbms_metastore/mysql/mysql_metastore.md). -6. (optional) Install InfluxDB. +6. (out-dated) Install InfluxDB. Kylin uses InfluxDB to save various system monitoring information. If you do not need to view related information, you can skip this step. It is strongly recommended to complete this step in a production environment and use related monitoring functions. @@ -78,7 +85,7 @@ docker pull apachekylin/apache-kylin-standalone:5.0.0 For more details, please refer to [Use InfluxDB as Time-Series Database](../operations/monitoring/influxdb/influxdb.md). -6. Create a working directory on HDFS and grant permissions. +7. Create a working directory on HDFS and grant permissions. The default working directory is `/kylin`. Also ensure the Linux account has access to its home directory on HDFS. Meanwhile, create directory `/kylin/spark-history` to store the spark log files.