This is an automated email from the ASF dual-hosted git repository. xxyu pushed a commit to branch kylin5 in repository https://gitbox.apache.org/repos/asf/kylin.git
commit 9c8484bcc701636097918ff865857d7f3f744c89 Author: XiaoxiangYu <x...@apache.org> AuthorDate: Thu Sep 1 10:03:35 2022 +0800 Refine release.sh --- README.md | 3 +- build/{apache_release => release}/build.sh | 2 +- build/{apache_release => release}/compress.sh | 29 +- .../download-grafana.sh | 2 +- .../download-influxdb.sh | 2 +- .../download-postgresql.sh | 2 +- .../{apache_release => release}/download-spark.sh | 4 +- build/{apache_release => release}/functions.sh | 0 build/{apache_release => release}/package.sh | 23 +- build/{apache_release => release}/prepare-libs.sh | 2 +- build/{apache_release => release}/prepare.sh | 4 +- build/{apache_release => release}/release.sh | 29 +- build/sbin/spark-test.sh | 2 +- dev-support/sandbox/sandbox.sh | 4 +- document/protocol-buffer/metadata.proto | 466 --------------------- 15 files changed, 60 insertions(+), 514 deletions(-) diff --git a/README.md b/README.md index 07e610c704..867e688515 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,5 @@ sh dev-support/unit_testing.sh 3. Build a Kylin 5 binary ```shell -cd build/apache_release -./release.sh +./build/release/release.sh ``` \ No newline at end of file diff --git a/build/apache_release/build.sh b/build/release/build.sh similarity index 96% rename from build/apache_release/build.sh rename to build/release/build.sh index 98c47f066c..4549745e7d 100755 --- a/build/apache_release/build.sh +++ b/build/release/build.sh @@ -29,7 +29,7 @@ echo 'Build front-end' if [ "${WITH_FRONT}" = "1" ]; then cd kystudio echo 'Install front-end dependencies' - npm install || { exit 1; } + npm install || { exit 1; } echo 'Install front-end end' npm run build || { exit 1; } echo 'build front-end dist end' diff --git a/build/apache_release/compress.sh b/build/release/compress.sh similarity index 86% rename from build/apache_release/compress.sh rename to build/release/compress.sh index 170bc6dc25..61c7f9eb9d 100755 --- a/build/apache_release/compress.sh +++ b/build/release/compress.sh @@ -20,15 +20,9 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh exportProjectVersions -# get package name -current_branch=${branch} -if [[ "${current_branch}" = "" ]]; then - current_branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p') -fi - # package as *.tar.gz echo "package name: ${package_name}" cd build/ @@ -42,9 +36,12 @@ mkdir -p ${package_name}/lib/ext cp -rf spark ${package_name}/ cp -rf sample_project ${package_name}/ cp -rf samples ${package_name}/ -cp -rf influxdb ${package_name}/ -cp -rf grafana ${package_name}/ -cp -rf postgresql ${package_name}/ + +if [[ -d "influxdb" ]]; then + cp -rf influxdb ${package_name}/ + cp -rf grafana ${package_name}/ + cp -rf postgresql ${package_name}/ +fi # Add ssb data preparation files mkdir -p ${package_name}/tool/ssb @@ -57,9 +54,6 @@ cp -rf ../build/deploy/grafana/dashboards ${package_name}/tool/grafana/ cp -rf ../build/deploy/grafana/provisioning ${package_name}/tool/grafana/ cp -rf ../build/deploy/grafana/custom.ini ${package_name}/tool/grafana/ -# Add ops_plan files -cp -rf ../ops_plan ${package_name}/ - # Add conf profiles mkdir -p ${package_name}/conf mkdir -p ${package_name}/tool/conf @@ -94,9 +88,12 @@ find ${package_name} -type d -exec chmod 755 {} \; find ${package_name} -type f -exec chmod 644 {} \; find ${package_name} -type f -name "*.sh" -exec chmod 755 {} \; find ${package_name}/spark -type f -exec chmod 755 {} \; -find ${package_name}/influxdb -type f -exec chmod 755 {} \; -find ${package_name}/grafana -type f -exec chmod 755 {} \; -find ${package_name}/postgresql -type f -exec chmod 755 {} \; + +if [[ -d "${package_name}/postgresql" ]]; then + find ${package_name}/influxdb -type f -exec chmod 755 {} \; + find ${package_name}/grafana -type f -exec chmod 755 {} \; + find ${package_name}/postgresql -type f -exec chmod 755 {} \; +fi rm -rf ../dist mkdir -p ../dist diff --git a/build/apache_release/download-grafana.sh b/build/release/download-grafana.sh similarity index 98% rename from build/apache_release/download-grafana.sh rename to build/release/download-grafana.sh index b82243b36b..830af0aacc 100755 --- a/build/apache_release/download-grafana.sh +++ b/build/release/download-grafana.sh @@ -20,7 +20,7 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh rm -rf build/grafana diff --git a/build/apache_release/download-influxdb.sh b/build/release/download-influxdb.sh similarity index 97% rename from build/apache_release/download-influxdb.sh rename to build/release/download-influxdb.sh index 2cc14be699..e03dacbf55 100755 --- a/build/apache_release/download-influxdb.sh +++ b/build/release/download-influxdb.sh @@ -20,7 +20,7 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh rm -rf build/influxdb diff --git a/build/apache_release/download-postgresql.sh b/build/release/download-postgresql.sh similarity index 98% rename from build/apache_release/download-postgresql.sh rename to build/release/download-postgresql.sh index ead7d90252..deed819299 100755 --- a/build/apache_release/download-postgresql.sh +++ b/build/release/download-postgresql.sh @@ -20,7 +20,7 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh mkdir -p build/postgresql diff --git a/build/apache_release/download-spark.sh b/build/release/download-spark.sh similarity index 97% rename from build/apache_release/download-spark.sh rename to build/release/download-spark.sh index 023eb8cc96..951d3f6bea 100755 --- a/build/apache_release/download-spark.sh +++ b/build/release/download-spark.sh @@ -20,7 +20,7 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh rm -rf build/spark @@ -54,8 +54,6 @@ rm -rf build/spark/data rm -rf build/spark/R rm -rf build/spark/hive_1_2_2 -cp -rf build/hadoop3 build/spark/ - if [[ "${WITH_HIVE1}" != "0" ]]; then if [ ! -f "build/hive_1_2_2.tar.gz" ] then diff --git a/build/apache_release/functions.sh b/build/release/functions.sh similarity index 100% rename from build/apache_release/functions.sh rename to build/release/functions.sh diff --git a/build/apache_release/package.sh b/build/release/package.sh similarity index 75% rename from build/apache_release/package.sh rename to build/release/package.sh index 5568a62982..a9aead7167 100755 --- a/build/apache_release/package.sh +++ b/build/release/package.sh @@ -20,9 +20,9 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh -echo "Packing for KE..." +echo "Packing for Apache Kylin ..." # Make share commands exist in environment echo "BUILD STAGE 1 - Checking environment..." @@ -30,22 +30,26 @@ checkCommandExists mvn checkCommandExists git checkCommandExists npm +# Fetch ${release_version} from pom.xml exportProjectVersions -kap_commit_sha1=`git rev-parse HEAD` -echo "${kap_commit_sha1}@KAP" > build/commit_SHA1 +kylin_commit_sha1=`git rev-parse HEAD` +if [[ "${current_branch}" = "" ]]; then + current_branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p') +fi +echo "${kylin_commit_sha1}@${current_branch}" > build/commit_SHA1 if [ -z "$BUILD_SYSTEM" ]; then BUILD_SYSTEM="MANUAL" fi echo "Build with ${BUILD_SYSTEM} at" `date "+%Y-%m-%d %H:%M:%S"` >> build/commit_SHA1 -KYLIN_VERSION_NAME="Kylin 5 ${release_version}" +KYLIN_VERSION_NAME="Apache Kylin ${release_version}" echo "${KYLIN_VERSION_NAME}" > build/VERSION echo "VERSION file content:" ${KYLIN_VERSION_NAME} echo "BUILD STAGE 2 - Build binaries..." -sh build/apache_release/build.sh $@ || { exit 1; } +sh build/release/build.sh $@ || { exit 1; } if [[ "${WITH_SPARK}" = "1" ]]; then echo "BUILD STAGE 3 - Prepare spark..." @@ -62,16 +66,17 @@ if [[ "${WITH_THIRDPARTY}" = "1" ]]; then sh build/apache_release/download-grafana.sh || { exit 1; } echo "BUILD STAGE 6 - Prepare postgresql..." - sh build/apache_release/download-postgresql.sh || { exit 1; } + sh build/release/download-postgresql.sh || { exit 1; } else + echo "BUILD STAGE 4-6 is skipped ..." rm -rf build/influxdb rm -rf build/grafana rm -rf build/postgresql fi echo "BUILD STAGE 7 - Prepare and compress package..." -sh build/apache_release/prepare.sh ${MVN_PROFILE} || { exit 1; } -sh build/apache_release/compress.sh || { exit 1; } +sh build/release/prepare.sh || { exit 1; } +sh build/release/compress.sh || { exit 1; } echo "BUILD STAGE 8 - Clean up..." diff --git a/build/apache_release/prepare-libs.sh b/build/release/prepare-libs.sh similarity index 96% rename from build/apache_release/prepare-libs.sh rename to build/release/prepare-libs.sh index b6fb19902f..f7d028f4b8 100755 --- a/build/apache_release/prepare-libs.sh +++ b/build/release/prepare-libs.sh @@ -20,7 +20,7 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh exportProjectVersions echo "copy lib file" diff --git a/build/apache_release/prepare.sh b/build/release/prepare.sh similarity index 93% rename from build/apache_release/prepare.sh rename to build/release/prepare.sh index 513713239c..77093bc98e 100755 --- a/build/apache_release/prepare.sh +++ b/build/release/prepare.sh @@ -21,10 +21,10 @@ dir=$(dirname ${0}) cd ${dir}/../.. -source build/apache_release/functions.sh +source build/release/functions.sh exportProjectVersions -sh build/apache_release/prepare-libs.sh || { exit 1; } +sh build/release/prepare-libs.sh || { exit 1; } #create ext dir mkdir -p build/ext diff --git a/build/apache_release/release.sh b/build/release/release.sh similarity index 73% rename from build/apache_release/release.sh rename to build/release/release.sh index 70283d575c..238f872ef7 100755 --- a/build/apache_release/release.sh +++ b/build/release/release.sh @@ -19,17 +19,22 @@ dir=$(dirname ${0}) cd ${dir}/../.. +source build/release/functions.sh -export PACKAGE_TIMESTAMP=1 export WITH_SPARK=1 export WITH_HIVE1=1 export WITH_THIRDPARTY=0 export WITH_FRONT=1 +export PACKAGE_OFFICIAL=0 +export WITH_HIVE1=0 +export WITH_THIRDPARTY=0 +export SKIP_COMPILE=0 + for PARAM in $@; do - if [[ "$PARAM" == "-noTimestamp" ]]; then - echo "Package without timestamp..." - export PACKAGE_TIMESTAMP=0 + if [[ "$PARAM" == "-official" ]]; then + echo "Package for official release..." + export PACKAGE_OFFICIAL=1 shift fi @@ -56,22 +61,30 @@ for PARAM in $@; do export WITH_FRONT=0 shift fi + if [[ "$PARAM" == "-skipCompile" ]]; then + echo 'Skip install backend-end dependencies...' + export SKIP_COMPILE=1 + shift + fi done +## Fetch kylin version from pom +exportProjectVersions + if [[ -z ${release_version} ]]; then release_version='staging' fi -if [[ "${PACKAGE_TIMESTAMP}" = "1" ]]; then +if [[ "${PACKAGE_OFFICIAL}" = "0" ]]; then timestamp=`date '+%Y%m%d%H%M%S'` export release_version=${release_version}.${timestamp} fi -export package_name="Kylin5-beta-${release_version}" +export package_name="apache-kylin-${release_version}" -sh build/apache_release/package.sh $@ || { echo "package failed!"; exit 1; } +sh build/release/package.sh $@ || { echo "package failed!"; exit 1; } echo "Release Version: ${release_version}" -package_name="Kylin5-beta-${release_version}.tar.gz" +package_name="apache-kylin-${release_version}.tar.gz" sha256sum dist/$package_name > dist/${package_name}.sha256sum echo "sha256: `cat dist/${package_name}.sha256sum`" diff --git a/build/sbin/spark-test.sh b/build/sbin/spark-test.sh index a57fa72e71..be72da546f 100755 --- a/build/sbin/spark-test.sh +++ b/build/sbin/spark-test.sh @@ -228,7 +228,7 @@ then else hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash ${KYLIN_WORKING_DIR}/${input_file} rm -rf ${full_input_file} - quit "ERROR: Test of submitting spark job failed,error when testing spark with spark configurations in Kylin!" + quit "ERROR: Test of submitting spark job failed,error when testing spark with spark configurations in Apache Kylin!" fi SPARK_SUBMIT_CLUSTER_MODE=$(echo "$SPARK_ENGINE_CONF_PROPS" | grep -c -E "spark.submit.deployMode=cluster") diff --git a/dev-support/sandbox/sandbox.sh b/dev-support/sandbox/sandbox.sh index 85779c413a..d5f6dab4c3 100755 --- a/dev-support/sandbox/sandbox.sh +++ b/dev-support/sandbox/sandbox.sh @@ -82,14 +82,14 @@ function main() { fi info "* Downloading spark..." - ${PROJECT_DIR}/build/apache_release/download-spark.sh + ${PROJECT_DIR}/build/release/download-spark.sh if [[ $? != 0 ]]; then warn " Download spark failed, please manually execute 'download-spark.sh'" fi info "* Setting spark dependency..." cp ${PROJECT_DIR}/src/server/target/jars/log4j* ${SPARK_HOME}/jars - cp ${WORKDIR}/libs/mysql-connector-java-8.0.16.jar ${SPARK_HOME}/jars +# cp ${WORKDIR}/libs/mysql-connector-java-8.0.16.jar ${SPARK_HOME}/jars info "* Setting IDEA run configurations..." if [[ ! -d "${PROJECT_DIR}/.idea/runConfigurations" ]]; then diff --git a/document/protocol-buffer/metadata.proto b/document/protocol-buffer/metadata.proto deleted file mode 100644 index f1dc3f877d..0000000000 --- a/document/protocol-buffer/metadata.proto +++ /dev/null @@ -1,466 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package org.apache.kylin.protobuf; - -option java_multiple_files = true; -option java_package = "org.apache.kylin.protobuf"; -option java_outer_classname = "KylinDataModel"; - - -message ProjectInstance { - string name = 1; - enum ProjectStatusEnum { - DISABLED = 0; - ENABLED = 1; - }; - ProjectStatusEnum status = 2; - string default_database = 3; - string description = 9; - - // common properties - string uuid = 4; - string owner = 5; - int64 createTime = 6; - int64 lastModified = 7; - string version = 8; - - // Configuration settings - map<string, string> settings = 10; -} - -message TableDesc { - string uuid = 1; - int64 lastModified = 2; - int64 createTime = 3; - string name = 4; - - SourceTypeEnum sourceType = 5; - enum SourceTypeEnum { - ID_HIVE = 0; - ID_STREAMING = 1; - ID_SPARKSQL = 5; - ID_EXTERNAL = 7; - ID_JDBC = 8; - ID_SPARK = 9; - ID_CSV = 11; - ID_FILE = 13; - } - - CatalogTableType tableType = 6; - enum CatalogTableType { - EXTERNAL = 0; - MANAGED = 1; - VIEW = 2; - } - - string project = 10; - string database = 11; - repeated ColumnDesc columns = 9; - repeated ColumnDesc partitionColumn = 14; - - string lastSnapshotPath = 12; - int64 lastSnapshotSize = 13; - map<string, int64> snapshotPartitions = 15; - map<string, SnapshotPartitionInfo> snapshotPartitionInfo = 16; - int64 snapshotTotalRows = 17; - string selectedSnapshotPartitionCol = 18; - string snapshotPartitionCol = 19; - int64 snapshotLastModified = 20; - bool snapshotHasBroken = 21; -} - -message DataModel { - // common properties - string uuid = 1; - int64 createTime = 2; - int64 lastModified = 3; - string version = 4; - string alias = 5; - string owner = 6; - string description = 7; - - string rootFactTableName = 8; - repeated JoinTableDesc joinTables = 9; - string filterCondition = 10; - - repeated NamedColumn allNamedColumns = 13; //dimensions - repeated Measure allMeasures = 14; //measures - repeated ComputedColumnDesc computedColumnDescs = 15; - PartitionDesc partitionDesc = 11; - DataCheckDesc dataCheckDesc = 18; - - SegmentConfig segmentConfig = 17; - ModelType modelType = 26; - enum ModelType { - BATCH = 0; - STREAMING = 1; - HYBRID = 2; - UNKNOWN = 3; - } - RealizationCapacity capacity = 12; - enum RealizationCapacity { - SMALL = 0; - MEDIUM = 1; - LARGE = 2; - } - BrokenReason brokenReason = 20; - enum BrokenReason { - SCHEMA = 0; - NULL = 1; - EVENT = 2; - } -} - -message JoinTableDesc{ - string table = 1; - TableKind kind = 2; - enum TableKind { - FACT = 0; - LOOKUP = 1; - } - string alias = 3; - JoinDesc join = 4; - ModelJoinRelationTypeEnum joinRelationTypeEnum = 6; - enum ModelJoinRelationTypeEnum { - MANY_TO_ONE = 0; - ONE_TO_ONE = 1; - ONE_TO_MANY = 2; - MANY_TO_MANY = 3; - } -} - -message NamedColumn { - int32 id = 1; - string name = 2; - string aliasDotColumn = 3; - - // logical delete symbol - ColumnStatus status = 4; - enum ColumnStatus { - TOMB = 0; - EXIST = 1; - DIMENSION = 2; - } -} - -message Measure { - string name = 1; - int32 id = 2; - - bool tomb = 3; - FunctionDesc function = 4; - string column = 5; - string comment = 6; -} - -message ComputedColumnDesc { - // the table identity DB.TABLE (ignoring alias) in the model where the computed column be int64 to - // this field is more useful for frontend, for backend code, usage should be avoided - string tableIdentity = 1; - string tableAlias = 2; - string columnName = 3; // the new col name - string expression = 4; - string datatype = 5; - - string comment = 6; - string uuid = 7; -} - -message IndexPlan { - string description = 1; - int64 retentionRange = 8; - int32 engineType = 9; - repeated int64 autoMergeTimeRanges = 7; - - RuleBasedIndex ruleBasedIndex = 3; // agg group - repeated IndexEntity indexes = 4; - repeated DictionaryDesc dictionaries = 10; - - repeated IndexEntity toBeDeletedIndexes = 6; - int64 nextAggregationIndexId = 11; - int64 nextTableIndexId = 12; - repeated int32 aggShardByColumns = 13; - map<int64, int32> layoutBucketNumMapping = 15; - - map<string, string> overrideProps = 5; -} - -message RuleBasedIndex { - repeated int32 dimensions = 2; // dimension id - repeated int32 measures = 3; //measure id - repeated int32 globalDimCap = 4; - repeated AggregationGroup aggregationGroups = 5; - repeated int32 layoutIdMapping = 6; // all of the layout id in agg group - - int32 parentForward = 7; - int64 indexStartId = 8; - int32 schedulerVersion = 11; - bool indexUpdateEnabled = 12; - - int64 lastModifiedTime = 9; -} - -message IndexEntity { - int64 id = 1; - repeated int32 dimensions = 2; - repeated int32 measures = 3; - repeated LayoutEntity layouts = 4; - int64 nextLayoutOffset = 5; -} - -message LayoutEntity { - int64 id = 1; - int64 updateTime = 10; - string name = 2; - string owner = 3; - - repeated int32 colOrder = 4; - repeated int32 shardByColumns = 6; - repeated int32 partitionByColumns = 7; - repeated int32 sortByColumns = 8; - - int32 storageType = 9; - bool isBase = 13; - string draftVersion = 14; - Range indexRange = 15; -} - -message DataCheckDesc { - CheckOptions checkOptions = 1; - enum CheckOptions { - PK_DUPLICATE = 0; - DATA_SKEW = 1; - NULL_OR_BLANK_VALUE = 2; - FORCE_ANALYSIS_LOOKUP = 3; - } - - int64 faultThreshold = 2; - int64 faultActions = 3; -} - -message DataFlow { - RealizationStatusEnum status = 1; - enum RealizationStatusEnum { - OFFLINE = 0; - ONLINE = 1; - BROKEN = 2; - } - - int32 cost = 2; - int32 queryHitCount = 3; - int64 lastQueryTime = 4; - repeated DataSegment segments = 6; -} - -message DataSegment { - string id = 1; - string name = 2; - int64 createTimeUTC = 3; - - SegmentStatusEnum status = 4; - enum SegmentStatusEnum { - NEW = 0; - READY = 1; - WARNING = 2; - } - SegmentRange segmentRange = 5; - TimeRange timeRange = 6; - - map<string, DimensionRangeInfo> dimensionRangeInfoMap = 7; - map<string, string> dictionaries = 9; // table/column ==> dictionary resource path - map<string, string> snapshots = 10; // table name ==> snapshot resource path - int64 lastBuildTime = 11; // last segment incr build job - - // stats - int64 sourceCount = 12; - int64 sourceBytesSize = 13; - map<string, int64> columnSourceBytes = 14; - map<string, int64> oriSnapshotSize = 15; - int64 storageSize = 16; - int64 storageFileCount = 17; - map<string, string> additionalInfo = 18; - - - // resumable flag, don't cross building jobs - // worked only in HDFSMeteStore - bool isSnapshotReady = 20; - - // resumable flag, don't cross building jobs - // worked only in HDFSMeteStore - bool isDictReady = 21; - - // resumable flag, don't cross building jobs - // worked only in HDFSMeteStore - bool isFlatTableReady = 22; - - // resumable flag, don't cross building jobs - // worked only in HDFSMeteStore - bool isFactViewReady = 23; - - int64 maxBucketId = 25; - map<string, string> parameters = 8; -} - - -message DataFlowDetails { - string dataflowId = 1; - repeated DataLayout layouts = 2; -} - -message DataLayout { - int64 layoutId = 1; - int64 createTime = 11; - - string buildJobId = 2; - - int64 rows = 3; - int64 byteSize = 4; - int64 fileCount = 5; - int64 sourceRows = 6; - int64 sourceByteSize = 7; - // partition num may be diff with file num - int32 partitionNum = 8; - repeated string partitionValues = 9; - - bool isReady = 10; -} - - - -message JoinDesc { - string type = 1; - repeated string primaryKey = 2; - repeated string foreignKey = 3; - NonEquiJoinCondition nonEquiJoinCondition = 4; - string primaryTable = 5; - string foreignTable = 6; -} - -message NonEquiJoinCondition { - NonEquiJoinConditionType type = 1; - enum NonEquiJoinConditionType { - EXPRESSION = 0; // expression with other NonEquiJoinCondition as operands - COLUMN = 1; - LITERAL = 2; - } - string dataType = 2; - SqlKind op = 3; // kind of the operator - enum SqlKind { - OTHER = 0; - SELECT = 1; - JOIN = 2; - IDENTIFIER = 3; - // .etc - } - string opName = 4; // name of the operator - repeated NonEquiJoinCondition operands = 5; // nested operands - string value = 6; // literal or column identity at leaf node - string expr = 7; // set at runtime with model init -} - -message FunctionDesc { - string expression = 1; - repeated ParameterDesc parameters = 2; - string returnType = 3; - - map<string, string> configuration = 4; -} - -message ParameterDesc { - string type = 1; - string value = 2; -} - -message PartitionDesc { - string partitionDateColumn = 1; - string partitionDateFormat = 2; - PartitionType partitionType = 3; - enum PartitionType { - APPEND = 0; - UPDATE_INSERT = 1; - } - string partitionConditionBuilderClz = 4; -} - -message ColumnDesc { - string id = 1; - string name = 2; - - string datatype = 3; - string comment = 5; - string caseSensitiveName = 8; -} - -message SnapshotPartitionInfo { - int64 totalRows = 1; -} - -message StorageDescription { - string path = 1; -} - -message AggregationGroup { - repeated int32 includes = 1; - repeated int32 measures = 2; - SelectRule selectRule = 3; - Range indexRange = 4; -} - -enum Range { - BATCH = 0; - STREAMING = 1; - HYBRID = 2; - EMPTY = 3; -} - -message SelectRule { - repeated int32 hierarchyDims = 1; - repeated int32 mandatoryDims = 2; - repeated int32 jointDims = 3; - repeated int32 dimCap = 4; -} - -message DictionaryDesc { - int32 id = 1; - int32 reuseId = 2; - string builderClass = 3; -} - -message DimensionRangeInfo { - int64 min = 1; - int64 max = 2; -} - -message SegmentRange { - int64 start = 1; - int64 end = 2; -} - -message TimeRange { - int64 start = 1; - int64 end = 2; -} - -message SegmentConfig { -} - - -