This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 41a230b721 [fix] iceberg catalog to specify the version and time 
(#22209)
41a230b721 is described below

commit 41a230b7212e1b5aa6f759426a0b3dc59e0a29a8
Author: wuwenchi <wuwenchi...@hotmail.com>
AuthorDate: Thu Jul 27 12:04:41 2023 +0800

    [fix] iceberg catalog to specify the version and time (#22209)
    
    problem:
    1. create a iceberg_type catalog:
    2. use iceberg catalog to specify verison
    ```
    mysql> show catalog iceberg;
    +----------------------+--------------------------+
    | Key                  | Value                    |
    +----------------------+--------------------------+
    | type                 | iceberg                  |
    | iceberg.catalog.type | hms                      |
    | hive.metastore.uris  | thrift://127.0.0.1:9083 |
    | hadoop.username      | hadoop                   |
    | create_time          | 2023-07-25 16:51:00.522  |
    +----------------------+--------------------------+
    5 rows in set (0.02 sec)
    
    mysql> select * from iceberg.iceberg_db.tb1 FOR VERSION AS OF 
8783036402036752909;
    ERROR 5090 (42000): errCode = 2, detailMessage = Only iceberg/hudi external 
table supports time travel in current version
    ```
    
    change:
    Add `ICEBERG_EXTERNAL_TABLE` type for specify the version and time
---
 .../java/org/apache/doris/analysis/TableRef.java   | 57 +++++++++++++---------
 .../iceberg/test_external_catalog_icebergv2.out    | 11 +++++
 .../iceberg/test_external_catalog_icebergv2.groovy | 28 +++++++++--
 3 files changed, 68 insertions(+), 28 deletions(-)

diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
index 730826fa2a..cd3637ff96 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
@@ -534,32 +534,41 @@ public class TableRef implements ParseNode, Writable {
             return;
         }
         TableIf.TableType tableType = this.getTable().getType();
-        if (tableType != TableIf.TableType.HMS_EXTERNAL_TABLE) {
-            
ErrorReport.reportAnalysisException(ErrorCode.ERR_NONSUPPORT_TIME_TRAVEL_TABLE);
-        }
-        HMSExternalTable extTable = (HMSExternalTable) this.getTable();
-        switch (extTable.getDlaType()) {
-            case ICEBERG:
-                if (tableSnapshot.getType() == TableSnapshot.VersionType.TIME) 
{
-                    String asOfTime = tableSnapshot.getTime();
-                    Matcher matcher = 
TimeUtils.DATETIME_FORMAT_REG.matcher(asOfTime);
-                    if (!matcher.matches()) {
-                        throw new AnalysisException("Invalid datetime string: 
" + asOfTime);
+        if (tableType == TableIf.TableType.HMS_EXTERNAL_TABLE) {
+            HMSExternalTable extTable = (HMSExternalTable) this.getTable();
+            switch (extTable.getDlaType()) {
+                case ICEBERG:
+                    if (tableSnapshot.getType() == 
TableSnapshot.VersionType.TIME) {
+                        String asOfTime = tableSnapshot.getTime();
+                        Matcher matcher = 
TimeUtils.DATETIME_FORMAT_REG.matcher(asOfTime);
+                        if (!matcher.matches()) {
+                            throw new AnalysisException("Invalid datetime 
string: " + asOfTime);
+                        }
                     }
+                    break;
+                case HUDI:
+                    if (tableSnapshot.getType() == 
TableSnapshot.VersionType.VERSION) {
+                        throw new AnalysisException("Hudi table only supports 
timestamp as snapshot ID");
+                    }
+                    try {
+                        
tableSnapshot.setTime(HudiUtils.formatQueryInstant(tableSnapshot.getTime()));
+                    } catch (Exception e) {
+                        throw new AnalysisException("Failed to parse hudi 
timestamp: " + e.getMessage(), e);
+                    }
+                    break;
+                default:
+                    
ErrorReport.reportAnalysisException(ErrorCode.ERR_NONSUPPORT_TIME_TRAVEL_TABLE);
+            }
+        } else if (tableType == TableIf.TableType.ICEBERG_EXTERNAL_TABLE) {
+            if (tableSnapshot.getType() == TableSnapshot.VersionType.TIME) {
+                String asOfTime = tableSnapshot.getTime();
+                Matcher matcher = 
TimeUtils.DATETIME_FORMAT_REG.matcher(asOfTime);
+                if (!matcher.matches()) {
+                    throw new AnalysisException("Invalid datetime string: " + 
asOfTime);
                 }
-                break;
-            case HUDI:
-                if (tableSnapshot.getType() == 
TableSnapshot.VersionType.VERSION) {
-                    throw new AnalysisException("Hudi table only supports 
timestamp as snapshot ID");
-                }
-                try {
-                    
tableSnapshot.setTime(HudiUtils.formatQueryInstant(tableSnapshot.getTime()));
-                } catch (Exception e) {
-                    throw new AnalysisException("Failed to parse hudi 
timestamp: " + e.getMessage(), e);
-                }
-                break;
-            default:
-                
ErrorReport.reportAnalysisException(ErrorCode.ERR_NONSUPPORT_TIME_TRAVEL_TABLE);
+            }
+        } else {
+            
ErrorReport.reportAnalysisException(ErrorCode.ERR_NONSUPPORT_TIME_TRAVEL_TABLE);
         }
     }
 
diff --git 
a/regression-test/data/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.out
 
b/regression-test/data/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.out
index a36c78742c..a6e68d2f62 100644
--- 
a/regression-test/data/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.out
+++ 
b/regression-test/data/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.out
@@ -61,3 +61,14 @@
 Customer#000000004
 Customer#000000007
 
+-- !q15 --
+150000
+
+-- !q16 --
+150000
+
+-- !q17 --
+150000
+
+-- !q18 --
+150000
diff --git 
a/regression-test/suites/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.groovy
 
b/regression-test/suites/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.groovy
index e2de233386..4175259c36 100644
--- 
a/regression-test/suites/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.groovy
+++ 
b/regression-test/suites/external_table_emr_p2/iceberg/test_external_catalog_icebergv2.groovy
@@ -20,17 +20,27 @@ suite("test_external_catalog_icebergv2", "p2") {
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
         String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
         String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_external_catalog_iceberg"
+        String hms_catalog_name = "test_external_hms_catalog_iceberg"
+        String iceberg_catalog_name = "test_external_iceberg_catalog_iceberg"
 
-        sql """drop catalog if exists ${catalog_name};"""
+        sql """drop catalog if exists ${hms_catalog_name};"""
         sql """
-            create catalog if not exists ${catalog_name} properties (
+            create catalog if not exists ${hms_catalog_name} properties (
                 'type'='hms',
                 'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
             );
         """
 
-        sql """switch ${catalog_name};"""
+        sql """drop catalog if exists ${iceberg_catalog_name};"""
+        sql """
+            create catalog if not exists ${iceberg_catalog_name} properties (
+                'type'='iceberg',
+                'iceberg.catalog.type'='hms',
+                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
+            );
+        """
+
+        sql """switch ${hms_catalog_name};"""
         // test parquet format format
         def q01 = {
             qt_q01 """ select count(1) as c from customer_small """
@@ -54,9 +64,19 @@ suite("test_external_catalog_icebergv2", "p2") {
             qt_q13 """ select c_custkey from customer_small where c_custkey in 
(1, 2, 4, 7) order by c_custkey """
             qt_q14 """ select c_name from customer_small where c_name in 
('Customer#000000004', 'Customer#000000007') order by c_custkey """
         }
+
+        // test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
+        def q04 = {
+            qt_q15 """ select count(*) from 
${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF 
'2022-12-22 02:29:30' """
+            qt_q16 """ select count(*) from 
${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 
6113938156088124425 """
+            qt_q17 """ select count(*) from 
${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF 
'2022-12-22 02:29:30' """
+            qt_q18 """ select count(*) from 
${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 
6113938156088124425 """
+        }
+        
         sql """ use `tpch_1000_icebergv2`; """
         q01()
         q02()
         q03()
+        q04()
     }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to