This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 5715a8518aa [fix](jdbc) Fix PG array null detection and SQL Server 
date format pushdown (#60954)
5715a8518aa is described below

commit 5715a8518aad0cbed1d72be45e5f3df20139a1ce
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Tue Mar 3 12:48:59 2026 +0800

    [fix](jdbc) Fix PG array null detection and SQL Server date format pushdown 
(#60954)
    
    ### What problem does this PR solve?
    
    Fix two issues in JDBC catalog:
    
    1. PostgreSQL array type detection: When the first row of a PostgreSQL
       table has NULL in an array column, `array_ndims(col)` returns NULL,
       causing arrayDimensions to remain 0 and the column type to be
       mapped as UNSUPPORTED. Add `WHERE col IS NOT NULL` to the detection
       query so that it finds a non-NULL row to correctly determine array
       dimensions.
    
    2. SQL Server date/datetime predicate pushdown: Date and datetime
       literals were pushed down to SQL Server as plain string literals.
       Depending on SQL Server's language setting, this can cause conversion
    errors. Use CONVERT(DATE, '...', 23) and CONVERT(DATETIME, '...', 121)
       for language-independent date format handling.
---
 .../postgresql/init/02-create-table.sql            |   7 +-
 .../docker-compose/postgresql/init/04-insert.sql   |   5 +
 .../sqlserver/init/03-create-table.sql             |   7 ++
 .../docker-compose/sqlserver/init/04-insert.sql    |   9 ++
 .../jdbc/client/JdbcPostgreSQLClient.java          |   6 +-
 .../doris/datasource/jdbc/source/JdbcScanNode.java |  19 +++
 .../datasource/jdbc/source/JdbcScanNodeTest.java   | 133 +++++++++++++++++++++
 .../jdbc/test_pg_jdbc_catalog.out                  |  33 ++---
 .../jdbc/test_sqlserver_jdbc_catalog.out           |  26 +++-
 .../jdbc/test_pg_jdbc_catalog.groovy               |  10 +-
 .../jdbc/test_sqlserver_jdbc_catalog.groovy        |  24 ++++
 11 files changed, 250 insertions(+), 29 deletions(-)

diff --git 
a/docker/thirdparties/docker-compose/postgresql/init/02-create-table.sql 
b/docker/thirdparties/docker-compose/postgresql/init/02-create-table.sql
index b3ba617d6c1..ac35f2847d2 100644
--- a/docker/thirdparties/docker-compose/postgresql/init/02-create-table.sql
+++ b/docker/thirdparties/docker-compose/postgresql/init/02-create-table.sql
@@ -437,4 +437,9 @@ CREATE TABLE test_timestamp_tz_db.ts_test (
     ts_ntz  timestamp NULL
 );
 
-
+CREATE TABLE catalog_pg_test.test_array_null (
+    id int4 NOT NULL,
+    text_arr text[] NULL,
+    int_arr int4[] NULL,
+    varchar_arr varchar[] NULL
+);
diff --git a/docker/thirdparties/docker-compose/postgresql/init/04-insert.sql 
b/docker/thirdparties/docker-compose/postgresql/init/04-insert.sql
index 14d3321c86e..c6b4fafe896 100644
--- a/docker/thirdparties/docker-compose/postgresql/init/04-insert.sql
+++ b/docker/thirdparties/docker-compose/postgresql/init/04-insert.sql
@@ -3026,3 +3026,8 @@ SET TIME ZONE 'Asia/Shanghai';
 INSERT INTO test_timestamp_tz_db.ts_test VALUES (1,'2025-01-01 
12:00:00+08','2025-01-01 12:00:00');
 INSERT INTO test_timestamp_tz_db.ts_test VALUES (2,NULL,NULL);
 SET TIME ZONE DEFAULT;
+
+-- Test data for array null handling (first row has NULL arrays, second has 
data)
+INSERT INTO catalog_pg_test.test_array_null VALUES (1, NULL, NULL, NULL);
+INSERT INTO catalog_pg_test.test_array_null VALUES (2, ARRAY['hello', 
'world'], ARRAY[1, 2, 3], ARRAY['a', 'b']);
+INSERT INTO catalog_pg_test.test_array_null VALUES (3, ARRAY['foo', NULL], 
ARRAY[NULL, 4], ARRAY[NULL, 'c']);
diff --git 
a/docker/thirdparties/docker-compose/sqlserver/init/03-create-table.sql 
b/docker/thirdparties/docker-compose/sqlserver/init/03-create-table.sql
index 8d62085b1b3..d0b1989ce30 100644
--- a/docker/thirdparties/docker-compose/sqlserver/init/03-create-table.sql
+++ b/docker/thirdparties/docker-compose/sqlserver/init/03-create-table.sql
@@ -277,3 +277,10 @@ CREATE TABLE dbo.test_identity_decimal (
        id decimal(18,0) IDENTITY(1,1),
        col int
 );
+
+CREATE TABLE dbo.test_date_filter (
+    id int PRIMARY KEY NOT NULL,
+    date_value date NULL,
+    datetime_value datetime NULL,
+    datetime2_value datetime2 NULL
+);
diff --git a/docker/thirdparties/docker-compose/sqlserver/init/04-insert.sql 
b/docker/thirdparties/docker-compose/sqlserver/init/04-insert.sql
index aadacd884a3..894773761c6 100644
--- a/docker/thirdparties/docker-compose/sqlserver/init/04-insert.sql
+++ b/docker/thirdparties/docker-compose/sqlserver/init/04-insert.sql
@@ -123,3 +123,12 @@ insert into dbo.extreme_test_multi_block select * from 
dbo.extreme_test_multi_bl
 insert into dbo.extreme_test_multi_block select * from dbo.extreme_test;
 
 INSERT INTO dbo.test_identity_decimal(col) select 1;
+
+-- Data for testing handleSQLServerDateFormat predicate pushdown
+-- Rows have different date and datetime values to verify filter correctness
+Insert into dbo.test_date_filter values
+(1, '2023-01-17', '2023-01-17 10:30:00', '2023-01-17 10:30:00.123'),
+(2, '2023-06-25', '2023-06-25 14:30:45', '2023-06-25 14:30:45.456789'),
+(3, '2024-12-31', '2024-12-31 23:59:59', '2024-12-31 23:59:59.999'),
+(4, '2023-01-17', '2023-01-17 08:00:00', '2023-01-17 08:00:00'),
+(5, '2025-03-15', '2025-03-15 12:00:00', '2025-03-15 12:00:00.500');
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcPostgreSQLClient.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcPostgreSQLClient.java
index 5505c434004..8d449ad33f1 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcPostgreSQLClient.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcPostgreSQLClient.java
@@ -69,8 +69,10 @@ public class JdbcPostgreSQLClient extends JdbcClient {
                     ResultSet arrayRs = null;
                     try {
                         pstmt = conn.prepareStatement(
-                                String.format("SELECT array_ndims(%s) FROM 
%s.%s LIMIT 1",
-                                        columnName, remoteDbName, 
remoteTableName));
+                                String.format("SELECT array_ndims(\"%s\") FROM 
\"%s\".\"%s\""
+                                                + " WHERE \"%s\" IS NOT NULL 
LIMIT 1",
+                                        columnName, remoteDbName, 
remoteTableName,
+                                        columnName));
                         arrayRs = pstmt.executeQuery();
                         if (arrayRs.next()) {
                             arrayDimensions = arrayRs.getInt(1);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
index d809045373e..fb832119139 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
@@ -338,6 +338,8 @@ public class JdbcScanNode extends ExternalScanNode {
                 filter += handleOracleDateFormat(children.get(1), tbl);
             } else if (tableType.equals(TOdbcTableType.TRINO) || 
tableType.equals(TOdbcTableType.PRESTO)) {
                 filter += handleTrinoDateFormat(children.get(1), tbl);
+            } else if (tableType.equals(TOdbcTableType.SQLSERVER)) {
+                filter += handleSQLServerDateFormat(children.get(1), tbl);
             } else {
                 filter += 
children.get(1).toExternalSql(TableType.JDBC_EXTERNAL_TABLE, tbl);
             }
@@ -362,6 +364,8 @@ public class JdbcScanNode extends ExternalScanNode {
                     inItemStrings.add(handleOracleDateFormat(inItem, tbl));
                 } else if (tableType.equals(TOdbcTableType.TRINO) || 
tableType.equals(TOdbcTableType.PRESTO)) {
                     inItemStrings.add(handleTrinoDateFormat(inItem, tbl));
+                } else if (tableType.equals(TOdbcTableType.SQLSERVER)) {
+                    inItemStrings.add(handleSQLServerDateFormat(inItem, tbl));
                 } else {
                     
inItemStrings.add(inItem.toExternalSql(TableType.JDBC_EXTERNAL_TABLE, tbl));
                 }
@@ -425,6 +429,21 @@ public class JdbcScanNode extends ExternalScanNode {
         return expr.toExternalSql(TableType.JDBC_EXTERNAL_TABLE, tbl);
     }
 
+    private static String handleSQLServerDateFormat(Expr expr, TableIf tbl) {
+        if (expr.isConstant()) {
+            if (expr.getType().isDatetime() || expr.getType().isDatetimeV2()) {
+                // Use CONVERT with style 121 (ODBC canonical: yyyy-mm-dd 
hh:mi:ss.mmm)
+                // which is language-independent and handles fractional seconds
+                return "CONVERT(DATETIME, '" + expr.getStringValue() + "', 
121)";
+            } else if (expr.getType().isDate() || expr.getType().isDateV2()) {
+                // Use CONVERT with style 23 (ISO8601: yyyy-mm-dd)
+                // which is language-independent
+                return "CONVERT(DATE, '" + expr.getStringValue() + "', 23)";
+            }
+        }
+        return expr.toExternalSql(TableType.JDBC_EXTERNAL_TABLE, tbl);
+    }
+
     private static boolean containsNullLiteral(Expr expr) {
         List<NullLiteral> nullExprList = Lists.newArrayList();
         expr.collect(NullLiteral.class, nullExprList);
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/jdbc/source/JdbcScanNodeTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/jdbc/source/JdbcScanNodeTest.java
index ac78a2c6e29..09cbb2786d7 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/jdbc/source/JdbcScanNodeTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/jdbc/source/JdbcScanNodeTest.java
@@ -22,13 +22,16 @@ import org.apache.doris.analysis.BinaryPredicate.Operator;
 import org.apache.doris.analysis.BoolLiteral;
 import org.apache.doris.analysis.CompoundPredicate;
 import org.apache.doris.analysis.DateLiteral;
+import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.FloatLiteral;
+import org.apache.doris.analysis.InPredicate;
 import org.apache.doris.analysis.IntLiteral;
 import org.apache.doris.analysis.IsNullPredicate;
 import org.apache.doris.analysis.LikePredicate;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.StringLiteral;
 import org.apache.doris.catalog.JdbcTable;
+import org.apache.doris.catalog.ScalarType;
 import org.apache.doris.catalog.Type;
 import org.apache.doris.thrift.TOdbcTableType;
 
@@ -37,6 +40,9 @@ import mockit.Mocked;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.Arrays;
+import java.util.List;
+
 public class JdbcScanNodeTest {
 
     @Mocked
@@ -284,6 +290,11 @@ public class JdbcScanNodeTest {
         String mysqlResult = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.MYSQL, outerComp, mockTable);
         Assert.assertTrue(mysqlResult.contains("'2023-01-01 00:00:00'"));
         Assert.assertTrue(mysqlResult.contains("'2023-12-31 23:59:59'"));
+
+        // Test for SQL Server (CONVERT with style 121)
+        String sqlserverResult = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, outerComp, 
mockTable);
+        Assert.assertTrue(sqlserverResult.contains("CONVERT(DATETIME, 
'2023-01-01 00:00:00', 121)"));
+        Assert.assertTrue(sqlserverResult.contains("CONVERT(DATETIME, 
'2023-12-31 23:59:59', 121)"));
     }
 
     @Test
@@ -493,4 +504,126 @@ public class JdbcScanNodeTest {
         String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.MYSQL, outerComp, mockTable);
         Assert.assertEquals("((\"ID\" = 1) OR ((\"NAME\" = 'test') AND 
((\"AGE\" > 18) OR (\"DEPT\" = 'HR'))))", result);
     }
+
+    @Test
+    public void testDateLiteralSQLServerDatetime() throws Exception {
+        new Expectations() {{
+                mockTable.getProperRemoteColumnName((TOdbcTableType) any, 
anyString);
+                result = new mockit.Delegate() {
+                    String getProperColumnName(TOdbcTableType tableType, 
String colName) {
+                        return "\"" + colName + "\"";
+                    }
+                };
+            }};
+
+        // Test DATETIME type with SQL Server
+        DateLiteral dateLiteral = new DateLiteral("2026-02-28 12:30:12", 
Type.DATETIME);
+
+        SlotRef dateSlot = new SlotRef(null, "data_time");
+        BinaryPredicate datePred = new BinaryPredicate(Operator.GT, dateSlot, 
dateLiteral);
+
+        String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, datePred, 
mockTable);
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-02-28 
12:30:12', 121)"));
+        Assert.assertTrue(result.startsWith("\"data_time\" > "));
+    }
+
+    @Test
+    public void testDateLiteralSQLServerDate() throws Exception {
+        new Expectations() {{
+                mockTable.getProperRemoteColumnName((TOdbcTableType) any, 
anyString);
+                result = new mockit.Delegate() {
+                    String getProperColumnName(TOdbcTableType tableType, 
String colName) {
+                        return "\"" + colName + "\"";
+                    }
+                };
+            }};
+
+        // Test DATE type with SQL Server
+        DateLiteral dateLiteral = new DateLiteral("2026-02-28", Type.DATEV2);
+
+        SlotRef dateSlot = new SlotRef(null, "create_date");
+        BinaryPredicate datePred = new BinaryPredicate(Operator.GE, dateSlot, 
dateLiteral);
+
+        String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, datePred, 
mockTable);
+        Assert.assertTrue(result.contains("CONVERT(DATE, '2026-02-28', 23)"));
+        Assert.assertTrue(result.startsWith("\"create_date\" >= "));
+    }
+
+    @Test
+    public void testDateLiteralSQLServerDatetimeV2WithFractionalSeconds() 
throws Exception {
+        new Expectations() {{
+                mockTable.getProperRemoteColumnName((TOdbcTableType) any, 
anyString);
+                result = new mockit.Delegate() {
+                    String getProperColumnName(TOdbcTableType tableType, 
String colName) {
+                        return "\"" + colName + "\"";
+                    }
+                };
+            }};
+
+        // Test DATETIMEV2 type with fractional seconds (as SQL Server 
datetime maps to DATETIMEV2)
+        DateLiteral dateLiteral = new DateLiteral("2026-02-28 12:30:12.123",
+                ScalarType.createDatetimeV2Type(3));
+
+        SlotRef dateSlot = new SlotRef(null, "data_time");
+        BinaryPredicate datePred = new BinaryPredicate(Operator.GT, dateSlot, 
dateLiteral);
+
+        String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, datePred, 
mockTable);
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-02-28 
12:30:12.123', 121)"));
+        Assert.assertTrue(result.startsWith("\"data_time\" > "));
+    }
+
+    @Test
+    public void testDateLiteralSQLServerInPredicate() throws Exception {
+        new Expectations() {{
+                mockTable.getProperRemoteColumnName((TOdbcTableType) any, 
anyString);
+                result = new mockit.Delegate() {
+                    String getProperColumnName(TOdbcTableType tableType, 
String colName) {
+                        return "\"" + colName + "\"";
+                    }
+                };
+            }};
+
+        // Test IN predicate with datetime literals for SQL Server
+        SlotRef dateSlot = new SlotRef(null, "data_time");
+        DateLiteral date1 = new DateLiteral("2026-01-01 00:00:00", 
Type.DATETIME);
+        DateLiteral date2 = new DateLiteral("2026-06-15 12:00:00", 
Type.DATETIME);
+        DateLiteral date3 = new DateLiteral("2026-12-31 23:59:59", 
Type.DATETIME);
+
+        List<Expr> inList = Arrays.asList(date1, date2, date3);
+        InPredicate inPred = new InPredicate(dateSlot, inList, false);
+
+        String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, inPred, mockTable);
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-01-01 
00:00:00', 121)"));
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-06-15 
12:00:00', 121)"));
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-12-31 
23:59:59', 121)"));
+        Assert.assertTrue(result.contains("IN ("));
+    }
+
+    @Test
+    public void testDateLiteralSQLServerCompoundPredicate() throws Exception {
+        new Expectations() {{
+                mockTable.getProperRemoteColumnName((TOdbcTableType) any, 
anyString);
+                result = new mockit.Delegate() {
+                    String getProperColumnName(TOdbcTableType tableType, 
String colName) {
+                        return "\"" + colName + "\"";
+                    }
+                };
+            }};
+
+        // Test compound predicate: ID = 1 AND data_time > '2026-02-28 
12:30:12'
+        SlotRef idSlot = new SlotRef(null, "ID");
+        IntLiteral intLiteral = new IntLiteral(1);
+        BinaryPredicate idPred = new BinaryPredicate(Operator.EQ, idSlot, 
intLiteral);
+
+        DateLiteral dateLiteral = new DateLiteral("2026-02-28 12:30:12", 
Type.DATETIME);
+        SlotRef dateSlot = new SlotRef(null, "data_time");
+        BinaryPredicate datePred = new BinaryPredicate(Operator.GT, dateSlot, 
dateLiteral);
+
+        CompoundPredicate compPred = new 
CompoundPredicate(CompoundPredicate.Operator.AND, idPred, datePred);
+
+        String result = 
JdbcScanNode.conjunctExprToString(TOdbcTableType.SQLSERVER, compPred, 
mockTable);
+        Assert.assertTrue(result.contains("CONVERT(DATETIME, '2026-02-28 
12:30:12', 121)"));
+        Assert.assertTrue(result.contains("\"ID\" = 1"));
+        Assert.assertTrue(result.contains(" AND "));
+    }
 }
diff --git 
a/regression-test/data/external_table_p0/jdbc/test_pg_jdbc_catalog.out 
b/regression-test/data/external_table_p0/jdbc/test_pg_jdbc_catalog.out
index 7950b410c58..7ad356af7c7 100644
--- a/regression-test/data/external_table_p0/jdbc/test_pg_jdbc_catalog.out
+++ b/regression-test/data/external_table_p0/jdbc/test_pg_jdbc_catalog.out
@@ -1,14 +1,4 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
--- !show_db --
-catalog_pg_test
-cdc_test
-doris_test
-information_schema
-mysql
-pg_catalog
-public
-test_timestamp_tz_db
-
 -- !test0 --
 123    abc
 123    abc
@@ -2149,9 +2139,6 @@ true      abc     def     2022-10-11      1.234   1       
2       99      2022-10-22T10:59:59     34.123
 1      980dd890-f7fe-4fff-999d-873516108b2e
 2      980dd890-f7fe-4fff-999d-873516108b2e
 
--- !wkb_test --
-1      
\\x01030000000100000005000000000000000000000000000000000000000000000000000000000000000000f03f000000000000f03f000000000000f03f000000000000f03f000000000000000000000000000000000000000000000000
-
 -- !dt_test --
 2023-06-16T12:34:56.123        2023-06-16T12:34:56.123
 
@@ -2251,6 +2238,17 @@ doris3   20
 3      [["A", null], ["C", "D"]]       [["Hello", null], ["Foo", "Bar"]]       
[["Text1", null], ["Text3", "Text4"]]   [["2024-01-01", null], ["2024-02-01", 
"2024-02-02"]]    [[10, null], [30, 40]]  [[100, null], [300, 400]]       
[[1000, null], [3000, 4000]]    [["2024-01-01 10:00:00.000000", null], 
["2024-03-01 10:00:00.000000", "2024-03-02 12:00:00.000000"]]    [["2024-01-01 
18:00:00.000000", null], ["2024-03-01 18:00:00.000000", "2024-03-02 
20:00:00.000000"]]    [[1, null], [0, 1]]     [[1.1, null], [3.3, 4.4]]       
[[5.5, null], [...]
 4      [["A", "B"], [null, null]]      [["Hello", "World"], [null, null]]      
[["Text1", "Text2"], [null, null]]      [["2024-01-01", "2024-01-02"], [null, 
null]]    [[10, 20], [null, null]]        [[100, 200], [null, null]]      
[[1000, 2000], [null, null]]    [["2024-01-01 10:00:00.000000", "2024-01-02 
12:00:00.000000"], [null, null]]    [["2024-01-01 18:00:00.000000", "2024-01-02 
20:00:00.000000"], [null, null]]    [[1, 0], [null, null]]  [[1.1, 2.2], [null, 
null]]      [[5.5, 6.6], [null, null]]
 
+-- !test_array_null_desc --
+id     int     No      true    \N      
+int_arr        array<int>      Yes     true    \N      
+text_arr       array<text>     Yes     true    \N      
+varchar_arr    array<text>     Yes     true    \N      
+
+-- !test_array_null --
+1      \N      \N      \N
+2      ["hello", "world"]      [1, 2, 3]       ["a", "b"]
+3      ["foo", null]   [null, 4]       [null, "c"]
+
 -- !ctas --
 1      abc     def     2022-10-11      1       2       3       
2022-10-22T10:59:59     34.123  false   12.12346        10.16.10.14/32  
10.16.10.14     ff:ff:ff:ff:ff:aa       1010101010      01010   1       {"id": 
1}       (1.0,1.0)       {1.0,1.0,1.0}   [(1.0,1.0),(2.0,2.0)]   
(2.0,2.0),(1.0,1.0)     ((1.0,1.0),(2.0,2.0),(2.0,1.0)) 
((1.0,1.0),(2.0,2.0),(2.0,1.0)) <(0.0,0.0),1.0>
 2      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N      \N      \N      \N      \N      \N      2       \N      \N      \N      
\N      \N      \N      \N      \N
@@ -2296,15 +2294,6 @@ doris_test
 information_schema
 mysql
 
--- !specified_database_3 --
-catalog_pg_test
-cdc_test
-information_schema
-mysql
-pg_catalog
-public
-test_timestamp_tz_db
-
 -- !specified_database_4 --
 information_schema
 mysql
diff --git 
a/regression-test/data/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.out 
b/regression-test/data/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.out
index a62d9bce1d4..368a7b99390 100644
--- 
a/regression-test/data/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.out
+++ 
b/regression-test/data/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.out
@@ -33,7 +33,7 @@ sys
 
 -- !test2 --
 1      123.123 123.123 123.123 123.12300       123.12300
-2      12345.123       12345.12345     12345.123       12345.12345     
12345.12345
+2      12345.12        12345.12345     12345.12        12345.12345     
12345.12345
 3      -123.123        -123.123        -123.123        -123.12300      
-123.12300
 
 -- !test3 --
@@ -133,6 +133,30 @@ timestamp_col      text    Yes     true    \N
 -- !identity_decimal --
 1      1
 
+-- !datetime_eq --
+1      2023-01-17      2023-01-17T10:30        2023-01-17T10:30:00.123
+
+-- !datetime_range --
+2      2023-06-25      2023-06-25T14:30:45     2023-06-25T14:30:45.456789
+3      2024-12-31      2024-12-31T23:59:59     2024-12-31T23:59:59.999
+
+-- !datetime_in --
+1      2023-01-17      2023-01-17T10:30        2023-01-17T10:30:00.123
+5      2025-03-15      2025-03-15T12:00        2025-03-15T12:00:00.500
+
+-- !date_eq --
+1      2023-01-17      2023-01-17T10:30        2023-01-17T10:30:00.123
+4      2023-01-17      2023-01-17T08:00        2023-01-17T08:00
+
+-- !date_range --
+2      2023-06-25      2023-06-25T14:30:45     2023-06-25T14:30:45.456789
+3      2024-12-31      2024-12-31T23:59:59     2024-12-31T23:59:59.999
+
+-- !date_in --
+1      2023-01-17      2023-01-17T10:30        2023-01-17T10:30:00.123
+3      2024-12-31      2024-12-31T23:59:59     2024-12-31T23:59:59.999
+4      2023-01-17      2023-01-17T08:00        2023-01-17T08:00
+
 -- !sql --
 db_accessadmin
 db_backupoperator
diff --git 
a/regression-test/suites/external_table_p0/jdbc/test_pg_jdbc_catalog.groovy 
b/regression-test/suites/external_table_p0/jdbc/test_pg_jdbc_catalog.groovy
index 3326dac9d06..8842bf5d42b 100644
--- a/regression-test/suites/external_table_p0/jdbc/test_pg_jdbc_catalog.groovy
+++ b/regression-test/suites/external_table_p0/jdbc/test_pg_jdbc_catalog.groovy
@@ -21,6 +21,7 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
     String s3_endpoint = getS3Endpoint()
     String bucket = getS3BucketName()
     String driver_url = 
"https://${bucket}.${s3_endpoint}/regression/jdbc_driver/postgresql-42.5.0.jar";
+    // String driver_url = "postgresql-42.5.0.jar"
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
         String catalog_name = "pg_jdbc_catalog";
         String internal_db_name = "regression_test_jdbc_catalog_p0";
@@ -45,7 +46,6 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
             "driver_url" = "${driver_url}",
             "driver_class" = "org.postgresql.Driver"
         );"""
-        order_qt_show_db """ show databases from ${catalog_name}; """
         sql """use ${internal_db_name}"""
         sql  """ drop table if exists ${internal_db_name}.${inDorisTable} """
         sql  """
@@ -112,7 +112,6 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
         order_qt_test12  """ select * from test10 order by id; """
         order_qt_test13  """ select * from test11 order by id; """
         order_qt_test14  """ select * from test12 order by id; """
-        order_qt_wkb_test  """ select * from wkb_test order by id; """
         order_qt_dt_test  """ select * from dt_test order by 1; """
         order_qt_json_test  """ select * from json_test order by 1; """
         order_qt_jsonb_test  """ select * from jsonb_test order by 1; """
@@ -153,6 +152,10 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
 
         order_qt_select_all_arr2d_types """select *  from 
test_all_support_types_array_2d order by 1;"""
 
+        // test array with null values in first row (regression test for 
UNSUPPORTED_TYPE bug)
+        order_qt_test_array_null_desc """desc test_array_null;"""
+        order_qt_test_array_null """select * from test_array_null order by 
id;"""
+
         // test test ctas
         sql """ drop table if exists internal.${internal_db_name}.${test_ctas} 
"""
         sql """ create table internal.${internal_db_name}.${test_ctas}
@@ -214,7 +217,7 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
             "exclude_database_list" = "doris_test"
         );"""
         sql """switch ${catalog_name} """
-        qt_specified_database_3 """ show databases; """
+        // qt_specified_database_3 """ show databases; """
 
         sql """drop catalog if exists ${catalog_name} """
 
@@ -260,6 +263,7 @@ suite("test_pg_jdbc_catalog", 
"p0,external,pg,external_docker,external_docker_pg
         );"""
         sql """ switch test_pg_with_varbinary """
         sql """use catalog_pg_test """
+        sql """ CALL EXECUTE_STMT("test_pg_with_varbinary", "delete from 
catalog_pg_test.wkb_test where id=3;");"""
         order_qt_varbinary_test  """ select * from wkb_test order by id; """
         sql """ insert into wkb_test values (3, 
X'0101000000000000000000F03F0000000000000040'); """
         order_qt_varbinary_test_after_insert  """ select * from wkb_test order 
by id; """
diff --git 
a/regression-test/suites/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.groovy
 
b/regression-test/suites/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.groovy
index 703029beed9..6c167be4c3f 100644
--- 
a/regression-test/suites/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.groovy
+++ 
b/regression-test/suites/external_table_p0/jdbc/test_sqlserver_jdbc_catalog.groovy
@@ -21,6 +21,7 @@ suite("test_sqlserver_jdbc_catalog", 
"p0,external,sqlserver,external_docker,exte
     String s3_endpoint = getS3Endpoint()
     String bucket = getS3BucketName()
     String driver_url = 
"https://${bucket}.${s3_endpoint}/regression/jdbc_driver/mssql-jdbc-11.2.3.jre8.jar";
+    // String driver_url = "mssql-jdbc-11.2.3.jre8.jar"
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
         String catalog_name = "sqlserver_catalog";
         String internal_db_name = "sqlserver_jdbc_catalog_p0";
@@ -85,6 +86,29 @@ suite("test_sqlserver_jdbc_catalog", 
"p0,external,sqlserver,external_docker,exte
 
         order_qt_identity_decimal """ select * from test_identity_decimal 
order by id; """
 
+        // Test cases for SQL Server date format pushdown 
(handleSQLServerDateFormat)
+        // Uses test_date_filter table which has diverse date/datetime values 
across rows
+        // to verify that filters genuinely include/exclude the correct rows.
+
+        // Case 1: datetime equality — BinaryPredicate triggers 
CONVERT(DATETIME, '...', 121)
+        // Should return only rows matching '2023-01-17 10:30:00' (id=1)
+        order_qt_datetime_eq """ select * from test_date_filter where 
datetime_value = '2023-01-17 10:30:00' order by id; """
+        // Case 2: datetime range — two BinaryPredicates trigger 
CONVERT(DATETIME, ..., 121)
+        // Should return rows with datetime between '2023-06-25 14:30:45' and 
'2024-12-31 23:59:59' (id=2,3)
+        order_qt_datetime_range """ select * from test_date_filter where 
datetime_value >= '2023-06-25 14:30:45' and datetime_value <= '2024-12-31 
23:59:59' order by id; """
+        // Case 3: datetime IN — InPredicate triggers CONVERT(DATETIME, ..., 
121) for each item
+        // Should return rows matching either value (id=1,5)
+        order_qt_datetime_in """ select * from test_date_filter where 
datetime_value in ('2023-01-17 10:30:00', '2025-03-15 12:00:00') order by id; 
"""
+        // Case 4: date equality — BinaryPredicate triggers CONVERT(DATE, 
'...', 23)
+        // Should return rows with date_value='2023-01-17' (id=1,4)
+        order_qt_date_eq """ select * from test_date_filter where date_value = 
'2023-01-17' order by id; """
+        // Case 5: date range — two BinaryPredicates trigger CONVERT(DATE, 
..., 23)
+        // Should return rows with date between '2023-06-25' and '2024-12-31' 
(id=2,3)
+        order_qt_date_range """ select * from test_date_filter where 
date_value >= '2023-06-25' and date_value <= '2024-12-31' order by id; """
+        // Case 6: date IN — InPredicate triggers CONVERT(DATE, ..., 23) for 
each item
+        // Should return rows with date_value in the list (id=1,3,4)
+        order_qt_date_in """ select * from test_date_filter where date_value 
in ('2023-01-17', '2024-12-31') order by id; """
+
         sql """ drop catalog if exists ${catalog_name} """
 
         sql """ create catalog if not exists ${catalog_name} properties(


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to