This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 47c2cc5c74 [vectorized](udf) java udf support with return map type 
(#22300)
47c2cc5c74 is described below

commit 47c2cc5c74c20fd998b35e7a7a908fbc512892ac
Author: Mryange <59914473+mrya...@users.noreply.github.com>
AuthorDate: Sat Jul 29 12:52:27 2023 +0800

    [vectorized](udf) java udf support with return map type (#22300)
---
 be/src/vec/functions/function_java_udf.cpp         |  68 ++++++-
 be/src/vec/functions/function_java_udf.h           |   1 +
 .../apache/doris/common/jni/utils/UdfUtils.java    |   8 +-
 .../java/org/apache/doris/udf/BaseExecutor.java    |   3 +-
 .../main/java/org/apache/doris/udf/UdfConvert.java |   2 +-
 .../java/org/apache/doris/udf/UdfExecutor.java     | 197 ++++++++++++++++++++-
 .../data/javaudf_p0/test_javaudf_ret_map.out       |  17 ++
 .../main/java/org/apache/doris/udf/MapidTest.java  |  33 ++++
 .../java/org/apache/doris/udf/MapidssTest.java     |  31 ++++
 .../main/java/org/apache/doris/udf/MapiiTest.java  |  33 ++++
 .../main/java/org/apache/doris/udf/MapssTest.java  |  33 ++++
 .../suites/javaudf_p0/test_javaudf_ret_map.groovy  | 120 +++++++++++++
 12 files changed, 529 insertions(+), 17 deletions(-)

diff --git a/be/src/vec/functions/function_java_udf.cpp 
b/be/src/vec/functions/function_java_udf.cpp
index 0df2806026..46bf887515 100644
--- a/be/src/vec/functions/function_java_udf.cpp
+++ b/be/src/vec/functions/function_java_udf.cpp
@@ -78,6 +78,8 @@ Status JavaFunctionCall::open(FunctionContext* context, 
FunctionContext::Functio
                 jni_env->executor_cl, "copyBatchBasicResult", 
"(ZI[Ljava/lang/Object;JJJ)V");
         jni_env->executor_result_array_batch_id = env->GetMethodID(
                 jni_env->executor_cl, "copyBatchArrayResult", 
"(ZI[Ljava/lang/Object;JJJJJ)V");
+        jni_env->executor_result_map_batch_id = env->GetMethodID(
+                jni_env->executor_cl, "copyBatchMapResult", 
"(ZI[Ljava/lang/Object;JJJJJJJJ)V");
         jni_env->executor_close_id =
                 env->GetMethodID(jni_env->executor_cl, "close", 
EXECUTOR_CLOSE_SIGNATURE);
         RETURN_ERROR_IF_EXC(env);
@@ -150,7 +152,7 @@ Status JavaFunctionCall::execute(FunctionContext* context, 
Block& block,
     ColumnPtr null_cols[arg_size];
     jclass obj_class = env->FindClass("[Ljava/lang/Object;");
     jclass arraylist_class = env->FindClass("Ljava/util/ArrayList;");
-    // jclass hashmap_class = env->FindClass("Ljava/util/HashMap;");
+    jclass hashmap_class = env->FindClass("Ljava/util/HashMap;");
     jobjectArray arg_objects = env->NewObjectArray(arg_size, obj_class, 
nullptr);
     int64_t nullmap_address = 0;
     for (size_t arg_idx = 0; arg_idx < arg_size; ++arg_idx) {
@@ -352,6 +354,69 @@ Status JavaFunctionCall::execute(FunctionContext* context, 
Block& block,
                 jni_ctx->executor, jni_env->executor_cl, 
jni_env->executor_result_array_batch_id,
                 result_nullable, num_rows, result_obj, nullmap_address, 
offset_address,
                 nested_nullmap_address, nested_data_address, 
nested_offset_address);
+    } else if (res_col->is_column_map()) {
+        ColumnMap* map_col = assert_cast<ColumnMap*>(res_col.get());
+        auto& offset_column = map_col->get_offsets_column();
+        auto offset_address = 
reinterpret_cast<int64_t>(offset_column.get_raw_data().data);
+        ColumnNullable& map_key_column_nullable = 
assert_cast<ColumnNullable&>(map_col->get_keys());
+        auto key_data_column_null_map = 
map_key_column_nullable.get_null_map_column_ptr();
+        auto key_data_column = map_key_column_nullable.get_nested_column_ptr();
+        auto& key_null_map_data =
+                
assert_cast<ColumnVector<UInt8>*>(key_data_column_null_map.get())->get_data();
+        auto key_nested_nullmap_address = 
reinterpret_cast<int64_t>(key_null_map_data.data());
+        int64_t key_nested_data_address = 0, key_nested_offset_address = 0;
+        if (key_data_column->is_column_string()) {
+            ColumnString* str_col = 
assert_cast<ColumnString*>(key_data_column.get());
+            ColumnString::Chars& chars = 
assert_cast<ColumnString::Chars&>(str_col->get_chars());
+            ColumnString::Offsets& offsets =
+                    
assert_cast<ColumnString::Offsets&>(str_col->get_offsets());
+            key_nested_data_address = reinterpret_cast<int64_t>(&chars);
+            key_nested_offset_address = 
reinterpret_cast<int64_t>(offsets.data());
+        } else {
+            key_nested_data_address =
+                    
reinterpret_cast<int64_t>(key_data_column->get_raw_data().data);
+        }
+
+        ColumnNullable& map_value_column_nullable =
+                assert_cast<ColumnNullable&>(map_col->get_values());
+        auto value_data_column_null_map = 
map_value_column_nullable.get_null_map_column_ptr();
+        auto value_data_column = 
map_value_column_nullable.get_nested_column_ptr();
+        auto& value_null_map_data =
+                
assert_cast<ColumnVector<UInt8>*>(value_data_column_null_map.get())->get_data();
+        auto value_nested_nullmap_address = 
reinterpret_cast<int64_t>(value_null_map_data.data());
+        int64_t value_nested_data_address = 0, value_nested_offset_address = 0;
+        // array type need pass address: [nullmap_address], offset_address, 
nested_nullmap_address, 
nested_data_address/nested_char_address,nested_offset_address
+        if (value_data_column->is_column_string()) {
+            ColumnString* str_col = 
assert_cast<ColumnString*>(value_data_column.get());
+            ColumnString::Chars& chars = 
assert_cast<ColumnString::Chars&>(str_col->get_chars());
+            ColumnString::Offsets& offsets =
+                    
assert_cast<ColumnString::Offsets&>(str_col->get_offsets());
+            value_nested_data_address = reinterpret_cast<int64_t>(&chars);
+            value_nested_offset_address = 
reinterpret_cast<int64_t>(offsets.data());
+        } else {
+            value_nested_data_address =
+                    
reinterpret_cast<int64_t>(value_data_column->get_raw_data().data);
+        }
+        jmethodID map_size = env->GetMethodID(hashmap_class, "size", "()I");
+        int element_size = 0; // get all element size in num_rows of map column
+        for (int i = 0; i < num_rows; ++i) {
+            jobject obj = env->GetObjectArrayElement(result_obj, i);
+            if (obj == nullptr) {
+                continue;
+            }
+            element_size = element_size + env->CallIntMethod(obj, map_size);
+            env->DeleteLocalRef(obj);
+        }
+        map_key_column_nullable.resize(element_size);
+        memset(key_null_map_data.data(), 0, element_size);
+        map_value_column_nullable.resize(element_size);
+        memset(value_null_map_data.data(), 0, element_size);
+        env->CallNonvirtualVoidMethod(jni_ctx->executor, jni_env->executor_cl,
+                                      jni_env->executor_result_map_batch_id, 
result_nullable,
+                                      num_rows, result_obj, nullmap_address, 
offset_address,
+                                      key_nested_nullmap_address, 
key_nested_data_address,
+                                      key_nested_offset_address, 
value_nested_nullmap_address,
+                                      value_nested_data_address, 
value_nested_offset_address);
     } else {
         return Status::InvalidArgument(strings::Substitute(
                 "Java UDF doesn't support return type $0 now !", 
return_type->get_name()));
@@ -359,6 +424,7 @@ Status JavaFunctionCall::execute(FunctionContext* context, 
Block& block,
     env->DeleteLocalRef(result_obj);
     env->DeleteLocalRef(obj_class);
     env->DeleteLocalRef(arraylist_class);
+    env->DeleteLocalRef(hashmap_class);
     if (result_nullable) {
         block.replace_by_position(result,
                                   ColumnNullable::create(std::move(res_col), 
std::move(null_col)));
diff --git a/be/src/vec/functions/function_java_udf.h 
b/be/src/vec/functions/function_java_udf.h
index 4398fa038d..c0828a2a3f 100644
--- a/be/src/vec/functions/function_java_udf.h
+++ b/be/src/vec/functions/function_java_udf.h
@@ -102,6 +102,7 @@ private:
         jmethodID executor_convert_map_argument_id;
         jmethodID executor_result_basic_batch_id;
         jmethodID executor_result_array_batch_id;
+        jmethodID executor_result_map_batch_id;
         jmethodID executor_close_id;
     };
 
diff --git 
a/fe/be-java-extensions/java-common/src/main/java/org/apache/doris/common/jni/utils/UdfUtils.java
 
b/fe/be-java-extensions/java-common/src/main/java/org/apache/doris/common/jni/utils/UdfUtils.java
index c546750bcf..5f7c050136 100644
--- 
a/fe/be-java-extensions/java-common/src/main/java/org/apache/doris/common/jni/utils/UdfUtils.java
+++ 
b/fe/be-java-extensions/java-common/src/main/java/org/apache/doris/common/jni/utils/UdfUtils.java
@@ -253,11 +253,11 @@ public class UdfUtils {
                 break;
             }
             case MAP: {
-                Preconditions.checkState(nodeIdx + 1 < 
typeDesc.getTypesSize());
+                Preconditions.checkState(nodeIdx + 2 < 
typeDesc.getTypesSize());
                 Pair<Type, Integer> keyType = fromThrift(typeDesc, nodeIdx + 
1);
-                Pair<Type, Integer> valueType = fromThrift(typeDesc, nodeIdx + 
1 + keyType.value());
-                type = new MapType(keyType.key(), valueType.key());
-                nodeIdx = 1 + keyType.value() + valueType.value();
+                Pair<Type, Integer> valueType = fromThrift(typeDesc, 
keyType.second);
+                type = new MapType(keyType.first, valueType.first);
+                nodeIdx = valueType.second;
                 break;
             }
 
diff --git 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/BaseExecutor.java
 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/BaseExecutor.java
index abbddab454..df5026742d 100644
--- 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/BaseExecutor.java
+++ 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/BaseExecutor.java
@@ -119,8 +119,7 @@ public abstract class BaseExecutor {
             parameterTypes[i] = Type.fromThrift(request.fn.arg_types.get(i));
         }
         String jarFile = request.location;
-        Type funcRetType = UdfUtils.fromThrift(request.fn.ret_type, 0).first;
-
+        Type funcRetType = Type.fromThrift(request.fn.ret_type);
         init(request, jarFile, funcRetType, parameterTypes);
     }
 
diff --git 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfConvert.java
 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfConvert.java
index fb2ead5a3f..dc83540885 100644
--- 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfConvert.java
+++ 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfConvert.java
@@ -705,7 +705,7 @@ public class UdfConvert {
     }
 
 
-    
////////////////////////////////////copyBatchArray//////////////////////////////////////////////////////////
+    //////////////////////////////////// 
copyBatchArray//////////////////////////////////////////////////////////
 
     public static long copyBatchArrayBooleanResult(long hasPutElementNum, 
boolean isNullable, int row, Object[] result,
             long nullMapAddr, long offsetsAddr, long nestedNullMapAddr, long 
dataAddr) {
diff --git 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfExecutor.java
 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfExecutor.java
index 34333ba1eb..1140d1824b 100644
--- 
a/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfExecutor.java
+++ 
b/fe/be-java-extensions/java-udf/src/main/java/org/apache/doris/udf/UdfExecutor.java
@@ -41,6 +41,7 @@ import java.time.LocalDate;
 import java.time.LocalDateTime;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Map;
 
 public class UdfExecutor extends BaseExecutor {
     // private static final java.util.logging.Logger LOG =
@@ -88,7 +89,8 @@ public class UdfExecutor extends BaseExecutor {
         int batchSize = UdfUtils.UNSAFE.getInt(null, batchSizePtr);
         try {
             if (retType.equals(JavaUdfDataType.STRING) || 
retType.equals(JavaUdfDataType.VARCHAR)
-                    || retType.equals(JavaUdfDataType.CHAR) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)) {
+                    || retType.equals(JavaUdfDataType.CHAR) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)
+                    || retType.equals(JavaUdfDataType.MAP_TYPE)) {
                 // If this udf return variable-size type (e.g.) String, we 
have to allocate output
                 // buffer multiple times until buffer size is enough to store 
output column. So we
                 // always begin with the last evaluated row instead of 
beginning of this batch.
@@ -111,12 +113,14 @@ public class UdfExecutor extends BaseExecutor {
                 }
             }
         } catch (Exception e) {
-            if (retType.equals(JavaUdfDataType.STRING) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)) {
+            if (retType.equals(JavaUdfDataType.STRING) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)
+                    || retType.equals(JavaUdfDataType.MAP_TYPE)) {
                 UdfUtils.UNSAFE.putLong(null, outputIntermediateStatePtr + 8, 
batchSize);
             }
             throw new UdfRuntimeException("UDF::evaluate() ran into a 
problem.", e);
         }
-        if (retType.equals(JavaUdfDataType.STRING) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)) {
+        if (retType.equals(JavaUdfDataType.STRING) || 
retType.equals(JavaUdfDataType.ARRAY_TYPE)
+                || retType.equals(JavaUdfDataType.MAP_TYPE)) {
             UdfUtils.UNSAFE.putLong(null, outputIntermediateStatePtr + 8, 
rowIdx);
         }
     }
@@ -311,14 +315,12 @@ public class UdfExecutor extends BaseExecutor {
         }
     }
 
-
-    public void copyBatchArrayResult(boolean isNullable, int numRows, Object[] 
result, long nullMapAddr,
-            long offsetsAddr, long nestedNullMapAddr, long dataAddr, long 
strOffsetAddr) {
-        Preconditions.checkState(result.length == numRows,
-                "copyBatchArrayResult result size should equal;");
+    public void copyBatchArrayResultImpl(boolean isNullable, int numRows, 
Object[] result, long nullMapAddr,
+            long offsetsAddr, long nestedNullMapAddr, long dataAddr, long 
strOffsetAddr,
+            PrimitiveType type) {
         long hasPutElementNum = 0;
         for (int row = 0; row < numRows; ++row) {
-            switch (retType.getItemType().getPrimitiveType()) {
+            switch (type) {
                 case BOOLEAN: {
                     hasPutElementNum = UdfConvert
                             .copyBatchArrayBooleanResult(hasPutElementNum, 
isNullable, row, result, nullMapAddr,
@@ -434,6 +436,95 @@ public class UdfExecutor extends BaseExecutor {
         }
     }
 
+    public void copyBatchArrayResult(boolean isNullable, int numRows, Object[] 
result, long nullMapAddr,
+            long offsetsAddr, long nestedNullMapAddr, long dataAddr, long 
strOffsetAddr) {
+        Preconditions.checkState(result.length == numRows,
+                "copyBatchArrayResult result size should equal;");
+        copyBatchArrayResultImpl(isNullable, numRows, result, nullMapAddr, 
offsetsAddr, nestedNullMapAddr, dataAddr,
+                strOffsetAddr, retType.getItemType().getPrimitiveType());
+    }
+
+    public void copyBatchMapResult(boolean isNullable, int numRows, Object[] 
result, long nullMapAddr,
+            long offsetsAddr, long keyNsestedNullMapAddr, long keyDataAddr, 
long keyStrOffsetAddr,
+            long valueNsestedNullMapAddr, long valueDataAddr, long 
valueStrOffsetAddr) {
+        Preconditions.checkState(result.length == numRows,
+                "copyBatchMapResult result size should equal;");
+        PrimitiveType keyType = retType.getKeyType().getPrimitiveType();
+        PrimitiveType valueType = retType.getValueType().getPrimitiveType();
+        Object[] keyCol = new Object[result.length];
+        Object[] valueCol = new Object[result.length];
+        switch (keyType) {
+            case BOOLEAN: {
+                new ArrayListBuilder<Boolean>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case TINYINT: {
+                new ArrayListBuilder<Byte>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case SMALLINT: {
+                new ArrayListBuilder<Short>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case INT: {
+                new ArrayListBuilder<Integer>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case BIGINT: {
+                new ArrayListBuilder<Long>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case LARGEINT: {
+                new ArrayListBuilder<BigInteger>().get(result, keyCol, 
valueCol, valueType);
+                break;
+            }
+            case FLOAT: {
+                new ArrayListBuilder<Float>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case DOUBLE: {
+                new ArrayListBuilder<Double>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case CHAR:
+            case VARCHAR:
+            case STRING: {
+                new ArrayListBuilder<String>().get(result, keyCol, valueCol, 
valueType);
+                break;
+            }
+            case DATEV2:
+            case DATE: {
+                new ArrayListBuilder<LocalDate>().get(result, keyCol, 
valueCol, valueType);
+                break;
+            }
+            case DATETIMEV2:
+            case DATETIME: {
+                new ArrayListBuilder<LocalDateTime>().get(result, keyCol, 
valueCol, valueType);
+                break;
+            }
+            case DECIMAL32:
+            case DECIMAL64:
+            case DECIMALV2:
+            case DECIMAL128: {
+                new ArrayListBuilder<BigDecimal>().get(result, keyCol, 
valueCol, valueType);
+                break;
+            }
+            default: {
+                LOG.info("Not support: " + keyType);
+                Preconditions.checkState(false, "Not support type " + 
keyType.toString());
+                break;
+            }
+        }
+
+        copyBatchArrayResultImpl(isNullable, numRows, valueCol, nullMapAddr, 
offsetsAddr, valueNsestedNullMapAddr,
+                valueDataAddr,
+                valueStrOffsetAddr, valueType);
+        copyBatchArrayResultImpl(isNullable, numRows, keyCol, nullMapAddr, 
offsetsAddr, keyNsestedNullMapAddr,
+                keyDataAddr,
+                keyStrOffsetAddr, keyType);
+
+    }
+
     /**
      * Evaluates the UDF with 'args' as the input to the UDF.
      */
@@ -539,6 +630,8 @@ public class UdfExecutor extends BaseExecutor {
                 } else {
                     retType = returnType.second;
                 }
+                Type keyType = retType.getKeyType();
+                Type valueType = retType.getValueType();
                 Pair<Boolean, JavaUdfDataType[]> inputType = 
UdfUtils.setArgTypes(parameterTypes, argClass, false);
                 if (!inputType.first) {
                     continue;
@@ -546,6 +639,8 @@ public class UdfExecutor extends BaseExecutor {
                     argTypes = inputType.second;
                 }
                 LOG.debug("Loaded UDF '" + className + "' from " + jarPath);
+                retType.setKeyType(keyType);
+                retType.setValueType(valueType);
                 return;
             }
 
@@ -649,4 +744,88 @@ public class UdfExecutor extends BaseExecutor {
             return retHashMap;
         }
     }
+
+    public static class ArrayListBuilder<keyType> {
+        public void get(Object[] map, Object[] keyCol, Object[] valueCol, 
PrimitiveType valueType) {
+            switch (valueType) {
+                case BOOLEAN: {
+                    new BuildArrayFromType<keyType, Boolean>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                case TINYINT: {
+                    new BuildArrayFromType<keyType, Byte>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case SMALLINT: {
+                    new BuildArrayFromType<keyType, Short>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case INT: {
+                    new BuildArrayFromType<keyType, Integer>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                case BIGINT: {
+                    new BuildArrayFromType<keyType, Long>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case LARGEINT: {
+                    new BuildArrayFromType<keyType, BigInteger>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                case FLOAT: {
+                    new BuildArrayFromType<keyType, Float>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case DOUBLE: {
+                    new BuildArrayFromType<keyType, Double>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case CHAR:
+                case VARCHAR:
+                case STRING: {
+                    new BuildArrayFromType<keyType, String>().get(map, keyCol, 
valueCol);
+                    break;
+                }
+                case DATEV2:
+                case DATE: {
+                    new BuildArrayFromType<keyType, LocalDate>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                case DATETIMEV2:
+                case DATETIME: {
+                    new BuildArrayFromType<keyType, LocalDateTime>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                case DECIMAL32:
+                case DECIMAL64:
+                case DECIMALV2:
+                case DECIMAL128: {
+                    new BuildArrayFromType<keyType, BigDecimal>().get(map, 
keyCol, valueCol);
+                    break;
+                }
+                default: {
+                    LOG.info("Not support: " + valueType);
+                    Preconditions.checkState(false, "Not support type " + 
valueType.toString());
+                    break;
+                }
+            }
+        }
+    }
+
+    public static class BuildArrayFromType<T1, T2> {
+        public void get(Object[] map, Object[] keyCol, Object[] valueCol) {
+            for (int colIdx = 0; colIdx < map.length; colIdx++) {
+                HashMap<T1, T2> hashMap = (HashMap<T1, T2>) map[colIdx];
+                ArrayList<T1> keys = new ArrayList<>();
+                ArrayList<T2> values = new ArrayList<>();
+                for (Map.Entry<T1, T2> entry : hashMap.entrySet()) {
+                    keys.add(entry.getKey());
+                    values.add(entry.getValue());
+                }
+                keyCol[colIdx] = keys;
+                valueCol[colIdx] = values;
+            }
+        }
+    }
+
 }
diff --git a/regression-test/data/javaudf_p0/test_javaudf_ret_map.out 
b/regression-test/data/javaudf_p0/test_javaudf_ret_map.out
new file mode 100644
index 0000000000..ae583f8e98
--- /dev/null
+++ b/regression-test/data/javaudf_p0/test_javaudf_ret_map.out
@@ -0,0 +1,17 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !select_1 --
+{1:1.1, 11:11.1}       {10:11, 110:111}
+{2:2.2, 22:22.2}       {20:22, 220:222}
+
+-- !select_2 --
+{1:1, 10:1, 100:1}     {100:10, 1000:10, 10:10}
+{2:2, 20:2, 200:2}     {2000:20, 20:20, 200:20}
+
+-- !select_3 --
+10     1.1     {"11410":"5141.1"}
+20     2.2     {"11420":"5142.2"}
+
+-- !select_4 --
+{"abc":"efg", "h":"i"} {"abc114":"efg514", "h114":"i514"}
+{"j":"k"}      {"j114":"k514"}
+
diff --git 
a/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidTest.java
 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidTest.java
new file mode 100644
index 0000000000..4e2981af7e
--- /dev/null
+++ 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidTest.java
@@ -0,0 +1,33 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.orc.impl.IntegerReader;
+
+import java.util.*;
+
+public class MapidTest extends UDF {
+    public HashMap<Integer, Double> evaluate(HashMap<Integer, Double> mid) {
+        HashMap<Integer, Double> ans = new HashMap<>();
+        for (Map.Entry<Integer, Double> it : mid.entrySet()) {
+            ans.put(it.getKey() * 10, it.getValue() * 10);
+        }
+        return ans;
+    }
+}
diff --git 
a/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidssTest.java
 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidssTest.java
new file mode 100644
index 0000000000..75e3085a8c
--- /dev/null
+++ 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapidssTest.java
@@ -0,0 +1,31 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.orc.impl.IntegerReader;
+
+import java.util.*;
+
+public class MapidssTest extends UDF {
+    public HashMap<String, String> evaluate(Integer i, Double d) {
+        HashMap<String, String> ans = new HashMap<>();
+        ans.put("114" + i, "514" + d);
+        return ans;
+    }
+}
diff --git 
a/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapiiTest.java
 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapiiTest.java
new file mode 100644
index 0000000000..d1b00bf7b2
--- /dev/null
+++ 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapiiTest.java
@@ -0,0 +1,33 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.orc.impl.IntegerReader;
+
+import java.util.*;
+
+public class MapiiTest extends UDF {
+    public HashMap<Integer, Integer> evaluate(HashMap<Integer, Integer> mii) {
+        HashMap<Integer, Integer> ans = new HashMap<>();
+        for (Map.Entry<Integer, Integer> it : mii.entrySet()) {
+            ans.put(it.getKey() * 10, it.getValue() * 10);
+        }
+        return ans;
+    }
+}
diff --git 
a/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapssTest.java
 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapssTest.java
new file mode 100644
index 0000000000..6027a0e75a
--- /dev/null
+++ 
b/regression-test/java-udf-src/src/main/java/org/apache/doris/udf/MapssTest.java
@@ -0,0 +1,33 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.orc.impl.IntegerReader;
+
+import java.util.*;
+
+public class MapssTest extends UDF {
+    public HashMap<String, String> evaluate(HashMap<String, String> mp) {
+        HashMap<String, String> ans = new HashMap<>();
+        for (Map.Entry<String, String> it : mp.entrySet()) {
+            ans.put(it.getKey() + "114", it.getValue() + "514");
+        }
+        return ans;
+    }
+}
diff --git a/regression-test/suites/javaudf_p0/test_javaudf_ret_map.groovy 
b/regression-test/suites/javaudf_p0/test_javaudf_ret_map.groovy
new file mode 100644
index 0000000000..df8baa37f9
--- /dev/null
+++ b/regression-test/suites/javaudf_p0/test_javaudf_ret_map.groovy
@@ -0,0 +1,120 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_javaudf_ret_map") {
+    def jarPath = 
"""${context.file.parent}/jars/java-udf-case-jar-with-dependencies.jar"""
+    log.info("Jar path: ${jarPath}".toString())
+    try {
+        sql """
+             CREATE TABLE IF NOT EXISTS db(
+                        `id` INT NULL COMMENT "",
+                        `i` INT NULL COMMENT "",
+                                               `d` Double NULL COMMENT "",
+                                           `mii` Map<INT, INT> NULL COMMENT "",
+                                           `mid` Map<INT, Double> NULL COMMENT 
""
+            ) ENGINE=OLAP
+            DUPLICATE KEY(`id`)
+            DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES (
+            "replication_allocation" = "tag.location.default: 1",
+            "storage_format" = "V2");
+        """
+        sql """ INSERT INTO db VALUES(1, 
10,1.1,{1:1,10:1,100:1},{1:1.1,11:11.1});   """
+        sql """ INSERT INTO db VALUES(2, 
20,2.2,{2:2,20:2,200:2},{2:2.2,22:22.2});   """
+
+        sql """
+              CREATE TABLE IF NOT EXISTS dbss(
+              `id` INT NULL COMMENT "",
+                                       `m` Map<String, String> NULL COMMENT ""
+            ) ENGINE=OLAP
+            DUPLICATE KEY(`id`)
+            DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES (
+            "replication_allocation" = "tag.location.default: 1",
+            "storage_format" = "V2");
+        """
+
+        sql """ INSERT INTO dbss VALUES(1,{"abc":"efg","h":"i"}); """
+        sql """ INSERT INTO dbss VALUES(2,{"j":"k"}); """
+      
+
+          sql """
+          
+        CREATE FUNCTION retii(map<int,int>) RETURNS map<int,int> PROPERTIES (
+            "file"="file://${jarPath}",
+            "symbol"="org.apache.doris.udf.MapiiTest",
+            "type"="JAVA_UDF"
+        ); 
+        
+        """
+        
+        sql """
+          
+        CREATE FUNCTION retss(map<String,String>) RETURNS map<String,String> 
PROPERTIES (
+                    "file"="file://${jarPath}",
+                    "symbol"="org.apache.doris.udf.MapssTest",
+                    "always_nullable"="true",
+                    "type"="JAVA_UDF"
+        ); 
+        
+        """
+
+
+        sql """
+          
+            CREATE FUNCTION retid(map<int,Double>) RETURNS map<int,Double> 
PROPERTIES (
+                        "file"="file://${jarPath}",
+                        "symbol"="org.apache.doris.udf.MapidTest",
+                        "always_nullable"="true",
+                        "type"="JAVA_UDF"
+            ); 
+        
+        """
+
+        sql """
+          
+        CREATE FUNCTION retidss(int ,double) RETURNS map<String,String> 
PROPERTIES (
+                    "file"="file://${jarPath}",
+                    "symbol"="org.apache.doris.udf.MapidssTest",
+                    "always_nullable"="true",
+                    "type"="JAVA_UDF"
+        ); 
+        
+        """
+
+        qt_select_1 """ select mid , retid(mid) from db order by id; """
+
+        qt_select_2 """ select mii , retii(mii) from db order by id; """
+
+        qt_select_3 """ select i,d,retidss(i,d) from db order by id; """
+
+        qt_select_4 """ select m,retss(m) from dbss order by id; """
+    } finally {
+        try_sql("DROP FUNCTION IF EXISTS retii(map<int,int>);")
+        try_sql("DROP FUNCTION IF EXISTS retss(map<String,String>);")
+        try_sql("DROP FUNCTION IF EXISTS retid(map<int,Double>);")
+        try_sql("DROP FUNCTION IF EXISTS retidss(int ,double);")
+        try_sql("DROP TABLE IF EXISTS db")
+        try_sql("DROP TABLE IF EXISTS dbss")
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org


Reply via email to