diqiu50 commented on code in PR #10494:
URL: https://github.com/apache/gravitino/pull/10494#discussion_r3008392582


##########
trino-connector/trino-connector/src/main/java/org/apache/gravitino/trino/connector/catalog/CatalogConnectorMetadata.java:
##########
@@ -396,4 +412,46 @@ public void setColumnType(SchemaTableName schemaTableName, 
String columnName, Ty
     String[] columnNames = {columnName};
     applyAlter(schemaTableName, TableChange.updateColumnType(columnNames, 
type));
   }
+
+  /**
+   * Checks whether the catalog supports function operations.
+   *
+   * @return true if the catalog supports function operations, false otherwise
+   */
+  public boolean supportsFunctions() {
+    return functionCatalog != null;
+  }
+
+  /**
+   * Lists all functions with details in the specified schema.
+   *
+   * @param schemaName the name of the schema
+   * @return an array of functions, or an empty array if functions are not 
supported

Review Comment:
   update docs



##########
trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoUDFIT.java:
##########
@@ -0,0 +1,318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.trino.connector.integration.test;
+
+import static java.lang.Thread.sleep;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.gravitino.Catalog;
+import org.apache.gravitino.NameIdentifier;
+import org.apache.gravitino.Namespace;
+import org.apache.gravitino.function.Function;
+import org.apache.gravitino.function.FunctionCatalog;
+import org.apache.gravitino.function.FunctionDefinitions;
+import org.apache.gravitino.function.FunctionImpl;
+import org.apache.gravitino.function.FunctionImpls;
+import org.apache.gravitino.function.FunctionParams;
+import org.apache.gravitino.function.FunctionType;
+import org.apache.gravitino.rel.types.Types;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Integration test for Trino connector UDF adaptation. Verifies that 
functions registered in
+ * Gravitino with TRINO runtime are visible via Trino's language function API.
+ */
+@Tag("gravitino-docker-test")
+public class TrinoUDFIT extends TrinoQueryITBase {
+
+  private static final Logger LOG = LoggerFactory.getLogger(TrinoUDFIT.class);
+
+  private static final String CATALOG_NAME = "gt_hive_udf";
+  private static final String SCHEMA_NAME = "gt_udf_schema";
+  private static Catalog catalog;
+
+  @BeforeAll
+  public static void setUp() throws Exception {
+    TrinoUDFIT instance = new TrinoUDFIT();
+    instance.setup();
+
+    createHiveCatalog();
+    createSchema();
+  }
+
+  @AfterAll
+  public static void tearDown() {
+    try {
+      cleanupFunctionsAndSchema();
+      dropCatalog(CATALOG_NAME);
+    } catch (Exception e) {
+      LOG.error("Error during teardown", e);
+    }
+    TrinoQueryITBase.cleanup();
+  }
+
+  private static void createHiveCatalog() throws Exception {
+    Map<String, String> properties = new HashMap<>();
+    properties.put("metastore.uris", hiveMetastoreUri);
+
+    boolean exists = metalake.catalogExists(CATALOG_NAME);
+    if (!exists) {
+      metalake.createCatalog(
+          CATALOG_NAME, Catalog.Type.RELATIONAL, "hive", "UDF test catalog", 
properties);
+    }
+
+    // Wait for catalog to sync to Trino
+    boolean catalogReady = false;
+    int tries = 180;
+    while (!catalogReady && tries-- >= 0) {
+      try {
+        String result = trinoQueryRunner.runQuery("show catalogs");
+        if (result.contains(metalakeName + "." + CATALOG_NAME)) {
+          catalogReady = true;
+          break;
+        }
+      } catch (Exception e) {
+        LOG.info("Waiting for catalog to sync to Trino");
+      }
+      sleep(1000);
+    }
+
+    if (!catalogReady) {
+      throw new Exception("Catalog " + CATALOG_NAME + " sync timeout");
+    }
+
+    catalog = metalake.loadCatalog(CATALOG_NAME);
+  }
+
+  private static void createSchema() {
+    boolean exists = catalog.asSchemas().schemaExists(SCHEMA_NAME);
+    if (!exists) {
+      catalog.asSchemas().createSchema(SCHEMA_NAME, "UDF test schema", 
Collections.emptyMap());
+    }
+  }
+
+  private static void cleanupFunctionsAndSchema() {
+    try {
+      FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+      NameIdentifier[] functions = 
functionCatalog.listFunctions(Namespace.of(SCHEMA_NAME));
+      for (NameIdentifier fn : functions) {
+        functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, 
fn.name()));
+      }
+    } catch (Exception e) {
+      LOG.error("Error cleaning up functions", e);
+    }
+
+    try {
+      catalog.asSchemas().dropSchema(SCHEMA_NAME, false);
+    } catch (Exception e) {
+      LOG.error("Error dropping schema", e);
+    }
+  }
+
+  @Test
+  public void testListLanguageFunctionsShowsRegisteredUDF() throws Exception {
+    String functionName = "test_add_one";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register a scalar function: test_add_one(x INTEGER) -> INTEGER
+    // Uses TRINO runtime + SQL language so it maps to a Trino LanguageFunction
+    // SQL body "RETURN x + 1" adds 1 to the input integer
+    Function function =
+        functionCatalog.registerFunction(
+            NameIdentifier.of(SCHEMA_NAME, functionName),
+            "Adds one to input",
+            FunctionType.SCALAR,
+            true,
+            FunctionDefinitions.of(
+                FunctionDefinitions.of(
+                    FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                    Types.IntegerType.get(),
+                    FunctionImpls.of(
+                        FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN x + 1")))));
+    Assertions.assertNotNull(function);
+
+    // Query Trino to verify the function is listed
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String showFunctionsQuery =
+        String.format("SHOW FUNCTIONS FROM %s.%s", trinoCatalogName, 
SCHEMA_NAME);
+    String result = trinoQueryRunner.runQuery(showFunctionsQuery);
+
+    LOG.info("SHOW FUNCTIONS result: {}", result);
+    Assertions.assertTrue(
+        result.contains(functionName),
+        "Expected function " + functionName + " to be listed. Got: " + result);
+
+    // Cleanup
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, functionName));
+  }
+
+  @Test
+  public void testSelectUDFReturnsCorrectResult() throws Exception {
+    String functionName = "test_add_five";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register a scalar function: test_add_five(x INTEGER) -> INTEGER
+    // SQL body "RETURN x + 5" adds 5 to the input integer
+    Function function =
+        functionCatalog.registerFunction(
+            NameIdentifier.of(SCHEMA_NAME, functionName),
+            "Adds five to input",
+            FunctionType.SCALAR,
+            true,
+            FunctionDefinitions.of(
+                FunctionDefinitions.of(
+                    FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                    Types.IntegerType.get(),
+                    FunctionImpls.of(
+                        FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN x + 5")))));
+    Assertions.assertNotNull(function);
+
+    // Invoke the function via SELECT and verify the result
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String selectQuery =
+        String.format("SELECT %s.%s.%s(5)", trinoCatalogName, SCHEMA_NAME, 
functionName);
+    String result = trinoQueryRunner.runQuery(selectQuery);
+
+    LOG.info("SELECT result: {}", result);
+    Assertions.assertTrue(
+        result.contains("10"), "Expected SELECT test_add_five(5) to return 10. 
Got: " + result);
+
+    // Cleanup
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, functionName));
+  }
+
+  @Test
+  public void testListLanguageFunctionsFiltersNonTrinoRuntime() throws 
Exception {
+    String functionName = "spark_only_func";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register a scalar function: spark_only_func(x INTEGER) -> INTEGER
+    // Uses SPARK runtime, so this should NOT be visible in Trino
+    Function function =
+        functionCatalog.registerFunction(
+            NameIdentifier.of(SCHEMA_NAME, functionName),
+            "Spark-only function",
+            FunctionType.SCALAR,
+            true,
+            FunctionDefinitions.of(
+                FunctionDefinitions.of(
+                    FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                    Types.IntegerType.get(),
+                    FunctionImpls.of(
+                        FunctionImpls.ofSql(FunctionImpl.RuntimeType.SPARK, 
"RETURN x + 1")))));
+    Assertions.assertNotNull(function);
+
+    // Query Trino - SPARK runtime function should be filtered out
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String showFunctionsQuery =
+        String.format("SHOW FUNCTIONS FROM %s.%s", trinoCatalogName, 
SCHEMA_NAME);
+    String result = trinoQueryRunner.runQuery(showFunctionsQuery);
+
+    LOG.info("SHOW FUNCTIONS result (should not contain spark_only_func): {}", 
result);
+    Assertions.assertFalse(
+        result.contains(functionName),
+        "SPARK runtime function should not appear in Trino. Got: " + result);
+
+    // Cleanup
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, functionName));
+  }
+
+  @Test
+  public void testMultipleUDFsInSameSchema() throws Exception {
+    String func1Name = "udf_multiply";
+    String func2Name = "udf_concat";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register two TRINO SQL functions:
+    // udf_multiply(x INTEGER) -> INTEGER: multiplies input by 2
+    // udf_concat(a STRING, b STRING) -> STRING: concatenates two strings
+    functionCatalog.registerFunction(
+        NameIdentifier.of(SCHEMA_NAME, func1Name),
+        "Multiply by 2",
+        FunctionType.SCALAR,
+        true,
+        FunctionDefinitions.of(
+            FunctionDefinitions.of(
+                FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                Types.IntegerType.get(),
+                FunctionImpls.of(
+                    FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN x * 2")))));
+
+    functionCatalog.registerFunction(
+        NameIdentifier.of(SCHEMA_NAME, func2Name),
+        "Concat strings",
+        FunctionType.SCALAR,
+        true,
+        FunctionDefinitions.of(
+            FunctionDefinitions.of(
+                FunctionParams.of(
+                    FunctionParams.of("a", Types.StringType.get()),
+                    FunctionParams.of("b", Types.StringType.get())),
+                Types.StringType.get(),
+                FunctionImpls.of(
+                    FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN concat(a, b)")))));
+
+    // Query Trino to verify both functions are listed
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String showFunctionsQuery =
+        String.format("SHOW FUNCTIONS FROM %s.%s", trinoCatalogName, 
SCHEMA_NAME);
+    String result = trinoQueryRunner.runQuery(showFunctionsQuery);
+
+    LOG.info("SHOW FUNCTIONS result: {}", result);
+    Assertions.assertTrue(
+        result.contains(func1Name), "Expected " + func1Name + " to be listed. 
Got: " + result);
+    Assertions.assertTrue(
+        result.contains(func2Name), "Expected " + func2Name + " to be listed. 
Got: " + result);
+
+    // Cleanup
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, func1Name));
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, func2Name));
+  }
+
+  @Test
+  public void testNoFunctionsWhenSchemaIsEmpty() {
+    // Create a separate empty schema
+    String emptySchema = "gt_empty_udf_schema";
+    boolean exists = catalog.asSchemas().schemaExists(emptySchema);
+    if (!exists) {
+      catalog.asSchemas().createSchema(emptySchema, "empty schema", 
Collections.emptyMap());
+    }
+
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String showFunctionsQuery =
+        String.format("SHOW FUNCTIONS FROM %s.%s", trinoCatalogName, 
emptySchema);
+    String result = trinoQueryRunner.runQuery(showFunctionsQuery);
+
+    LOG.info("SHOW FUNCTIONS for empty schema: {}", result);
+    // For an empty schema, the result should not contain any custom function 
names
+    Assertions.assertFalse(
+        result.contains("udf_"), "Expected no UDFs in empty schema. Got: " + 
result);

Review Comment:
   Can you clarify the criteria here? `udf_` is too vague.



##########
trino-connector/trino-connector/src/main/java/org/apache/gravitino/trino/connector/GravitinoMetadata.java:
##########
@@ -679,4 +695,85 @@ private String getColumnName(
     }
     return internalMetadataColumnMetadata.getName();
   }
+
+  @Override
+  public Collection<LanguageFunction> listLanguageFunctions(
+      ConnectorSession session, String schemaName) {
+    if (!catalogConnectorMetadata.supportsFunctions()) {
+      return List.of();
+    }
+    return 
Arrays.stream(catalogConnectorMetadata.listFunctionInfos(schemaName))
+        .flatMap(function -> toLanguageFunctions(function).stream())
+        .toList();
+  }
+
+  @Override
+  public Collection<LanguageFunction> getLanguageFunctions(
+      ConnectorSession session, SchemaFunctionName name) {
+    if (!catalogConnectorMetadata.supportsFunctions()) {
+      return List.of();
+    }
+    try {
+      Function function =
+          catalogConnectorMetadata.getFunction(name.getSchemaName(), 
name.getFunctionName());
+      if (function == null) {
+        return List.of();
+      }
+      return toLanguageFunctions(function);
+    } catch (NoSuchFunctionException e) {
+      LOG.debug("Function {} not found in schema {}", name.getFunctionName(), 
name.getSchemaName());
+      return List.of();
+    }
+  }
+
+  /**
+   * Converts a Gravitino function to a collection of Trino LanguageFunction 
instances. Only SQL
+   * implementations with TRINO runtime are included. Each definition with a 
Trino SQL
+   * implementation produces one LanguageFunction. The signature token is 
generated from the
+   * function name and parameter types.
+   */
+  private Collection<LanguageFunction> toLanguageFunctions(Function function) {
+    List<LanguageFunction> result = new ArrayList<>();
+    for (FunctionDefinition definition : function.definitions()) {
+      for (FunctionImpl impl : definition.impls()) {
+        if (!isTrinoSqlImplementation(impl)) {
+          continue;
+        }
+        String sql = ((SQLImpl) impl).sql();
+        try {
+          String signatureToken = buildSignatureToken(function.name(), 
definition.parameters());
+          result.add(new LanguageFunction(signatureToken, sql, List.of(), 
Optional.empty()));
+        } catch (Exception e) {
+          LOG.warn(
+              "Failed to build signature token for function {}: {}",
+              function.name(),
+              e.getMessage());

Review Comment:
   missing stack



##########
trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoUDFIT.java:
##########
@@ -0,0 +1,318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.trino.connector.integration.test;
+
+import static java.lang.Thread.sleep;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.gravitino.Catalog;
+import org.apache.gravitino.NameIdentifier;
+import org.apache.gravitino.Namespace;
+import org.apache.gravitino.function.Function;
+import org.apache.gravitino.function.FunctionCatalog;
+import org.apache.gravitino.function.FunctionDefinitions;
+import org.apache.gravitino.function.FunctionImpl;
+import org.apache.gravitino.function.FunctionImpls;
+import org.apache.gravitino.function.FunctionParams;
+import org.apache.gravitino.function.FunctionType;
+import org.apache.gravitino.rel.types.Types;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Integration test for Trino connector UDF adaptation. Verifies that 
functions registered in
+ * Gravitino with TRINO runtime are visible via Trino's language function API.
+ */
+@Tag("gravitino-docker-test")
+public class TrinoUDFIT extends TrinoQueryITBase {
+
+  private static final Logger LOG = LoggerFactory.getLogger(TrinoUDFIT.class);
+
+  private static final String CATALOG_NAME = "gt_hive_udf";
+  private static final String SCHEMA_NAME = "gt_udf_schema";
+  private static Catalog catalog;
+
+  @BeforeAll
+  public static void setUp() throws Exception {
+    TrinoUDFIT instance = new TrinoUDFIT();
+    instance.setup();
+
+    createHiveCatalog();
+    createSchema();
+  }
+
+  @AfterAll
+  public static void tearDown() {
+    try {
+      cleanupFunctionsAndSchema();
+      dropCatalog(CATALOG_NAME);
+    } catch (Exception e) {
+      LOG.error("Error during teardown", e);
+    }
+    TrinoQueryITBase.cleanup();
+  }
+
+  private static void createHiveCatalog() throws Exception {
+    Map<String, String> properties = new HashMap<>();
+    properties.put("metastore.uris", hiveMetastoreUri);
+
+    boolean exists = metalake.catalogExists(CATALOG_NAME);
+    if (!exists) {
+      metalake.createCatalog(
+          CATALOG_NAME, Catalog.Type.RELATIONAL, "hive", "UDF test catalog", 
properties);
+    }
+
+    // Wait for catalog to sync to Trino
+    boolean catalogReady = false;
+    int tries = 180;
+    while (!catalogReady && tries-- >= 0) {
+      try {
+        String result = trinoQueryRunner.runQuery("show catalogs");
+        if (result.contains(metalakeName + "." + CATALOG_NAME)) {
+          catalogReady = true;
+          break;
+        }
+      } catch (Exception e) {
+        LOG.info("Waiting for catalog to sync to Trino");
+      }
+      sleep(1000);
+    }
+
+    if (!catalogReady) {
+      throw new Exception("Catalog " + CATALOG_NAME + " sync timeout");
+    }
+
+    catalog = metalake.loadCatalog(CATALOG_NAME);
+  }
+
+  private static void createSchema() {
+    boolean exists = catalog.asSchemas().schemaExists(SCHEMA_NAME);
+    if (!exists) {
+      catalog.asSchemas().createSchema(SCHEMA_NAME, "UDF test schema", 
Collections.emptyMap());
+    }
+  }
+
+  private static void cleanupFunctionsAndSchema() {
+    try {
+      FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+      NameIdentifier[] functions = 
functionCatalog.listFunctions(Namespace.of(SCHEMA_NAME));
+      for (NameIdentifier fn : functions) {
+        functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, 
fn.name()));
+      }
+    } catch (Exception e) {
+      LOG.error("Error cleaning up functions", e);
+    }
+
+    try {
+      catalog.asSchemas().dropSchema(SCHEMA_NAME, false);
+    } catch (Exception e) {
+      LOG.error("Error dropping schema", e);
+    }
+  }
+
+  @Test
+  public void testListLanguageFunctionsShowsRegisteredUDF() throws Exception {
+    String functionName = "test_add_one";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register a scalar function: test_add_one(x INTEGER) -> INTEGER
+    // Uses TRINO runtime + SQL language so it maps to a Trino LanguageFunction
+    // SQL body "RETURN x + 1" adds 1 to the input integer
+    Function function =
+        functionCatalog.registerFunction(
+            NameIdentifier.of(SCHEMA_NAME, functionName),
+            "Adds one to input",
+            FunctionType.SCALAR,
+            true,
+            FunctionDefinitions.of(
+                FunctionDefinitions.of(
+                    FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                    Types.IntegerType.get(),
+                    FunctionImpls.of(
+                        FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN x + 1")))));
+    Assertions.assertNotNull(function);
+
+    // Query Trino to verify the function is listed
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String showFunctionsQuery =
+        String.format("SHOW FUNCTIONS FROM %s.%s", trinoCatalogName, 
SCHEMA_NAME);
+    String result = trinoQueryRunner.runQuery(showFunctionsQuery);
+
+    LOG.info("SHOW FUNCTIONS result: {}", result);
+    Assertions.assertTrue(
+        result.contains(functionName),
+        "Expected function " + functionName + " to be listed. Got: " + result);
+
+    // Cleanup
+    functionCatalog.dropFunction(NameIdentifier.of(SCHEMA_NAME, functionName));
+  }
+
+  @Test
+  public void testSelectUDFReturnsCorrectResult() throws Exception {
+    String functionName = "test_add_five";
+    FunctionCatalog functionCatalog = catalog.asFunctionCatalog();
+
+    // Register a scalar function: test_add_five(x INTEGER) -> INTEGER
+    // SQL body "RETURN x + 5" adds 5 to the input integer
+    Function function =
+        functionCatalog.registerFunction(
+            NameIdentifier.of(SCHEMA_NAME, functionName),
+            "Adds five to input",
+            FunctionType.SCALAR,
+            true,
+            FunctionDefinitions.of(
+                FunctionDefinitions.of(
+                    FunctionParams.of(FunctionParams.of("x", 
Types.IntegerType.get())),
+                    Types.IntegerType.get(),
+                    FunctionImpls.of(
+                        FunctionImpls.ofSql(FunctionImpl.RuntimeType.TRINO, 
"RETURN x + 5")))));
+    Assertions.assertNotNull(function);
+
+    // Invoke the function via SELECT and verify the result
+    String trinoCatalogName = metalakeName + "." + CATALOG_NAME;
+    String selectQuery =
+        String.format("SELECT %s.%s.%s(5)", trinoCatalogName, SCHEMA_NAME, 
functionName);
+    String result = trinoQueryRunner.runQuery(selectQuery);
+
+    LOG.info("SELECT result: {}", result);
+    Assertions.assertTrue(
+        result.contains("10"), "Expected SELECT test_add_five(5) to return 10. 
Got: " + result);

Review Comment:
   The same problem



##########
trino-connector/trino-connector/src/test/java/org/apache/gravitino/trino/connector/TestGravitinoMetadataFunction.java:
##########
@@ -0,0 +1,252 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.trino.connector;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import io.trino.spi.connector.ConnectorMetadata;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.function.LanguageFunction;
+import io.trino.spi.function.SchemaFunctionName;
+import java.util.Collection;
+import java.util.List;
+import org.apache.gravitino.Audit;
+import org.apache.gravitino.exceptions.NoSuchFunctionException;
+import org.apache.gravitino.function.Function;
+import org.apache.gravitino.function.FunctionDefinition;
+import org.apache.gravitino.function.FunctionImpl;
+import org.apache.gravitino.function.FunctionImpls;
+import org.apache.gravitino.function.FunctionParam;
+import org.apache.gravitino.function.FunctionType;
+import org.apache.gravitino.rel.types.Types;
+import org.apache.gravitino.trino.connector.catalog.CatalogConnectorMetadata;
+import 
org.apache.gravitino.trino.connector.catalog.CatalogConnectorMetadataAdapter;
+import org.apache.gravitino.trino.connector.util.GeneralDataTypeTransformer;
+import org.junit.jupiter.api.Test;
+
+public class TestGravitinoMetadataFunction {
+
+  @Test
+  public void testListLanguageFunctionsReturnsTrinoSqlFunctions() {
+    Function function =
+        createMockFunction("my_func", "RETURN x + 1", 
FunctionImpl.RuntimeType.TRINO);
+    CatalogConnectorMetadata catalogMetadata = 
mock(CatalogConnectorMetadata.class);
+    when(catalogMetadata.supportsFunctions()).thenReturn(true);
+    when(catalogMetadata.listFunctionInfos("test_schema")).thenReturn(new 
Function[] {function});
+
+    GravitinoMetadata metadata = createTestMetadata(catalogMetadata);
+    ConnectorSession session = mock(ConnectorSession.class);
+
+    Collection<LanguageFunction> functions = 
metadata.listLanguageFunctions(session, "test_schema");
+    assertEquals(1, functions.size());
+
+    LanguageFunction langFunc = functions.iterator().next();
+    assertEquals("RETURN x + 1", langFunc.sql());
+    assertEquals("my_func(integer)", langFunc.signatureToken());
+  }
+
+  @Test
+  public void testGetLanguageFunctionsReturnsTrinoSqlFunctions() {
+    Function function =
+        createMockFunction("my_func", "RETURN x + 1", 
FunctionImpl.RuntimeType.TRINO);
+    CatalogConnectorMetadata catalogMetadata = 
mock(CatalogConnectorMetadata.class);
+    when(catalogMetadata.supportsFunctions()).thenReturn(true);
+    when(catalogMetadata.getFunction("test_schema", 
"my_func")).thenReturn(function);
+
+    GravitinoMetadata metadata = createTestMetadata(catalogMetadata);
+    ConnectorSession session = mock(ConnectorSession.class);
+
+    Collection<LanguageFunction> functions =
+        metadata.getLanguageFunctions(session, new 
SchemaFunctionName("test_schema", "my_func"));
+    assertEquals(1, functions.size());
+
+    LanguageFunction langFunc = functions.iterator().next();
+    assertEquals("RETURN x + 1", langFunc.sql());
+  }
+
+  @Test
+  public void testListLanguageFunctionsFiltersNonTrinoRuntime() {
+    Function sparkFunction =
+        createMockFunction("spark_func", "RETURN 1", 
FunctionImpl.RuntimeType.SPARK);
+    Function trinoFunction =
+        createMockFunction("trino_func", "RETURN 2", 
FunctionImpl.RuntimeType.TRINO);
+
+    CatalogConnectorMetadata catalogMetadata = 
mock(CatalogConnectorMetadata.class);
+    when(catalogMetadata.supportsFunctions()).thenReturn(true);
+    when(catalogMetadata.listFunctionInfos("test_schema"))
+        .thenReturn(new Function[] {sparkFunction, trinoFunction});
+
+    GravitinoMetadata metadata = createTestMetadata(catalogMetadata);
+    ConnectorSession session = mock(ConnectorSession.class);
+
+    Collection<LanguageFunction> functions = 
metadata.listLanguageFunctions(session, "test_schema");
+    assertEquals(1, functions.size());
+    assertEquals("RETURN 2", functions.iterator().next().sql());
+  }
+
+  @Test
+  public void testListLanguageFunctionsWhenUnsupported() {
+    CatalogConnectorMetadata catalogMetadata = 
mock(CatalogConnectorMetadata.class);
+    when(catalogMetadata.supportsFunctions()).thenReturn(false);
+
+    GravitinoMetadata metadata = createTestMetadata(catalogMetadata);
+    ConnectorSession session = mock(ConnectorSession.class);
+
+    Collection<LanguageFunction> functions = 
metadata.listLanguageFunctions(session, "test_schema");
+    assertTrue(functions.isEmpty());
+  }
+
+  @Test
+  public void testGetLanguageFunctionsWhenFunctionNotFound() {
+    CatalogConnectorMetadata catalogMetadata = 
mock(CatalogConnectorMetadata.class);
+    when(catalogMetadata.supportsFunctions()).thenReturn(true);
+    when(catalogMetadata.getFunction("test_schema", "no_such_func"))
+        .thenThrow(new NoSuchFunctionException("Function not found"));
+
+    GravitinoMetadata metadata = createTestMetadata(catalogMetadata);
+    ConnectorSession session = mock(ConnectorSession.class);
+
+    Collection<LanguageFunction> functions =
+        metadata.getLanguageFunctions(
+            session, new SchemaFunctionName("test_schema", "no_such_func"));
+    assertTrue(functions.isEmpty());
+  }

Review Comment:
   Add some  tests  for supportsFunctions=false



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to