danhuawang opened a new issue, #10900:
URL: https://github.com/apache/gravitino/issues/10900

   ### Version
   
   main branch
   
   ### Describe what's wrong
   
   ```
   wangdanhua@wangdanhuadeMBP env1-cross-metalake % trino --server 
http://34.138.225.171:8080 --debug
   trino> SHOW CATALOGS;
       -> 
       Catalog    
   ---------------
    gravitino_irc 
    jmx           
    memory        
    remote_irc    
    system        
    tpcds         
    tpch          
   (7 rows)
   
   Query 20260429_082146_00000_nrzdn, FINISHED, 1 node
   http://34.138.225.171:8080/ui/query.html?20260429_082146_00000_nrzdn
   Splits: 5 total, 5 done (100.00%)
   CPU Time: 0.0s total,     0 rows/s, 8.01KB/s, 43% active
   Per Node: 0.0 parallelism,     0 rows/s,    98B/s
   Parallelism: 0.0
   Peak Memory: 334B
   2.52 [0 rows, 246B] [0 rows/s, 98B/s]
   
   trino> CREATE SCHEMA gravitino_irc.company_gcs;
   CREATE SCHEMA
   trino> SHOW CREATE SCHEMA gravitino_irc.company_gcs; 
                                           Create Schema                        
                 
   
----------------------------------------------------------------------------------------------
    CREATE SCHEMA gravitino_irc.company_gcs                                     
                 
    WITH (                                                                      
                 
       location = 
'gs://irc-e2e-env1-metalake-a-warehouse/env1/metalake_a/catalog_1/company_gcs' 
    )                                                                           
                 
   (1 row)
   
   Query 20260429_082211_00002_nrzdn, FINISHED, 1 node
   http://34.138.225.171:8080/ui/query.html?20260429_082211_00002_nrzdn
   Splits: 1 total, 1 done (100.00%)
   CPU Time: 0.0s total,     0 rows/s,     0B/s, 100% active
   Per Node: 0.0 parallelism,     0 rows/s,     0B/s
   Parallelism: 0.0
   Peak Memory: 432B
   1.21 [0 rows, 0B] [0 rows/s, 0B/s]
   
   trino> CREATE TABLE gravitino_irc.company_gcs.employees_gcs ( name varchar, 
salary decimal(10,2) ) WITH ( format = 'PARQUET' );
   Query 20260429_082317_00003_nrzdn failed: no JSON input found
   java.lang.IllegalArgumentException: no JSON input found
        at 
com.google.common.base.Preconditions.checkArgument(Preconditions.java:141)
        at 
com.google.api.client.util.Preconditions.checkArgument(Preconditions.java:47)
        at 
com.google.api.client.json.JsonParser.startParsing(JsonParser.java:215)
        at com.google.api.client.json.JsonParser.parse(JsonParser.java:358)
        at com.google.api.client.json.JsonParser.parse(JsonParser.java:335)
        at 
com.google.api.client.json.JsonObjectParser.parseAndClose(JsonObjectParser.java:79)
        at 
com.google.api.client.json.JsonObjectParser.parseAndClose(JsonObjectParser.java:73)
        at 
com.google.auth.oauth2.GoogleCredentials.fromStream(GoogleCredentials.java:285)
        at 
com.google.auth.oauth2.GoogleCredentials.fromStream(GoogleCredentials.java:235)
        at 
io.trino.filesystem.gcs.GcsAccessTokenAuth.setAuth(GcsAccessTokenAuth.java:36)
        at 
io.trino.filesystem.gcs.GcsStorageFactory.create(GcsStorageFactory.java:67)
        at 
io.trino.filesystem.gcs.GcsFileSystemFactory.create(GcsFileSystemFactory.java:59)
        at 
io.trino.filesystem.switching.SwitchingFileSystem.lambda$createFileSystem$0(SwitchingFileSystem.java:200)
        at java.base/java.util.Optional.orElseGet(Optional.java:364)
        at 
io.trino.filesystem.switching.SwitchingFileSystem.createFileSystem(SwitchingFileSystem.java:199)
        at 
io.trino.filesystem.switching.SwitchingFileSystem.fileSystem(SwitchingFileSystem.java:194)
        at 
io.trino.filesystem.switching.SwitchingFileSystem.listFiles(SwitchingFileSystem.java:116)
        at 
io.trino.filesystem.tracing.TracingFileSystem.lambda$listFiles$0(TracingFileSystem.java:119)
        at io.trino.filesystem.tracing.Tracing.withTracing(Tracing.java:51)
        at 
io.trino.filesystem.tracing.TracingFileSystem.listFiles(TracingFileSystem.java:119)
        at 
io.trino.filesystem.cache.CacheFileSystem.listFiles(CacheFileSystem.java:106)
        at 
io.trino.plugin.iceberg.IcebergMetadata.beginCreateTable(IcebergMetadata.java:1309)
        at 
io.trino.plugin.iceberg.IcebergMetadata.createTable(IcebergMetadata.java:1196)
        at 
io.trino.plugin.base.classloader.ClassLoaderSafeConnectorMetadata.createTable(ClassLoaderSafeConnectorMetadata.java:461)
        at 
io.trino.tracing.TracingConnectorMetadata.createTable(TracingConnectorMetadata.java:388)
        at 
io.trino.metadata.MetadataManager.createTable(MetadataManager.java:878)
        at 
io.trino.tracing.TracingMetadata.createTable(TracingMetadata.java:436)
        at 
io.trino.execution.CreateTableTask.internalExecute(CreateTableTask.java:306)
        at io.trino.execution.CreateTableTask.execute(CreateTableTask.java:128)
        at io.trino.execution.CreateTableTask.execute(CreateTableTask.java:94)
        at 
io.trino.execution.DataDefinitionExecution.start(DataDefinitionExecution.java:152)
        at 
io.trino.execution.SqlQueryManager.createQuery(SqlQueryManager.java:284)
        at 
io.trino.dispatcher.LocalDispatchQuery.startExecution(LocalDispatchQuery.java:150)
        at 
io.trino.dispatcher.LocalDispatchQuery.lambda$waitForMinimumWorkers$1(LocalDispatchQuery.java:134)
        at 
io.airlift.concurrent.MoreFutures.lambda$addSuccessCallback$0(MoreFutures.java:570)
        at io.airlift.concurrent.MoreFutures$3.onSuccess(MoreFutures.java:545)
        at 
com.google.common.util.concurrent.Futures$CallbackListener.run(Futures.java:1132)
        at io.trino.$gen.Trino_478____20260429_080531_2.run(Unknown Source)
        at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1090)
        at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:614)
        at java.base/java.lang.Thread.run(Thread.java:1474)
   
   trino> exit
   
   ```
   
   ### Error message and/or stacktrace
   
   gravitino-server.log
   
   ```
        at java.base/java.lang.Thread.run(Thread.java:840) [?:?]
   2026-04-29 08:22:08.357 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.service.IcebergCatalogWrapperManager.lambda$new$0(IcebergCatalogWrapperManager.java:66)]
 - Remove IcebergCatalogWrapper cache default_catalog.
   2026-04-29 08:22:08.358 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.common.ops.IcebergCatalogWrapper.close(IcebergCatalogWrapper.java:355)]
 - Closing IcebergCatalogWrapper for catalog: jdbc
   2026-04-29 08:22:09.431 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.service.IcebergCatalogWrapperManager.lambda$new$0(IcebergCatalogWrapperManager.java:66)]
 - Remove IcebergCatalogWrapper cache catalog_1.
   2026-04-29 08:22:09.432 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.common.ops.IcebergCatalogWrapper.close(IcebergCatalogWrapper.java:355)]
 - Closing IcebergCatalogWrapper for catalog: jdbc
   2026-04-29 08:22:11.945 INFO [iceberg-rest-43] 
[org.apache.gravitino.iceberg.common.utils.IcebergCatalogUtil.loadCatalogBackend(IcebergCatalogUtil.java:163)]
 - Load catalog backend of JDBC
   2026-04-29 08:22:11.972 INFO [iceberg-rest-43] 
[org.apache.iceberg.CatalogUtil.loadFileIO(CatalogUtil.java:382)] - Loading 
custom FileIO implementation: org.apache.iceberg.gcp.gcs.GCSFileIO
   2026-04-29 08:22:12.007 INFO [iceberg-rest-43] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.namespaceExists(IcebergNamespaceOperations.java:173)]
 - Check Iceberg namespace exists, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:22:12.068 INFO [iceberg-rest-38] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.namespaceExists(IcebergNamespaceOperations.java:173)]
 - Check Iceberg namespace exists, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:22:12.117 INFO [iceberg-rest-42] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.loadNamespace(IcebergNamespaceOperations.java:142)]
 - Load Iceberg namespace, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:22:12.241 INFO [iceberg-rest-39] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.namespaceExists(IcebergNamespaceOperations.java:173)]
 - Check Iceberg namespace exists, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:22:17.999 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.service.IcebergCatalogWrapperManager.lambda$new$0(IcebergCatalogWrapperManager.java:66)]
 - Remove IcebergCatalogWrapper cache catalog_1.
   2026-04-29 08:22:18.000 INFO [ForkJoinPool.commonPool-worker-9] 
[org.apache.gravitino.iceberg.common.ops.IcebergCatalogWrapper.close(IcebergCatalogWrapper.java:355)]
 - Closing IcebergCatalogWrapper for catalog: jdbc
   2026-04-29 08:23:18.260 INFO [iceberg-rest-42] 
[org.apache.gravitino.iceberg.common.utils.IcebergCatalogUtil.loadCatalogBackend(IcebergCatalogUtil.java:163)]
 - Load catalog backend of JDBC
   2026-04-29 08:23:18.281 INFO [iceberg-rest-42] 
[org.apache.iceberg.CatalogUtil.loadFileIO(CatalogUtil.java:382)] - Loading 
custom FileIO implementation: org.apache.iceberg.gcp.gcs.GCSFileIO
   2026-04-29 08:23:18.457 INFO [iceberg-rest-42] 
[org.apache.gravitino.iceberg.service.rest.IcebergTableOperations.loadTable(IcebergTableOperations.java:301)]
 - Load Iceberg table, catalog: catalog_1, namespace: company_gcs, table: 
employees_gcs, access delegation: vended-credentials, credential vending: true
   2026-04-29 08:23:18.467 INFO [iceberg-rest-42] 
[org.apache.gravitino.iceberg.service.IcebergExceptionMapper.toRESTResponse(IcebergExceptionMapper.java:136)]
 - Iceberg REST server error maybe caused by user request, response http 
status: 404, exception: class 
org.apache.iceberg.exceptions.NoSuchTableException, exception message: Table 
does not exist: company_gcs.employees_gcs
   2026-04-29 08:23:18.563 INFO [iceberg-rest-36] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.namespaceExists(IcebergNamespaceOperations.java:173)]
 - Check Iceberg namespace exists, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:23:18.661 INFO [iceberg-rest-38] 
[org.apache.gravitino.iceberg.service.rest.IcebergNamespaceOperations.loadNamespace(IcebergNamespaceOperations.java:142)]
 - Load Iceberg namespace, catalog: catalog_1, namespace: company_gcs
   2026-04-29 08:23:18.768 INFO [iceberg-rest-41] 
[org.apache.gravitino.iceberg.service.rest.IcebergTableOperations.createTable(IcebergTableOperations.java:169)]
 - Create Iceberg table, catalog: catalog_1, namespace: company_gcs, create 
table request: CreateTableRequest{name=employees_gcs, 
location=gs://irc-e2e-env1-metalake-a-warehouse/env1/metalake_a/catalog_1/company_gcs/employees_gcs-7e6ce0d9412d4bf6aa38f2610e2335a0,
 properties={write.format.default=PARQUET, format-version=2, 
write.parquet.compression-codec=}, schema=table {
     1: name: optional string
     2: salary: optional decimal(10, 2)
   }, partitionSpec=org.apache.iceberg.UnboundPartitionSpec@1a874839, 
writeOrder=org.apache.iceberg.UnboundSortOrder@272e4b88, stageCreate=true}, 
accessDelegation: vended-credentials, isCredentialVending: true
   2026-04-29 08:23:21.264 INFO [iceberg-rest-41] 
[org.apache.gravitino.iceberg.service.CatalogWrapperForREST.injectCredentialConfig(CatalogWrapperForREST.java:295)]
 - Generate credential: gcs-token for Iceberg table: company_gcs.employees_gcs
   2026-04-29 08:23:24.263 INFO [ForkJoinPool.commonPool-worker-10] 
[org.apache.gravitino.iceberg.service.IcebergCatalogWrapperManager.lambda$new$0(IcebergCatalogWrapperManager.java:66)]
 - Remove IcebergCatalogWrapper cache catalog_1.
   2026-04-29 08:23:24.264 INFO [ForkJoinPool.commonPool-worker-10] 
[org.apache.gravitino.iceberg.common.ops.IcebergCatalogWrapper.close(IcebergCatalogWrapper.java:355)]
 - Closing IcebergCatalogWrapper for catalog: jdbc
   2026-04-29 08:32:34.828 INFO [ForkJoinPool.commonPool-worker-11] 
[org.apache.gravitino.catalog.CatalogManager.lambda$new$0(CatalogManager.java:320)]
 - Closing catalog metalake_a.catalog_2.
   2026-04-29 08:32:34.838 INFO [ForkJoinPool.commonPool-worker-11] 
[org.apache.gravitino.iceberg.common.ops.IcebergCatalogWrapper.close(IcebergCatalogWrapper.java:355)]
 - Closing IcebergCatalogWrapper for catalog: rest
   2026-04-29 08:32:35.179 WARN 
[org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner] 
[org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner.run(FileSystem.java:4024)]
 - Cleaner thread interrupted, will stop
   java.lang.InterruptedException: null
        at java.base/java.lang.Object.wait(Native Method) ~[?:?]
        at 
java.base/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:155) ~[?:?]
        at 
java.base/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:176) ~[?:?]
        at 
org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner.run(FileSystem.java:4021)
 [hadoop-client-api-3.3.1.jar:?]
        at java.base/java.lang.Thread.run(Thread.java:840) [?:?]
   
   ```
   
   ### How to reproduce
   
   Trino catalog property
   ```
   connector.name=iceberg
   iceberg.catalog.type=rest
   iceberg.rest-catalog.uri=http://gravitino.irc-e2e-env1:9001/iceberg/
   
   # OAuth2 client credentials flow — Trino automatically obtains and refreshes
   # the token using Keycloak's client_credentials grant.
   # Trino >= 435 required.
   iceberg.rest-catalog.security=OAUTH2
   
iceberg.rest-catalog.oauth2.server-uri=http://XXXX:8080/realms/myrealm/protocol/openid-connect/token
   iceberg.rest-catalog.oauth2.credential=trino-client:XXXXX
   iceberg.rest-catalog.oauth2.scope=openid profile email
   
   # Credential vending: ask IRC to vend short-lived cloud credentials
   iceberg.rest-catalog.vended-credentials-enabled=true
   iceberg.rest-catalog.nested-namespace-enabled=true
   # Native GCS3 filesystem (Trino 430+) – credentials come from vended tokens
   fs.native-gcs.enabled=true
   gcs.auth-type=ACCESS_TOKEN
   
   ```
   
   ### Additional context
   
   _No response_


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to