zhangjiuyang1993 opened a new issue, #6708:
URL: https://github.com/apache/iceberg/issues/6708

   ### Apache Iceberg version
   
   0.14.1
   
   ### Query engine
   
   Spark
   
   ### Please describe the bug 🐞
   
   @nastra 
   version: "3"
   
   services:
   spark-iceberg:
   image: tabulario/spark-iceberg
   container_name: spark-iceberg
   build: spark/
   depends_on:
   - rest
   - minio
   volumes:
   - ./warehouse:/home/iceberg/warehouse
   - ./notebooks:/home/iceberg/notebooks/notebooks
   environment:
   - AWS_ACCESS_KEY_ID=admin
   - AWS_SECRET_ACCESS_KEY=password
   - AWS_REGION=us-east-1
   ports:
   - 8888:8888
   - 8080:8080
   links:
   - rest:rest
   - minio:minio
   rest:
   image: tabulario/iceberg-rest:0.1.0
   ports:
   - 8181:8181
   environment:
   - AWS_ACCESS_KEY_ID=admin
   - AWS_SECRET_ACCESS_KEY=password
   - AWS_REGION=us-east-1
   - CATALOG_WAREHOUSE=s3a://warehouse/wh/
   - CATALOG_IO__IMPL=org.apache.iceberg.aws.s3.S3FileIO
   - CATALOG_S3_ENDPOINT=http://minio:9000/
   minio:
   image: minio/minio
   container_name: minio
   environment:
   - MINIO_ROOT_USER=admin
   - MINIO_ROOT_PASSWORD=password
   ports:
   - 9001:9001
   - 9000:9000
   command: ["server", "/data", "--console-address", ":9001"]
   mc:
   depends_on:
   - minio
   image: minio/mc
   container_name: mc
   environment:
   - AWS_ACCESS_KEY_ID=admin
   - AWS_SECRET_ACCESS_KEY=password
   - AWS_REGION=us-east-1
   entrypoint: >
   /bin/sh -c "
   until (/usr/bin/mc config host add minio http://minio:9000/ admin password) 
do echo '...waiting...' && sleep 1; done;
   /usr/bin/mc rm -r --force minio/warehouse;
   /usr/bin/mc mb minio/warehouse;
   /usr/bin/mc policy set public minio/warehouse;
   exit 0;
   "
   While According to the configuration above, I got the following error:
   23/01/31 01:43:58 WARN RESTSessionCatalog: Failed to report metrics to REST 
endpoint for table nyc.taxis
   org.apache.iceberg.exceptions.BadRequestException: Malformed request: No 
route for request: POST v1/namespaces/nyc/tables/taxis/metrics
   at 
org.apache.iceberg.rest.ErrorHandlers$DefaultErrorHandler.accept(ErrorHandlers.java:152)
   at 
org.apache.iceberg.rest.ErrorHandlers$DefaultErrorHandler.accept(ErrorHandlers.java:135)
   at org.apache.iceberg.rest.HTTPClient.throwFailure(HTTPClient.java:150)
   at org.apache.iceberg.rest.HTTPClient.execute(HTTPClient.java:224)
   at org.apache.iceberg.rest.HTTPClient.post(HTTPClient.java:269)
   at org.apache.iceberg.rest.RESTClient.post(RESTClient.java:112)
   at 
org.apache.iceberg.rest.RESTSessionCatalog.reportMetrics(RESTSessionCatalog.java:321)
   at 
org.apache.iceberg.rest.RESTSessionCatalog.lambda$loadTable$2(RESTSessionCatalog.java:307)
   at 
org.apache.iceberg.BaseTableScan.lambda$planFiles$0(BaseTableScan.java:168)
   at org.apache.iceberg.io.CloseableIterable$3.close(CloseableIterable.java:95)
   at 
org.apache.iceberg.spark.source.SparkBatchQueryScan.files(SparkBatchQueryScan.java:125)
   at 
org.apache.iceberg.spark.source.SparkBatchQueryScan.tasks(SparkBatchQueryScan.java:138)
   at org.apache.iceberg.spark.source.SparkScan.toBatch(SparkScan.java:111)
   at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.batch$lzycompute(BatchScanExec.scala:42)
   at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.batch(BatchScanExec.scala:42)
   at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.inputPartitions$lzycompute(BatchScanExec.scala:54)
   at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.inputPartitions(BatchScanExec.scala:54)
   at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExecBase.supportsColumnar(DataSourceV2ScanExecBase.scala:142)
   at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExecBase.supportsColumnar$(DataSourceV2ScanExecBase.scala:141)
   at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.supportsColumnar(BatchScanExec.scala:36)
   at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy.apply(DataSourceV2Strategy.scala:143)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$1(QueryPlanner.scala:63)
   at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486)
   at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492)
   at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:491)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
   at 
org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
   at scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:196)
   at scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:194)
   at scala.collection.Iterator.foreach(Iterator.scala:943)
   at scala.collection.Iterator.foreach$(Iterator.scala:943)
   at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
   at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:199)
   at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:192)
   at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1431)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
   at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486)
   at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
   at 
org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
   at scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:196)
   at scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:194)
   at scala.collection.Iterator.foreach(Iterator.scala:943)
   at scala.collection.Iterator.foreach$(Iterator.scala:943)
   at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
   at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:199)
   at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:192)
   at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1431)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
   at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486)
   at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492)
   at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
   at 
org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69)
   at 
org.apache.spark.sql.execution.QueryExecution$.createSparkPlan(QueryExecution.scala:459)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$sparkPlan$1(QueryExecution.scala:145)
   at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)
   at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
   at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)
   at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:145)
   at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:138)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:158)
   at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)
   at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
   at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
   at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)
   at 
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:158)
   at 
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:151)
   at 
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:204)
   at 
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:249)
   at 
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:218)
   at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:103)
   at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
   at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
   at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
   at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3856)
   at org.apache.spark.sql.Dataset.collectToPython(Dataset.scala:3685)
   at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
Method)
   at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
   at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
   at py4j.Gateway.invoke(Gateway.java:282)
   at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
   at py4j.commands.CallCommand.execute(CallCommand.java:79)
   at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
   at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
   at java.base/java.lang.Thread.run(Thread.java:829)
   Sorry if wrongly reported as bug.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to