yuangjiang opened a new issue, #6265: URL: https://github.com/apache/iceberg/issues/6265
### Apache Iceberg version _No response_ ### Query engine _No response_ ### Please describe the bug 🐞 I created the iceberg table using spark 3.3.1 iceberg hadoop catalog. The following exception message appears when I delete. 22/11/24 17:22:29 WARN DFSClient: Failed to connect to /192.168.x.x:1004 for block, add to deadNodes and continue. java.nio.channels.ClosedByInterruptException java.nio.channels.ClosedByInterruptException at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202) at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:659) at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3441) at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:777) at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:694) at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:355) at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:665) at org.apache.hadoop.hdfs.DFSInputStream.seekToBlockSource(DFSInputStream.java:1567) at org.apache.hadoop.hdfs.DFSInputStream.readBuffer(DFSInputStream.java:847) at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:883) at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:926) at java.io.DataInputStream.read(DataInputStream.java:149) at org.apache.iceberg.hadoop.HadoopStreams$HadoopSeekableInputStream.read(HadoopStreams.java:112) at org.apache.iceberg.avro.AvroIO$AvroInputStreamAdapter.read(AvroIO.java:117) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader$SeekableInputStream.read(DataFileReader.java:292) at org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder$InputStreamByteSource.tryReadRaw(BinaryDecoder.java:895) at org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder.isEnd(BinaryDecoder.java:518) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileStream.hasNextBlock(DataFileStream.java:287) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileStream.hasNext(DataFileStream.java:212) at org.apache.iceberg.avro.AvroIterable$AvroReuseIterator.hasNext(AvroIterable.java:191) at org.apache.iceberg.io.CloseableIterable$4$1.hasNext(CloseableIterable.java:110) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.CloseableIterable$4$1.hasNext(CloseableIterable.java:110) at org.apache.iceberg.util.ParallelIterable$ParallelIterator.lambda$null$1(ParallelIterable.java:69) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 22/11/24 17:22:29 WARN BlockReaderFactory: I/O error constructing remote block reader. java.nio.channels.ClosedByInterruptException at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202) at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:659) at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3441) at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:777) at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:694) at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:355) at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:665) at org.apache.hadoop.hdfs.DFSInputStream.seekToBlockSource(DFSInputStream.java:1567) at org.apache.hadoop.hdfs.DFSInputStream.readBuffer(DFSInputStream.java:847) at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:883) at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:926) at java.io.DataInputStream.read(DataInputStream.java:149) at org.apache.iceberg.hadoop.HadoopStreams$HadoopSeekableInputStream.read(HadoopStreams.java:112) at org.apache.iceberg.avro.AvroIO$AvroInputStreamAdapter.read(AvroIO.java:117) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader$SeekableInputStream.read(DataFileReader.java:292) at org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder$InputStreamByteSource.tryReadRaw(BinaryDecoder.java:895) at org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder.isEnd(BinaryDecoder.java:518) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileStream.hasNextBlock(DataFileStream.java:287) at org.apache.iceberg.shaded.org.apache.avro.file.DataFileStream.hasNext(DataFileStream.java:212) at org.apache.iceberg.avro.AvroIterable$AvroReuseIterator.hasNext(AvroIterable.java:191) at org.apache.iceberg.io.CloseableIterable$4$1.hasNext(CloseableIterable.java:110) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.FilterIterator.advance(FilterIterator.java:64) at org.apache.iceberg.io.FilterIterator.hasNext(FilterIterator.java:49) at org.apache.iceberg.io.CloseableIterable$4$1.hasNext(CloseableIterable.java:110) at org.apache.iceberg.util.ParallelIterable$ParallelIterator.lambda$null$1(ParallelIterable.java:69) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For additional commands, e-mail: issues-h...@iceberg.apache.org