huaxingao commented on code in PR #11040: URL: https://github.com/apache/iceberg/pull/11040#discussion_r1738093741
########## spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkScan.java: ########## @@ -293,6 +293,73 @@ public void testTableWithTwoColStats() throws NoSuchTableException { withSQLConf(reportColStatsEnabled, () -> checkColStatisticsReported(scan, 4L, expectedTwoNDVs)); } + @TestTemplate + public void testMultipleSnapshotsWithColStats() throws NoSuchTableException { + sql("CREATE TABLE %s (id int, data string) USING iceberg", tableName); + + List<SimpleRecord> records = + Lists.newArrayList( + new SimpleRecord(1, "a"), + new SimpleRecord(2, "b"), + new SimpleRecord(3, "a"), + new SimpleRecord(4, "b")); + spark + .createDataset(records, Encoders.bean(SimpleRecord.class)) + .coalesce(1) + .writeTo(tableName) + .append(); + Table table = validationCatalog.loadTable(tableIdent); + long snapshotId1 = table.currentSnapshot().snapshotId(); + + spark + .createDataset(List.of(new SimpleRecord(5, "a")), Encoders.bean(SimpleRecord.class)) + .coalesce(1) + .writeTo(tableName) + .append(); + table.refresh(); + long snapshotId2 = table.currentSnapshot().snapshotId(); + + SparkScanBuilder scanBuilder = + new SparkScanBuilder(spark, table, CaseInsensitiveStringMap.empty()); + SparkScan scan = (SparkScan) scanBuilder.build(); + + Map<String, String> reportColStatsEnabled = + ImmutableMap.of(SQLConf.CBO_ENABLED().key(), "true"); + + GenericStatisticsFile statisticsFile = + new GenericStatisticsFile( + snapshotId1, + "/test/statistics/file1.puffin", + 100, + 42, + ImmutableList.of( + new GenericBlobMetadata( + APACHE_DATASKETCHES_THETA_V1, + snapshotId1, + 1, + ImmutableList.of(1), + ImmutableMap.of("ndv", "4")))); + table.updateStatistics().setStatistics(snapshotId1, statisticsFile).commit(); + + statisticsFile = + new GenericStatisticsFile( + snapshotId2, + "/test/statistics/file2.puffin", + 100, + 42, + ImmutableList.of( + new GenericBlobMetadata( + APACHE_DATASKETCHES_THETA_V1, + snapshotId2, + 1, + ImmutableList.of(1), + ImmutableMap.of("ndv", "5")))); + table.updateStatistics().setStatistics(snapshotId2, statisticsFile).commit(); + Map<String, Long> expectedNDV = Maps.newHashMap(); + expectedNDV.put("id", 5L); + withSQLConf(reportColStatsEnabled, () -> checkColStatisticsReported(scan, 5L, expectedNDV)); Review Comment: Shall we also retrieve the Statistics for snapshotId1 and verify it? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For additional commands, e-mail: issues-h...@iceberg.apache.org