ebyhr commented on code in PR #11744: URL: https://github.com/apache/iceberg/pull/11744#discussion_r1879109181
########## flink/v1.19/flink/src/test/java/org/apache/iceberg/flink/TestHelpers.java: ########## @@ -602,7 +602,7 @@ public static void assertEquals(ContentFile<?> expected, ContentFile<?> actual) assertThat(actual).isNotNull(); assertThat(actual.specId()).as("SpecId").isEqualTo(expected.specId()); assertThat(actual.content()).as("Content").isEqualTo(expected.content()); - assertThat(actual.path()).as("Path").isEqualTo(expected.path()); + assertThat(actual.location()).as("Path").isEqualTo(expected.location()); Review Comment: ```suggestion assertThat(actual.location()).as("Location").isEqualTo(expected.location()); ``` ########## spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestExpireSnapshotsProcedure.java: ########## @@ -300,7 +300,7 @@ public void testExpireDeleteFiles() throws Exception { Assert.assertEquals("Should have 1 delete file", 1, TestHelpers.deleteFiles(table).size()); Path deleteManifestPath = new Path(TestHelpers.deleteManifests(table).iterator().next().path()); DeleteFile deleteFile = TestHelpers.deleteFiles(table).iterator().next(); - Path deleteFilePath = new Path(String.valueOf(deleteFile.path())); + Path deleteFilePath = new Path(String.valueOf(deleteFile.location())); Review Comment: Why do we need `String.valueOf` even though `deleteFile.location()` is String? ########## spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkCleanupUtil.java: ########## @@ -81,7 +81,7 @@ private static String taskInfo() { * @param files a list of files to delete */ public static void deleteFiles(String context, FileIO io, List<? extends ContentFile<?>> files) { - List<String> paths = Lists.transform(files, file -> file.path().toString()); + List<String> paths = Lists.transform(files, file -> file.location()); Review Comment: Spark v3.5 uses method reference here. ########## spark/v3.3/spark/src/test/java/org/apache/iceberg/ValidationHelpers.java: ########## @@ -42,7 +42,7 @@ public static List<Long> snapshotIds(Long... ids) { } public static List<String> files(ContentFile<?>... files) { - return Arrays.stream(files).map(file -> file.path().toString()).collect(Collectors.toList()); + return Arrays.stream(files).map(file -> file.location()).collect(Collectors.toList()); Review Comment: nit: We could use method reference. Feel free to ignore this comment. I know Spark v3.5 uses this style. ########## spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/source/SparkCleanupUtil.java: ########## @@ -81,7 +81,7 @@ private static String taskInfo() { * @param files a list of files to delete */ public static void deleteFiles(String context, FileIO io, List<? extends ContentFile<?>> files) { - List<String> paths = Lists.transform(files, file -> file.path().toString()); + List<String> paths = Lists.transform(files, file -> file.location()); Review Comment: Spark v3.5 uses method reference here. ########## spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java: ########## @@ -635,7 +635,7 @@ private static void importUnpartitionedSparkTable( if (checkDuplicateFiles) { Dataset<Row> importedFiles = spark - .createDataset(Lists.transform(files, f -> f.path().toString()), Encoders.STRING()) + .createDataset(Lists.transform(files, f -> f.location()), Encoders.STRING()) Review Comment: Spark v3.5 uses method reference here. ########## spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/BaseReader.java: ########## @@ -153,7 +153,7 @@ public boolean next() throws IOException { if (currentTask != null && !currentTask.isDataTask()) { String filePaths = referencedFiles(currentTask) - .map(file -> file.path().toString()) + .map(file -> file.location()) Review Comment: Spark v3.5 uses method reference here. ########## spark/v3.3/spark/src/main/java/org/apache/iceberg/spark/source/BaseReader.java: ########## @@ -148,7 +148,7 @@ public boolean next() throws IOException { if (currentTask != null && !currentTask.isDataTask()) { String filePaths = referencedFiles(currentTask) - .map(file -> file.path().toString()) + .map(file -> file.location()) Review Comment: Spark v3.5 uses method reference here. Feel free to ignore this and other comments about method reference if it's intentional. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For additional commands, e-mail: issues-h...@iceberg.apache.org