liziyan-lzy commented on code in PR #12254:
URL: https://github.com/apache/iceberg/pull/12254#discussion_r2033194329


##########
spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/actions/DeleteOrphanFilesSparkAction.java:
##########
@@ -372,7 +413,7 @@ private static void listDirRecursively(
       }
 
       for (String subDir : subDirs) {
-        listDirRecursively(
+        listDirRecursivelyWithHadoop(

Review Comment:
   Because this method is based on Hadoop FileSystem to list file status. Do 
you have any suggestions for method naming?



##########
spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/actions/DeleteOrphanFilesSparkAction.java:
##########
@@ -335,7 +344,39 @@ private Dataset<String> listedFileDS() {
     return spark().createDataset(completeMatchingFileRDD.rdd(), 
Encoders.STRING());
   }
 
-  private static void listDirRecursively(
+  private static void listDirRecursivelyWithFileIO(
+      SupportsPrefixOperations io,
+      String dir,
+      Predicate<org.apache.iceberg.io.FileInfo> predicate,
+      PathFilter pathFilter,
+      List<String> matchingFiles) {
+    String listPath = dir;
+    if (!dir.endsWith("/")) {
+      listPath = dir + "/";
+    }
+    Iterable<org.apache.iceberg.io.FileInfo> files = io.listPrefix(listPath);

Review Comment:
   Thanks for highlighting this concern. I share the same worry. Since 
SupportsPrefixOperations currently only supports full recursive listing, full 
directory-level filtering could indeed cause performance issues. Perhaps we 
should consider adding depth support to SupportsPrefixOperations later?



##########
spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/actions/DeleteOrphanFilesSparkAction.java:
##########
@@ -335,7 +344,39 @@ private Dataset<String> listedFileDS() {
     return spark().createDataset(completeMatchingFileRDD.rdd(), 
Encoders.STRING());
   }
 
-  private static void listDirRecursively(
+  private static void listDirRecursivelyWithFileIO(
+      SupportsPrefixOperations io,
+      String dir,
+      Predicate<org.apache.iceberg.io.FileInfo> predicate,
+      PathFilter pathFilter,
+      List<String> matchingFiles) {
+    String listPath = dir;
+    if (!dir.endsWith("/")) {
+      listPath = dir + "/";
+    }
+    Iterable<org.apache.iceberg.io.FileInfo> files = io.listPrefix(listPath);
+    for (org.apache.iceberg.io.FileInfo file : files) {
+      Path path = new Path(file.location());
+      if (!isHiddenPath(dir, path, pathFilter) && predicate.test(file)) {
+        matchingFiles.add(file.location());
+      }
+    }
+  }
+
+  private static boolean isHiddenPath(String baseDir, Path path, PathFilter 
pathFilter) {
+    boolean isHiddenPath = false;
+    Path currentPath = path;
+    while (currentPath.getParent().toString().contains(baseDir)) {
+      if (!pathFilter.accept(currentPath)) {
+        isHiddenPath = true;
+        break;
+      }
+      currentPath = currentPath.getParent();
+    }

Review Comment:
   Fixed. Thanks!



##########
spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/actions/DeleteOrphanFilesSparkAction.java:
##########
@@ -335,7 +344,39 @@ private Dataset<String> listedFileDS() {
     return spark().createDataset(completeMatchingFileRDD.rdd(), 
Encoders.STRING());
   }
 
-  private static void listDirRecursively(
+  private static void listDirRecursivelyWithFileIO(
+      SupportsPrefixOperations io,
+      String dir,
+      Predicate<org.apache.iceberg.io.FileInfo> predicate,
+      PathFilter pathFilter,
+      List<String> matchingFiles) {
+    String listPath = dir;
+    if (!dir.endsWith("/")) {
+      listPath = dir + "/";
+    }
+    Iterable<org.apache.iceberg.io.FileInfo> files = io.listPrefix(listPath);
+    for (org.apache.iceberg.io.FileInfo file : files) {
+      Path path = new Path(file.location());
+      if (!isHiddenPath(dir, path, pathFilter) && predicate.test(file)) {
+        matchingFiles.add(file.location());
+      }
+    }
+  }
+
+  private static boolean isHiddenPath(String baseDir, Path path, PathFilter 
pathFilter) {
+    boolean isHiddenPath = false;
+    Path currentPath = path;
+    while (currentPath.getParent().toString().contains(baseDir)) {
+      if (!pathFilter.accept(currentPath)) {
+        isHiddenPath = true;
+        break;
+      }

Review Comment:
   Fixed. Thanks!



##########
spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/actions/DeleteOrphanFilesSparkAction.java:
##########
@@ -335,7 +344,39 @@ private Dataset<String> listedFileDS() {
     return spark().createDataset(completeMatchingFileRDD.rdd(), 
Encoders.STRING());
   }
 
-  private static void listDirRecursively(
+  private static void listDirRecursivelyWithFileIO(
+      SupportsPrefixOperations io,
+      String dir,
+      Predicate<org.apache.iceberg.io.FileInfo> predicate,
+      PathFilter pathFilter,
+      List<String> matchingFiles) {
+    String listPath = dir;
+    if (!dir.endsWith("/")) {
+      listPath = dir + "/";
+    }

Review Comment:
   Fixed. Thanks!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to