This is an automated email from the ASF dual-hosted git repository. rnewson pushed a commit to branch enhance-nouveau-cleanup in repository https://gitbox.apache.org/repos/asf/couchdb.git
commit f801fd32822af1e3b1f08ee7fd17d727856e09f4 Author: Robert Newson <[email protected]> AuthorDate: Wed Aug 27 15:22:34 2025 +0100 enhance _nouveau_cleanup Allow a glob pattern so we can delete indexes for previous versions of a database. --- .../apache/couchdb/nouveau/core/IndexManager.java | 61 +++++++++++++--------- src/nouveau/src/nouveau_fabric_cleanup.erl | 5 +- src/nouveau/src/nouveau_rpc.erl | 4 +- 3 files changed, 43 insertions(+), 27 deletions(-) diff --git a/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java b/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java index 85230d322..c4f59f7a1 100644 --- a/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java +++ b/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java @@ -33,7 +33,6 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.stream.Stream; import org.apache.couchdb.nouveau.api.IndexDefinition; import org.apache.couchdb.nouveau.lucene9.Lucene9AnalyzerFactory; import org.apache.couchdb.nouveau.lucene9.Lucene9Index; @@ -250,35 +249,49 @@ public final class IndexManager implements Managed { } public void deleteAll(final String path, final List<String> exclusions) throws IOException { - LOGGER.info("deleting indexes below {} (excluding {})", path, exclusions == null ? "nothing" : exclusions); + LOGGER.info( + "deleting indexes matching {} (excluding {})", + path, + exclusions == null || exclusions.isEmpty() ? "nothing" : exclusions); + var parts = path.split("/"); + deleteAll(rootDir, parts, 0, exclusions); + } - final Path indexRootPath = indexRootPath(path); - if (!indexRootPath.toFile().exists()) { + private void deleteAll(final Path path, final String[] parts, final int index, final List<String> exclusions) + throws IOException { + // End of the path + if (index == parts.length - 1) { + try (var stream = Files.newDirectoryStream(path, parts[index])) { + stream.forEach(p -> { + if (exclusions != null && exclusions.indexOf(p.getFileName().toString()) != -1) { + return; + } + final String relativeName = rootDir.relativize(p).toString(); + try { + deleteIndex(relativeName); + } catch (final IOException | InterruptedException e) { + LOGGER.error("Exception deleting {}", p, e); + } + // Clean any newly empty directories. + do { + final File f = p.toFile(); + if (f.isDirectory() && f.list().length == 0) { + f.delete(); + } + } while ((p = p.getParent()) != null && !rootDir.equals(p)); + }); + } return; } - Stream<Path> stream = Files.find(indexRootPath, 100, (p, attr) -> attr.isDirectory() && isIndex(p)); - try { - stream.forEach((p) -> { - final String relativeToExclusions = indexRootPath.relativize(p).toString(); - if (exclusions != null && exclusions.indexOf(relativeToExclusions) != -1) { - return; - } - final String relativeName = rootDir.relativize(p).toString(); + // Recurse + try (var stream = Files.newDirectoryStream(path, parts[index])) { + stream.forEach(p -> { try { - deleteIndex(relativeName); - } catch (final IOException | InterruptedException e) { - LOGGER.error("Exception deleting {}", p, e); + deleteAll(p, parts, index + 1, exclusions); + } catch (IOException e) { + LOGGER.warn("Exception during delete of " + rootDir.relativize(p), e); } - // Clean any newly empty directories. - do { - final File f = p.toFile(); - if (f.isDirectory() && f.list().length == 0) { - f.delete(); - } - } while ((p = p.getParent()) != null && !rootDir.equals(p)); }); - } finally { - stream.close(); } } diff --git a/src/nouveau/src/nouveau_fabric_cleanup.erl b/src/nouveau/src/nouveau_fabric_cleanup.erl index ea1e28eb3..07167e8c7 100644 --- a/src/nouveau/src/nouveau_fabric_cleanup.erl +++ b/src/nouveau/src/nouveau_fabric_cleanup.erl @@ -40,7 +40,10 @@ go(DbName) -> Shards = mem3:shards(DbName), lists:foreach( fun(Shard) -> - rexi:cast(Shard#shard.node, {nouveau_rpc, cleanup, [Shard#shard.name, ActiveSigs]}) + Path = + <<"shards/", (mem3_util:range_to_hex(Shard#shard.range))/binary, "/", DbName/binary, + ".*/*">>, + rexi:cast(Shard#shard.node, {nouveau_rpc, cleanup, [Path, ActiveSigs]}) end, Shards ). diff --git a/src/nouveau/src/nouveau_rpc.erl b/src/nouveau/src/nouveau_rpc.erl index b7e0eb509..f7ab5a433 100644 --- a/src/nouveau/src/nouveau_rpc.erl +++ b/src/nouveau/src/nouveau_rpc.erl @@ -89,6 +89,6 @@ info(DbName, #index{} = Index0) -> rexi:reply({error, Reason}) end. -cleanup(DbName, Exclusions) -> - nouveau_api:delete_path(nouveau_util:index_name(DbName), Exclusions), +cleanup(Path, Exclusions) -> + nouveau_api:delete_path(nouveau_util:index_name(Path), Exclusions), rexi:reply(ok).
