mikemccand commented on code in PR #14178: URL: https://github.com/apache/lucene/pull/14178#discussion_r2152136427
########## lucene/sandbox/src/java/org/apache/lucene/sandbox/codecs/faiss/package-info.java: ########## @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Provides a Faiss-based vector format via {@link Review Comment: Link out to Faiss' source code? ########## lucene/sandbox/src/java/org/apache/lucene/sandbox/codecs/faiss/FaissKnnVectorsWriter.java: ########## @@ -0,0 +1,240 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.sandbox.codecs.faiss; + +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.DATA_CODEC_NAME; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.DATA_EXTENSION; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.META_CODEC_NAME; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.META_EXTENSION; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.VERSION_CURRENT; +import static org.apache.lucene.sandbox.codecs.faiss.LibFaissC.createIndex; +import static org.apache.lucene.sandbox.codecs.faiss.LibFaissC.indexWrite; + +import java.io.IOException; +import java.lang.foreign.Arena; +import java.lang.foreign.MemorySegment; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.KnnFieldVectorsWriter; +import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter; +import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Sorter; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.hnsw.IntToIntFunction; + +/** + * Write per-segment Faiss indexes and associated metadata. + * + * @lucene.experimental + */ +final class FaissKnnVectorsWriter extends KnnVectorsWriter { + private final String description, indexParams; + private final FlatVectorsWriter rawVectorsWriter; + private final IndexOutput meta, data; + private final Map<FieldInfo, FlatFieldVectorsWriter<?>> rawFields; + private boolean closed, finished; + + public FaissKnnVectorsWriter( + String description, + String indexParams, + SegmentWriteState state, + FlatVectorsWriter rawVectorsWriter) + throws IOException { + + this.description = description; + this.indexParams = indexParams; + this.rawVectorsWriter = rawVectorsWriter; + this.rawFields = new HashMap<>(); + this.closed = false; + this.finished = false; + + boolean failure = true; + try { + this.meta = openOutput(state, META_EXTENSION, META_CODEC_NAME); + this.data = openOutput(state, DATA_EXTENSION, DATA_CODEC_NAME); + failure = false; + } finally { + if (failure) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + private IndexOutput openOutput(SegmentWriteState state, String extension, String codecName) + throws IOException { + String fileName = + IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, extension); + IndexOutput output = state.directory.createOutput(fileName, state.context); + CodecUtil.writeIndexHeader( + output, codecName, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + return output; + } + + @Override + public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException { + rawVectorsWriter.mergeOneField(fieldInfo, mergeState); + switch (fieldInfo.getVectorEncoding()) { + case BYTE -> + // TODO: Support using SQ8 quantization, see: + // - https://github.com/opensearch-project/k-NN/pull/2425 + throw new UnsupportedOperationException("Byte vectors not supported"); + case FLOAT32 -> { + FloatVectorValues merged = + KnnVectorsWriter.MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); + writeFloatField(fieldInfo, merged, doc -> doc); + } + } + } + + @Override + public KnnFieldVectorsWriter<?> addField(FieldInfo fieldInfo) throws IOException { + FlatFieldVectorsWriter<?> rawFieldVectorsWriter = rawVectorsWriter.addField(fieldInfo); + rawFields.put(fieldInfo, rawFieldVectorsWriter); + return rawFieldVectorsWriter; + } + + @Override + public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException { + rawVectorsWriter.flush(maxDoc, sortMap); + for (Map.Entry<FieldInfo, FlatFieldVectorsWriter<?>> entry : rawFields.entrySet()) { + FieldInfo fieldInfo = entry.getKey(); + switch (fieldInfo.getVectorEncoding()) { + case BYTE -> + // TODO: Support using SQ8 quantization, see: + // - https://github.com/opensearch-project/k-NN/pull/2425 + throw new UnsupportedOperationException("Byte vectors not supported"); + + case FLOAT32 -> { + @SuppressWarnings("unchecked") + FlatFieldVectorsWriter<float[]> rawWriter = + (FlatFieldVectorsWriter<float[]>) entry.getValue(); + + List<float[]> vectors = rawWriter.getVectors(); + int dimension = fieldInfo.getVectorDimension(); + DocIdSet docIdSet = rawWriter.getDocsWithFieldSet(); + + writeFloatField( + fieldInfo, + new BufferedFloatVectorValues(vectors, dimension, docIdSet), + (sortMap != null) ? sortMap::oldToNew : doc -> doc); + } + } + } + } + + private void writeFloatField( + FieldInfo fieldInfo, FloatVectorValues floatVectorValues, IntToIntFunction oldToNewDocId) + throws IOException { + int number = fieldInfo.number; + meta.writeInt(number); + + // Write index to temp file and deallocate from memory + try (Arena temp = Arena.ofConfined()) { + VectorSimilarityFunction function = fieldInfo.getVectorSimilarityFunction(); + MemorySegment indexPointer = + createIndex(description, indexParams, function, floatVectorValues, oldToNewDocId) + // Ensure timely cleanup + .reinterpret(temp, LibFaissC::freeIndex); + + // See flags defined in c_api/index_io_c.h + int ioFlags = 3; + + // Write index + long dataOffset = data.getFilePointer(); + indexWrite(indexPointer, data, ioFlags); + long dataLength = data.getFilePointer() - dataOffset; + + meta.writeLong(dataOffset); + meta.writeLong(dataLength); + } + } + + @Override + public void finish() throws IOException { + if (finished) { + throw new IllegalStateException("Already finished"); + } + finished = true; + + rawVectorsWriter.finish(); + meta.writeInt(-1); + CodecUtil.writeFooter(meta); + CodecUtil.writeFooter(data); + } + + @Override + public void close() throws IOException { + if (closed == false) { + IOUtils.close(rawVectorsWriter, meta, data); + closed = true; + } + } + + @Override + public long ramBytesUsed() { + // TODO: How to estimate Faiss usage? Review Comment: Normally I would say "run the JVM with smaller and smaller heap sizes during indexing to see at what point pathological GC or OOME starts and then you know how much RAM is required". But Faiss pulls its RAM from sbrk/malloc, not restricted by the JVM heap, so it's trickier. One could maybe use `ulimit` so the kernel will return `null` if the process tries to allocate too much RAM, and subtract the JVM heap from that total. Or maybe Faiss documents RAM usage somewhere? Lucene is somewhat RAM hungry when building HNSW graphs ([example](https://github.com/apache/lucene/issues/14208), and there have been [recent improvements](https://github.com/apache/lucene/pull/14527)). This is mostly a problem during large segment merges, when we build a new (ish?) HNSW graph containing all vectors from the merging segments... Anyway, I don't think this is a blocker for merging to sandbox -- we can learn over time the RAM usage. ########## lucene/sandbox/src/java/org/apache/lucene/sandbox/codecs/faiss/FaissKnnVectorsReader.java: ########## @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.sandbox.codecs.faiss; + +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.DATA_CODEC_NAME; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.DATA_EXTENSION; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.META_CODEC_NAME; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.META_EXTENSION; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.VERSION_CURRENT; +import static org.apache.lucene.sandbox.codecs.faiss.FaissKnnVectorsFormat.VERSION_START; +import static org.apache.lucene.sandbox.codecs.faiss.LibFaissC.indexRead; +import static org.apache.lucene.sandbox.codecs.faiss.LibFaissC.indexSearch; + +import java.io.IOException; +import java.lang.foreign.Arena; +import java.lang.foreign.MemorySegment; +import java.util.HashMap; +import java.util.Map; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.hnsw.FlatVectorsReader; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.KnnCollector; +import org.apache.lucene.store.DataAccessHint; +import org.apache.lucene.store.FileTypeHint; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.IOUtils; + +/** + * Read per-segment Faiss indexes and associated metadata. + * + * @lucene.experimental + */ +final class FaissKnnVectorsReader extends KnnVectorsReader { + private final FlatVectorsReader rawVectorsReader; + private final IndexInput meta, data; + private final Map<String, IndexEntry> indexMap; + private final Arena arena; + private boolean closed; + + public FaissKnnVectorsReader(SegmentReadState state, FlatVectorsReader rawVectorsReader) + throws IOException { + this.rawVectorsReader = rawVectorsReader; + this.indexMap = new HashMap<>(); + this.arena = Arena.ofShared(); + this.closed = false; + + boolean failure = true; + try { + meta = + openInput( + state, + META_EXTENSION, + META_CODEC_NAME, + VERSION_START, + VERSION_CURRENT, + state.context); + data = + openInput( + state, + DATA_EXTENSION, + DATA_CODEC_NAME, + VERSION_START, + VERSION_CURRENT, + state.context.withHints(FileTypeHint.DATA, DataAccessHint.RANDOM)); + + Map.Entry<String, IndexEntry> entry; + while ((entry = parseNextField(state)) != null) { + this.indexMap.put(entry.getKey(), entry.getValue()); + } + + failure = false; + } finally { + if (failure) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @SuppressWarnings("SameParameterValue") + private IndexInput openInput( + SegmentReadState state, + String extension, + String codecName, + int versionStart, + int versionEnd, + IOContext context) + throws IOException { + + String fileName = + IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, extension); + IndexInput input = state.directory.openInput(fileName, context); + CodecUtil.checkIndexHeader( + input, codecName, versionStart, versionEnd, state.segmentInfo.getId(), state.segmentSuffix); + return input; + } + + private Map.Entry<String, IndexEntry> parseNextField(SegmentReadState state) throws IOException { + int fieldNumber = meta.readInt(); + if (fieldNumber == -1) { + return null; + } + + FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNumber); + if (fieldInfo == null) { + throw new IllegalStateException("Invalid field"); + } + + long dataOffset = meta.readLong(); + long dataLength = meta.readLong(); + + // See flags defined in c_api/index_io_c.h + int ioFlags = 3; + + // Read index into memory + MemorySegment indexPointer = + indexRead(data.slice(fieldInfo.name, dataOffset, dataLength), ioFlags) + // Ensure timely cleanup + .reinterpret(arena, LibFaissC::freeIndex); + + return Map.entry( + fieldInfo.name, new IndexEntry(indexPointer, fieldInfo.getVectorSimilarityFunction())); + } + + @Override + public void checkIntegrity() throws IOException { Review Comment: Maybe leave a `TODO` asking how we could check integrity of Faiss's data structures too? But we are confirming checksums match, so at least we'll catch an errant bit flip, even if it happens in Faiss's on-disk structures. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org For additional commands, e-mail: issues-h...@lucene.apache.org