epotyom commented on code in PR #13568: URL: https://github.com/apache/lucene/pull/13568#discussion_r1684302960
########## lucene/demo/src/java/org/apache/lucene/demo/facet/SandboxFacetsExample.java: ########## @@ -0,0 +1,714 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.demo.facet; + +import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME; +import static org.apache.lucene.sandbox.facet.ComparableUtils.rankCountOrdToComparable; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoubleDocValuesField; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.facet.DrillDownQuery; +import org.apache.lucene.facet.DrillSideways; +import org.apache.lucene.facet.FacetField; +import org.apache.lucene.facet.FacetResult; +import org.apache.lucene.facet.FacetsConfig; +import org.apache.lucene.facet.LabelAndValue; +import org.apache.lucene.facet.MultiLongValuesSource; +import org.apache.lucene.facet.range.LongRange; +import org.apache.lucene.facet.taxonomy.FacetLabel; +import org.apache.lucene.facet.taxonomy.TaxonomyReader; +import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; +import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexWriterConfig.OpenMode; +import org.apache.lucene.sandbox.facet.ComparableUtils; +import org.apache.lucene.sandbox.facet.FacetFieldCollector; +import org.apache.lucene.sandbox.facet.FacetFieldCollectorManager; +import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap; +import org.apache.lucene.sandbox.facet.abstracts.OrdToComparable; +import org.apache.lucene.sandbox.facet.abstracts.OrdinalIterator; +import org.apache.lucene.sandbox.facet.abstracts.Reducer; +import org.apache.lucene.sandbox.facet.ordinal_iterators.TopnOrdinalIterator; +import org.apache.lucene.sandbox.facet.ranges.LongRangeFacetCutter; +import org.apache.lucene.sandbox.facet.ranges.RangeOrdLabelBiMap; +import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder; +import org.apache.lucene.sandbox.facet.recorders.LongAggregationsFacetRecorder; +import org.apache.lucene.sandbox.facet.recorders.MultiFacetsRecorder; +import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyChildrenOrdinalIterator; +import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter; +import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap; +import org.apache.lucene.search.CollectorOwner; +import org.apache.lucene.search.DoubleValuesSource; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LongValuesSource; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiCollectorManager; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopScoreDocCollectorManager; +import org.apache.lucene.store.ByteBuffersDirectory; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; + +/** Demo for sandbox faceting. */ +public class SandboxFacetsExample { + + private final Directory indexDir = new ByteBuffersDirectory(); + private final Directory taxoDir = new ByteBuffersDirectory(); + private final FacetsConfig config = new FacetsConfig(); + + /** Empty constructor */ + public SandboxFacetsExample() { + config.setHierarchical("Publish Date", true); + } + + /** Build the example index. */ + void index() throws IOException { + IndexWriter indexWriter = + new IndexWriter( + indexDir, new IndexWriterConfig(new WhitespaceAnalyzer()).setOpenMode(OpenMode.CREATE)); + + // Writes facet ords to a separate directory from the main index + DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); + + Document doc = new Document(); + doc.add(new FacetField("Author", "Bob")); + doc.add(new FacetField("Publish Date", "2010", "10", "15")); + doc.add(new NumericDocValuesField("Price", 10)); + doc.add(new NumericDocValuesField("Units", 9)); + doc.add(new DoubleDocValuesField("Popularity", 3.5d)); + indexWriter.addDocument(config.build(taxoWriter, doc)); + + doc = new Document(); + doc.add(new FacetField("Author", "Lisa")); + doc.add(new FacetField("Publish Date", "2010", "10", "20")); + doc.add(new NumericDocValuesField("Price", 4)); + doc.add(new NumericDocValuesField("Units", 2)); + doc.add(new DoubleDocValuesField("Popularity", 4.1D)); + indexWriter.addDocument(config.build(taxoWriter, doc)); + + doc = new Document(); + doc.add(new FacetField("Author", "Lisa")); + doc.add(new FacetField("Publish Date", "2012", "1", "1")); + doc.add(new NumericDocValuesField("Price", 3)); + doc.add(new NumericDocValuesField("Units", 5)); + doc.add(new DoubleDocValuesField("Popularity", 3.9D)); + indexWriter.addDocument(config.build(taxoWriter, doc)); + + doc = new Document(); + doc.add(new FacetField("Author", "Susan")); + doc.add(new FacetField("Publish Date", "2012", "1", "7")); + doc.add(new NumericDocValuesField("Price", 8)); + doc.add(new NumericDocValuesField("Units", 7)); + doc.add(new DoubleDocValuesField("Popularity", 4D)); + indexWriter.addDocument(config.build(taxoWriter, doc)); + + doc = new Document(); + doc.add(new FacetField("Author", "Frank")); + doc.add(new FacetField("Publish Date", "1999", "5", "5")); + doc.add(new NumericDocValuesField("Price", 9)); + doc.add(new NumericDocValuesField("Units", 6)); + doc.add(new DoubleDocValuesField("Popularity", 4.9D)); + indexWriter.addDocument(config.build(taxoWriter, doc)); + + IOUtils.close(indexWriter, taxoWriter); + } + + /** User runs a query and counts facets only without collecting the matching documents. */ + List<FacetResult> facetsOnly() throws IOException { + //// (1) init readers + DirectoryReader indexReader = DirectoryReader.open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + + //// (2) init collector + TaxonomyFacetsCutter defaultTaxoCutter = + new TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, taxoReader); + CountFacetRecorder defaultRecorder = new CountFacetRecorder(false); + + FacetFieldCollectorManager<CountFacetRecorder> collectorManager = + new FacetFieldCollectorManager<>(defaultTaxoCutter, defaultTaxoCutter, defaultRecorder); + + //// (2.1) if we need to collect data using multiple different collectors, e.g. taxonomy and + // ranges, + //// or even two taxonomy facets that use different Category List Field, we can use + // MultiCollectorManager, e.g.: + // TODO: add a demo for it. + // TaxonomyFacetsCutter publishDateCutter = new + // TaxonomyFacetsCutter(config.getDimConfig("Publish Date"), taxoReader); + // CountFacetRecorder publishDateRecorder = new CountFacetRecorder(false); + // FacetFieldCollectorManager<CountFacetRecorder> publishDateCollectorManager = new + // FacetFieldCollectorManager<>(publishDateCutter, publishDateRecorder); + // MultiCollectorManager drillDownCollectorManager = new + // MultiCollectorManager(authorCollectorManager, publishDateCollectorManager); + // Object[] results = searcher.search(new MatchAllDocsQuery(), drillDownCollectorManager); + + //// (3) search + // Search returns the same Recorder we created - so we can ignore results + searcher.search(new MatchAllDocsQuery(), collectorManager); + + //// (4) Get top 10 results by count for Author and Publish Date + // This object is used to get topN results by count + OrdToComparable<ComparableUtils.IntOrdComparable> countComparable = + ComparableUtils.countOrdToComparable(defaultRecorder); + // We don't actually need to use FacetResult, it is up to client what to do with the results. + // Here we just want to demo that we can still do FacetResult as well + List<FacetResult> results = new ArrayList<>(2); + // This object provides labels for ordinals. + OrdLabelBiMap ordLabels = new TaxonomyOrdLabelBiMap(taxoReader); + for (String dimension : List.of("Author", "Publish Date")) { + //// (4.1) Chain two ordinal iterators to get top N children + OrdinalIterator childrenIternator = + new TaxonomyChildrenOrdinalIterator( + defaultRecorder.recordedOrds(), + taxoReader.getParallelTaxonomyArrays().parents(), + ordLabels.getOrd(new FacetLabel(dimension))); + OrdinalIterator topByCountOrds = + new TopnOrdinalIterator<>(childrenIternator, countComparable, 10); + // Get array of final ordinals - we need to use all of them to get labels first, and then to + // get counts, + // but OrdinalIterator only allows reading ordinals once. + int[] resultOrdinals = topByCountOrds.toArray(); + + //// (4.2) Use faceting results + FacetLabel[] labels = ordLabels.getLabels(resultOrdinals); + List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length); + for (int i = 0; i < resultOrdinals.length; i++) { + labelsAndValues.add( + new LabelAndValue(labels[i].getLeaf(), defaultRecorder.getCount(resultOrdinals[i]))); + } + // TODO fix value and childCount + results.add( + new FacetResult( + dimension, new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0)); + } + + IOUtils.close(indexReader, taxoReader); + return results; + } + + /** + * User runs a query and counts facets for exclusive ranges without collecting the matching + * documents + */ + List<FacetResult> exclusiveRangesCountFacetsOnly() throws IOException { + DirectoryReader indexReader = DirectoryReader.open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + + MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price"); + + // Exclusive ranges example + LongRange[] inputRanges = new LongRange[2]; + inputRanges[0] = new LongRange("0-5", 0, true, 5, true); + inputRanges[1] = new LongRange("5-10", 5, false, 10, true); + + LongRangeFacetCutter longRangeFacetCutter = + LongRangeFacetCutter.create("Price", valuesSource, inputRanges); + CountFacetRecorder countRecorder = new CountFacetRecorder(false); + + FacetFieldCollectorManager<CountFacetRecorder> collectorManager = + new FacetFieldCollectorManager<>(longRangeFacetCutter, null, countRecorder); + searcher.search(new MatchAllDocsQuery(), collectorManager); + RangeOrdLabelBiMap ordToLabels = new RangeOrdLabelBiMap(inputRanges); + + OrdToComparable<ComparableUtils.IntOrdComparable> countComparable = + ComparableUtils.countOrdToComparable(countRecorder); + OrdinalIterator topByCountOrds = + new TopnOrdinalIterator<>(countRecorder.recordedOrds(), countComparable, 10); + + List<FacetResult> results = new ArrayList<>(2); + + int[] resultOrdinals = topByCountOrds.toArray(); + FacetLabel[] labels = ordToLabels.getLabels(resultOrdinals); + List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length); + for (int i = 0; i < resultOrdinals.length; i++) { + labelsAndValues.add( + new LabelAndValue(labels[i].getLeaf(), countRecorder.getCount(resultOrdinals[i]))); + } + + results.add( + new FacetResult( + "Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0)); + + System.out.println("Computed counts"); + IOUtils.close(indexReader); + return results; + } + + List<FacetResult> overlappingRangesCountFacetsOnly() throws IOException { + DirectoryReader indexReader = DirectoryReader.open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + + MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price"); + + // overlapping ranges example + LongRange[] inputRanges = new LongRange[2]; + inputRanges[0] = new LongRange("0-5", 0, true, 5, true); + inputRanges[1] = new LongRange("0-10", 0, true, 10, true); + + LongRangeFacetCutter longRangeFacetCutter = + LongRangeFacetCutter.create("Price", valuesSource, inputRanges); + CountFacetRecorder countRecorder = new CountFacetRecorder(false); + + FacetFieldCollectorManager<CountFacetRecorder> collectorManager = + new FacetFieldCollectorManager<>(longRangeFacetCutter, null, countRecorder); + searcher.search(new MatchAllDocsQuery(), collectorManager); + RangeOrdLabelBiMap ordToLabels = new RangeOrdLabelBiMap(inputRanges); + + OrdToComparable<ComparableUtils.IntOrdComparable> countComparable = + ComparableUtils.countOrdToComparable(countRecorder); + OrdinalIterator topByCountOrds = + new TopnOrdinalIterator<>(countRecorder.recordedOrds(), countComparable, 10); + + List<FacetResult> results = new ArrayList<>(2); + + int[] resultOrdinals = topByCountOrds.toArray(); + FacetLabel[] labels = ordToLabels.getLabels(resultOrdinals); + List<LabelAndValue> labelsAndValues = new ArrayList<>(labels.length); + for (int i = 0; i < resultOrdinals.length; i++) { + labelsAndValues.add( + new LabelAndValue(labels[i].getLeaf(), countRecorder.getCount(resultOrdinals[i]))); + } + + results.add( + new FacetResult( + "Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0)); + + System.out.println("Computed counts"); + IOUtils.close(indexReader); + return results; + } + + List<FacetResult> exclusiveRangesAggregationFacets() throws IOException { + DirectoryReader indexReader = DirectoryReader.open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + + MultiLongValuesSource valuesSource = MultiLongValuesSource.fromLongField("Price"); + + // Exclusive ranges example + LongRange[] inputRanges = new LongRange[2]; + inputRanges[0] = new LongRange("0-5", 0, true, 5, true); + inputRanges[1] = new LongRange("5-10", 5, false, 10, true); + + LongRangeFacetCutter longRangeFacetCutter = + LongRangeFacetCutter.create("Price", valuesSource, inputRanges); + + // initialise the aggregations to be computed - a values source + reducer + LongValuesSource[] longValuesSources = new LongValuesSource[2]; + Reducer[] reducers = new Reducer[2]; + // popularity:max + longValuesSources[0] = DoubleValuesSource.fromDoubleField("Popularity").toLongValuesSource(); + reducers[0] = Reducer.MAX; + // units:sum + longValuesSources[1] = LongValuesSource.fromLongField("Units"); + reducers[1] = Reducer.SUM; + + LongAggregationsFacetRecorder longAggregationsFacetRecorder = + new LongAggregationsFacetRecorder(longValuesSources, reducers); + + CountFacetRecorder countRecorder = new CountFacetRecorder(false); + + // Compute both counts and aggregations + MultiFacetsRecorder multiFacetsRecorder = + new MultiFacetsRecorder(countRecorder, longAggregationsFacetRecorder); + + FacetFieldCollectorManager<MultiFacetsRecorder> collectorManager = + new FacetFieldCollectorManager<>(longRangeFacetCutter, null, multiFacetsRecorder); + searcher.search(new MatchAllDocsQuery(), collectorManager); + RangeOrdLabelBiMap ordToLabels = new RangeOrdLabelBiMap(inputRanges); + + // Get recorded ords - use either count/aggregations recorder + OrdinalIterator recordedOrds = longAggregationsFacetRecorder.recordedOrds(); + + // We don't actually need to use FacetResult, it is up to client what to do with the results. + // Here we just want to demo that we can still do FacetResult as well + List<FacetResult> results = new ArrayList<>(2); + OrdToComparable<ComparableUtils.LongIntOrdComparable> ordToComparable; + OrdinalIterator topOrds; + int[] resultOrdinals; + FacetLabel[] labels; + List<LabelAndValue> labelsAndValues; + + // Sort results by units:sum and tie-break by count + ordToComparable = rankCountOrdToComparable(countRecorder, longAggregationsFacetRecorder, 1); + topOrds = new TopnOrdinalIterator<>(recordedOrds, ordToComparable, 10); + + resultOrdinals = topOrds.toArray(); + labels = ordToLabels.getLabels(resultOrdinals); + labelsAndValues = new ArrayList<>(labels.length); + for (int i = 0; i < resultOrdinals.length; i++) { + labelsAndValues.add( + new LabelAndValue( + labels[i].getLeaf(), + longAggregationsFacetRecorder.getRecordedValue(resultOrdinals[i], 1))); + } + results.add( + new FacetResult( + "Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0)); + + // note: previous ordinal iterator was exhausted + recordedOrds = longAggregationsFacetRecorder.recordedOrds(); + // Sort results by popularity:max and tie-break by count + ordToComparable = rankCountOrdToComparable(countRecorder, longAggregationsFacetRecorder, 0); + topOrds = new TopnOrdinalIterator<>(recordedOrds, ordToComparable, 10); + resultOrdinals = topOrds.toArray(); + labels = ordToLabels.getLabels(resultOrdinals); + labelsAndValues = new ArrayList<>(labels.length); + for (int i = 0; i < resultOrdinals.length; i++) { + labelsAndValues.add( + new LabelAndValue( + labels[i].getLeaf(), + longAggregationsFacetRecorder.getRecordedValue(resultOrdinals[i], 0))); + } + // TODO: is the tie-break by ord correct? Right now, it gives the higher ord as output + results.add( + new FacetResult( + "Price", new String[0], 0, labelsAndValues.toArray(new LabelAndValue[0]), 0)); + + return results; + } + + /** User runs a query and counts facets. */ + private List<FacetResult> facetsWithSearch() throws IOException { Review Comment: It demonstrates collecting hits and facets at the same time. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org For additional commands, e-mail: issues-h...@lucene.apache.org