stefanvodita commented on code in PR #13568:
URL: https://github.com/apache/lucene/pull/13568#discussion_r1677574763


##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java:
##########
@@ -300,35 +302,28 @@ public DrillSidewaysResult(
     }
   }
 
-  private static class CallableCollector implements Callable<CallableResult> {
-
-    private final int pos;
+  private static class CallableCollector implements Callable<Object> {

Review Comment:
   I find it a bit strange to parametrise with `Object` and always return 
`null`. Would parametrising with `Void` make sense?



##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java:
##########
@@ -398,130 +497,56 @@ private <R> ConcurrentDrillSidewaysResult<R> 
searchSequentially(
     }
     Query[] drillDownQueries = query.getDrillDownQueries();
 
-    int numDims = drillDownDims.size();
-
-    FacetsCollectorManager drillDownCollectorManager = 
createDrillDownFacetsCollectorManager();
-
-    FacetsCollectorManager[] drillSidewaysFacetsCollectorManagers =
-        new FacetsCollectorManager[numDims];
-    for (int i = 0; i < numDims; i++) {
-      drillSidewaysFacetsCollectorManagers[i] = 
createDrillSidewaysFacetsCollectorManager();
-    }
-
     DrillSidewaysQuery dsq =
         new DrillSidewaysQuery(
             baseQuery,
-            drillDownCollectorManager,
-            drillSidewaysFacetsCollectorManagers,
+            // drillDownCollectorOwner,
+            // Don't pass drill down collector because drill down is collected 
by IndexSearcher
+            // itself.
+            // TODO: deprecate drillDown collection in DrillSidewaysQuery?

Review Comment:
   Overall, the changes in this file make me think we would write things 
differently if we didn't have to maintain the API for the other faceting 
implementations. Is that so?



##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java:
##########
@@ -398,130 +497,56 @@ private <R> ConcurrentDrillSidewaysResult<R> 
searchSequentially(
     }
     Query[] drillDownQueries = query.getDrillDownQueries();
 
-    int numDims = drillDownDims.size();
-
-    FacetsCollectorManager drillDownCollectorManager = 
createDrillDownFacetsCollectorManager();
-
-    FacetsCollectorManager[] drillSidewaysFacetsCollectorManagers =
-        new FacetsCollectorManager[numDims];
-    for (int i = 0; i < numDims; i++) {
-      drillSidewaysFacetsCollectorManagers[i] = 
createDrillSidewaysFacetsCollectorManager();
-    }
-
     DrillSidewaysQuery dsq =
         new DrillSidewaysQuery(
             baseQuery,
-            drillDownCollectorManager,
-            drillSidewaysFacetsCollectorManagers,
+            // drillDownCollectorOwner,
+            // Don't pass drill down collector because drill down is collected 
by IndexSearcher
+            // itself.
+            // TODO: deprecate drillDown collection in DrillSidewaysQuery?
+            null,
+            drillSidewaysCollectorOwners,
             drillDownQueries,
             scoreSubDocsAtOnce());
 
-    R collectorResult = searcher.search(dsq, hitCollectorManager);
-
-    FacetsCollector drillDownCollector;
-    if (drillDownCollectorManager != null) {
-      drillDownCollector = 
drillDownCollectorManager.reduce(dsq.managedDrillDownCollectors);
-    } else {
-      drillDownCollector = null;
-    }
-
-    FacetsCollector[] drillSidewaysCollectors = new FacetsCollector[numDims];
-    int numSlices = dsq.managedDrillSidewaysCollectors.size();
-
-    for (int dim = 0; dim < numDims; dim++) {
-      List<FacetsCollector> facetsCollectorsForDim = new 
ArrayList<>(numSlices);
-
-      for (int slice = 0; slice < numSlices; slice++) {
-        
facetsCollectorsForDim.add(dsq.managedDrillSidewaysCollectors.get(slice)[dim]);
-      }
-
-      drillSidewaysCollectors[dim] =
-          
drillSidewaysFacetsCollectorManagers[dim].reduce(facetsCollectorsForDim);
-    }
-
-    String[] drillSidewaysDims = drillDownDims.keySet().toArray(new String[0]);
-
-    return new ConcurrentDrillSidewaysResult<>(
-        buildFacetsResult(drillDownCollector, drillSidewaysCollectors, 
drillSidewaysDims),
-        null,
-        collectorResult,
-        drillDownCollector,
-        drillSidewaysCollectors,
-        drillSidewaysDims);
+    searcher.searchNoReduce(dsq, drillDownCollectorOwner);
   }
 
-  @SuppressWarnings("unchecked")
-  private <R> ConcurrentDrillSidewaysResult<R> searchConcurrently(
-      final DrillDownQuery query, final CollectorManager<?, R> 
hitCollectorManager)
+  private void searchConcurrently(
+      final DrillDownQuery query,
+      final CollectorOwner<?, ?> drillDownCollectorOwner,
+      final List<CollectorOwner<?, ?>> drillSidewaysCollectorOwners)
       throws IOException {
 
     final Map<String, Integer> drillDownDims = query.getDims();
     final List<CallableCollector> callableCollectors = new 
ArrayList<>(drillDownDims.size() + 1);
 
-    // Add the main DrillDownQuery
-    FacetsCollectorManager drillDownFacetsCollectorManager =
-        createDrillDownFacetsCollectorManager();
-    CollectorManager<?, ?> mainCollectorManager;
-    if (drillDownFacetsCollectorManager != null) {
-      // Make sure we populate a facet collector corresponding to the base 
query if desired:
-      mainCollectorManager =
-          new MultiCollectorManager(drillDownFacetsCollectorManager, 
hitCollectorManager);
-    } else {
-      mainCollectorManager = hitCollectorManager;
-    }
-    callableCollectors.add(new CallableCollector(-1, searcher, query, 
mainCollectorManager));
+    callableCollectors.add(new CallableCollector(-1, searcher, query, 
drillDownCollectorOwner));
     int i = 0;
     final Query[] filters = query.getDrillDownQueries();
-    for (String dim : drillDownDims.keySet())
+    for (String dim : drillDownDims.keySet()) {
       callableCollectors.add(
           new CallableCollector(
-              i++,
+              i,
               searcher,
               getDrillDownQuery(query, filters, dim),
-              createDrillSidewaysFacetsCollectorManager()));
-
-    final FacetsCollector mainFacetsCollector;
-    final FacetsCollector[] facetsCollectors = new 
FacetsCollector[drillDownDims.size()];
-    final R collectorResult;
+              drillSidewaysCollectorOwners.get(i)));
+      i++; // TODO: refactor maybe?

Review Comment:
   Can we resolve this?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/SandboxFacetTestCase.java:
##########
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsCollector;
+import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.taxonomy.FacetLabel;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels.FacetLabelReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.abstracts.OrdToComparable;
+import org.apache.lucene.sandbox.facet.abstracts.OrdinalIterator;
+import org.apache.lucene.sandbox.facet.ordinal_iterators.TopnOrdinalIterator;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import 
org.apache.lucene.sandbox.facet.taxonomy.TaxonomyChildrenOrdinalIterator;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.tests.util.LuceneTestCase;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class SandboxFacetTestCase extends LuceneTestCase {
+  // we don't have access to overall count for all facets from count recorder,
+  // and we can't compute it as a SUM of values for each facet ID because we 
need to respect cases
+  // where
+  // the same doc belongs to multiple facets (e.g. overlapping ranges and
+  // multi value fields). We can add an extra range that includes everything,
+  // or consider supporting overall count in CountFacetRecorder. But it is not 
exactly the value
+  // we can get now, as this value wouldn't respect top-n cutoff. Is this 
value a must have facets
+  // feature?
+  static final int VALUE_CANT_BE_COMPUTED = -5;
+
+  /**
+   * Utility method that uses {@link FacetLabelReader} to get facet labels for 
each hit in {@link
+   * MatchingDocs}. The method returns {@code List<List<FacetLabel>>} where 
outer list has one entry
+   * per document and inner list has all {@link FacetLabel} entries that 
belong to a document. The
+   * inner list may be empty if no {@link FacetLabel} are found for a hit.
+   *
+   * @param taxoReader {@link TaxonomyReader} used to read taxonomy during 
search. This instance is
+   *     expected to be open for reading.
+   * @param fc {@link FacetsCollector} A collector with matching hits.
+   * @param dimension facet dimension for which labels are requested. A null 
value fetches labels
+   *     for all dimensions.
+   * @return {@code List<List<FacetLabel>} where outer list has one non-null 
entry per document. and
+   *     inner list contain all {@link FacetLabel} entries that belong to a 
document.
+   * @throws IOException when a low-level IO issue occurs.
+   */
+  public List<List<FacetLabel>> getAllTaxonomyFacetLabels(
+      String dimension, TaxonomyReader taxoReader, FacetsCollector fc) throws 
IOException {
+    List<List<FacetLabel>> actualLabels = new ArrayList<>();
+    TaxonomyFacetLabels taxoLabels =
+        new TaxonomyFacetLabels(taxoReader, 
FacetsConfig.DEFAULT_INDEX_FIELD_NAME);
+    for (MatchingDocs m : fc.getMatchingDocs()) {
+      FacetLabelReader facetLabelReader = 
taxoLabels.getFacetLabelReader(m.context);
+      DocIdSetIterator disi = m.bits.iterator();
+      while (disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+        actualLabels.add(allFacetLabels(disi.docID(), dimension, 
facetLabelReader));
+      }
+    }
+    return actualLabels;
+  }
+
+  /**
+   * Utility method to get all facet labels for an input docId and dimension 
using the supplied
+   * {@link FacetLabelReader}.
+   *
+   * @param docId docId for which facet labels are needed.
+   * @param dimension Retain facet labels for supplied dimension only. A null 
value fetches all
+   *     facet labels.
+   * @param facetLabelReader {@FacetLabelReader} instance use to get facet 
labels for input docId.
+   * @return {@code List<FacetLabel>} containing matching facet labels.
+   * @throws IOException when a low-level IO issue occurs while reading facet 
labels.
+   */
+  List<FacetLabel> allFacetLabels(int docId, String dimension, 
FacetLabelReader facetLabelReader)
+      throws IOException {
+    List<FacetLabel> facetLabels = new ArrayList<>();
+    FacetLabel facetLabel;
+    if (dimension != null) {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId, dimension); 
facetLabel != null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId, dimension);
+      }
+    } else {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId); facetLabel != 
null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId);
+      }
+    }
+    return facetLabels;
+  }
+
+  protected String[] getRandomTokens(int count) {

Review Comment:
   There are many unused methods in this file. Do we want to remove them?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/SandboxFacetTestCase.java:
##########
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsCollector;
+import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.taxonomy.FacetLabel;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels.FacetLabelReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.abstracts.OrdToComparable;
+import org.apache.lucene.sandbox.facet.abstracts.OrdinalIterator;
+import org.apache.lucene.sandbox.facet.ordinal_iterators.TopnOrdinalIterator;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import 
org.apache.lucene.sandbox.facet.taxonomy.TaxonomyChildrenOrdinalIterator;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.tests.util.LuceneTestCase;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class SandboxFacetTestCase extends LuceneTestCase {
+  // we don't have access to overall count for all facets from count recorder,
+  // and we can't compute it as a SUM of values for each facet ID because we 
need to respect cases
+  // where
+  // the same doc belongs to multiple facets (e.g. overlapping ranges and
+  // multi value fields). We can add an extra range that includes everything,
+  // or consider supporting overall count in CountFacetRecorder. But it is not 
exactly the value
+  // we can get now, as this value wouldn't respect top-n cutoff. Is this 
value a must have facets
+  // feature?
+  static final int VALUE_CANT_BE_COMPUTED = -5;

Review Comment:
   Also, what is out plan to address this permanently?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/TestRangeFacet.java:
##########
@@ -0,0 +1,1654 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
+
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
+import java.io.IOException;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.facet.DrillDownQuery;
+import org.apache.lucene.facet.DrillSideways;
+import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.MultiDoubleValuesSource;
+import org.apache.lucene.facet.MultiLongValuesSource;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.facet.range.LongRange;
+import org.apache.lucene.facet.range.Range;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.ranges.DoubleRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.LongRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.RangeOrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter;
+import org.apache.lucene.search.CollectorOwner;
+import org.apache.lucene.search.DoubleValues;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LongValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.search.DummyTotalHitCountCollector;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * Test sandbox facet ranges. Mostly test cases from LongRangeFacetCounts 
adopted for sandbox
+ * faceting.
+ */
+public class TestRangeFacet extends SandboxFacetTestCase {
+
+  public void testBasicLong() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  private int[] getRangeOrdinals(Range[] inputRanges) {
+    // TODO: it can be fragile, we need better way of getting all ordinals for 
provided ranges?

Review Comment:
   Should we sort this out?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/TestRangeFacet.java:
##########
@@ -0,0 +1,1654 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
+
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
+import java.io.IOException;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.facet.DrillDownQuery;
+import org.apache.lucene.facet.DrillSideways;
+import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.MultiDoubleValuesSource;
+import org.apache.lucene.facet.MultiLongValuesSource;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.facet.range.LongRange;
+import org.apache.lucene.facet.range.Range;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.ranges.DoubleRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.LongRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.RangeOrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter;
+import org.apache.lucene.search.CollectorOwner;
+import org.apache.lucene.search.DoubleValues;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LongValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.search.DummyTotalHitCountCollector;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * Test sandbox facet ranges. Mostly test cases from LongRangeFacetCounts 
adopted for sandbox
+ * faceting.
+ */
+public class TestRangeFacet extends SandboxFacetTestCase {
+
+  public void testBasicLong() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  private int[] getRangeOrdinals(Range[] inputRanges) {
+    // TODO: it can be fragile, we need better way of getting all ordinals for 
provided ranges?
+    int[] result = new int[inputRanges.length];
+    for (int i = 0; i < inputRanges.length; i++) {
+      result[i] = i;
+    }
+    return result;
+  }
+
+  public void testBasicLongMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // just index the same value twice each time and make sure we don't double 
count
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 100; l < 200; l++) {
+      field1.setLongValue(l);
+      // Make second value sometimes smaller, sometimes bigger, and sometimes 
equal
+      if (l % 3 == 0) {

Review Comment:
   Worth a switch statement?



##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java:
##########
@@ -349,45 +344,149 @@ private DrillDownQuery getDrillDownQuery(
   public <R> ConcurrentDrillSidewaysResult<R> search(
       final DrillDownQuery query, final CollectorManager<?, R> 
hitCollectorManager)
       throws IOException {
+    // Main query
+    FacetsCollectorManager drillDownFacetsCollectorManager =
+        createDrillDownFacetsCollectorManager();
+    final CollectorOwner<?, ?> mainCollectorOwner;
+    if (drillDownFacetsCollectorManager != null) {
+      // Make sure we populate a facet collector corresponding to the base 
query if desired:
+      mainCollectorOwner =
+          CollectorOwner.hire(
+              new MultiCollectorManager(drillDownFacetsCollectorManager, 
hitCollectorManager));
+    } else {
+      mainCollectorOwner = CollectorOwner.hire(hitCollectorManager);
+    }
+    // Drill sideways dimensions
+    final List<CollectorOwner<?, ?>> drillSidewaysCollectorOwners;
+    if (query.getDims().isEmpty() == false) {
+      drillSidewaysCollectorOwners = new ArrayList<>(query.getDims().size());
+      for (int i = 0; i < query.getDims().size(); i++) {
+        drillSidewaysCollectorOwners.add(
+            CollectorOwner.hire(createDrillSidewaysFacetsCollectorManager()));
+      }
+    } else {
+      drillSidewaysCollectorOwners = null;
+    }
+    // Execute query
     if (executor != null) {
-      return searchConcurrently(query, hitCollectorManager);
+      searchConcurrently(query, mainCollectorOwner, 
drillSidewaysCollectorOwners);
+    } else {
+      searchSequentially(query, mainCollectorOwner, 
drillSidewaysCollectorOwners);
+    }
+
+    // Collect results
+    final FacetsCollector facetsCollectorResult;
+    final R hitCollectorResult;
+    if (drillDownFacetsCollectorManager != null) {
+      // drill down collected using MultiCollector
+      // Extract the results:
+      Object[] drillDownResult = (Object[]) mainCollectorOwner.reduce();
+      facetsCollectorResult = (FacetsCollector) drillDownResult[0];
+      hitCollectorResult = (R) drillDownResult[1];
+    } else {
+      facetsCollectorResult = null;
+      hitCollectorResult = (R) mainCollectorOwner.reduce();
+    }
+
+    // Getting results for drill sideways dimensions (if any)
+    final String[] drillSidewaysDims;
+    final FacetsCollector[] drillSidewaysCollectors;
+    if (query.getDims().isEmpty() == false) {
+      drillSidewaysDims = query.getDims().keySet().toArray(new String[0]);
+      int numDims = query.getDims().size();
+      assert drillSidewaysCollectorOwners != null;
+      assert drillSidewaysCollectorOwners.size() == numDims;
+      drillSidewaysCollectors = new FacetsCollector[numDims];
+      for (int dim = 0; dim < numDims; dim++) {
+        drillSidewaysCollectors[dim] =
+            (FacetsCollector) drillSidewaysCollectorOwners.get(dim).reduce();
+      }
     } else {
-      return searchSequentially(query, hitCollectorManager);
+      drillSidewaysDims = null;
+      drillSidewaysCollectors = null;
     }
+
+    return new ConcurrentDrillSidewaysResult<>(
+        buildFacetsResult(facetsCollectorResult, drillSidewaysCollectors, 
drillSidewaysDims),
+        null,
+        hitCollectorResult,
+        facetsCollectorResult,
+        drillSidewaysCollectors,
+        drillSidewaysDims);
   }
 
-  @SuppressWarnings("unchecked")
-  private <R> ConcurrentDrillSidewaysResult<R> searchSequentially(
-      final DrillDownQuery query, final CollectorManager<?, R> 
hitCollectorManager)
+  /**
+   * Search using DrillDownQuery with custom collectors. This method can be 
used with any {@link
+   * CollectorOwner}s. It doesn't return anything because it is expected that 
you read results from
+   * provided {@link CollectorOwner}s.
+   *
+   * <p>To read the results, run {@link CollectorOwner#reduce()} for drill 
down and all drill
+   * sideways dimensions.
+   *
+   * <p>If {@code doReduce} is set to true, this method itself calls {@link
+   * CollectorOwner#reduce()}. Note that results of the call are not returned 
by this method, so you
+   * can only do that if there is some other way of accessing results from the 
reduce call.
+   *
+   * <p>Note: use {@link Collections#unmodifiableList(List)} to wrap {@code
+   * drillSidewaysCollectorOwners} to convince compiler that it is safe to use 
List here.
+   *
+   * <p>TODO: Class CollectorOwner was created so that we can ignore 
CollectorManager type C,
+   * because we want each dimensions to be able to use their own types. 
Alternatively, we can use
+   * typesafe heterogeneous container and provide CollectorManager type for 
each dimension to this
+   * method? I do like CollectorOwner approach as it seems more intuitive?
+   *
+   * <p>TODO: deprecate doReduce - always reduce, {@link 
CollectorOwner#getResult()} can be used by

Review Comment:
   Let's figure out how we want to go about this.



##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java:
##########
@@ -195,11 +156,11 @@ public ScorerSupplier scorerSupplier(LeafReaderContext 
context) throws IOExcepti
         // a null scorer in this case, but we need to make sure #finish gets 
called on all facet
         // collectors since IndexSearcher won't handle this for us:
         if (baseScorerSupplier == null || nullCount > 1) {
-          if (drillDownCollector != null) {
-            drillDownCollector.finish();
+          if (drillDownLeafCollector != null) {

Review Comment:
   I don't understand why we switched to the leaf collector here.



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/SandboxFacetTestCase.java:
##########
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsCollector;
+import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.taxonomy.FacetLabel;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels.FacetLabelReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.abstracts.OrdToComparable;
+import org.apache.lucene.sandbox.facet.abstracts.OrdinalIterator;
+import org.apache.lucene.sandbox.facet.ordinal_iterators.TopnOrdinalIterator;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import 
org.apache.lucene.sandbox.facet.taxonomy.TaxonomyChildrenOrdinalIterator;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.tests.util.LuceneTestCase;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class SandboxFacetTestCase extends LuceneTestCase {
+  // we don't have access to overall count for all facets from count recorder,
+  // and we can't compute it as a SUM of values for each facet ID because we 
need to respect cases
+  // where
+  // the same doc belongs to multiple facets (e.g. overlapping ranges and
+  // multi value fields). We can add an extra range that includes everything,
+  // or consider supporting overall count in CountFacetRecorder. But it is not 
exactly the value
+  // we can get now, as this value wouldn't respect top-n cutoff. Is this 
value a must have facets
+  // feature?
+  static final int VALUE_CANT_BE_COMPUTED = -5;

Review Comment:
   `-5` seems like a value I could reasonably run into with a test. How about 
`Integer.MIN_VALUE`?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/SandboxFacetTestCase.java:
##########
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsCollector;
+import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.taxonomy.FacetLabel;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels;
+import org.apache.lucene.facet.taxonomy.TaxonomyFacetLabels.FacetLabelReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.abstracts.OrdToComparable;
+import org.apache.lucene.sandbox.facet.abstracts.OrdinalIterator;
+import org.apache.lucene.sandbox.facet.ordinal_iterators.TopnOrdinalIterator;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import 
org.apache.lucene.sandbox.facet.taxonomy.TaxonomyChildrenOrdinalIterator;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.tests.util.LuceneTestCase;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class SandboxFacetTestCase extends LuceneTestCase {
+  // we don't have access to overall count for all facets from count recorder,
+  // and we can't compute it as a SUM of values for each facet ID because we 
need to respect cases
+  // where
+  // the same doc belongs to multiple facets (e.g. overlapping ranges and
+  // multi value fields). We can add an extra range that includes everything,
+  // or consider supporting overall count in CountFacetRecorder. But it is not 
exactly the value
+  // we can get now, as this value wouldn't respect top-n cutoff. Is this 
value a must have facets
+  // feature?
+  static final int VALUE_CANT_BE_COMPUTED = -5;
+
+  /**
+   * Utility method that uses {@link FacetLabelReader} to get facet labels for 
each hit in {@link
+   * MatchingDocs}. The method returns {@code List<List<FacetLabel>>} where 
outer list has one entry
+   * per document and inner list has all {@link FacetLabel} entries that 
belong to a document. The
+   * inner list may be empty if no {@link FacetLabel} are found for a hit.
+   *
+   * @param taxoReader {@link TaxonomyReader} used to read taxonomy during 
search. This instance is
+   *     expected to be open for reading.
+   * @param fc {@link FacetsCollector} A collector with matching hits.
+   * @param dimension facet dimension for which labels are requested. A null 
value fetches labels
+   *     for all dimensions.
+   * @return {@code List<List<FacetLabel>} where outer list has one non-null 
entry per document. and
+   *     inner list contain all {@link FacetLabel} entries that belong to a 
document.
+   * @throws IOException when a low-level IO issue occurs.
+   */
+  public List<List<FacetLabel>> getAllTaxonomyFacetLabels(
+      String dimension, TaxonomyReader taxoReader, FacetsCollector fc) throws 
IOException {
+    List<List<FacetLabel>> actualLabels = new ArrayList<>();
+    TaxonomyFacetLabels taxoLabels =
+        new TaxonomyFacetLabels(taxoReader, 
FacetsConfig.DEFAULT_INDEX_FIELD_NAME);
+    for (MatchingDocs m : fc.getMatchingDocs()) {
+      FacetLabelReader facetLabelReader = 
taxoLabels.getFacetLabelReader(m.context);
+      DocIdSetIterator disi = m.bits.iterator();
+      while (disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+        actualLabels.add(allFacetLabels(disi.docID(), dimension, 
facetLabelReader));
+      }
+    }
+    return actualLabels;
+  }
+
+  /**
+   * Utility method to get all facet labels for an input docId and dimension 
using the supplied
+   * {@link FacetLabelReader}.
+   *
+   * @param docId docId for which facet labels are needed.
+   * @param dimension Retain facet labels for supplied dimension only. A null 
value fetches all
+   *     facet labels.
+   * @param facetLabelReader {@FacetLabelReader} instance use to get facet 
labels for input docId.
+   * @return {@code List<FacetLabel>} containing matching facet labels.
+   * @throws IOException when a low-level IO issue occurs while reading facet 
labels.
+   */
+  List<FacetLabel> allFacetLabels(int docId, String dimension, 
FacetLabelReader facetLabelReader)
+      throws IOException {
+    List<FacetLabel> facetLabels = new ArrayList<>();
+    FacetLabel facetLabel;
+    if (dimension != null) {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId, dimension); 
facetLabel != null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId, dimension);
+      }
+    } else {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId); facetLabel != 
null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId);
+      }
+    }
+    return facetLabels;
+  }
+
+  protected String[] getRandomTokens(int count) {
+    String[] tokens = new String[count];
+    for (int i = 0; i < tokens.length; i++) {
+      tokens[i] = TestUtil.randomRealisticUnicodeString(random(), 1, 10);
+      // tokens[i] = _TestUtil.randomSimpleString(random(), 1, 10);
+    }
+    return tokens;
+  }
+
+  protected String pickToken(String[] tokens) {

Review Comment:
   This isn't picking all tokens with equal probability (but looks unused 
anyway).



##########
lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java:
##########
@@ -252,9 +213,9 @@ public int hashCode() {
     final int prime = 31;

Review Comment:
   Any thoughts on the TODO above this method?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/TestTaxonomyFacet.java:
##########
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.DrillDownQuery;
+import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyOrdLabelBiMap;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.util.IOUtils;
+
+/** Test for associations */
+public class TestTaxonomyFacet extends SandboxFacetTestCase {
+
+  public void testConstants() {
+    // It is essential for TaxonomyOrdLabelBiMap that invalid ordinal is the 
same as for
+    // TaxonomyReader
+    assertEquals(TaxonomyOrdLabelBiMap.INVALID_ORD, 
TaxonomyReader.INVALID_ORDINAL);

Review Comment:
   I commented about this elsewhere, but it's a bit annoying that these are 
different constants if they are always meant to have the same value.



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/TestRangeFacet.java:
##########
@@ -0,0 +1,1654 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
+
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
+import java.io.IOException;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.facet.DrillDownQuery;
+import org.apache.lucene.facet.DrillSideways;
+import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.MultiDoubleValuesSource;
+import org.apache.lucene.facet.MultiLongValuesSource;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.facet.range.LongRange;
+import org.apache.lucene.facet.range.Range;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.ranges.DoubleRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.LongRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.RangeOrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter;
+import org.apache.lucene.search.CollectorOwner;
+import org.apache.lucene.search.DoubleValues;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LongValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.search.DummyTotalHitCountCollector;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * Test sandbox facet ranges. Mostly test cases from LongRangeFacetCounts 
adopted for sandbox
+ * faceting.
+ */
+public class TestRangeFacet extends SandboxFacetTestCase {
+
+  public void testBasicLong() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  private int[] getRangeOrdinals(Range[] inputRanges) {
+    // TODO: it can be fragile, we need better way of getting all ordinals for 
provided ranges?
+    int[] result = new int[inputRanges.length];
+    for (int i = 0; i < inputRanges.length; i++) {
+      result[i] = i;
+    }
+    return result;
+  }
+
+  public void testBasicLongMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // just index the same value twice each time and make sure we don't double 
count
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 100; l < 200; l++) {
+      field1.setLongValue(l);
+      // Make second value sometimes smaller, sometimes bigger, and sometimes 
equal
+      if (l % 3 == 0) {
+        field2.setLongValue(l - 100);
+      } else if (l % 3 == 1) {
+        field2.setLongValue(l + 100);
+      } else {
+        field2.setLongValue(l);
+      }
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field1.setLongValue(Long.MAX_VALUE);
+    field2.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    ////////// Not overlapping ranges
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("110-120", 110L, true, 120L, true),
+          new LongRange("121-130", 121L, true, 130L, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=2\n" + "  110-120 (11)\n" + "  
121-130 (10)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    ///////// Overlapping ranges
+    inputRanges =
+        new LongRange[] {
+          new LongRange("110-120", 110L, true, 120L, true),
+          new LongRange("115-125", 115L, true, 125L, true),
+        };
+
+    valuesSource = MultiLongValuesSource.fromLongField("field");
+    longRangeFacetCutter = LongRangeFacetCutter.create("field", valuesSource, 
inputRanges);
+    countRecorder = new CountFacetRecorder(random().nextBoolean());
+
+    collectorManager = new FacetFieldCollectorManager<>(longRangeFacetCutter, 
null, countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=2\n" + "  110-120 (11)\n" + "  
115-125 (11)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    ////////// Multiple ranges (similar to original test)
+    inputRanges =
+        new LongRange[] {
+          new LongRange("[100-110)", 100L, true, 110L, false),
+          new LongRange("[100-110]", 100L, true, 110L, true),
+          new LongRange("(190-200)", 190L, false, 200L, false),
+          new LongRange("[190-200]", 190L, true, 200L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true)
+        };
+
+    valuesSource = MultiLongValuesSource.fromLongField("field");
+    longRangeFacetCutter = LongRangeFacetCutter.create("field", valuesSource, 
inputRanges);
+    countRecorder = new CountFacetRecorder(random().nextBoolean());
+
+    collectorManager = new FacetFieldCollectorManager<>(longRangeFacetCutter, 
null, countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n"
+            + "  [100-110) (10)\n"
+            + "  [100-110] (11)\n"
+            + "  (190-200) (9)\n"
+            + "  [190-200] (10)\n"
+            + "  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testBasicLongMultiValuedMixedSegmentTypes() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    // write docs as two segments (50 in each). the first segment will contain 
a mix of single- and
+    // multi-value cases, while the second segment will be all single values.
+    for (int l = 0; l < 100; l++) {
+      field1.setLongValue(l);
+      field2.setLongValue(l);
+      Document doc = new Document();
+      doc.add(field1);
+      if (l == 0) {
+        doc.add(field2);
+      } else if (l < 50) {
+        if (random().nextBoolean()) {
+          doc.add(field2);
+        }
+      }
+      w.addDocument(doc);
+      if (l == 50) {
+        w.commit();
+      }
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testLongMinMax() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    field.setLongValue(Long.MIN_VALUE);
+    w.addDocument(doc);
+    field.setLongValue(0);
+    w.addDocument(doc);
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("min", Long.MIN_VALUE, true, Long.MIN_VALUE, true),
+          new LongRange("max", Long.MAX_VALUE, true, Long.MAX_VALUE, true),
+          new LongRange("all0", Long.MIN_VALUE, true, Long.MAX_VALUE, true),
+          new LongRange("all1", Long.MIN_VALUE, false, Long.MAX_VALUE, true),
+          new LongRange("all2", Long.MIN_VALUE, true, Long.MAX_VALUE, false),
+          new LongRange("all3", Long.MIN_VALUE, false, Long.MAX_VALUE, false)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=6\n  min (1)\n  max (1)\n  all0 
(3)\n  all1 (2)\n  all2 (2)\n  all3 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testOverlappedEndStart() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("0-10", 0L, true, 10L, true),
+          new LongRange("10-20", 10L, true, 20L, true),
+          new LongRange("20-30", 20L, true, 30L, true),
+          new LongRange("30-40", 30L, true, 40L, true)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=4\n  0-10 (11)\n  10-20 (11)\n  
20-30 (11)\n  30-40 (11)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testEmptyRangesSingleValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges = new LongRange[0];
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=0\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testEmptyRangesMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 0; l < 100; l++) {
+      field1.setLongValue(l);
+      field2.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges = new LongRange[0];
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=0\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  /**
+   * Tests single request that mixes Range and non-Range faceting, with 
DrillSideways and taxonomy.
+   */
+  public void testMixedRangeAndNonRangeTaxonomy() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Directory td = newDirectory();
+    DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(td, 
IndexWriterConfig.OpenMode.CREATE);
+
+    FacetsConfig config = new FacetsConfig();
+
+    for (long l = 0; l < 100; l++) {
+      Document doc = new Document();
+      // For computing range facet counts:
+      doc.add(new NumericDocValuesField("field", l));
+      // For drill down by numeric range:
+      doc.add(new LongPoint("field", l));
+
+      if ((l & 3) == 0) {
+        doc.add(new FacetField("dim", "a"));
+      } else {
+        doc.add(new FacetField("dim", "b"));
+      }
+      w.addDocument(config.build(tw, doc));
+    }
+
+    final IndexReader r = w.getReader();
+    final TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
+
+    IndexSearcher s = newSearcher(r, false, false);
+    // DrillSideways requires the entire range of docs to be scored at once, 
so it doesn't support
+    // timeouts whose implementation scores one window of doc IDs at a time.
+    s.setTimeout(null);
+
+    if (VERBOSE) {
+      System.out.println("TEST: searcher=" + s);
+    }
+
+    DrillSideways ds =
+        new DrillSideways(s, config, tr) {
+          @Override
+          protected boolean scoreSubDocsAtOnce() {
+            return random().nextBoolean();
+          }
+        };
+
+    // Data for range facets
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, false)
+        };
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter fieldCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder fieldCountRecorder = new 
CountFacetRecorder(random().nextBoolean());
+    FacetFieldCollectorManager<CountFacetRecorder> fieldCollectorManager =
+        new FacetFieldCollectorManager<>(fieldCutter, null, 
fieldCountRecorder);
+    OrdLabelBiMap fieldOrdLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    // Data for taxonomy facets
+    TaxonomyFacetsCutter dimCutter = new 
TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, tr);
+    CountFacetRecorder dimCountRecorder = new 
CountFacetRecorder(random().nextBoolean());
+    FacetFieldCollectorManager<CountFacetRecorder> dimCollectorManager =
+        new FacetFieldCollectorManager<>(dimCutter, dimCutter, 
dimCountRecorder);
+
+    MultiCollectorManager collectorManager =
+        new MultiCollectorManager(fieldCollectorManager, dimCollectorManager);
+
+    ////// First search, no drill-downs:
+    DrillDownQuery ddq = new DrillDownQuery(config);
+    ds.search(ddq, CollectorOwner.hire(collectorManager), List.of(), true);
+
+    // assertEquals(100, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (75)\n  a (25)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    ////// Second search, drill down on dim=b:
+    fieldCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    fieldCollectorManager = new FacetFieldCollectorManager<>(fieldCutter, 
null, fieldCountRecorder);
+    dimCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    dimCollectorManager = new FacetFieldCollectorManager<>(dimCutter, 
dimCutter, dimCountRecorder);
+    ddq = new DrillDownQuery(config);
+    ddq.add("dim", "b");
+    ds.search(
+        ddq,
+        CollectorOwner.hire(fieldCollectorManager),
+        List.of(CollectorOwner.hire(dimCollectorManager)),
+        true);
+
+    // assertEquals(75, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (75)\n  a (25)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (7)\n  less 
than or equal to 10 (8)\n  over 90 (7)\n  90 or above (8)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    ////// Third search, drill down on "less than or equal to 10":
+    fieldCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    fieldCollectorManager = new FacetFieldCollectorManager<>(fieldCutter, 
null, fieldCountRecorder);
+    dimCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    dimCollectorManager = new FacetFieldCollectorManager<>(dimCutter, 
dimCutter, dimCountRecorder);
+    ddq = new DrillDownQuery(config);
+    ddq.add("field", LongPoint.newRangeQuery("field", 0L, 10L));
+    ds.search(
+        ddq,
+        CollectorOwner.hire(dimCollectorManager),
+        List.of(CollectorOwner.hire(fieldCollectorManager)),
+        true);
+
+    // assertEquals(11, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (8)\n  a (3)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(tw, tr, td, r, d);
+  }
+
+  public void testBasicDouble() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    DoubleDocValuesField field = new DoubleDocValuesField("field", 0.0);
+    doc.add(field);
+    for (int i = 0; i < 100; i++) {
+      field.setDoubleValue(i);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  public void testBasicDoubleMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // index the same value twice and make sure we don't double count
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0);
+    doc.add(field1);
+    doc.add(field2);
+    for (int i = 0; i < 100; i++) {
+      field1.setLongValue(NumericUtils.doubleToSortableLong(i));
+      field2.setLongValue(NumericUtils.doubleToSortableLong(i));
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  public void testBasicDoubleMultiValuedMixedSegmentTypes() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    // write docs as two segments (50 in each). the first segment will contain 
a mix of single- and
+    // multi-value cases, while the second segment will be all single values.
+    for (int l = 0; l < 100; l++) {
+      field1.setLongValue(NumericUtils.doubleToSortableLong(l));
+      field2.setLongValue(NumericUtils.doubleToSortableLong(l));
+      Document doc = new Document();
+      doc.add(field1);
+      if (l == 0) {
+        doc.add(field2);
+      } else if (l < 50) {
+        if (random().nextBoolean()) {
+          doc.add(field2);
+        }
+      }
+      w.addDocument(doc);
+      if (l == 50) {
+        w.commit();
+      }
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+    r.close();
+    d.close();
+  }
+
+  public void testRandomLongsSingleValued() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    int numDocs = atLeast(1000);
+    if (VERBOSE) {
+      System.out.println("TEST: numDocs=" + numDocs);
+    }
+    long[] values = new long[numDocs];
+    long minValue = Long.MAX_VALUE;
+    long maxValue = Long.MIN_VALUE;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      long v = random().nextLong();
+      values[i] = v;
+      doc.add(new NumericDocValuesField("field", v));
+      doc.add(new LongPoint("field", v));
+      w.addDocument(doc);
+      minValue = Math.min(minValue, v);
+      maxValue = Math.max(maxValue, v);
+    }
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r, false);
+    FacetsConfig config = new FacetsConfig();
+
+    int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
+      int numRange = TestUtil.nextInt(random(), 1, 100);
+      LongRange[] ranges = new LongRange[numRange];
+      int[] expectedCounts = new int[numRange];
+      long minAcceptedValue = Long.MAX_VALUE;
+      long maxAcceptedValue = Long.MIN_VALUE;
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        long min;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            min = prevRange.min;
+          } else {
+            min = prevRange.max;
+          }
+        } else {
+          min = random().nextLong();
+        }
+        long max;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            max = prevRange.min;
+          } else {
+            max = prevRange.max;
+          }
+        } else {
+          max = random().nextLong();
+        }
+
+        if (min > max) {
+          long x = min;
+          min = max;
+          max = x;
+        }
+        boolean minIncl;
+        boolean maxIncl;
+
+        // NOTE: max - min >= 0 is here to handle the common overflow case!
+        if (max - min >= 0 && max - min < 2) {
+          // If max == min or max == min+1, we always do inclusive, else we 
might pass an empty
+          // range and hit exc from LongRange's ctor:
+          minIncl = true;
+          maxIncl = true;
+        } else {
+          minIncl = random().nextBoolean();
+          maxIncl = random().nextBoolean();
+        }
+        ranges[rangeID] = new LongRange("r" + rangeID, min, minIncl, max, 
maxIncl);
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + ": " + ranges[rangeID]);
+        }
+
+        // Do "slow but hopefully correct" computation of
+        // expected count:
+        for (int i = 0; i < numDocs; i++) {
+          boolean accept = true;
+          if (minIncl) {
+            accept &= values[i] >= min;
+          } else {
+            accept &= values[i] > min;
+          }
+          if (maxIncl) {
+            accept &= values[i] <= max;
+          } else {
+            accept &= values[i] < max;
+          }
+          if (accept) {
+            expectedCounts[rangeID]++;
+            minAcceptedValue = Math.min(minAcceptedValue, values[i]);
+            maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
+          }
+        }
+      }
+
+      // TODO: fastMatchQuery functionality is not implemented for sandbox 
faceting yet, do we need

Review Comment:
   Interesting. Do you think fast match query makes sense in the context of 
faceting while matching?



##########
lucene/sandbox/src/test/org/apache/lucene/sandbox/facet/TestRangeFacet.java:
##########
@@ -0,0 +1,1654 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.sandbox.facet;
+
+import static org.apache.lucene.facet.FacetsConfig.DEFAULT_INDEX_FIELD_NAME;
+
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
+import java.io.IOException;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.facet.DrillDownQuery;
+import org.apache.lucene.facet.DrillSideways;
+import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.FacetsConfig;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.MultiDoubleValuesSource;
+import org.apache.lucene.facet.MultiLongValuesSource;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.facet.range.LongRange;
+import org.apache.lucene.facet.range.Range;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.sandbox.facet.abstracts.OrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.ranges.DoubleRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.LongRangeFacetCutter;
+import org.apache.lucene.sandbox.facet.ranges.RangeOrdLabelBiMap;
+import org.apache.lucene.sandbox.facet.recorders.CountFacetRecorder;
+import org.apache.lucene.sandbox.facet.taxonomy.TaxonomyFacetsCutter;
+import org.apache.lucene.search.CollectorOwner;
+import org.apache.lucene.search.DoubleValues;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LongValuesSource;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.search.DummyTotalHitCountCollector;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * Test sandbox facet ranges. Mostly test cases from LongRangeFacetCounts 
adopted for sandbox
+ * faceting.
+ */
+public class TestRangeFacet extends SandboxFacetTestCase {
+
+  public void testBasicLong() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  private int[] getRangeOrdinals(Range[] inputRanges) {
+    // TODO: it can be fragile, we need better way of getting all ordinals for 
provided ranges?
+    int[] result = new int[inputRanges.length];
+    for (int i = 0; i < inputRanges.length; i++) {
+      result[i] = i;
+    }
+    return result;
+  }
+
+  public void testBasicLongMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // just index the same value twice each time and make sure we don't double 
count
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 100; l < 200; l++) {
+      field1.setLongValue(l);
+      // Make second value sometimes smaller, sometimes bigger, and sometimes 
equal
+      if (l % 3 == 0) {
+        field2.setLongValue(l - 100);
+      } else if (l % 3 == 1) {
+        field2.setLongValue(l + 100);
+      } else {
+        field2.setLongValue(l);
+      }
+      w.addDocument(doc);
+    }
+
+    // Also add Long.MAX_VALUE
+    field1.setLongValue(Long.MAX_VALUE);
+    field2.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    ////////// Not overlapping ranges
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("110-120", 110L, true, 120L, true),
+          new LongRange("121-130", 121L, true, 130L, true),
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=2\n" + "  110-120 (11)\n" + "  
121-130 (10)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    ///////// Overlapping ranges
+    inputRanges =
+        new LongRange[] {
+          new LongRange("110-120", 110L, true, 120L, true),
+          new LongRange("115-125", 115L, true, 125L, true),
+        };
+
+    valuesSource = MultiLongValuesSource.fromLongField("field");
+    longRangeFacetCutter = LongRangeFacetCutter.create("field", valuesSource, 
inputRanges);
+    countRecorder = new CountFacetRecorder(random().nextBoolean());
+
+    collectorManager = new FacetFieldCollectorManager<>(longRangeFacetCutter, 
null, countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=2\n" + "  110-120 (11)\n" + "  
115-125 (11)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    ////////// Multiple ranges (similar to original test)
+    inputRanges =
+        new LongRange[] {
+          new LongRange("[100-110)", 100L, true, 110L, false),
+          new LongRange("[100-110]", 100L, true, 110L, true),
+          new LongRange("(190-200)", 190L, false, 200L, false),
+          new LongRange("[190-200]", 190L, true, 200L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true)
+        };
+
+    valuesSource = MultiLongValuesSource.fromLongField("field");
+    longRangeFacetCutter = LongRangeFacetCutter.create("field", valuesSource, 
inputRanges);
+    countRecorder = new CountFacetRecorder(random().nextBoolean());
+
+    collectorManager = new FacetFieldCollectorManager<>(longRangeFacetCutter, 
null, countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n"
+            + "  [100-110) (10)\n"
+            + "  [100-110] (11)\n"
+            + "  (190-200) (9)\n"
+            + "  [190-200] (10)\n"
+            + "  over 1000 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testBasicLongMultiValuedMixedSegmentTypes() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    // write docs as two segments (50 in each). the first segment will contain 
a mix of single- and
+    // multi-value cases, while the second segment will be all single values.
+    for (int l = 0; l < 100; l++) {
+      field1.setLongValue(l);
+      field2.setLongValue(l);
+      Document doc = new Document();
+      doc.add(field1);
+      if (l == 0) {
+        doc.add(field2);
+      } else if (l < 50) {
+        if (random().nextBoolean()) {
+          doc.add(field2);
+        }
+      }
+      w.addDocument(doc);
+      if (l == 50) {
+        w.commit();
+      }
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testLongMinMax() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    field.setLongValue(Long.MIN_VALUE);
+    w.addDocument(doc);
+    field.setLongValue(0);
+    w.addDocument(doc);
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("min", Long.MIN_VALUE, true, Long.MIN_VALUE, true),
+          new LongRange("max", Long.MAX_VALUE, true, Long.MAX_VALUE, true),
+          new LongRange("all0", Long.MIN_VALUE, true, Long.MAX_VALUE, true),
+          new LongRange("all1", Long.MIN_VALUE, false, Long.MAX_VALUE, true),
+          new LongRange("all2", Long.MIN_VALUE, true, Long.MAX_VALUE, false),
+          new LongRange("all3", Long.MIN_VALUE, false, Long.MAX_VALUE, false)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=6\n  min (1)\n  max (1)\n  all0 
(3)\n  all1 (2)\n  all2 (2)\n  all3 (1)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testOverlappedEndStart() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+    field.setLongValue(Long.MAX_VALUE);
+    w.addDocument(doc);
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("0-10", 0L, true, 10L, true),
+          new LongRange("10-20", 10L, true, 20L, true),
+          new LongRange("20-30", 20L, true, 30L, true),
+          new LongRange("30-40", 30L, true, 40L, true)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=4\n  0-10 (11)\n  10-20 (11)\n  
20-30 (11)\n  30-40 (11)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testEmptyRangesSingleValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges = new LongRange[0];
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=0\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  public void testEmptyRangesMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 0; l < 100; l++) {
+      field1.setLongValue(l);
+      field2.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    LongRange[] inputRanges = new LongRange[0];
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=0\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    r.close();
+    d.close();
+  }
+
+  /**
+   * Tests single request that mixes Range and non-Range faceting, with 
DrillSideways and taxonomy.
+   */
+  public void testMixedRangeAndNonRangeTaxonomy() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Directory td = newDirectory();
+    DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(td, 
IndexWriterConfig.OpenMode.CREATE);
+
+    FacetsConfig config = new FacetsConfig();
+
+    for (long l = 0; l < 100; l++) {
+      Document doc = new Document();
+      // For computing range facet counts:
+      doc.add(new NumericDocValuesField("field", l));
+      // For drill down by numeric range:
+      doc.add(new LongPoint("field", l));
+
+      if ((l & 3) == 0) {
+        doc.add(new FacetField("dim", "a"));
+      } else {
+        doc.add(new FacetField("dim", "b"));
+      }
+      w.addDocument(config.build(tw, doc));
+    }
+
+    final IndexReader r = w.getReader();
+    final TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
+
+    IndexSearcher s = newSearcher(r, false, false);
+    // DrillSideways requires the entire range of docs to be scored at once, 
so it doesn't support
+    // timeouts whose implementation scores one window of doc IDs at a time.
+    s.setTimeout(null);
+
+    if (VERBOSE) {
+      System.out.println("TEST: searcher=" + s);
+    }
+
+    DrillSideways ds =
+        new DrillSideways(s, config, tr) {
+          @Override
+          protected boolean scoreSubDocsAtOnce() {
+            return random().nextBoolean();
+          }
+        };
+
+    // Data for range facets
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, false)
+        };
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter fieldCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder fieldCountRecorder = new 
CountFacetRecorder(random().nextBoolean());
+    FacetFieldCollectorManager<CountFacetRecorder> fieldCollectorManager =
+        new FacetFieldCollectorManager<>(fieldCutter, null, 
fieldCountRecorder);
+    OrdLabelBiMap fieldOrdLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    // Data for taxonomy facets
+    TaxonomyFacetsCutter dimCutter = new 
TaxonomyFacetsCutter(DEFAULT_INDEX_FIELD_NAME, config, tr);
+    CountFacetRecorder dimCountRecorder = new 
CountFacetRecorder(random().nextBoolean());
+    FacetFieldCollectorManager<CountFacetRecorder> dimCollectorManager =
+        new FacetFieldCollectorManager<>(dimCutter, dimCutter, 
dimCountRecorder);
+
+    MultiCollectorManager collectorManager =
+        new MultiCollectorManager(fieldCollectorManager, dimCollectorManager);
+
+    ////// First search, no drill-downs:
+    DrillDownQuery ddq = new DrillDownQuery(config);
+    ds.search(ddq, CollectorOwner.hire(collectorManager), List.of(), true);
+
+    // assertEquals(100, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (75)\n  a (25)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    ////// Second search, drill down on dim=b:
+    fieldCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    fieldCollectorManager = new FacetFieldCollectorManager<>(fieldCutter, 
null, fieldCountRecorder);
+    dimCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    dimCollectorManager = new FacetFieldCollectorManager<>(dimCutter, 
dimCutter, dimCountRecorder);
+    ddq = new DrillDownQuery(config);
+    ddq.add("dim", "b");
+    ds.search(
+        ddq,
+        CollectorOwner.hire(fieldCollectorManager),
+        List.of(CollectorOwner.hire(dimCollectorManager)),
+        true);
+
+    // assertEquals(75, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (75)\n  a (25)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (7)\n  less 
than or equal to 10 (8)\n  over 90 (7)\n  90 or above (8)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    ////// Third search, drill down on "less than or equal to 10":
+    fieldCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    fieldCollectorManager = new FacetFieldCollectorManager<>(fieldCutter, 
null, fieldCountRecorder);
+    dimCountRecorder = new CountFacetRecorder(random().nextBoolean());
+    dimCollectorManager = new FacetFieldCollectorManager<>(dimCutter, 
dimCutter, dimCountRecorder);
+    ddq = new DrillDownQuery(config);
+    ddq.add("field", LongPoint.newRangeQuery("field", 0L, 10L));
+    ds.search(
+        ddq,
+        CollectorOwner.hire(dimCollectorManager),
+        List.of(CollectorOwner.hire(fieldCollectorManager)),
+        true);
+
+    // assertEquals(11, dsr.hits.totalHits.value);
+    assertEquals(
+        "dim=dim path=[] value=-5 childCount=2\n  b (8)\n  a (3)\n",
+        getTopChildrenByCount(dimCountRecorder, tr, 10, "dim").toString());
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(
+                getRangeOrdinals(inputRanges), fieldCountRecorder, "field", 
fieldOrdLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(tw, tr, td, r, d);
+  }
+
+  public void testBasicDouble() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    DoubleDocValuesField field = new DoubleDocValuesField("field", 0.0);
+    doc.add(field);
+    for (int i = 0; i < 100; i++) {
+      field.setDoubleValue(i);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  public void testBasicDoubleMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // index the same value twice and make sure we don't double count
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0);
+    doc.add(field1);
+    doc.add(field2);
+    for (int i = 0; i < 100; i++) {
+      field1.setLongValue(NumericUtils.doubleToSortableLong(i));
+      field2.setLongValue(NumericUtils.doubleToSortableLong(i));
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  public void testBasicDoubleMultiValuedMixedSegmentTypes() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    // write docs as two segments (50 in each). the first segment will contain 
a mix of single- and
+    // multi-value cases, while the second segment will be all single values.
+    for (int l = 0; l < 100; l++) {
+      field1.setLongValue(NumericUtils.doubleToSortableLong(l));
+      field2.setLongValue(NumericUtils.doubleToSortableLong(l));
+      Document doc = new Document();
+      doc.add(field1);
+      if (l == 0) {
+        doc.add(field2);
+      } else if (l < 50) {
+        if (random().nextBoolean()) {
+          doc.add(field2);
+        }
+      }
+      w.addDocument(doc);
+      if (l == 50) {
+        w.commit();
+      }
+    }
+
+    IndexReader r = w.getReader();
+    w.close();
+
+    IndexSearcher s = newSearcher(r);
+
+    DoubleRange[] inputRanges =
+        new DoubleRange[] {
+          new DoubleRange("less than 10", 0.0, true, 10.0, false),
+          new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+          new DoubleRange("over 90", 90.0, false, 100.0, false),
+          new DoubleRange("90 or above", 90.0, true, 100.0, false),
+          new DoubleRange("over 1000", 1000.0, false, 
Double.POSITIVE_INFINITY, false)
+        };
+
+    MultiDoubleValuesSource valuesSource = 
MultiDoubleValuesSource.fromDoubleField("field");
+    DoubleRangeFacetCutter doubleRangeFacetCutter =
+        new DoubleRangeFacetCutter("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (10)\n  less 
than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+    r.close();
+    d.close();
+  }
+
+  public void testRandomLongsSingleValued() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    int numDocs = atLeast(1000);
+    if (VERBOSE) {
+      System.out.println("TEST: numDocs=" + numDocs);
+    }
+    long[] values = new long[numDocs];
+    long minValue = Long.MAX_VALUE;
+    long maxValue = Long.MIN_VALUE;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      long v = random().nextLong();
+      values[i] = v;
+      doc.add(new NumericDocValuesField("field", v));
+      doc.add(new LongPoint("field", v));
+      w.addDocument(doc);
+      minValue = Math.min(minValue, v);
+      maxValue = Math.max(maxValue, v);
+    }
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r, false);
+    FacetsConfig config = new FacetsConfig();
+
+    int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
+      int numRange = TestUtil.nextInt(random(), 1, 100);
+      LongRange[] ranges = new LongRange[numRange];
+      int[] expectedCounts = new int[numRange];
+      long minAcceptedValue = Long.MAX_VALUE;
+      long maxAcceptedValue = Long.MIN_VALUE;
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        long min;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            min = prevRange.min;
+          } else {
+            min = prevRange.max;
+          }
+        } else {
+          min = random().nextLong();
+        }
+        long max;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            max = prevRange.min;
+          } else {
+            max = prevRange.max;
+          }
+        } else {
+          max = random().nextLong();
+        }
+
+        if (min > max) {
+          long x = min;
+          min = max;
+          max = x;
+        }
+        boolean minIncl;
+        boolean maxIncl;
+
+        // NOTE: max - min >= 0 is here to handle the common overflow case!
+        if (max - min >= 0 && max - min < 2) {
+          // If max == min or max == min+1, we always do inclusive, else we 
might pass an empty
+          // range and hit exc from LongRange's ctor:
+          minIncl = true;
+          maxIncl = true;
+        } else {
+          minIncl = random().nextBoolean();
+          maxIncl = random().nextBoolean();
+        }
+        ranges[rangeID] = new LongRange("r" + rangeID, min, minIncl, max, 
maxIncl);
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + ": " + ranges[rangeID]);
+        }
+
+        // Do "slow but hopefully correct" computation of
+        // expected count:
+        for (int i = 0; i < numDocs; i++) {
+          boolean accept = true;
+          if (minIncl) {
+            accept &= values[i] >= min;
+          } else {
+            accept &= values[i] > min;
+          }
+          if (maxIncl) {
+            accept &= values[i] <= max;
+          } else {
+            accept &= values[i] < max;
+          }
+          if (accept) {
+            expectedCounts[rangeID]++;
+            minAcceptedValue = Math.min(minAcceptedValue, values[i]);
+            maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
+          }
+        }
+      }
+
+      // TODO: fastMatchQuery functionality is not implemented for sandbox 
faceting yet, do we need
+      // it?
+      /*Query fastMatchQuery;
+      if (random().nextBoolean()) {
+        if (random().nextBoolean()) {
+          fastMatchQuery = LongPoint.newRangeQuery("field", minValue, 
maxValue);
+        } else {
+          fastMatchQuery = LongPoint.newRangeQuery("field", minAcceptedValue, 
maxAcceptedValue);
+        }
+      } else {
+        fastMatchQuery = null;
+      }*/
+
+      final MultiLongValuesSource mvs;
+      if (random().nextBoolean()) {
+        LongValuesSource vs = LongValuesSource.fromLongField("field");
+        mvs = MultiLongValuesSource.fromSingleValued(vs);
+      } else {
+        mvs = MultiLongValuesSource.fromLongField("field");
+      }
+
+      LongRangeFacetCutter longRangeFacetCutter = 
LongRangeFacetCutter.create("field", mvs, ranges);
+      CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+      FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+          new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+      s.search(new MatchAllDocsQuery(), collectorManager);
+
+      OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(ranges);
+      FacetResult result =
+          getAllSortByOrd(getRangeOrdinals(ranges), countRecorder, "field", 
ordLabelBiMap);
+      assertEquals(numRange, result.labelValues.length);
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + " expectedCount=" + 
expectedCounts[rangeID]);
+        }
+        LabelAndValue subNode = result.labelValues[rangeID];
+        assertEquals("r" + rangeID, subNode.label);
+        assertEquals(expectedCounts[rangeID], subNode.value.intValue());
+
+        LongRange range = ranges[rangeID];
+
+        // Test drill-down:
+        DrillDownQuery ddq = new DrillDownQuery(config);
+        ddq.add("field", LongPoint.newRangeQuery("field", range.min, 
range.max));
+        assertEquals(expectedCounts[rangeID], s.count(ddq));
+      }
+    }
+
+    w.close();
+    IOUtils.close(r, dir);
+  }
+
+  public void testRandomLongsMultiValued() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    int numDocs = atLeast(1000);
+    if (VERBOSE) {
+      System.out.println("TEST: numDocs=" + numDocs);
+    }
+    long[][] values = new long[numDocs][];
+    long minValue = Long.MAX_VALUE;
+    long maxValue = Long.MIN_VALUE;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      int numVals = RandomNumbers.randomIntBetween(random(), 1, 50);
+      if (random().nextInt(10) == 0) {
+        numVals = 1; // make sure we have ample testing of single-value cases
+      }
+      values[i] = new long[numVals];
+      for (int j = 0; j < numVals; j++) {
+        long v = random().nextLong();
+        values[i][j] = v;
+        doc.add(new SortedNumericDocValuesField("field", v));
+        doc.add(new LongPoint("field", v));
+        minValue = Math.min(minValue, v);
+        maxValue = Math.max(maxValue, v);
+      }
+      w.addDocument(doc);
+    }
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r, false);
+    FacetsConfig config = new FacetsConfig();
+
+    int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
+      int numRange = TestUtil.nextInt(random(), 1, 100);
+      LongRange[] ranges = new LongRange[numRange];
+      int[] expectedCounts = new int[numRange];
+      long minAcceptedValue = Long.MAX_VALUE;
+      long maxAcceptedValue = Long.MIN_VALUE;
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        long min;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            min = prevRange.min;
+          } else {
+            min = prevRange.max;
+          }
+        } else {
+          min = random().nextLong();
+        }
+        long max;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          LongRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            max = prevRange.min;
+          } else {
+            max = prevRange.max;
+          }
+        } else {
+          max = random().nextLong();
+        }
+
+        if (min > max) {
+          long x = min;
+          min = max;
+          max = x;
+        }
+        boolean minIncl;
+        boolean maxIncl;
+
+        // NOTE: max - min >= 0 is here to handle the common overflow case!
+        if (max - min >= 0 && max - min < 2) {
+          // If max == min or max == min+1, we always do inclusive, else we 
might pass an empty
+          // range and hit exc from LongRange's ctor:
+          minIncl = true;
+          maxIncl = true;
+        } else {
+          minIncl = random().nextBoolean();
+          maxIncl = random().nextBoolean();
+        }
+        ranges[rangeID] = new LongRange("r" + rangeID, min, minIncl, max, 
maxIncl);
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + ": " + ranges[rangeID]);
+        }
+
+        // Do "slow but hopefully correct" computation of
+        // expected count:
+        for (int i = 0; i < numDocs; i++) {
+          for (int j = 0; j < values[i].length; j++) {
+            boolean accept = true;
+            if (minIncl) {
+              accept &= values[i][j] >= min;
+            } else {
+              accept &= values[i][j] > min;
+            }
+            if (maxIncl) {
+              accept &= values[i][j] <= max;
+            } else {
+              accept &= values[i][j] < max;
+            }
+            if (accept) {
+              expectedCounts[rangeID]++;
+              minAcceptedValue = Math.min(minAcceptedValue, values[i][j]);
+              maxAcceptedValue = Math.max(maxAcceptedValue, values[i][j]);
+              break; // ensure each doc can contribute at most 1 count to each 
range
+            }
+          }
+        }
+      }
+
+      // TODO: fastMatchQuery functionality is not implemented for sandbox 
faceting yet, do we need
+      // it?
+      /*Query fastMatchQuery;
+      if (random().nextBoolean()) {
+        if (random().nextBoolean()) {
+          fastMatchQuery = LongPoint.newRangeQuery("field", minValue, 
maxValue);
+        } else {
+          fastMatchQuery = LongPoint.newRangeQuery("field", minAcceptedValue, 
maxAcceptedValue);
+        }
+      } else {
+        fastMatchQuery = null;
+      }*/
+      final MultiLongValuesSource mvs = 
MultiLongValuesSource.fromLongField("field");
+
+      LongRangeFacetCutter longRangeFacetCutter = 
LongRangeFacetCutter.create("field", mvs, ranges);
+      CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+      FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+          new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+      s.search(new MatchAllDocsQuery(), collectorManager);
+
+      OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(ranges);
+      FacetResult result =
+          getAllSortByOrd(getRangeOrdinals(ranges), countRecorder, "field", 
ordLabelBiMap);
+      assertEquals(numRange, result.labelValues.length);
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + " expectedCount=" + 
expectedCounts[rangeID]);
+        }
+        LabelAndValue subNode = result.labelValues[rangeID];
+        assertEquals("r" + rangeID, subNode.label);
+        assertEquals(expectedCounts[rangeID], subNode.value.intValue());
+
+        LongRange range = ranges[rangeID];
+
+        // Test drill-down:
+        DrillDownQuery ddq = new DrillDownQuery(config);
+        if (random().nextBoolean()) {
+          ddq.add("field", LongPoint.newRangeQuery("field", range.min, 
range.max));
+        } else {
+          ddq.add(
+              "field",
+              SortedNumericDocValuesField.newSlowRangeQuery("field", 
range.min, range.max));
+        }
+        assertEquals(expectedCounts[rangeID], s.count(ddq));
+      }
+    }
+
+    w.close();
+    IOUtils.close(r, dir);
+  }
+
+  public void testRandomDoublesSingleValued() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    int numDocs = atLeast(1000);
+    double[] values = new double[numDocs];
+    double minValue = Double.POSITIVE_INFINITY;
+    double maxValue = Double.NEGATIVE_INFINITY;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      double v = random().nextDouble();
+      values[i] = v;
+      doc.add(new DoubleDocValuesField("field", v));
+      doc.add(new DoublePoint("field", v));
+      w.addDocument(doc);
+      minValue = Math.min(minValue, v);
+      maxValue = Math.max(maxValue, v);
+    }
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r, false);
+    FacetsConfig config = new FacetsConfig();
+
+    int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
+      int numRange = TestUtil.nextInt(random(), 1, 5);
+      DoubleRange[] ranges = new DoubleRange[numRange];
+      int[] expectedCounts = new int[numRange];
+      double minAcceptedValue = Double.POSITIVE_INFINITY;
+      double maxAcceptedValue = Double.NEGATIVE_INFINITY;
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        double min;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            min = prevRange.min;
+          } else {
+            min = prevRange.max;
+          }
+        } else {
+          min = random().nextDouble();
+        }
+        double max;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            max = prevRange.min;
+          } else {
+            max = prevRange.max;
+          }
+        } else {
+          max = random().nextDouble();
+        }
+
+        if (min > max) {
+          double x = min;
+          min = max;
+          max = x;
+        }
+
+        boolean minIncl;
+        boolean maxIncl;
+
+        long minAsLong = NumericUtils.doubleToSortableLong(min);
+        long maxAsLong = NumericUtils.doubleToSortableLong(max);
+        // NOTE: maxAsLong - minAsLong >= 0 is here to handle the common 
overflow case!
+        if (maxAsLong - minAsLong >= 0 && maxAsLong - minAsLong < 2) {
+          minIncl = true;
+          maxIncl = true;
+        } else {
+          minIncl = random().nextBoolean();
+          maxIncl = random().nextBoolean();
+        }
+        ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, 
maxIncl);
+
+        // Do "slow but hopefully correct" computation of
+        // expected count:
+        for (int i = 0; i < numDocs; i++) {
+          boolean accept = true;
+          if (minIncl) {
+            accept &= values[i] >= min;
+          } else {
+            accept &= values[i] > min;
+          }
+          if (maxIncl) {
+            accept &= values[i] <= max;
+          } else {
+            accept &= values[i] < max;
+          }
+          if (accept) {
+            expectedCounts[rangeID]++;
+            minAcceptedValue = Math.min(minAcceptedValue, values[i]);
+            maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
+          }
+        }
+      }
+
+      // TODO: fastMatchQuery functionality is not implemented for sandbox 
faceting yet, do we need
+      // it?
+      /*Query fastMatchFilter;
+      if (random().nextBoolean()) {
+        if (random().nextBoolean()) {
+          fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, 
maxValue);
+        } else {
+          fastMatchFilter = DoublePoint.newRangeQuery("field", 
minAcceptedValue, maxAcceptedValue);
+        }
+      } else {
+        fastMatchFilter = null;
+      }*/
+
+      final MultiDoubleValuesSource mvs;
+      if (random().nextBoolean()) {
+        DoubleValuesSource vs = DoubleValuesSource.fromDoubleField("field");
+        mvs = MultiDoubleValuesSource.fromSingleValued(vs);
+      } else {
+        mvs = MultiDoubleValuesSource.fromDoubleField("field");
+      }
+
+      DoubleRangeFacetCutter doubleRangeFacetCutter =
+          new DoubleRangeFacetCutter("field", mvs, ranges);
+      CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+      FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+          new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+      s.search(new MatchAllDocsQuery(), collectorManager);
+
+      OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(ranges);
+      FacetResult result =
+          getAllSortByOrd(getRangeOrdinals(ranges), countRecorder, "field", 
ordLabelBiMap);
+      assertEquals(numRange, result.labelValues.length);
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + " expectedCount=" + 
expectedCounts[rangeID]);
+        }
+        LabelAndValue subNode = result.labelValues[rangeID];
+        assertEquals("r" + rangeID, subNode.label);
+        assertEquals(expectedCounts[rangeID], subNode.value.intValue());
+
+        DoubleRange range = ranges[rangeID];
+
+        // Test drill-down:
+        DrillDownQuery ddq = new DrillDownQuery(config);
+        ddq.add("field", DoublePoint.newRangeQuery("field", range.min, 
range.max));
+
+        assertEquals(expectedCounts[rangeID], s.count(ddq));
+      }
+    }
+
+    w.close();
+    IOUtils.close(r, dir);
+  }
+
+  public void testRandomDoublesMultiValued() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    int numDocs = atLeast(1000);
+    double[][] values = new double[numDocs][];
+    double minValue = Double.POSITIVE_INFINITY;
+    double maxValue = Double.NEGATIVE_INFINITY;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      int numVals = RandomNumbers.randomIntBetween(random(), 1, 50);
+      if (random().nextInt(10) == 0) {
+        numVals = 1; // make sure we have ample testing of single-value cases
+      }
+      values[i] = new double[numVals];
+      for (int j = 0; j < numVals; j++) {
+        double v = random().nextDouble();
+        values[i][j] = v;
+        doc.add(new SortedNumericDocValuesField("field", 
Double.doubleToLongBits(v)));
+        doc.add(new DoublePoint("field", v));
+        minValue = Math.min(minValue, v);
+        maxValue = Math.max(maxValue, v);
+      }
+      w.addDocument(doc);
+    }
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r, false);
+    FacetsConfig config = new FacetsConfig();
+
+    int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
+      int numRange = TestUtil.nextInt(random(), 1, 5);
+      DoubleRange[] ranges = new DoubleRange[numRange];
+      int[] expectedCounts = new int[numRange];
+      double minAcceptedValue = Double.POSITIVE_INFINITY;
+      double maxAcceptedValue = Double.NEGATIVE_INFINITY;
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        double min;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            min = prevRange.min;
+          } else {
+            min = prevRange.max;
+          }
+        } else {
+          min = random().nextDouble();
+        }
+        double max;
+        if (rangeID > 0 && random().nextInt(10) == 7) {
+          // Use an existing boundary:
+          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
+          if (random().nextBoolean()) {
+            max = prevRange.min;
+          } else {
+            max = prevRange.max;
+          }
+        } else {
+          max = random().nextDouble();
+        }
+
+        if (min > max) {
+          double x = min;
+          min = max;
+          max = x;
+        }
+
+        boolean minIncl;
+        boolean maxIncl;
+
+        long minAsLong = NumericUtils.doubleToSortableLong(min);
+        long maxAsLong = NumericUtils.doubleToSortableLong(max);
+        // NOTE: maxAsLong - minAsLong >= 0 is here to handle the common 
overflow case!
+        if (maxAsLong - minAsLong >= 0 && maxAsLong - minAsLong < 2) {
+          minIncl = true;
+          maxIncl = true;
+        } else {
+          minIncl = random().nextBoolean();
+          maxIncl = random().nextBoolean();
+        }
+        ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, 
maxIncl);
+
+        // Do "slow but hopefully correct" computation of
+        // expected count:
+        for (int i = 0; i < numDocs; i++) {
+          for (int j = 0; j < values[i].length; j++) {
+            boolean accept = true;
+            if (minIncl) {
+              accept &= values[i][j] >= min;
+            } else {
+              accept &= values[i][j] > min;
+            }
+            if (maxIncl) {
+              accept &= values[i][j] <= max;
+            } else {
+              accept &= values[i][j] < max;
+            }
+            if (accept) {
+              expectedCounts[rangeID]++;
+              minAcceptedValue = Math.min(minAcceptedValue, values[i][j]);
+              maxAcceptedValue = Math.max(maxAcceptedValue, values[i][j]);
+              break; // ensure each doc can contribute at most 1 count to each 
range
+            }
+          }
+        }
+      }
+      // TODO: fastMatchQuery functionality is not implemented for sandbox 
faceting yet, do we need
+      // it?
+      /*Query fastMatchFilter;
+      if (random().nextBoolean()) {
+        if (random().nextBoolean()) {
+          fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, 
maxValue);
+        } else {
+          fastMatchFilter = DoublePoint.newRangeQuery("field", 
minAcceptedValue, maxAcceptedValue);
+        }
+      } else {
+        fastMatchFilter = null;
+      }*/
+      final MultiDoubleValuesSource mvs = 
MultiDoubleValuesSource.fromDoubleField("field");
+      DoubleRangeFacetCutter doubleRangeFacetCutter =
+          new DoubleRangeFacetCutter("field", mvs, ranges);
+      CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+      FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+          new FacetFieldCollectorManager<>(doubleRangeFacetCutter, null, 
countRecorder);
+      s.search(new MatchAllDocsQuery(), collectorManager);
+
+      OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(ranges);
+      FacetResult result =
+          getAllSortByOrd(getRangeOrdinals(ranges), countRecorder, "field", 
ordLabelBiMap);
+      assertEquals(numRange, result.labelValues.length);
+      for (int rangeID = 0; rangeID < numRange; rangeID++) {
+        if (VERBOSE) {
+          System.out.println("  range " + rangeID + " expectedCount=" + 
expectedCounts[rangeID]);
+        }
+        LabelAndValue subNode = result.labelValues[rangeID];
+        assertEquals("r" + rangeID, subNode.label);
+        assertEquals(expectedCounts[rangeID], subNode.value.intValue());
+
+        DoubleRange range = ranges[rangeID];
+
+        // Test drill-down:
+        DrillDownQuery ddq = new DrillDownQuery(config);
+        if (random().nextBoolean()) {
+          ddq.add("field", DoublePoint.newRangeQuery("field", range.min, 
range.max));
+        } else {
+          ddq.add(
+              "field",
+              SortedNumericDocValuesField.newSlowRangeQuery(
+                  "field", Double.doubleToLongBits(range.min), 
Double.doubleToLongBits(range.max)));
+        }
+
+        assertEquals(expectedCounts[rangeID], s.count(ddq));
+      }
+    }
+
+    w.close();
+    IOUtils.close(r, dir);
+  }
+
+  // LUCENE-5178
+  public void testMissingValues() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    NumericDocValuesField field = new NumericDocValuesField("field", 0L);
+    doc.add(field);
+    for (long l = 0; l < 100; l++) {
+      if (l % 5 == 0) {
+        // Every 5th doc is missing the value:
+        w.addDocument(new Document());
+        continue;
+      }
+      field.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, false)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (8)\n  less 
than or equal to 10 (8)\n  over 90 (8)\n  90 or above (8)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  public void testMissingValuesMultiValued() throws Exception {
+    Directory d = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), d);
+    Document doc = new Document();
+    // index the same field twice to test multi-valued logic
+    SortedNumericDocValuesField field1 = new 
SortedNumericDocValuesField("field", 0L);
+    SortedNumericDocValuesField field2 = new 
SortedNumericDocValuesField("field", 0L);
+    doc.add(field1);
+    doc.add(field2);
+    for (long l = 0; l < 100; l++) {
+      if (l % 5 == 0) {
+        // Every 5th doc is missing the value:
+        w.addDocument(new Document());
+        continue;
+      }
+      field1.setLongValue(l);
+      field2.setLongValue(l);
+      w.addDocument(doc);
+    }
+
+    IndexReader r = w.getReader();
+
+    IndexSearcher s = newSearcher(r);
+    LongRange[] inputRanges =
+        new LongRange[] {
+          new LongRange("less than 10", 0L, true, 10L, false),
+          new LongRange("less than or equal to 10", 0L, true, 10L, true),
+          new LongRange("over 90", 90L, false, 100L, false),
+          new LongRange("90 or above", 90L, true, 100L, false),
+          new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, false)
+        };
+
+    MultiLongValuesSource valuesSource = 
MultiLongValuesSource.fromLongField("field");
+    LongRangeFacetCutter longRangeFacetCutter =
+        LongRangeFacetCutter.create("field", valuesSource, inputRanges);
+    CountFacetRecorder countRecorder = new 
CountFacetRecorder(random().nextBoolean());
+
+    FacetFieldCollectorManager<CountFacetRecorder> collectorManager =
+        new FacetFieldCollectorManager<>(longRangeFacetCutter, null, 
countRecorder);
+    s.search(new MatchAllDocsQuery(), collectorManager);
+    OrdLabelBiMap ordLabelBiMap = new RangeOrdLabelBiMap(inputRanges);
+
+    assertEquals(
+        "dim=field path=[] value=-5 childCount=5\n  less than 10 (8)\n  less 
than or equal to 10 (8)\n  over 90 (8)\n  90 or above (8)\n  over 1000 (0)\n",
+        getAllSortByOrd(getRangeOrdinals(inputRanges), countRecorder, "field", 
ordLabelBiMap)
+            .toString());
+
+    w.close();
+    IOUtils.close(r, d);
+  }
+
+  /// TODO: continue from here

Review Comment:
   What are we continuing?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@lucene.apache.org
For additional commands, e-mail: issues-h...@lucene.apache.org


Reply via email to