Hi,
we are currently facing a new problem while reindexing one of our SOLR
4.4 instances:
We are using SOLR 4.4 getting data via DIH out of a MySQL Server.
The data is constantly growing.
We have reindexed our data a lot of times without any trouble.
The problem can be reproduced.
There is another server, configured exactly the same way (via git) which
was reindex 3 days ago against the same MySQL Server without problems.
But: That server has more RAM and more powerfull CPUs as the one making
headaches today.
The error log says:
java.lang.ArrayIndexOutOfBoundsException
at
org.apache.lucene.util.packed.Packed64SingleBlock$Packed64SingleBlock4.get(Packed64SingleBlock.java:336)
at
org.apache.lucene.util.packed.GrowableWriter.get(GrowableWriter.java:56)
at
org.apache.lucene.util.packed.AbstractPagedMutable.get(AbstractPagedMutable.java:88)
at org.apache.lucene.util.fst.NodeHash.addNew(NodeHash.java:151)
at org.apache.lucene.util.fst.NodeHash.rehash(NodeHash.java:169)
at org.apache.lucene.util.fst.NodeHash.add(NodeHash.java:133)
at org.apache.lucene.util.fst.Builder.compileNode(Builder.java:197)
at org.apache.lucene.util.fst.Builder.freezeTail(Builder.java:289)
at org.apache.lucene.util.fst.Builder.add(Builder.java:394)
at
org.apache.lucene.codecs.BlockTreeTermsWriter$PendingBlock.append(BlockTreeTermsWriter.java:474)
at
org.apache.lucene.codecs.BlockTreeTermsWriter$PendingBlock.compileIndex(BlockTreeTermsWriter.java:438)
at
org.apache.lucene.codecs.BlockTreeTermsWriter$TermsWriter.writeBlocks(BlockTreeTermsWriter.java:569)
at
org.apache.lucene.codecs.BlockTreeTermsWriter$TermsWriter$FindBlocks.freeze(BlockTreeTermsWriter.java:544)
at org.apache.lucene.util.fst.Builder.freezeTail(Builder.java:214)
at org.apache.lucene.util.fst.Builder.finish(Builder.java:463)
at
org.apache.lucene.codecs.BlockTreeTermsWriter$TermsWriter.finish(BlockTreeTermsWriter.java:1010)
at
org.apache.lucene.index.FreqProxTermsWriterPerField.flush(FreqProxTermsWriterPerField.java:553)
at
org.apache.lucene.index.FreqProxTermsWriter.flush(FreqProxTermsWriter.java:85)
at org.apache.lucene.index.TermsHash.flush(TermsHash.java:116)
at org.apache.lucene.index.DocInverter.flush(DocInverter.java:53)
at
org.apache.lucene.index.DocFieldProcessor.flush(DocFieldProcessor.java:81)
at
org.apache.lucene.index.DocumentsWriterPerThread.flush(DocumentsWriterPerThread.java:501)
at
org.apache.lucene.index.DocumentsWriter.doFlush(DocumentsWriter.java:478)
at
org.apache.lucene.index.DocumentsWriter.postUpdate(DocumentsWriter.java:372)
at
org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter.java:445)
at
org.apache.lucene.index.IndexWriter.updateDocument(IndexWriter.java)
at
org.apache.solr.update.DirectUpdateHandler2.addDoc(DirectUpdateHandler2.java:212)
at
org.apache.solr.update.processor.RunUpdateProcessor.processAdd(RunUpdateProcessorFactory.java:69)
at
org.apache.solr.update.processor.UpdateRequestProcessor.processAdd(UpdateRequestProcessor.java:51)
at
org.apache.solr.update.processor.DistributedUpdateProcessor.versionAdd(DistributedUpdateProcessor.java:572)
at
org.apache.solr.update.processor.DistributedUpdateProcessor.processAdd(DistributedUpdateProcessor.java:435)
at
org.apache.solr.update.processor.LogUpdateProcessor.processAdd(LogUpdateProcessorFactory.java:100)
at
org.apache.solr.handler.dataimport.SolrWriter.upload(SolrWriter.java:70)
at
org.apache.solr.handler.dataimport.DataImportHandler$1.upload(DataImportHandler.java:237)
at
org.apache.solr.handler.dataimport.DocBuilder.buildDocument(DocBuilder.java:504)
at
org.apache.solr.handler.dataimport.DocBuilder.buildDocument(DocBuilder.java:408)
at
org.apache.solr.handler.dataimport.DocBuilder.doFullDump(DocBuilder.java:323)
at
org.apache.solr.handler.dataimport.DocBuilder.execute(DocBuilder.java:231)
at
org.apache.solr.handler.dataimport.DataImporter.doFullImport(DataImporter.java:411)
at
org.apache.solr.handler.dataimport.DataImporter.runCmd(DataImporter.java:476)
at
org.apache.solr.handler.dataimport.DataImporter$1.run(DataImporter.java:457)
Any suggestions are welcome.
Best regards
Ralf