ACCUMULO-2446: many updates for Accumulo 1.8.0
Project: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/repo Commit: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/commit/e32632ca Tree: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/tree/e32632ca Diff: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/diff/e32632ca Branch: refs/heads/1.8 Commit: e32632cafc9a15cce716c7cb43423a4e48e0d2c3 Parents: 9c30660 Author: Mike Miller <mmil...@apache.org> Authored: Thu Dec 1 15:18:30 2016 -0500 Committer: Mike Miller <mmil...@apache.org> Committed: Tue Dec 13 13:22:02 2016 -0500 ---------------------------------------------------------------------- .gitignore | 4 + ingest/bin/ingest.sh | 2 +- ingest/conf/wikipedia.xml.uno | 43 + ingest/pom.xml | 57 +- .../wikisearch/ingest/WikipediaIngester.java | 13 +- .../ingest/WikipediaPartitionedIngester.java | 12 +- .../output/BufferingRFileRecordWriter.java | 47 +- .../output/SortingRFileOutputFormat.java | 9 +- .../wikisearch/protobuf/TermWeight.java | 721 ++++++++++------ .../examples/wikisearch/protobuf/Uid.java | 838 ++++++++++++------- ingest/src/main/protobuf/TermWeight.proto | 4 +- ingest/src/main/protobuf/Uid.proto | 4 +- pom.xml | 104 +-- query-war/pom.xml | 2 +- query/pom.xml | 95 +-- .../iterator/DefaultIteratorEnvironment.java | 22 + .../examples/wikisearch/query/Query.java | 5 +- 17 files changed, 1168 insertions(+), 814 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/.gitignore ---------------------------------------------------------------------- diff --git a/.gitignore b/.gitignore index 698a832..8a09ac0 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,7 @@ .idea **/*.iml **/lib +.project +.settings/ +.classpath + http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/bin/ingest.sh ---------------------------------------------------------------------- diff --git a/ingest/bin/ingest.sh b/ingest/bin/ingest.sh index 73d582d..f9f3495 100755 --- a/ingest/bin/ingest.sh +++ b/ingest/bin/ingest.sh @@ -38,7 +38,7 @@ LIBJARS=`echo $CLASSPATH | sed 's/^://' | sed 's/:/,/g'` # # Map/Reduce job # -JAR=$SCRIPT_DIR/../lib/wikisearch-ingest-1.5.0.jar +JAR=$SCRIPT_DIR/../lib/wikisearch-ingest-*.jar CONF=$SCRIPT_DIR/../conf/wikipedia.xml HDFS_DATA_DIR=$1 export HADOOP_CLASSPATH=$CLASSPATH http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/conf/wikipedia.xml.uno ---------------------------------------------------------------------- diff --git a/ingest/conf/wikipedia.xml.uno b/ingest/conf/wikipedia.xml.uno new file mode 100644 index 0000000..093551c --- /dev/null +++ b/ingest/conf/wikipedia.xml.uno @@ -0,0 +1,43 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<configuration> + <property> + <name>wikipedia.accumulo.zookeepers</name> + <value>localhost:2181</value> + </property> + <property> + <name>wikipedia.accumulo.instance_name</name> + <value>uno</value> + </property> + <property> + <name>wikipedia.accumulo.user</name> + <value>root</value> + </property> + <property> + <name>wikipedia.accumulo.password</name> + <value>secret</value> + </property> + <property> + <name>wikipedia.accumulo.table</name> + <value>wikipedia</value> + </property> + <property> + <name>wikipedia.ingest.partitions</name> + <value>1</value> + </property> +</configuration> http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/pom.xml ---------------------------------------------------------------------- diff --git a/ingest/pom.xml b/ingest/pom.xml index 1d17e27..426cff2 100644 --- a/ingest/pom.xml +++ b/ingest/pom.xml @@ -20,7 +20,7 @@ <parent> <groupId>org.apache.accumulo</groupId> <artifactId>accumulo-wikisearch</artifactId> - <version>1.5.0</version> + <version>1.8.0</version> </parent> <artifactId>wikisearch-ingest</artifactId> <name>wikisearch-ingest</name> @@ -48,6 +48,12 @@ <dependency> <groupId>org.apache.accumulo</groupId> <artifactId>accumulo-core</artifactId> + <exclusions> + <exclusion> + <groupId>commons-digester</groupId> + <artifactId>commons-digester</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> <groupId>org.apache.lucene</groupId> @@ -101,53 +107,4 @@ </plugin> </plugins> </build> - <profiles> - <!-- profile for building against Hadoop 1.0.x - Activate by not specifying hadoop.profile --> - <profile> - <id>hadoop-1.0</id> - <activation> - <property> - <name>!hadoop.profile</name> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${version.hadoop}</version> - <exclusions> - <exclusion> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </profile> - <!-- profile for building against Hadoop 2.0.x - Activate using: mvn -Dhadoop.profile=2.0 --> - <profile> - <id>hadoop-2.0</id> - <activation> - <property> - <name>hadoop.profile</name> - <value>2.0</value> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - <version>${version.hadoop}</version> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </profile> - </profiles> </project> http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java ---------------------------------------------------------------------- diff --git a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java index d4fa1c6..1a495ed 100644 --- a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java +++ b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java @@ -29,13 +29,16 @@ import java.util.regex.Pattern; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; +import org.apache.accumulo.core.client.ClientConfiguration; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.IteratorSetting.Column; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; +import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty; import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; +import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.iterators.user.SummingCombiner; @@ -135,9 +138,9 @@ public class WikipediaIngester extends Configured implements Tool { conf.set("mapred.map.tasks.speculative.execution", "false"); String tablename = WikipediaConfiguration.getTableName(conf); - - String zookeepers = WikipediaConfiguration.getZookeepers(conf); - String instanceName = WikipediaConfiguration.getInstanceName(conf); + ClientConfiguration clientConfig = new ClientConfiguration(); + clientConfig.setProperty(ClientProperty.INSTANCE_NAME, WikipediaConfiguration.getInstanceName(conf)); + clientConfig.setProperty(ClientProperty.INSTANCE_ZK_HOST, WikipediaConfiguration.getZookeepers(conf)); String user = WikipediaConfiguration.getUser(conf); byte[] password = WikipediaConfiguration.getPassword(conf); @@ -168,8 +171,8 @@ public class WikipediaIngester extends Configured implements Tool { job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Mutation.class); job.setOutputFormatClass(AccumuloOutputFormat.class); - AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), user, password, true, tablename); - AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), instanceName, zookeepers); + AccumuloOutputFormat.setConnectorInfo(job, user, new PasswordToken(password)); + AccumuloOutputFormat.setZooKeeperInstance(job, clientConfig); return job.waitForCompletion(true) ? 0 : 1; } http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java ---------------------------------------------------------------------- diff --git a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java index 59035dc..841f169 100644 --- a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java +++ b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java @@ -29,13 +29,16 @@ import java.util.regex.Pattern; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; +import org.apache.accumulo.core.client.ClientConfiguration; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.IteratorSetting.Column; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; +import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty; import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; +import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.iterators.user.SummingCombiner; @@ -236,12 +239,13 @@ public class WikipediaPartitionedIngester extends Configured implements Tool { SortingRFileOutputFormat.setPathName(ingestConf, WikipediaConfiguration.bulkIngestDir(ingestConf)); } else { ingestJob.setOutputFormatClass(AccumuloOutputFormat.class); - String zookeepers = WikipediaConfiguration.getZookeepers(ingestConf); - String instanceName = WikipediaConfiguration.getInstanceName(ingestConf); + ClientConfiguration clientConfig = new ClientConfiguration(); + clientConfig.setProperty(ClientProperty.INSTANCE_NAME, WikipediaConfiguration.getInstanceName(ingestConf)); + clientConfig.setProperty(ClientProperty.INSTANCE_ZK_HOST, WikipediaConfiguration.getZookeepers(ingestConf)); String user = WikipediaConfiguration.getUser(ingestConf); byte[] password = WikipediaConfiguration.getPassword(ingestConf); - AccumuloOutputFormat.setOutputInfo(ingestJob.getConfiguration(), user, password, true, tablename); - AccumuloOutputFormat.setZooKeeperInstance(ingestJob.getConfiguration(), instanceName, zookeepers); + AccumuloOutputFormat.setConnectorInfo(ingestJob, user, new PasswordToken(password)); + AccumuloOutputFormat.setZooKeeperInstance(ingestJob, clientConfig); } return ingestJob.waitForCompletion(true) ? 0 : 1; http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/BufferingRFileRecordWriter.java ---------------------------------------------------------------------- diff --git a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/BufferingRFileRecordWriter.java b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/BufferingRFileRecordWriter.java index 9b663de..aa6e357 100644 --- a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/BufferingRFileRecordWriter.java +++ b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/BufferingRFileRecordWriter.java @@ -22,13 +22,18 @@ import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; +import org.apache.accumulo.core.client.AccumuloException; +import org.apache.accumulo.core.client.AccumuloSecurityException; +import org.apache.accumulo.core.client.BatchWriter; +import org.apache.accumulo.core.client.BatchWriterConfig; +import org.apache.accumulo.core.client.Connector; +import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.file.FileSKVWriter; -import org.apache.accumulo.core.file.rfile.RFileOperations; +import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.Text; @@ -37,12 +42,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; final class BufferingRFileRecordWriter extends RecordWriter<Text,Mutation> { private final long maxSize; - private final AccumuloConfiguration acuconf; private final Configuration conf; - private final String filenamePrefix; - private final String taskID; - private final FileSystem fs; - private int fileCount = 0; private long size; private Map<Text,TreeMap<Key,Value>> buffers = new HashMap<Text,TreeMap<Key,Value>>(); @@ -79,31 +79,32 @@ final class BufferingRFileRecordWriter extends RecordWriter<Text,Mutation> { if (buffer.size() == 0) return; - String file = filenamePrefix + "/" + tablename + "/" + taskID + "_" + (fileCount++) + ".rf"; + Connector conn; + try { + conn = WikipediaConfiguration.getConnector(conf); + BatchWriterConfig bwconfig = new BatchWriterConfig(); + BatchWriter writer = conn.createBatchWriter(tablename.toString(), bwconfig); + for (Entry<Key,Value> e : buffer.entrySet()) { + Key k = e.getKey(); + Mutation m = new Mutation(); + m.put(k.getColumnFamily(), k.getColumnQualifier(), e.getValue()); + writer.addMutation(m); + } + writer.close(); + } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } // TODO get the table configuration for the given table? - FileSKVWriter writer = RFileOperations.getInstance().openWriter(file, fs, conf, acuconf); - - // forget locality groups for now, just write everything to the default - writer.startDefaultLocalityGroup(); - - for (Entry<Key,Value> e : buffer.entrySet()) { - writer.append(e.getKey(), e.getValue()); - } - - writer.close(); size -= bufferSize; buffer.clear(); bufferSizes.put(tablename, 0l); } - BufferingRFileRecordWriter(long maxSize, AccumuloConfiguration acuconf, Configuration conf, String filenamePrefix, String taskID, FileSystem fs) { + BufferingRFileRecordWriter(long maxSize, Configuration conf) { this.maxSize = maxSize; - this.acuconf = acuconf; this.conf = conf; - this.filenamePrefix = filenamePrefix; - this.taskID = taskID; - this.fs = fs; } @Override http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java ---------------------------------------------------------------------- diff --git a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java index 1fa8fdc..c54db06 100644 --- a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java +++ b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java @@ -105,17 +105,10 @@ public class SortingRFileOutputFormat extends OutputFormat<Text,Mutation> { // grab the configuration final Configuration conf = attempt.getConfiguration(); - // create a filename - final String filenamePrefix = getPathName(conf); - final String taskID = attempt.getTaskAttemptID().toString(); // grab the max size final long maxSize = getMaxBufferSize(conf); - // grab the FileSystem - final FileSystem fs = FileSystem.get(conf); - // create a default AccumuloConfiguration - final AccumuloConfiguration acuconf = AccumuloConfiguration.getDefaultConfiguration(); - return new BufferingRFileRecordWriter(maxSize, acuconf, conf, filenamePrefix, taskID, fs); + return new BufferingRFileRecordWriter(maxSize, conf); } } http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/e32632ca/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/protobuf/TermWeight.java ---------------------------------------------------------------------- diff --git a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/protobuf/TermWeight.java b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/protobuf/TermWeight.java index bf5133f..7ae7aca 100644 --- a/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/protobuf/TermWeight.java +++ b/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/protobuf/TermWeight.java @@ -1,19 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: TermWeight.proto @@ -21,98 +5,239 @@ package org.apache.accumulo.examples.wikisearch.protobuf; public final class TermWeight { private TermWeight() {} - - public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {} - - public static final class Info extends com.google.protobuf.GeneratedMessage { + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface InfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required float normalizedTermFrequency = 1; + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ + boolean hasNormalizedTermFrequency(); + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ + float getNormalizedTermFrequency(); + + // repeated uint32 wordOffset = 2; + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + java.util.List<java.lang.Integer> getWordOffsetList(); + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + int getWordOffsetCount(); + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + int getWordOffset(int index); + } + /** + * Protobuf type {@code org.apache.accumulo.examples.wikisearch.protobuf.Info} + */ + public static final class Info extends + com.google.protobuf.GeneratedMessage + implements InfoOrBuilder { // Use Info.newBuilder() to construct. - private Info() { - initFields(); + private Info(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); } - - private Info(boolean noInit) {} - + private Info(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private static final Info defaultInstance; - public static Info getDefaultInstance() { return defaultInstance; } - + public Info getDefaultInstanceForType() { return defaultInstance; } - - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_protobuf_Info_descriptor; + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Info( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 13: { + bitField0_ |= 0x00000001; + normalizedTermFrequency_ = input.readFloat(); + break; + } + case 16: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + wordOffset_ = new java.util.ArrayList<java.lang.Integer>(); + mutable_bitField0_ |= 0x00000002; + } + wordOffset_.add(input.readUInt32()); + break; + } + case 18: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + wordOffset_ = new java.util.ArrayList<java.lang.Integer>(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + wordOffset_.add(input.readUInt32()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + wordOffset_ = java.util.Collections.unmodifiableList(wordOffset_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor; } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_protobuf_Info_fieldAccessorTable; + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.class, org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.Builder.class); } - + + public static com.google.protobuf.Parser<Info> PARSER = + new com.google.protobuf.AbstractParser<Info>() { + public Info parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Info(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser<Info> getParserForType() { + return PARSER; + } + + private int bitField0_; // required float normalizedTermFrequency = 1; public static final int NORMALIZEDTERMFREQUENCY_FIELD_NUMBER = 1; - private boolean hasNormalizedTermFrequency; - private float normalizedTermFrequency_ = 0F; - + private float normalizedTermFrequency_; + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public boolean hasNormalizedTermFrequency() { - return hasNormalizedTermFrequency; + return ((bitField0_ & 0x00000001) == 0x00000001); } - + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public float getNormalizedTermFrequency() { return normalizedTermFrequency_; } - + // repeated uint32 wordOffset = 2; public static final int WORDOFFSET_FIELD_NUMBER = 2; - private java.util.List<java.lang.Integer> wordOffset_ = java.util.Collections.emptyList(); - - public java.util.List<java.lang.Integer> getWordOffsetList() { + private java.util.List<java.lang.Integer> wordOffset_; + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + public java.util.List<java.lang.Integer> + getWordOffsetList() { return wordOffset_; } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public int getWordOffsetCount() { return wordOffset_.size(); } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public int getWordOffset(int index) { return wordOffset_.get(index); } - - private void initFields() {} - + + private void initFields() { + normalizedTermFrequency_ = 0F; + wordOffset_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; public final boolean isInitialized() { - if (!hasNormalizedTermFrequency) + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasNormalizedTermFrequency()) { + memoizedIsInitialized = 0; return false; + } + memoizedIsInitialized = 1; return true; } - - public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { getSerializedSize(); - if (hasNormalizedTermFrequency()) { - output.writeFloat(1, getNormalizedTermFrequency()); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeFloat(1, normalizedTermFrequency_); } - for (int element : getWordOffsetList()) { - output.writeUInt32(2, element); + for (int i = 0; i < wordOffset_.size(); i++) { + output.writeUInt32(2, wordOffset_.get(i)); } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; - public int getSerializedSize() { int size = memoizedSerializedSize; - if (size != -1) - return size; - + if (size != -1) return size; + size = 0; - if (hasNormalizedTermFrequency()) { - size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, getNormalizedTermFrequency()); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(1, normalizedTermFrequency_); } { int dataSize = 0; - for (int element : getWordOffsetList()) { - dataSize += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(element); + for (int i = 0; i < wordOffset_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(wordOffset_.get(i)); } size += dataSize; size += 1 * getWordOffsetList().size(); @@ -121,304 +246,368 @@ public final class TermWeight { memoizedSerializedSize = size; return size; } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry).buildParsed(); + return PARSER.parseFrom(data); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry).buildParsed(); + return PARSER.parseFrom(data); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(java.io.InputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry).buildParsed(); - } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseFrom(input); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseDelimitedFrom(java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } + return PARSER.parseFrom(input, extensionRegistry); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); } - - public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry).buildParsed(); + return PARSER.parseDelimitedFrom(input, extensionRegistry); } - - public static Builder newBuilder() { - return Builder.create(); + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); } - - public Builder newBuilderForType() { - return newBuilder(); + public static org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); } - + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info prototype) { return newBuilder().mergeFrom(prototype); } - - public Builder toBuilder() { - return newBuilder(this); - } - - public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { - private org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info result; - - // Construct using protobuf.TermWeight.Info.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info(); - return builder; + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.accumulo.examples.wikisearch.protobuf.Info} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.InfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor; } - - protected org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info internalGetResult() { - return result; + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.class, org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.Builder.class); } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException("Cannot call clear() after build()."); + + // Construct using org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } - result = new org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info(); + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + normalizedTermFrequency_ = 0F; + bitField0_ = (bitField0_ & ~0x00000001); + wordOffset_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); return this; } - + public Builder clone() { - return create().mergeFrom(result); + return create().mergeFrom(buildPartial()); } - - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.getDescriptor(); + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor; } - + public org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info getDefaultInstanceForType() { return org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.getDefaultInstance(); } - - public boolean isInitialized() { - return result.isInitialized(); - } - + public org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info build() { - if (result != null && !isInitialized()) { + org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info result = buildPartial(); + if (!result.isInitialized()) { throw newUninitializedMessageException(result); } - return buildPartial(); - } - - private org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException(result).asInvalidProtocolBufferException(); - } - return buildPartial(); + return result; } - + public org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info buildPartial() { - if (result == null) { - throw new IllegalStateException("build() has already been called on this Builder."); + org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info result = new org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; } - if (result.wordOffset_ != java.util.Collections.EMPTY_LIST) { - result.wordOffset_ = java.util.Collections.unmodifiableList(result.wordOffset_); + result.normalizedTermFrequency_ = normalizedTermFrequency_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + wordOffset_ = java.util.Collections.unmodifiableList(wordOffset_); + bitField0_ = (bitField0_ & ~0x00000002); } - org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info returnMe = result; - result = null; - return returnMe; + result.wordOffset_ = wordOffset_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info) { - return mergeFrom((org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info) other); + return mergeFrom((org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info)other); } else { super.mergeFrom(other); return this; } } - + public Builder mergeFrom(org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info other) { - if (other == org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.getDefaultInstance()) - return this; + if (other == org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.getDefaultInstance()) return this; if (other.hasNormalizedTermFrequency()) { setNormalizedTermFrequency(other.getNormalizedTermFrequency()); } if (!other.wordOffset_.isEmpty()) { - if (result.wordOffset_.isEmpty()) { - result.wordOffset_ = new java.util.ArrayList<java.lang.Integer>(); + if (wordOffset_.isEmpty()) { + wordOffset_ = other.wordOffset_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureWordOffsetIsMutable(); + wordOffset_.addAll(other.wordOffset_); } - result.wordOffset_.addAll(other.wordOffset_); + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } - - public Builder mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + + public final boolean isInitialized() { + if (!hasNormalizedTermFrequency()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 13: { - setNormalizedTermFrequency(input.readFloat()); - break; - } - case 16: { - addWordOffset(input.readUInt32()); - break; - } - case 18: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - while (input.getBytesUntilLimit() > 0) { - addWordOffset(input.readUInt32()); - } - input.popLimit(limit); - break; - } + org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); } } + return this; } - + private int bitField0_; + // required float normalizedTermFrequency = 1; + private float normalizedTermFrequency_ ; + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public boolean hasNormalizedTermFrequency() { - return result.hasNormalizedTermFrequency(); + return ((bitField0_ & 0x00000001) == 0x00000001); } - + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public float getNormalizedTermFrequency() { - return result.getNormalizedTermFrequency(); + return normalizedTermFrequency_; } - + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public Builder setNormalizedTermFrequency(float value) { - result.hasNormalizedTermFrequency = true; - result.normalizedTermFrequency_ = value; + bitField0_ |= 0x00000001; + normalizedTermFrequency_ = value; + onChanged(); return this; } - + /** + * <code>required float normalizedTermFrequency = 1;</code> + */ public Builder clearNormalizedTermFrequency() { - result.hasNormalizedTermFrequency = false; - result.normalizedTermFrequency_ = 0F; + bitField0_ = (bitField0_ & ~0x00000001); + normalizedTermFrequency_ = 0F; + onChanged(); return this; } - + // repeated uint32 wordOffset = 2; - public java.util.List<java.lang.Integer> getWordOffsetList() { - return java.util.Collections.unmodifiableList(result.wordOffset_); + private java.util.List<java.lang.Integer> wordOffset_ = java.util.Collections.emptyList(); + private void ensureWordOffsetIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + wordOffset_ = new java.util.ArrayList<java.lang.Integer>(wordOffset_); + bitField0_ |= 0x00000002; + } + } + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + public java.util.List<java.lang.Integer> + getWordOffsetList() { + return java.util.Collections.unmodifiableList(wordOffset_); } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public int getWordOffsetCount() { - return result.getWordOffsetCount(); + return wordOffset_.size(); } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public int getWordOffset(int index) { - return result.getWordOffset(index); + return wordOffset_.get(index); } - - public Builder setWordOffset(int index, int value) { - result.wordOffset_.set(index, value); + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + public Builder setWordOffset( + int index, int value) { + ensureWordOffsetIsMutable(); + wordOffset_.set(index, value); + onChanged(); return this; } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public Builder addWordOffset(int value) { - if (result.wordOffset_.isEmpty()) { - result.wordOffset_ = new java.util.ArrayList<java.lang.Integer>(); - } - result.wordOffset_.add(value); + ensureWordOffsetIsMutable(); + wordOffset_.add(value); + onChanged(); return this; } - - public Builder addAllWordOffset(java.lang.Iterable<? extends java.lang.Integer> values) { - if (result.wordOffset_.isEmpty()) { - result.wordOffset_ = new java.util.ArrayList<java.lang.Integer>(); - } - super.addAll(values, result.wordOffset_); + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ + public Builder addAllWordOffset( + java.lang.Iterable<? extends java.lang.Integer> values) { + ensureWordOffsetIsMutable(); + super.addAll(values, wordOffset_); + onChanged(); return this; } - + /** + * <code>repeated uint32 wordOffset = 2;</code> + */ public Builder clearWordOffset() { - result.wordOffset_ = java.util.Collections.emptyList(); + wordOffset_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); return this; } - - // @@protoc_insertion_point(builder_scope:protobuf.Info) + + // @@protoc_insertion_point(builder_scope:org.apache.accumulo.examples.wikisearch.protobuf.Info) } - + static { defaultInstance = new Info(true); - org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.internalForceInit(); defaultInstance.initFields(); } - - // @@protoc_insertion_point(class_scope:protobuf.Info) + + // @@protoc_insertion_point(class_scope:org.apache.accumulo.examples.wikisearch.protobuf.Info) } - - private static com.google.protobuf.Descriptors.Descriptor internal_static_protobuf_Info_descriptor; - private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_protobuf_Info_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { return descriptor; } - - private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; static { - java.lang.String[] descriptorData = {"\n\020TermWeight.proto\022\010protobuf\";\n\004Info\022\037\n\027" - + "normalizedTermFrequency\030\001 \002(\002\022\022\n\nwordOff" + "set\030\002 \003(\rB\014\n\010protobufH\001"}; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors(com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_protobuf_Info_descriptor = getDescriptor().getMessageTypes().get(0); - internal_static_protobuf_Info_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_protobuf_Info_descriptor, new java.lang.String[] {"NormalizedTermFrequency", "WordOffset",}, org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.class, - org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.Builder.class); - return null; - } + java.lang.String[] descriptorData = { + "\n\020TermWeight.proto\0220org.apache.accumulo." + + "examples.wikisearch.protobuf\";\n\004Info\022\037\n\027" + + "normalizedTermFrequency\030\001 \002(\002\022\022\n\nwordOff" + + "set\030\002 \003(\rB4\n0org.apache.accumulo.example" + + "s.wikisearch.protobufH\001" }; - com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}, - assigner); + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_accumulo_examples_wikisearch_protobuf_Info_descriptor, + new java.lang.String[] { "NormalizedTermFrequency", "WordOffset", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); } - - public static void internalForceInit() {} - + // @@protoc_insertion_point(outer_class_scope) }