Updated Branches:
  refs/heads/master 2e4ce5e52 -> c3befc23d

ACCUMULO-1854 Remove compiler warnings


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/f8e14c79
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/f8e14c79
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/f8e14c79

Branch: refs/heads/master
Commit: f8e14c794992bd5d8d530b50338b16436088c243
Parents: dd55dc7
Author: Christopher Tubbs <ctubb...@apache.org>
Authored: Tue Nov 26 13:16:17 2013 -0500
Committer: Christopher Tubbs <ctubb...@apache.org>
Committed: Tue Nov 26 13:16:17 2013 -0500

----------------------------------------------------------------------
 .../core/client/mapreduce/RangeInputSplit.java  | 81 ++++++++++----------
 .../mapreduce/AccumuloInputFormatTest.java      | 18 +++--
 .../accumulo/test/functional/ExamplesIT.java    | 27 ++++---
 3 files changed, 70 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/f8e14c79/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
 
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
index 59cc8d8..98b1a32 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
+++ 
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
@@ -20,7 +20,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.math.BigInteger;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
@@ -132,6 +131,7 @@ public class RangeInputSplit extends InputSplit implements 
Writable {
   /**
    * This implementation of length is only an estimate, it does not provide 
exact values. Do not have your code rely on this return value.
    */
+  @Override
   public long getLength() throws IOException {
     Text startRow = range.isInfiniteStartKey() ? new Text(new byte[] 
{Byte.MIN_VALUE}) : range.getStartKey().getRow();
     Text stopRow = range.isInfiniteStopKey() ? new Text(new byte[] 
{Byte.MAX_VALUE}) : range.getEndKey().getRow();
@@ -151,10 +151,12 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
     return diff + 1;
   }
 
+  @Override
   public String[] getLocations() throws IOException {
     return locations;
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     range.readFields(in);
     tableName = in.readUTF();
@@ -163,42 +165,42 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
     locations = new String[numLocs];
     for (int i = 0; i < numLocs; ++i)
       locations[i] = in.readUTF();
-    
+
     if (in.readBoolean()) {
       isolatedScan = in.readBoolean();
     }
-    
+
     if (in.readBoolean()) {
       offline = in.readBoolean();
     }
-    
+
     if (in.readBoolean()) {
       localIterators = in.readBoolean();
     }
-    
+
     if (in.readBoolean()) {
       mockInstance = in.readBoolean();
     }
-    
+
     if (in.readBoolean()) {
       int numColumns = in.readInt();
       List<String> columns = new ArrayList<String>(numColumns);
       for (int i = 0; i < numColumns; i++) {
         columns.add(in.readUTF());
       }
-      
+
       fetchedColumns = InputConfigurator.deserializeFetchedColumns(columns);
     }
-    
+
     if (in.readBoolean()) {
       String strAuths = in.readUTF();
       auths = new Authorizations(strAuths.getBytes(Constants.UTF8));
     }
-    
+
     if (in.readBoolean()) {
       principal = in.readUTF();
     }
-    
+
     if (in.readBoolean()) {
       int ordinal = in.readInt();
       this.tokenSource = TokenSource.values()[ordinal];
@@ -208,32 +210,33 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
           String tokenClass = in.readUTF();
           byte[] base64TokenBytes = in.readUTF().getBytes(Constants.UTF8);
           byte[] tokenBytes = Base64.decodeBase64(base64TokenBytes);
-          
+
           this.token = AuthenticationTokenSerializer.deserialize(tokenClass, 
tokenBytes);
           break;
-          
+
         case FILE:
           this.tokenFile = in.readUTF();
-          
+
           break;
         default:
-          throw new IOException("Cannot parse unknown TokenSource ordinal");   
   
+          throw new IOException("Cannot parse unknown TokenSource ordinal");
       }
     }
-    
+
     if (in.readBoolean()) {
       instanceName = in.readUTF();
     }
-    
+
     if (in.readBoolean()) {
       zooKeepers = in.readUTF();
     }
-    
+
     if (in.readBoolean()) {
       level = Level.toLevel(in.readInt());
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     range.write(out);
     out.writeUTF(tableName);
@@ -241,27 +244,27 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
     out.writeInt(locations.length);
     for (int i = 0; i < locations.length; ++i)
       out.writeUTF(locations[i]);
-    
+
     out.writeBoolean(null != isolatedScan);
     if (null != isolatedScan) {
       out.writeBoolean(isolatedScan);
     }
-    
+
     out.writeBoolean(null != offline);
     if (null != offline) {
       out.writeBoolean(offline);
     }
-    
+
     out.writeBoolean(null != localIterators);
     if (null != localIterators) {
       out.writeBoolean(localIterators);
     }
-    
+
     out.writeBoolean(null != mockInstance);
     if (null != mockInstance) {
       out.writeBoolean(mockInstance);
     }
-    
+
     out.writeBoolean(null != fetchedColumns);
     if (null != fetchedColumns) {
       String[] cols = InputConfigurator.serializeColumns(fetchedColumns);
@@ -270,21 +273,21 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
         out.writeUTF(col);
       }
     }
-    
+
     out.writeBoolean(null != auths);
     if (null != auths) {
       out.writeUTF(auths.serialize());
     }
-    
+
     out.writeBoolean(null != principal);
     if (null != principal) {
       out.writeUTF(principal);
     }
-    
+
     out.writeBoolean(null != tokenSource);
     if (null != tokenSource) {
       out.writeInt(tokenSource.ordinal());
-      
+
       if (null != token && null != tokenFile) {
         throw new IOException("Cannot use both inline AuthenticationToken and 
file-based AuthenticationToken");
       } else if (null != token) {
@@ -294,17 +297,17 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
         out.writeUTF(tokenFile);
       }
     }
-    
+
     out.writeBoolean(null != instanceName);
     if (null != instanceName) {
       out.writeUTF(instanceName);
     }
-    
+
     out.writeBoolean(null != zooKeepers);
     if (null != zooKeepers) {
       out.writeUTF(zooKeepers);
     }
-    
+
     out.writeBoolean(null != level);
     if (null != level) {
       out.writeInt(level.toInt());
@@ -350,20 +353,20 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
   public String getTableId() {
     return tableId;
   }
-  
+
   public Instance getInstance() {
     if (null == instanceName) {
       return null;
     }
-    
-    if (isMockInstance()) {  
+
+    if (isMockInstance()) {
       return new MockInstance(getInstanceName());
     }
-    
+
     if (null == zooKeepers) {
       return null;
     }
-    
+
     return new 
ZooKeeperInstance(ClientConfiguration.loadDefault().withInstance(getInstanceName()).withZkHosts(getZooKeepers()));
   }
 
@@ -390,16 +393,16 @@ public class RangeInputSplit extends InputSplit 
implements Writable {
   public void setPrincipal(String principal) {
     this.principal = principal;
   }
-  
+
   public AuthenticationToken getToken() {
     return token;
   }
-  
+
   public void setToken(AuthenticationToken token) {
     this.tokenSource = TokenSource.INLINE;
     this.token = token;
   }
-  
+
   public void setToken(String tokenFile) {
     this.tokenSource = TokenSource.FILE;
     this.tokenFile = tokenFile;
@@ -456,7 +459,7 @@ public class RangeInputSplit extends InputSplit implements 
Writable {
   public Set<Pair<Text,Text>> getFetchedColumns() {
     return fetchedColumns;
   }
-  
+
   public void setFetchedColumns(Collection<Pair<Text,Text>> fetchedColumns) {
     this.fetchedColumns = new HashSet<Pair<Text,Text>>();
     for (Pair<Text,Text> columns : fetchedColumns) {
@@ -479,7 +482,7 @@ public class RangeInputSplit extends InputSplit implements 
Writable {
   public Level getLogLevel() {
     return level;
   }
-  
+
   public void setLogLevel(Level level) {
     this.level = level;
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/f8e14c79/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
 
b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
index 397b203..20b5f95 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
+++ 
b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
@@ -302,6 +302,7 @@ public class AccumuloInputFormatTest {
 
   @Test
   public void testCorrectRangeInputSplits() throws Exception {
+    @SuppressWarnings("deprecation")
     Job job = new Job(new Configuration(), this.getClass().getSimpleName() + 
"_" + System.currentTimeMillis());
 
     String username = "user", table = "table", instance = "instance";
@@ -363,10 +364,13 @@ public class AccumuloInputFormatTest {
     }
     bw.close();
 
-    Assert.assertEquals(0, MRTester.main(new String[] {user, "", "testtable", 
"testPartialInputSplitDelegationToConfiguration",
-        EmptySplitsAccumuloInputFormat.class.getCanonicalName()}));
+    Assert.assertEquals(
+        0,
+        MRTester.main(new String[] {user, "", "testtable", 
"testPartialInputSplitDelegationToConfiguration",
+            EmptySplitsAccumuloInputFormat.class.getCanonicalName()}));
     assertNull(e1);
-    assertNull(e2);  }
+    assertNull(e2);
+  }
 
   @Test
   public void testPartialFailedInputSplitDelegationToConfiguration() throws 
Exception {
@@ -384,9 +388,11 @@ public class AccumuloInputFormatTest {
     }
     bw.close();
 
-    // We should fail before we even get into the Mapper because we can't make 
the RecordReader 
-    Assert.assertEquals(1, MRTester.main(new String[] {user, "", "testtable", 
"testPartialFailedInputSplitDelegationToConfiguration",
-        BadPasswordSplitsAccumuloInputFormat.class.getCanonicalName()}));
+    // We should fail before we even get into the Mapper because we can't make 
the RecordReader
+    Assert.assertEquals(
+        1,
+        MRTester.main(new String[] {user, "", "testtable", 
"testPartialFailedInputSplitDelegationToConfiguration",
+            BadPasswordSplitsAccumuloInputFormat.class.getCanonicalName()}));
     assertNull(e1);
     assertNull(e2);
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/f8e14c79/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java 
b/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java
index 84803f1..16b425f 100644
--- a/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java
@@ -74,7 +74,6 @@ import 
org.apache.accumulo.minicluster.MiniAccumuloCluster.LogWriter;
 import org.apache.accumulo.minicluster.MiniAccumuloConfig;
 import org.apache.accumulo.server.util.Admin;
 import org.apache.accumulo.test.TestIngest;
-import org.apache.accumulo.test.VerifyIngest;
 import org.apache.accumulo.tracer.TraceServer;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -173,14 +172,16 @@ public class ExamplesIT extends ConfigurableMacIT {
     // try the speed test a couple times in case the system is loaded with 
other tests
     for (int i = 0; i < 2; i++) {
       long now = System.currentTimeMillis();
-      assertEquals(0,  cluster.exec(RandomBatchScanner.class,"--seed", "7", 
"-i", instance, "-z",
-          keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", "--min", 
"0", "--max", "1000000000", "--size", "50",
-          "--scanThreads", "4","-t", "bloom_test").waitFor());
+      assertEquals(
+          0,
+          cluster.exec(RandomBatchScanner.class, "--seed", "7", "-i", 
instance, "-z", keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", 
"--min", "0",
+              "--max", "1000000000", "--size", "50", "--scanThreads", "4", 
"-t", "bloom_test").waitFor());
       diff = System.currentTimeMillis() - now;
       now = System.currentTimeMillis();
-      assertEquals(1,  cluster.exec(RandomBatchScanner.class,"--seed", "8", 
"-i", instance, "-z",
-          keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", "--min", 
"0", "--max", "1000000000", "--size", "50",
-          "--scanThreads", "4","-t", "bloom_test").waitFor());
+      assertEquals(
+          1,
+          cluster.exec(RandomBatchScanner.class, "--seed", "8", "-i", 
instance, "-z", keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", 
"--min", "0",
+              "--max", "1000000000", "--size", "50", "--scanThreads", "4", 
"-t", "bloom_test").waitFor());
       diff2 = System.currentTimeMillis() - now;
       if (diff2 < diff)
         break;
@@ -205,12 +206,13 @@ public class ExamplesIT extends ConfigurableMacIT {
     assertTrue(thisFile);
     // create a reverse index
     c.tableOperations().create("doc2Term");
-    assertEquals(0, cluster.exec(Reverse.class, "-i", instance, "-z", keepers, 
"--shardTable", "shard", "--doc2Term", "doc2Term", "-u", "root", "-p", 
passwd).waitFor());
+    assertEquals(0, cluster.exec(Reverse.class, "-i", instance, "-z", keepers, 
"--shardTable", "shard", "--doc2Term", "doc2Term", "-u", "root", "-p", passwd)
+        .waitFor());
     // run some queries
     assertEquals(
         0,
-        cluster.exec(ContinuousQuery.class, "-i", instance, "-z", keepers, 
"--shardTable", "shard", "--doc2Term", "doc2Term", "-u", "root", "-p", passwd, 
"--terms", "5", "--count",
-            "1000").waitFor());
+        cluster.exec(ContinuousQuery.class, "-i", instance, "-z", keepers, 
"--shardTable", "shard", "--doc2Term", "doc2Term", "-u", "root", "-p", passwd,
+            "--terms", "5", "--count", "1000").waitFor());
 
     log.info("Testing MaxMutation constraint");
     c.tableOperations().create("test_ingest");
@@ -227,7 +229,10 @@ public class ExamplesIT extends ConfigurableMacIT {
     log.info("Starting bulk ingest example");
     assertEquals(0, cluster.exec(GenerateTestData.class, "--start-row", "0", 
"--count", "10000", "--output", dir + "/tmp/input/data").waitFor());
     assertEquals(0, cluster.exec(SetupTable.class, "-i", instance, "-z", 
keepers, "-u", user, "-p", passwd, "--table", "bulkTable").waitFor());
-    assertEquals(0, cluster.exec(BulkIngestExample.class, "-i", instance, 
"-z", keepers, "-u", user, "-p", passwd, "--table", "bulkTable", "--inputDir", 
dir + "/tmp/input", "--workDir", dir + "/tmp").waitFor());
+    assertEquals(
+        0,
+        cluster.exec(BulkIngestExample.class, "-i", instance, "-z", keepers, 
"-u", user, "-p", passwd, "--table", "bulkTable", "--inputDir",
+            dir + "/tmp/input", "--workDir", dir + "/tmp").waitFor());
 
     log.info("Running TeraSortIngest example");
     exec(TeraSortIngest.class, new String[] {"--count", (1000 * 1000) + "", 
"-nk", "10", "-xk", "10", "-nv", "10", "-xv", "10", "-t", "sorted", "-i", 
instance,

Reply via email to