Repository: accumulo
Updated Branches:
  refs/heads/1.6 5f29c9f65 -> f84108268
  refs/heads/master 3353a4596 -> 7d9373b61


ACCUMULO-3212 Fix hadoop-1 support.


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/f8410826
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/f8410826
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/f8410826

Branch: refs/heads/1.6
Commit: f8410826883c4ff1a14573cc8b6fbc50f1238522
Parents: 5f29c9f
Author: Josh Elser <els...@apache.org>
Authored: Mon Oct 20 19:19:03 2014 -0400
Committer: Josh Elser <els...@apache.org>
Committed: Mon Oct 20 19:19:03 2014 -0400

----------------------------------------------------------------------
 .../conf/CredentialProviderFactoryShimTest.java | 25 ++++++++++++++------
 1 file changed, 18 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/f8410826/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
 
b/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
index a154e6d..6bfd6e8 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
+++ 
b/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
@@ -18,6 +18,7 @@ package org.apache.accumulo.core.conf;
 
 import java.io.File;
 import java.io.IOException;
+import java.lang.reflect.Method;
 import java.net.URL;
 import java.util.Collections;
 import java.util.HashMap;
@@ -26,9 +27,10 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.log4j.Logger;
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.BeforeClass;
@@ -38,6 +40,7 @@ import org.junit.Test;
  *
  */
 public class CredentialProviderFactoryShimTest {
+  private static final Logger log = 
Logger.getLogger(CredentialProviderFactoryShimTest.class);
 
   private static final String populatedKeyStoreName = "/accumulo.jceks", 
emptyKeyStoreName = "/empty.jceks";
   private static File emptyKeyStore, populatedKeyStore;
@@ -162,9 +165,9 @@ public class CredentialProviderFactoryShimTest {
   public void extractFromHdfs() throws Exception {
     File target = new File(System.getProperty("user.dir"), "target");
     String prevValue = System.setProperty("test.build.data", new File(target, 
this.getClass().getName() + "_minidfs").toString());
-    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(new 
Configuration());
-    builder.numDataNodes(1);
-    MiniDFSCluster dfsCluster = builder.build();
+    // TODO Remove when Hadoop1 support is dropped
+    @SuppressWarnings("deprecation")
+    MiniDFSCluster dfsCluster = new MiniDFSCluster(new Configuration(), 1, 
true, null);
     try {
       if (null != prevValue) {
         System.setProperty("test.build.data", prevValue);
@@ -173,9 +176,17 @@ public class CredentialProviderFactoryShimTest {
       }
 
       // One namenode, One configuration
-      Configuration dfsConfiguration = dfsCluster.getConfiguration(0);
+      Configuration dfsConfiguration;
+      Method m = MiniDFSCluster.class.getMethod("getConfiguration", int.class);
+      try {
+        Object result = m.invoke(dfsCluster, 0);
+        dfsConfiguration = (Configuration) result;
+      } catch (Exception e) {
+        log.info("Couldn't get configuration from MiniDFS cluster, assuming 
hadoop-1 and ignoring test", e);
+        return;
+      }
       Path destPath = new Path("/accumulo.jceks");
-      DistributedFileSystem dfs = dfsCluster.getFileSystem();
+      FileSystem dfs = dfsCluster.getFileSystem();
       // Put the populated keystore in hdfs
       dfs.copyFromLocalFile(new Path(populatedKeyStore.toURI()), destPath);
 
@@ -191,7 +202,7 @@ public class CredentialProviderFactoryShimTest {
       dfsCluster.shutdown();
     }
   }
-  
+
   @Test
   public void existingConfigurationReturned() {
     Configuration conf = new Configuration(false);

Reply via email to