http://git-wip-us.apache.org/repos/asf/accumulo/blob/a4afd1bf/test/src/main/java/org/apache/accumulo/test/functional/KerberosProxyIT.java
----------------------------------------------------------------------
diff --cc 
test/src/main/java/org/apache/accumulo/test/functional/KerberosProxyIT.java
index 7264a42,0000000..59a6762
mode 100644,000000..100644
--- 
a/test/src/main/java/org/apache/accumulo/test/functional/KerberosProxyIT.java
+++ 
b/test/src/main/java/org/apache/accumulo/test/functional/KerberosProxyIT.java
@@@ -1,485 -1,0 +1,485 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.accumulo.test.functional;
 +
 +import static java.nio.charset.StandardCharsets.UTF_8;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertTrue;
 +
 +import java.io.File;
 +import java.io.FileWriter;
 +import java.io.IOException;
 +import java.net.ConnectException;
 +import java.net.InetAddress;
 +import java.nio.ByteBuffer;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Properties;
 +
 +import org.apache.accumulo.cluster.ClusterUser;
 +import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 +import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 +import org.apache.accumulo.core.conf.Property;
 +import org.apache.accumulo.core.rpc.UGIAssumingTransport;
 +import org.apache.accumulo.harness.AccumuloITBase;
 +import org.apache.accumulo.harness.MiniClusterConfigurationCallback;
 +import org.apache.accumulo.harness.MiniClusterHarness;
 +import org.apache.accumulo.harness.TestingKdc;
 +import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
 +import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
 +import org.apache.accumulo.proxy.Proxy;
 +import org.apache.accumulo.proxy.ProxyServer;
 +import org.apache.accumulo.proxy.thrift.AccumuloProxy;
 +import org.apache.accumulo.proxy.thrift.AccumuloProxy.Client;
 +import org.apache.accumulo.proxy.thrift.AccumuloSecurityException;
 +import org.apache.accumulo.proxy.thrift.ColumnUpdate;
 +import org.apache.accumulo.proxy.thrift.Key;
 +import org.apache.accumulo.proxy.thrift.KeyValue;
 +import org.apache.accumulo.proxy.thrift.ScanOptions;
 +import org.apache.accumulo.proxy.thrift.ScanResult;
 +import org.apache.accumulo.proxy.thrift.TimeType;
 +import org.apache.accumulo.proxy.thrift.WriterOptions;
 +import org.apache.accumulo.server.util.PortUtils;
- import org.apache.accumulo.test.categories.MiniClusterOnlyTest;
++import org.apache.accumulo.test.categories.MiniClusterOnlyTests;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 +import org.apache.hadoop.security.UserGroupInformation;
 +import org.apache.thrift.protocol.TCompactProtocol;
 +import org.apache.thrift.transport.TSaslClientTransport;
 +import org.apache.thrift.transport.TSocket;
 +import org.apache.thrift.transport.TTransportException;
 +import org.hamcrest.Description;
 +import org.hamcrest.TypeSafeMatcher;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Before;
 +import org.junit.BeforeClass;
 +import org.junit.Rule;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.junit.rules.ExpectedException;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +/**
 + * Tests impersonation of clients by the proxy over SASL
 + */
- @Category(MiniClusterOnlyTest.class)
++@Category(MiniClusterOnlyTests.class)
 +public class KerberosProxyIT extends AccumuloITBase {
 +  private static final Logger log = 
LoggerFactory.getLogger(KerberosProxyIT.class);
 +
 +  @Rule
 +  public ExpectedException thrown = ExpectedException.none();
 +
 +  private static TestingKdc kdc;
 +  private static String krbEnabledForITs = null;
 +  private static File proxyKeytab;
 +  private static String hostname, proxyPrimary, proxyPrincipal;
 +
 +  @Override
 +  protected int defaultTimeoutSeconds() {
 +    return 60 * 5;
 +  }
 +
 +  @BeforeClass
 +  public static void startKdc() throws Exception {
 +    kdc = new TestingKdc();
 +    kdc.start();
 +    krbEnabledForITs = 
System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION);
 +    if (null == krbEnabledForITs || !Boolean.parseBoolean(krbEnabledForITs)) {
 +      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, 
"true");
 +    }
 +
 +    // Create a principal+keytab for the proxy
 +    proxyKeytab = new File(kdc.getKeytabDir(), "proxy.keytab");
 +    hostname = InetAddress.getLocalHost().getCanonicalHostName();
 +    // Set the primary because the client needs to know it
 +    proxyPrimary = "proxy";
 +    // Qualify with an instance
 +    proxyPrincipal = proxyPrimary + "/" + hostname;
 +    kdc.createPrincipal(proxyKeytab, proxyPrincipal);
 +    // Tack on the realm too
 +    proxyPrincipal = kdc.qualifyUser(proxyPrincipal);
 +  }
 +
 +  @AfterClass
 +  public static void stopKdc() throws Exception {
 +    if (null != kdc) {
 +      kdc.stop();
 +    }
 +    if (null != krbEnabledForITs) {
 +      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, 
krbEnabledForITs);
 +    }
 +    UserGroupInformation.setConfiguration(new Configuration(false));
 +  }
 +
 +  private MiniAccumuloClusterImpl mac;
 +  private Process proxyProcess;
 +  private int proxyPort;
 +
 +  @Before
 +  public void startMac() throws Exception {
 +    MiniClusterHarness harness = new MiniClusterHarness();
 +    mac = harness.create(getClass().getName(), testName.getMethodName(), new 
PasswordToken("unused"), new MiniClusterConfigurationCallback() {
 +
 +      @Override
 +      public void configureMiniCluster(MiniAccumuloConfigImpl cfg, 
Configuration coreSite) {
 +        cfg.setNumTservers(1);
 +        Map<String,String> siteCfg = cfg.getSiteConfig();
 +        // Allow the proxy to impersonate the client user, but no one else
 +        
siteCfg.put(Property.INSTANCE_RPC_SASL_ALLOWED_USER_IMPERSONATION.getKey(), 
proxyPrincipal + ":" + kdc.getRootUser().getPrincipal());
 +        
siteCfg.put(Property.INSTANCE_RPC_SASL_ALLOWED_HOST_IMPERSONATION.getKey(), 
"*");
 +        cfg.setSiteConfig(siteCfg);
 +      }
 +
 +    }, kdc);
 +
 +    mac.start();
 +    MiniAccumuloConfigImpl cfg = mac.getConfig();
 +
 +    // Generate Proxy configuration and start the proxy
 +    proxyProcess = startProxy(cfg);
 +
 +    // Enabled kerberos auth
 +    Configuration conf = new Configuration(false);
 +    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, 
"kerberos");
 +    UserGroupInformation.setConfiguration(conf);
 +
 +    boolean success = false;
 +    ClusterUser rootUser = kdc.getRootUser();
 +    // Rely on the junit timeout rule
 +    while (!success) {
 +      UserGroupInformation ugi;
 +      try {
 +        ugi = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(rootUser.getPrincipal(), 
rootUser.getKeytab().getAbsolutePath());
 +      } catch (IOException ex) {
 +        log.info("Login as root is failing", ex);
 +        Thread.sleep(3000);
 +        continue;
 +      }
 +
 +      TSocket socket = new TSocket(hostname, proxyPort);
 +      log.info("Connecting to proxy with server primary '" + proxyPrimary + 
"' running on " + hostname);
 +      TSaslClientTransport transport = new TSaslClientTransport("GSSAPI", 
null, proxyPrimary, hostname, 
Collections.singletonMap("javax.security.sasl.qop",
 +          "auth"), null, socket);
 +
 +      final UGIAssumingTransport ugiTransport = new 
UGIAssumingTransport(transport, ugi);
 +
 +      try {
 +        // UGI transport will perform the doAs for us
 +        ugiTransport.open();
 +        success = true;
 +      } catch (TTransportException e) {
 +        Throwable cause = e.getCause();
 +        if (null != cause && cause instanceof ConnectException) {
 +          log.info("Proxy not yet up, waiting");
 +          Thread.sleep(3000);
 +          proxyProcess = checkProxyAndRestart(proxyProcess, cfg);
 +          continue;
 +        }
 +      } finally {
 +        if (null != ugiTransport) {
 +          ugiTransport.close();
 +        }
 +      }
 +    }
 +
 +    assertTrue("Failed to connect to the proxy repeatedly", success);
 +  }
 +
 +  /**
 +   * Starts the thrift proxy using the given MAConfig.
 +   *
 +   * @param cfg
 +   *          configuration for MAC
 +   * @return Process for the thrift proxy
 +   */
 +  private Process startProxy(MiniAccumuloConfigImpl cfg) throws IOException {
 +    File proxyPropertiesFile = generateNewProxyConfiguration(cfg);
 +    return mac.exec(Proxy.class, "-p", 
proxyPropertiesFile.getCanonicalPath());
 +  }
 +
 +  /**
 +   * Generates a proxy configuration file for the MAC instance. Implicitly 
updates {@link #proxyPort} when choosing the port the proxy will listen on.
 +   *
 +   * @param cfg
 +   *          The MAC configuration
 +   * @return The proxy's configuration file
 +   */
 +  private File generateNewProxyConfiguration(MiniAccumuloConfigImpl cfg) 
throws IOException {
 +    // Chooses a new port for the proxy as side-effect
 +    proxyPort = PortUtils.getRandomFreePort();
 +
 +    // Proxy configuration
 +    File proxyPropertiesFile = new File(cfg.getConfDir(), "proxy.properties");
 +    if (proxyPropertiesFile.exists()) {
 +      assertTrue("Failed to delete proxy.properties file", 
proxyPropertiesFile.delete());
 +    }
 +    Properties proxyProperties = new Properties();
 +    proxyProperties.setProperty("useMockInstance", "false");
 +    proxyProperties.setProperty("useMiniAccumulo", "false");
 +    proxyProperties.setProperty("protocolFactory", 
TCompactProtocol.Factory.class.getName());
 +    proxyProperties.setProperty("tokenClass", KerberosToken.class.getName());
 +    proxyProperties.setProperty("port", Integer.toString(proxyPort));
 +    proxyProperties.setProperty("maxFrameSize", "16M");
 +    proxyProperties.setProperty("instance", mac.getInstanceName());
 +    proxyProperties.setProperty("zookeepers", mac.getZooKeepers());
 +    proxyProperties.setProperty("thriftServerType", "sasl");
 +    proxyProperties.setProperty("kerberosPrincipal", proxyPrincipal);
 +    proxyProperties.setProperty("kerberosKeytab", 
proxyKeytab.getCanonicalPath());
 +
 +    // Write out the proxy.properties file
 +    FileWriter writer = new FileWriter(proxyPropertiesFile);
 +    proxyProperties.store(writer, "Configuration for Accumulo proxy");
 +    writer.close();
 +
 +    log.info("Created configuration for proxy listening on {}", proxyPort);
 +
 +    return proxyPropertiesFile;
 +  }
 +
 +  /**
 +   * Restarts the thrift proxy if the previous instance is no longer running. 
If the proxy is still running, this method does nothing.
 +   *
 +   * @param proxy
 +   *          The thrift proxy process
 +   * @param cfg
 +   *          The MAC configuration
 +   * @return The process for the Proxy, either the previous instance or a new 
instance.
 +   */
 +  private Process checkProxyAndRestart(Process proxy, MiniAccumuloConfigImpl 
cfg) throws IOException {
 +    try {
 +      // Get the return code
 +      proxy.exitValue();
 +    } catch (IllegalThreadStateException e) {
 +      log.info("Proxy is still running");
 +      // OK, process is still running, don't restart
 +      return proxy;
 +    }
 +
 +    log.info("Restarting proxy because it is no longer alive");
 +
 +    // We got a return code which means the proxy exited. We'll assume this 
is because it failed
 +    // to bind the port due to the known race condition between choosing a 
port and having the
 +    // proxy bind it.
 +    return startProxy(cfg);
 +  }
 +
 +  @After
 +  public void stopMac() throws Exception {
 +    if (null != proxyProcess) {
 +      log.info("Destroying proxy process");
 +      proxyProcess.destroy();
 +      log.info("Waiting for proxy termination");
 +      proxyProcess.waitFor();
 +      log.info("Proxy terminated");
 +    }
 +    if (null != mac) {
 +      mac.stop();
 +    }
 +  }
 +
 +  @Test
 +  public void testProxyClient() throws Exception {
 +    ClusterUser rootUser = kdc.getRootUser();
 +    UserGroupInformation ugi = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(rootUser.getPrincipal(), 
rootUser.getKeytab().getAbsolutePath());
 +
 +    TSocket socket = new TSocket(hostname, proxyPort);
 +    log.info("Connecting to proxy with server primary '" + proxyPrimary + "' 
running on " + hostname);
 +    TSaslClientTransport transport = new TSaslClientTransport("GSSAPI", null, 
proxyPrimary, hostname, Collections.singletonMap("javax.security.sasl.qop",
 +        "auth"), null, socket);
 +
 +    final UGIAssumingTransport ugiTransport = new 
UGIAssumingTransport(transport, ugi);
 +
 +    // UGI transport will perform the doAs for us
 +    ugiTransport.open();
 +
 +    AccumuloProxy.Client.Factory factory = new AccumuloProxy.Client.Factory();
 +    Client client = factory.getClient(new TCompactProtocol(ugiTransport), new 
TCompactProtocol(ugiTransport));
 +
 +    // Will fail if the proxy can impersonate the client
 +    ByteBuffer login = client.login(rootUser.getPrincipal(), 
Collections.<String,String> emptyMap());
 +
 +    // For all of the below actions, the proxy user doesn't have permission 
to do any of them, but the client user does.
 +    // The fact that any of them actually run tells us that impersonation is 
working.
 +
 +    // Create a table
 +    String table = "table";
 +    if (!client.tableExists(login, table)) {
 +      client.createTable(login, table, true, TimeType.MILLIS);
 +    }
 +
 +    // Write two records to the table
 +    String writer = client.createWriter(login, table, new WriterOptions());
 +    Map<ByteBuffer,List<ColumnUpdate>> updates = new HashMap<>();
 +    ColumnUpdate update = new 
ColumnUpdate(ByteBuffer.wrap("cf1".getBytes(UTF_8)), 
ByteBuffer.wrap("cq1".getBytes(UTF_8)));
 +    update.setValue(ByteBuffer.wrap("value1".getBytes(UTF_8)));
 +    updates.put(ByteBuffer.wrap("row1".getBytes(UTF_8)), 
Collections.<ColumnUpdate> singletonList(update));
 +    update = new ColumnUpdate(ByteBuffer.wrap("cf2".getBytes(UTF_8)), 
ByteBuffer.wrap("cq2".getBytes(UTF_8)));
 +    update.setValue(ByteBuffer.wrap("value2".getBytes(UTF_8)));
 +    updates.put(ByteBuffer.wrap("row2".getBytes(UTF_8)), 
Collections.<ColumnUpdate> singletonList(update));
 +    client.update(writer, updates);
 +
 +    // Flush and close the writer
 +    client.flush(writer);
 +    client.closeWriter(writer);
 +
 +    // Open a scanner to the table
 +    String scanner = client.createScanner(login, table, new ScanOptions());
 +    ScanResult results = client.nextK(scanner, 10);
 +    assertEquals(2, results.getResults().size());
 +
 +    // Check the first key-value
 +    KeyValue kv = results.getResults().get(0);
 +    Key k = kv.key;
 +    ByteBuffer v = kv.value;
 +    assertEquals(ByteBuffer.wrap("row1".getBytes(UTF_8)), k.row);
 +    assertEquals(ByteBuffer.wrap("cf1".getBytes(UTF_8)), k.colFamily);
 +    assertEquals(ByteBuffer.wrap("cq1".getBytes(UTF_8)), k.colQualifier);
 +    assertEquals(ByteBuffer.wrap(new byte[0]), k.colVisibility);
 +    assertEquals(ByteBuffer.wrap("value1".getBytes(UTF_8)), v);
 +
 +    // And then the second
 +    kv = results.getResults().get(1);
 +    k = kv.key;
 +    v = kv.value;
 +    assertEquals(ByteBuffer.wrap("row2".getBytes(UTF_8)), k.row);
 +    assertEquals(ByteBuffer.wrap("cf2".getBytes(UTF_8)), k.colFamily);
 +    assertEquals(ByteBuffer.wrap("cq2".getBytes(UTF_8)), k.colQualifier);
 +    assertEquals(ByteBuffer.wrap(new byte[0]), k.colVisibility);
 +    assertEquals(ByteBuffer.wrap("value2".getBytes(UTF_8)), v);
 +
 +    // Close the scanner
 +    client.closeScanner(scanner);
 +
 +    ugiTransport.close();
 +  }
 +
 +  @Test
 +  public void testDisallowedClientForImpersonation() throws Exception {
 +    String user = testName.getMethodName();
 +    File keytab = new File(kdc.getKeytabDir(), user + ".keytab");
 +    kdc.createPrincipal(keytab, user);
 +
 +    // Login as the new user
 +    UserGroupInformation ugi = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, 
keytab.getAbsolutePath());
 +
 +    log.info("Logged in as " + ugi);
 +
 +    // Expect an AccumuloSecurityException
 +    thrown.expect(AccumuloSecurityException.class);
 +    // Error msg would look like:
 +    //
 +    // org.apache.accumulo.core.client.AccumuloSecurityException: Error 
BAD_CREDENTIALS for user Principal in credentials object should match kerberos
 +    // principal.
 +    // Expected 'proxy/hw10447.lo...@example.com' but was 
'testdisallowedclientforimpersonat...@example.com' - Username or Password is 
Invalid)
 +    thrown.expect(new ThriftExceptionMatchesPattern(".*Error 
BAD_CREDENTIALS.*"));
 +    thrown.expect(new ThriftExceptionMatchesPattern(".*Expected '" + 
proxyPrincipal + "' but was '" + kdc.qualifyUser(user) + "'.*"));
 +
 +    TSocket socket = new TSocket(hostname, proxyPort);
 +    log.info("Connecting to proxy with server primary '" + proxyPrimary + "' 
running on " + hostname);
 +
 +    // Should fail to open the tran
 +    TSaslClientTransport transport = new TSaslClientTransport("GSSAPI", null, 
proxyPrimary, hostname, Collections.singletonMap("javax.security.sasl.qop",
 +        "auth"), null, socket);
 +
 +    final UGIAssumingTransport ugiTransport = new 
UGIAssumingTransport(transport, ugi);
 +
 +    // UGI transport will perform the doAs for us
 +    ugiTransport.open();
 +
 +    AccumuloProxy.Client.Factory factory = new AccumuloProxy.Client.Factory();
 +    Client client = factory.getClient(new TCompactProtocol(ugiTransport), new 
TCompactProtocol(ugiTransport));
 +
 +    // Will fail because the proxy can't impersonate this user (per the site 
configuration)
 +    try {
 +      client.login(kdc.qualifyUser(user), Collections.<String,String> 
emptyMap());
 +    } finally {
 +      if (null != ugiTransport) {
 +        ugiTransport.close();
 +      }
 +    }
 +  }
 +
 +  @Test
 +  public void testMismatchPrincipals() throws Exception {
 +    ClusterUser rootUser = kdc.getRootUser();
 +    // Should get an AccumuloSecurityException and the given message
 +    thrown.expect(AccumuloSecurityException.class);
 +    thrown.expect(new 
ThriftExceptionMatchesPattern(ProxyServer.RPC_ACCUMULO_PRINCIPAL_MISMATCH_MSG));
 +
 +    // Make a new user
 +    String user = testName.getMethodName();
 +    File keytab = new File(kdc.getKeytabDir(), user + ".keytab");
 +    kdc.createPrincipal(keytab, user);
 +
 +    // Login as the new user
 +    UserGroupInformation ugi = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, 
keytab.getAbsolutePath());
 +
 +    log.info("Logged in as " + ugi);
 +
 +    TSocket socket = new TSocket(hostname, proxyPort);
 +    log.info("Connecting to proxy with server primary '" + proxyPrimary + "' 
running on " + hostname);
 +
 +    // Should fail to open the tran
 +    TSaslClientTransport transport = new TSaslClientTransport("GSSAPI", null, 
proxyPrimary, hostname, Collections.singletonMap("javax.security.sasl.qop",
 +        "auth"), null, socket);
 +
 +    final UGIAssumingTransport ugiTransport = new 
UGIAssumingTransport(transport, ugi);
 +
 +    // UGI transport will perform the doAs for us
 +    ugiTransport.open();
 +
 +    AccumuloProxy.Client.Factory factory = new AccumuloProxy.Client.Factory();
 +    Client client = factory.getClient(new TCompactProtocol(ugiTransport), new 
TCompactProtocol(ugiTransport));
 +
 +    // The proxy needs to recognize that the requested principal isn't the 
same as the SASL principal and fail
 +    // Accumulo should let this through -- we need to rely on the proxy to 
dump me before talking to accumulo
 +    try {
 +      client.login(rootUser.getPrincipal(), Collections.<String,String> 
emptyMap());
 +    } finally {
 +      if (null != ugiTransport) {
 +        ugiTransport.close();
 +      }
 +    }
 +  }
 +
 +  private static class ThriftExceptionMatchesPattern extends 
TypeSafeMatcher<AccumuloSecurityException> {
 +    private String pattern;
 +
 +    public ThriftExceptionMatchesPattern(String pattern) {
 +      this.pattern = pattern;
 +    }
 +
 +    @Override
 +    protected boolean matchesSafely(AccumuloSecurityException item) {
 +      return item.isSetMsg() && item.msg.matches(pattern);
 +    }
 +
 +    @Override
 +    public void describeTo(Description description) {
 +      description.appendText("matches pattern ").appendValue(pattern);
 +    }
 +
 +    @Override
 +    protected void describeMismatchSafely(AccumuloSecurityException item, 
Description mismatchDescription) {
 +      mismatchDescription.appendText("does not match");
 +    }
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/a4afd1bf/test/src/main/java/org/apache/accumulo/test/functional/KerberosRenewalIT.java
----------------------------------------------------------------------
diff --cc 
test/src/main/java/org/apache/accumulo/test/functional/KerberosRenewalIT.java
index 0e60501,0000000..4774f69
mode 100644,000000..100644
--- 
a/test/src/main/java/org/apache/accumulo/test/functional/KerberosRenewalIT.java
+++ 
b/test/src/main/java/org/apache/accumulo/test/functional/KerberosRenewalIT.java
@@@ -1,191 -1,0 +1,191 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.accumulo.test.functional;
 +
 +import static org.junit.Assert.assertEquals;
 +
 +import java.util.Map;
 +import java.util.Map.Entry;
 +
 +import org.apache.accumulo.cluster.ClusterUser;
 +import org.apache.accumulo.core.client.AccumuloException;
 +import org.apache.accumulo.core.client.AccumuloSecurityException;
 +import org.apache.accumulo.core.client.BatchWriter;
 +import org.apache.accumulo.core.client.BatchWriterConfig;
 +import org.apache.accumulo.core.client.Connector;
 +import org.apache.accumulo.core.client.Scanner;
 +import org.apache.accumulo.core.client.TableExistsException;
 +import org.apache.accumulo.core.client.TableNotFoundException;
 +import org.apache.accumulo.core.client.admin.CompactionConfig;
 +import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 +import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 +import org.apache.accumulo.core.conf.Property;
 +import org.apache.accumulo.core.data.Key;
 +import org.apache.accumulo.core.data.Mutation;
 +import org.apache.accumulo.core.data.PartialKey;
 +import org.apache.accumulo.core.data.Value;
 +import org.apache.accumulo.core.security.Authorizations;
 +import org.apache.accumulo.harness.AccumuloITBase;
 +import org.apache.accumulo.harness.MiniClusterConfigurationCallback;
 +import org.apache.accumulo.harness.MiniClusterHarness;
 +import org.apache.accumulo.harness.TestingKdc;
 +import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
 +import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
- import org.apache.accumulo.test.categories.MiniClusterOnlyTest;
++import org.apache.accumulo.test.categories.MiniClusterOnlyTests;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 +import org.apache.hadoop.minikdc.MiniKdc;
 +import org.apache.hadoop.security.UserGroupInformation;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Before;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import com.google.common.collect.Iterables;
 +
 +/**
 + * MAC test which uses {@link MiniKdc} to simulate ta secure environment. Can 
be used as a sanity check for Kerberos/SASL testing.
 + */
- @Category(MiniClusterOnlyTest.class)
++@Category(MiniClusterOnlyTests.class)
 +public class KerberosRenewalIT extends AccumuloITBase {
 +  private static final Logger log = 
LoggerFactory.getLogger(KerberosRenewalIT.class);
 +
 +  private static TestingKdc kdc;
 +  private static String krbEnabledForITs = null;
 +  private static ClusterUser rootUser;
 +
 +  private static final long TICKET_LIFETIME = 6 * 60 * 1000; // Anything less 
seems to fail when generating the ticket
 +  private static final long TICKET_TEST_LIFETIME = 8 * 60 * 1000; // Run a 
test for 8 mins
 +  private static final long TEST_DURATION = 9 * 60 * 1000; // The test should 
finish within 9 mins
 +
 +  @BeforeClass
 +  public static void startKdc() throws Exception {
 +    // 30s renewal time window
 +    kdc = new TestingKdc(TestingKdc.computeKdcDir(), 
TestingKdc.computeKeytabDir(), TICKET_LIFETIME);
 +    kdc.start();
 +    krbEnabledForITs = 
System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION);
 +    if (null == krbEnabledForITs || !Boolean.parseBoolean(krbEnabledForITs)) {
 +      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, 
"true");
 +    }
 +    rootUser = kdc.getRootUser();
 +  }
 +
 +  @AfterClass
 +  public static void stopKdc() throws Exception {
 +    if (null != kdc) {
 +      kdc.stop();
 +    }
 +    if (null != krbEnabledForITs) {
 +      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, 
krbEnabledForITs);
 +    }
 +  }
 +
 +  @Override
 +  public int defaultTimeoutSeconds() {
 +    return (int) TEST_DURATION / 1000;
 +  }
 +
 +  private MiniAccumuloClusterImpl mac;
 +
 +  @Before
 +  public void startMac() throws Exception {
 +    MiniClusterHarness harness = new MiniClusterHarness();
 +    mac = harness.create(this, new PasswordToken("unused"), kdc, new 
MiniClusterConfigurationCallback() {
 +
 +      @Override
 +      public void configureMiniCluster(MiniAccumuloConfigImpl cfg, 
Configuration coreSite) {
 +        Map<String,String> site = cfg.getSiteConfig();
 +        site.put(Property.INSTANCE_ZK_TIMEOUT.getKey(), "15s");
 +        // Reduce the period just to make sure we trigger renewal fast
 +        site.put(Property.GENERAL_KERBEROS_RENEWAL_PERIOD.getKey(), "5s");
 +        cfg.setSiteConfig(site);
 +      }
 +
 +    });
 +
 +    mac.getConfig().setNumTservers(1);
 +    mac.start();
 +    // Enabled kerberos auth
 +    Configuration conf = new Configuration(false);
 +    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, 
"kerberos");
 +    UserGroupInformation.setConfiguration(conf);
 +  }
 +
 +  @After
 +  public void stopMac() throws Exception {
 +    if (null != mac) {
 +      mac.stop();
 +    }
 +  }
 +
 +  // Intentially setting the Test annotation timeout. We do not want to scale 
the timeout.
 +  @Test(timeout = TEST_DURATION)
 +  public void testReadAndWriteThroughTicketLifetime() throws Exception {
 +    // Attempt to use Accumulo for a duration of time that exceeds the 
Kerberos ticket lifetime.
 +    // This is a functional test to verify that Accumulo services renew their 
ticket.
 +    // If the test doesn't finish on its own, this signifies that Accumulo 
services failed
 +    // and the test should fail. If Accumulo services renew their ticket, the 
test case
 +    // should exit gracefully on its own.
 +
 +    // Login as the "root" user
 +    UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), 
rootUser.getKeytab().getAbsolutePath());
 +    log.info("Logged in as {}", rootUser.getPrincipal());
 +
 +    Connector conn = mac.getConnector(rootUser.getPrincipal(), new 
KerberosToken());
 +    log.info("Created connector as {}", rootUser.getPrincipal());
 +    assertEquals(rootUser.getPrincipal(), conn.whoami());
 +
 +    long duration = 0;
 +    long last = System.currentTimeMillis();
 +    // Make sure we have a couple renewals happen
 +    while (duration < TICKET_TEST_LIFETIME) {
 +      // Create a table, write a record, compact, read the record, drop the 
table.
 +      createReadWriteDrop(conn);
 +      // Wait a bit after
 +      Thread.sleep(5000);
 +
 +      // Update the duration
 +      long now = System.currentTimeMillis();
 +      duration += now - last;
 +      last = now;
 +    }
 +  }
 +
 +  /**
 +   * Creates a table, adds a record to it, and then compacts the table. A 
simple way to make sure that the system user exists (since the master does an 
RPC to
 +   * the tserver which will create the system user if it doesn't already 
exist).
 +   */
 +  private void createReadWriteDrop(Connector conn) throws 
TableNotFoundException, AccumuloSecurityException, AccumuloException, 
TableExistsException {
 +    final String table = testName.getMethodName() + "_table";
 +    conn.tableOperations().create(table);
 +    BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
 +    Mutation m = new Mutation("a");
 +    m.put("b", "c", "d");
 +    bw.addMutation(m);
 +    bw.close();
 +    conn.tableOperations().compact(table, new 
CompactionConfig().setFlush(true).setWait(true));
 +    Scanner s = conn.createScanner(table, Authorizations.EMPTY);
 +    Entry<Key,Value> entry = Iterables.getOnlyElement(s);
 +    assertEquals("Did not find the expected key", 0, new Key("a", "b", 
"c").compareTo(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL));
 +    assertEquals("d", entry.getValue().toString());
 +    conn.tableOperations().delete(table);
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/a4afd1bf/test/src/main/java/org/apache/accumulo/test/functional/NativeMapIT.java
----------------------------------------------------------------------
diff --cc 
test/src/main/java/org/apache/accumulo/test/functional/NativeMapIT.java
index 2d594f5,0000000..ea3ba66
mode 100644,000000..100644
--- a/test/src/main/java/org/apache/accumulo/test/functional/NativeMapIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/functional/NativeMapIT.java
@@@ -1,613 -1,0 +1,616 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.accumulo.test.functional;
 +
 +import static java.nio.charset.StandardCharsets.UTF_8;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertNull;
 +import static org.junit.Assert.assertTrue;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.Comparator;
 +import java.util.Iterator;
 +import java.util.Map.Entry;
 +import java.util.NoSuchElementException;
 +import java.util.Random;
 +import java.util.TreeMap;
 +
 +import org.apache.accumulo.core.Constants;
 +import org.apache.accumulo.core.data.Key;
 +import org.apache.accumulo.core.data.Value;
 +import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
 +import org.apache.accumulo.core.util.Pair;
++import org.apache.accumulo.test.categories.SunnyDayTests;
 +import org.apache.accumulo.tserver.NativeMap;
 +import org.apache.hadoop.io.Text;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
++import org.junit.experimental.categories.Category;
 +
++@Category(SunnyDayTests.class)
 +public class NativeMapIT {
 +
 +  private Key nk(int r) {
 +    return new Key(new Text(String.format("r%09d", r)));
 +  }
 +
 +  private Key nk(int r, int cf, int cq, int cv, int ts, boolean deleted) {
 +    Key k = new Key(new Text(String.format("r%09d", r)), new 
Text(String.format("cf%09d", cf)), new Text(String.format("cq%09d", cq)), new 
Text(String.format(
 +        "cv%09d", cv)), ts);
 +
 +    k.setDeleted(deleted);
 +
 +    return k;
 +  }
 +
 +  private Value nv(int v) {
 +    return new Value(String.format("r%09d", v).getBytes(UTF_8));
 +  }
 +
 +  public static File nativeMapLocation() {
 +    File projectDir = new 
File(System.getProperty("user.dir")).getParentFile();
 +    File nativeMapDir = new File(projectDir, 
"server/native/target/accumulo-native-" + Constants.VERSION + 
"/accumulo-native-" + Constants.VERSION);
 +    return nativeMapDir;
 +  }
 +
 +  @BeforeClass
 +  public static void setUp() {
 +    NativeMap.loadNativeLib(Collections.singletonList(nativeMapLocation()));
 +  }
 +
 +  private void verifyIterator(int start, int end, int valueOffset, 
Iterator<Entry<Key,Value>> iter) {
 +    for (int i = start; i <= end; i++) {
 +      assertTrue(iter.hasNext());
 +      Entry<Key,Value> entry = iter.next();
 +      assertEquals(nk(i), entry.getKey());
 +      assertEquals(nv(i + valueOffset), entry.getValue());
 +    }
 +
 +    assertFalse(iter.hasNext());
 +  }
 +
 +  private void insertAndVerify(NativeMap nm, int start, int end, int 
valueOffset) {
 +    for (int i = start; i <= end; i++) {
 +      nm.put(nk(i), nv(i + valueOffset));
 +    }
 +
 +    for (int i = start; i <= end; i++) {
 +      Value v = nm.get(nk(i));
 +      assertNotNull(v);
 +      assertEquals(nv(i + valueOffset), v);
 +
 +      Iterator<Entry<Key,Value>> iter2 = nm.iterator(nk(i));
 +      assertTrue(iter2.hasNext());
 +      Entry<Key,Value> entry = iter2.next();
 +      assertEquals(nk(i), entry.getKey());
 +      assertEquals(nv(i + valueOffset), entry.getValue());
 +    }
 +
 +    assertNull(nm.get(nk(start - 1)));
 +
 +    assertNull(nm.get(nk(end + 1)));
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +    verifyIterator(start, end, valueOffset, iter);
 +
 +    for (int i = start; i <= end; i++) {
 +      iter = nm.iterator(nk(i));
 +      verifyIterator(i, end, valueOffset, iter);
 +
 +      // lookup nonexistant key that falls after existing key
 +      iter = nm.iterator(nk(i, 1, 1, 1, 1, false));
 +      verifyIterator(i + 1, end, valueOffset, iter);
 +    }
 +
 +    assertEquals(end - start + 1, nm.size());
 +  }
 +
 +  private void insertAndVerifyExhaustive(NativeMap nm, int num, int run) {
 +    for (int i = 0; i < num; i++) {
 +      for (int j = 0; j < num; j++) {
 +        for (int k = 0; k < num; k++) {
 +          for (int l = 0; l < num; l++) {
 +            for (int ts = 0; ts < num; ts++) {
 +              Key key = nk(i, j, k, l, ts, true);
 +              Value value = new Value((i + "_" + j + "_" + k + "_" + l + "_" 
+ ts + "_" + true + "_" + run).getBytes(UTF_8));
 +
 +              nm.put(key, value);
 +
 +              key = nk(i, j, k, l, ts, false);
 +              value = new Value((i + "_" + j + "_" + k + "_" + l + "_" + ts + 
"_" + false + "_" + run).getBytes(UTF_8));
 +
 +              nm.put(key, value);
 +            }
 +          }
 +        }
 +      }
 +    }
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +
 +    for (int i = 0; i < num; i++) {
 +      for (int j = 0; j < num; j++) {
 +        for (int k = 0; k < num; k++) {
 +          for (int l = 0; l < num; l++) {
 +            for (int ts = num - 1; ts >= 0; ts--) {
 +              Key key = nk(i, j, k, l, ts, true);
 +              Value value = new Value((i + "_" + j + "_" + k + "_" + l + "_" 
+ ts + "_" + true + "_" + run).getBytes(UTF_8));
 +
 +              assertTrue(iter.hasNext());
 +              Entry<Key,Value> entry = iter.next();
 +              assertEquals(key, entry.getKey());
 +              assertEquals(value, entry.getValue());
 +
 +              key = nk(i, j, k, l, ts, false);
 +              value = new Value((i + "_" + j + "_" + k + "_" + l + "_" + ts + 
"_" + false + "_" + run).getBytes(UTF_8));
 +
 +              assertTrue(iter.hasNext());
 +              entry = iter.next();
 +              assertEquals(key, entry.getKey());
 +              assertEquals(value, entry.getValue());
 +            }
 +          }
 +        }
 +      }
 +    }
 +
 +    assertFalse(iter.hasNext());
 +
 +    for (int i = 0; i < num; i++) {
 +      for (int j = 0; j < num; j++) {
 +        for (int k = 0; k < num; k++) {
 +          for (int l = 0; l < num; l++) {
 +            for (int ts = 0; ts < num; ts++) {
 +              Key key = nk(i, j, k, l, ts, true);
 +              Value value = new Value((i + "_" + j + "_" + k + "_" + l + "_" 
+ ts + "_" + true + "_" + run).getBytes(UTF_8));
 +
 +              assertEquals(value, nm.get(key));
 +
 +              Iterator<Entry<Key,Value>> iter2 = nm.iterator(key);
 +              assertTrue(iter2.hasNext());
 +              Entry<Key,Value> entry = iter2.next();
 +              assertEquals(key, entry.getKey());
 +              assertEquals(value, entry.getValue());
 +
 +              key = nk(i, j, k, l, ts, false);
 +              value = new Value((i + "_" + j + "_" + k + "_" + l + "_" + ts + 
"_" + false + "_" + run).getBytes(UTF_8));
 +
 +              assertEquals(value, nm.get(key));
 +
 +              Iterator<Entry<Key,Value>> iter3 = nm.iterator(key);
 +              assertTrue(iter3.hasNext());
 +              Entry<Key,Value> entry2 = iter3.next();
 +              assertEquals(key, entry2.getKey());
 +              assertEquals(value, entry2.getValue());
 +            }
 +          }
 +        }
 +      }
 +    }
 +
 +    assertEquals(num * num * num * num * num * 2, nm.size());
 +  }
 +
 +  @Test
 +  public void test1() {
 +    NativeMap nm = new NativeMap();
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +    assertFalse(iter.hasNext());
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void test2() {
 +    NativeMap nm = new NativeMap();
 +
 +    insertAndVerify(nm, 1, 10, 0);
 +    insertAndVerify(nm, 1, 10, 1);
 +    insertAndVerify(nm, 1, 10, 2);
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void test4() {
 +    NativeMap nm = new NativeMap();
 +
 +    insertAndVerifyExhaustive(nm, 3, 0);
 +    insertAndVerifyExhaustive(nm, 3, 1);
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void test5() {
 +    NativeMap nm = new NativeMap();
 +
 +    insertAndVerify(nm, 1, 10, 0);
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +    iter.next();
 +
 +    nm.delete();
 +
 +    try {
 +      nm.put(nk(1), nv(1));
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +    try {
 +      nm.get(nk(1));
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +    try {
 +      nm.iterator();
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +    try {
 +      nm.iterator(nk(1));
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +    try {
 +      nm.size();
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +    try {
 +      iter.next();
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +
 +  }
 +
 +  @Test
 +  public void test7() {
 +    NativeMap nm = new NativeMap();
 +
 +    insertAndVerify(nm, 1, 10, 0);
 +
 +    nm.delete();
 +
 +    try {
 +      nm.delete();
 +      assertTrue(false);
 +    } catch (IllegalStateException e) {
 +
 +    }
 +  }
 +
 +  @Test
 +  public void test8() {
 +    // test verifies that native map sorts keys sharing some common prefix 
properly
 +
 +    NativeMap nm = new NativeMap();
 +
 +    TreeMap<Key,Value> tm = new TreeMap<>();
 +
 +    tm.put(new Key(new Text("fo")), new Value(new byte[] {'0'}));
 +    tm.put(new Key(new Text("foo")), new Value(new byte[] {'1'}));
 +    tm.put(new Key(new Text("foo1")), new Value(new byte[] {'2'}));
 +    tm.put(new Key(new Text("foo2")), new Value(new byte[] {'3'}));
 +
 +    for (Entry<Key,Value> entry : tm.entrySet()) {
 +      nm.put(entry.getKey(), entry.getValue());
 +    }
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +
 +    for (Entry<Key,Value> entry : tm.entrySet()) {
 +      assertTrue(iter.hasNext());
 +      Entry<Key,Value> entry2 = iter.next();
 +
 +      assertEquals(entry.getKey(), entry2.getKey());
 +      assertEquals(entry.getValue(), entry2.getValue());
 +    }
 +
 +    assertFalse(iter.hasNext());
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void test9() {
 +    NativeMap nm = new NativeMap();
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +
 +    try {
 +      iter.next();
 +      assertTrue(false);
 +    } catch (NoSuchElementException e) {
 +
 +    }
 +
 +    insertAndVerify(nm, 1, 1, 0);
 +
 +    iter = nm.iterator();
 +    iter.next();
 +
 +    try {
 +      iter.next();
 +      assertTrue(false);
 +    } catch (NoSuchElementException e) {
 +
 +    }
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void test10() {
 +    int start = 1;
 +    int end = 10000;
 +
 +    NativeMap nm = new NativeMap();
 +    for (int i = start; i <= end; i++) {
 +      nm.put(nk(i), nv(i));
 +    }
 +
 +    long mem1 = nm.getMemoryUsed();
 +
 +    for (int i = start; i <= end; i++) {
 +      nm.put(nk(i), nv(i));
 +    }
 +
 +    long mem2 = nm.getMemoryUsed();
 +
 +    if (mem1 != mem2) {
 +      throw new RuntimeException("Memory changed after inserting duplicate 
data " + mem1 + " " + mem2);
 +    }
 +
 +    for (int i = start; i <= end; i++) {
 +      nm.put(nk(i), nv(i));
 +    }
 +
 +    long mem3 = nm.getMemoryUsed();
 +
 +    if (mem1 != mem3) {
 +      throw new RuntimeException("Memory changed after inserting duplicate 
data " + mem1 + " " + mem3);
 +    }
 +
 +    byte bigrow[] = new byte[1000000];
 +    byte bigvalue[] = new byte[bigrow.length];
 +
 +    for (int i = 0; i < bigrow.length; i++) {
 +      bigrow[i] = (byte) (0xff & (i % 256));
 +      bigvalue[i] = bigrow[i];
 +    }
 +
 +    nm.put(new Key(new Text(bigrow)), new Value(bigvalue));
 +
 +    long mem4 = nm.getMemoryUsed();
 +
 +    Value val = nm.get(new Key(new Text(bigrow)));
 +    if (val == null || !val.equals(new Value(bigvalue))) {
 +      throw new RuntimeException("Did not get expected big value");
 +    }
 +
 +    nm.put(new Key(new Text(bigrow)), new Value(bigvalue));
 +
 +    long mem5 = nm.getMemoryUsed();
 +
 +    if (mem4 != mem5) {
 +      throw new RuntimeException("Memory changed after inserting duplicate 
data " + mem4 + " " + mem5);
 +    }
 +
 +    val = nm.get(new Key(new Text(bigrow)));
 +    if (val == null || !val.equals(new Value(bigvalue))) {
 +      throw new RuntimeException("Did not get expected big value");
 +    }
 +
 +    nm.delete();
 +  }
 +
 +  // random length random field
 +  private static byte[] rlrf(Random r, int maxLen) {
 +    int len = r.nextInt(maxLen);
 +
 +    byte f[] = new byte[len];
 +    r.nextBytes(f);
 +
 +    return f;
 +  }
 +
 +  @Test
 +  public void test11() {
 +    NativeMap nm = new NativeMap();
 +
 +    // insert things with varying field sizes and value sizes
 +
 +    // generate random data
 +    Random r = new Random(75);
 +
 +    ArrayList<Pair<Key,Value>> testData = new ArrayList<>();
 +
 +    for (int i = 0; i < 100000; i++) {
 +
 +      Key k = new Key(rlrf(r, 97), rlrf(r, 13), rlrf(r, 31), rlrf(r, 11), 
(r.nextLong() & 0x7fffffffffffffffl), false, false);
 +      Value v = new Value(rlrf(r, 511));
 +
 +      testData.add(new Pair<>(k, v));
 +    }
 +
 +    // insert unsorted data
 +    for (Pair<Key,Value> pair : testData) {
 +      nm.put(pair.getFirst(), pair.getSecond());
 +    }
 +
 +    for (int i = 0; i < 2; i++) {
 +
 +      // sort data
 +      Collections.sort(testData, new Comparator<Pair<Key,Value>>() {
 +        @Override
 +        public int compare(Pair<Key,Value> o1, Pair<Key,Value> o2) {
 +          return o1.getFirst().compareTo(o2.getFirst());
 +        }
 +      });
 +
 +      // verify
 +      Iterator<Entry<Key,Value>> iter1 = nm.iterator();
 +      Iterator<Pair<Key,Value>> iter2 = testData.iterator();
 +
 +      while (iter1.hasNext() && iter2.hasNext()) {
 +        Entry<Key,Value> e = iter1.next();
 +        Pair<Key,Value> p = iter2.next();
 +
 +        if (!e.getKey().equals(p.getFirst()))
 +          throw new RuntimeException("Keys not equal");
 +
 +        if (!e.getValue().equals(p.getSecond()))
 +          throw new RuntimeException("Values not equal");
 +      }
 +
 +      if (iter1.hasNext())
 +        throw new RuntimeException("Not all of native map consumed");
 +
 +      if (iter2.hasNext())
 +        throw new RuntimeException("Not all of test data consumed");
 +
 +      System.out.println("test 11 nm mem " + nm.getMemoryUsed());
 +
 +      // insert data again w/ different value
 +      Collections.shuffle(testData, r);
 +      // insert unsorted data
 +      for (Pair<Key,Value> pair : testData) {
 +        pair.getSecond().set(rlrf(r, 511));
 +        nm.put(pair.getFirst(), pair.getSecond());
 +      }
 +    }
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void testBinary() {
 +    NativeMap nm = new NativeMap();
 +
 +    byte emptyBytes[] = new byte[0];
 +
 +    for (int i = 0; i < 256; i++) {
 +      for (int j = 0; j < 256; j++) {
 +        byte row[] = new byte[] {'r', (byte) (0xff & i), (byte) (0xff & j)};
 +        byte data[] = new byte[] {'v', (byte) (0xff & i), (byte) (0xff & j)};
 +
 +        Key k = new Key(row, emptyBytes, emptyBytes, emptyBytes, 1);
 +        Value v = new Value(data);
 +
 +        nm.put(k, v);
 +      }
 +    }
 +
 +    Iterator<Entry<Key,Value>> iter = nm.iterator();
 +    for (int i = 0; i < 256; i++) {
 +      for (int j = 0; j < 256; j++) {
 +        byte row[] = new byte[] {'r', (byte) (0xff & i), (byte) (0xff & j)};
 +        byte data[] = new byte[] {'v', (byte) (0xff & i), (byte) (0xff & j)};
 +
 +        Key k = new Key(row, emptyBytes, emptyBytes, emptyBytes, 1);
 +        Value v = new Value(data);
 +
 +        assertTrue(iter.hasNext());
 +        Entry<Key,Value> entry = iter.next();
 +
 +        assertEquals(k, entry.getKey());
 +        assertEquals(v, entry.getValue());
 +
 +      }
 +    }
 +
 +    assertFalse(iter.hasNext());
 +
 +    for (int i = 0; i < 256; i++) {
 +      for (int j = 0; j < 256; j++) {
 +        byte row[] = new byte[] {'r', (byte) (0xff & i), (byte) (0xff & j)};
 +        byte data[] = new byte[] {'v', (byte) (0xff & i), (byte) (0xff & j)};
 +
 +        Key k = new Key(row, emptyBytes, emptyBytes, emptyBytes, 1);
 +        Value v = new Value(data);
 +
 +        Value v2 = nm.get(k);
 +
 +        assertEquals(v, v2);
 +      }
 +    }
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void testEmpty() {
 +    NativeMap nm = new NativeMap();
 +
 +    assertTrue(nm.size() == 0);
 +    assertTrue(nm.getMemoryUsed() == 0);
 +
 +    nm.delete();
 +  }
 +
 +  @Test
 +  public void testConcurrentIter() throws IOException {
 +    NativeMap nm = new NativeMap();
 +
 +    nm.put(nk(0), nv(0));
 +    nm.put(nk(1), nv(1));
 +    nm.put(nk(3), nv(3));
 +
 +    SortedKeyValueIterator<Key,Value> iter = nm.skvIterator();
 +
 +    // modify map after iter created
 +    nm.put(nk(2), nv(2));
 +
 +    assertTrue(iter.hasTop());
 +    assertEquals(iter.getTopKey(), nk(0));
 +    iter.next();
 +
 +    assertTrue(iter.hasTop());
 +    assertEquals(iter.getTopKey(), nk(1));
 +    iter.next();
 +
 +    assertTrue(iter.hasTop());
 +    assertEquals(iter.getTopKey(), nk(2));
 +    iter.next();
 +
 +    assertTrue(iter.hasTop());
 +    assertEquals(iter.getTopKey(), nk(3));
 +    iter.next();
 +
 +    assertFalse(iter.hasTop());
 +
 +    nm.delete();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/a4afd1bf/test/src/main/java/org/apache/accumulo/test/functional/PermissionsIT.java
----------------------------------------------------------------------
diff --cc 
test/src/main/java/org/apache/accumulo/test/functional/PermissionsIT.java
index c7fc709,0000000..7bf52ee
mode 100644,000000..100644
--- a/test/src/main/java/org/apache/accumulo/test/functional/PermissionsIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/functional/PermissionsIT.java
@@@ -1,710 -1,0 +1,710 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.accumulo.test.functional;
 +
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertTrue;
 +
 +import java.io.IOException;
 +import java.util.Arrays;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import java.util.Set;
 +
 +import org.apache.accumulo.cluster.ClusterUser;
 +import org.apache.accumulo.core.client.AccumuloException;
 +import org.apache.accumulo.core.client.AccumuloSecurityException;
 +import org.apache.accumulo.core.client.BatchWriter;
 +import org.apache.accumulo.core.client.BatchWriterConfig;
 +import org.apache.accumulo.core.client.ClientConfiguration;
 +import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
 +import org.apache.accumulo.core.client.Connector;
 +import org.apache.accumulo.core.client.MutationsRejectedException;
 +import org.apache.accumulo.core.client.Scanner;
 +import org.apache.accumulo.core.client.TableExistsException;
 +import org.apache.accumulo.core.client.TableNotFoundException;
 +import org.apache.accumulo.core.client.security.SecurityErrorCode;
 +import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 +import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 +import org.apache.accumulo.core.conf.Property;
 +import org.apache.accumulo.core.data.Key;
 +import org.apache.accumulo.core.data.Mutation;
 +import org.apache.accumulo.core.data.Value;
 +import org.apache.accumulo.core.metadata.MetadataTable;
 +import org.apache.accumulo.core.security.Authorizations;
 +import org.apache.accumulo.core.security.SystemPermission;
 +import org.apache.accumulo.core.security.TablePermission;
 +import org.apache.accumulo.harness.AccumuloClusterHarness;
- import org.apache.accumulo.test.categories.MiniClusterOnlyTest;
++import org.apache.accumulo.test.categories.MiniClusterOnlyTests;
 +import org.apache.hadoop.io.Text;
 +import org.junit.Assume;
 +import org.junit.Before;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +// This test verifies the default permissions so a clean instance must be 
used. A shared instance might
 +// not be representative of a fresh installation.
- @Category(MiniClusterOnlyTest.class)
++@Category(MiniClusterOnlyTests.class)
 +public class PermissionsIT extends AccumuloClusterHarness {
 +  private static final Logger log = 
LoggerFactory.getLogger(PermissionsIT.class);
 +
 +  @Override
 +  public int defaultTimeoutSeconds() {
 +    return 60;
 +  }
 +
 +  @Before
 +  public void limitToMini() throws Exception {
 +    Assume.assumeTrue(ClusterType.MINI == getClusterType());
 +    Connector c = getConnector();
 +    Set<String> users = c.securityOperations().listLocalUsers();
 +    ClusterUser user = getUser(0);
 +    if (users.contains(user.getPrincipal())) {
 +      c.securityOperations().dropLocalUser(user.getPrincipal());
 +    }
 +  }
 +
 +  private void loginAs(ClusterUser user) throws IOException {
 +    // Force a re-login as the provided user
 +    user.getToken();
 +  }
 +
 +  @Test
 +  public void systemPermissionsTest() throws Exception {
 +    ClusterUser testUser = getUser(0), rootUser = getAdminUser();
 +
 +    // verify that the test is being run by root
 +    Connector c = getConnector();
 +    verifyHasOnlyTheseSystemPermissions(c, c.whoami(), 
SystemPermission.values());
 +
 +    // create the test user
 +    String principal = testUser.getPrincipal();
 +    AuthenticationToken token = testUser.getToken();
 +    PasswordToken passwordToken = null;
 +    if (token instanceof PasswordToken) {
 +      passwordToken = (PasswordToken) token;
 +    }
 +    loginAs(rootUser);
 +    c.securityOperations().createLocalUser(principal, passwordToken);
 +    loginAs(testUser);
 +    Connector test_user_conn = c.getInstance().getConnector(principal, token);
 +    loginAs(rootUser);
 +    verifyHasNoSystemPermissions(c, principal, SystemPermission.values());
 +
 +    // test each permission
 +    for (SystemPermission perm : SystemPermission.values()) {
 +      log.debug("Verifying the " + perm + " permission");
 +
 +      // test permission before and after granting it
 +      String tableNamePrefix = getUniqueNames(1)[0];
 +      testMissingSystemPermission(tableNamePrefix, c, rootUser, 
test_user_conn, testUser, perm);
 +      loginAs(rootUser);
 +      c.securityOperations().grantSystemPermission(principal, perm);
 +      verifyHasOnlyTheseSystemPermissions(c, principal, perm);
 +      testGrantedSystemPermission(tableNamePrefix, c, rootUser, 
test_user_conn, testUser, perm);
 +      loginAs(rootUser);
 +      c.securityOperations().revokeSystemPermission(principal, perm);
 +      verifyHasNoSystemPermissions(c, principal, perm);
 +    }
 +  }
 +
 +  static Map<String,String> map(Iterable<Entry<String,String>> i) {
 +    Map<String,String> result = new HashMap<>();
 +    for (Entry<String,String> e : i) {
 +      result.put(e.getKey(), e.getValue());
 +    }
 +    return result;
 +  }
 +
 +  private void testMissingSystemPermission(String tableNamePrefix, Connector 
root_conn, ClusterUser rootUser, Connector test_user_conn, ClusterUser testUser,
 +      SystemPermission perm) throws Exception {
 +    String tableName, user, password = "password", namespace;
 +    boolean passwordBased = testUser.getPassword() != null;
 +    log.debug("Confirming that the lack of the " + perm + " permission 
properly restricts the user");
 +
 +    // test permission prior to granting it
 +    switch (perm) {
 +      case CREATE_TABLE:
 +        tableName = tableNamePrefix + "__CREATE_TABLE_WITHOUT_PERM_TEST__";
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.tableOperations().create(tableName);
 +          throw new IllegalStateException("Should NOT be able to create a 
table");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| root_conn.tableOperations().list().contains(tableName))
 +            throw e;
 +        }
 +        break;
 +      case DROP_TABLE:
 +        tableName = tableNamePrefix + "__DROP_TABLE_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.tableOperations().create(tableName);
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.tableOperations().delete(tableName);
 +          throw new IllegalStateException("Should NOT be able to delete a 
table");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.tableOperations().list().contains(tableName))
 +            throw e;
 +        }
 +        break;
 +      case ALTER_TABLE:
 +        tableName = tableNamePrefix + "__ALTER_TABLE_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.tableOperations().create(tableName);
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.tableOperations().setProperty(tableName, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +          throw new IllegalStateException("Should NOT be able to set a table 
property");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED
 +              || 
map(root_conn.tableOperations().getProperties(tableName)).get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +            throw e;
 +        }
 +        loginAs(rootUser);
 +        root_conn.tableOperations().setProperty(tableName, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.tableOperations().removeProperty(tableName, 
Property.TABLE_BLOOM_ERRORRATE.getKey());
 +          throw new IllegalStateException("Should NOT be able to remove a 
table property");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED
 +              || 
!map(root_conn.tableOperations().getProperties(tableName)).get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +            throw e;
 +        }
 +        String table2 = tableName + "2";
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.tableOperations().rename(tableName, table2);
 +          throw new IllegalStateException("Should NOT be able to rename a 
table");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.tableOperations().list().contains(tableName)
 +              || root_conn.tableOperations().list().contains(table2))
 +            throw e;
 +        }
 +        break;
 +      case CREATE_USER:
 +        user = "__CREATE_USER_WITHOUT_PERM_TEST__";
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.securityOperations().createLocalUser(user, 
(passwordBased ? new PasswordToken(password) : null));
 +          throw new IllegalStateException("Should NOT be able to create a 
user");
 +        } catch (AccumuloSecurityException e) {
 +          AuthenticationToken userToken = testUser.getToken();
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED
 +              || (userToken instanceof PasswordToken && 
root_conn.securityOperations().authenticateUser(user, userToken)))
 +            throw e;
 +        }
 +        break;
 +      case DROP_USER:
 +        user = "__DROP_USER_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.securityOperations().createLocalUser(user, (passwordBased ? 
new PasswordToken(password) : null));
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.securityOperations().dropLocalUser(user);
 +          throw new IllegalStateException("Should NOT be able to delete a 
user");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.securityOperations().listLocalUsers().contains(user)) {
 +            log.info("Failed to authenticate as " + user);
 +            throw e;
 +          }
 +        }
 +        break;
 +      case ALTER_USER:
 +        user = "__ALTER_USER_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.securityOperations().createLocalUser(user, (passwordBased ? 
new PasswordToken(password) : null));
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.securityOperations().changeUserAuthorizations(user, 
new Authorizations("A", "B"));
 +          throw new IllegalStateException("Should NOT be able to alter a 
user");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.securityOperations().getUserAuthorizations(user).isEmpty())
 +            throw e;
 +        }
 +        break;
 +      case SYSTEM:
 +        // test for system permission would go here
 +        break;
 +      case CREATE_NAMESPACE:
 +        namespace = "__CREATE_NAMESPACE_WITHOUT_PERM_TEST__";
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.namespaceOperations().create(namespace);
 +          throw new IllegalStateException("Should NOT be able to create a 
namespace");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| root_conn.namespaceOperations().list().contains(namespace))
 +            throw e;
 +        }
 +        break;
 +      case DROP_NAMESPACE:
 +        namespace = "__DROP_NAMESPACE_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.namespaceOperations().create(namespace);
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.namespaceOperations().delete(namespace);
 +          throw new IllegalStateException("Should NOT be able to delete a 
namespace");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.namespaceOperations().list().contains(namespace))
 +            throw e;
 +        }
 +        break;
 +      case ALTER_NAMESPACE:
 +        namespace = "__ALTER_NAMESPACE_WITHOUT_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.namespaceOperations().create(namespace);
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.namespaceOperations().setProperty(namespace, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +          throw new IllegalStateException("Should NOT be able to set a 
namespace property");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED
 +              || 
map(root_conn.namespaceOperations().getProperties(namespace)).get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +            throw e;
 +        }
 +        loginAs(rootUser);
 +        root_conn.namespaceOperations().setProperty(namespace, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.namespaceOperations().removeProperty(namespace, 
Property.TABLE_BLOOM_ERRORRATE.getKey());
 +          throw new IllegalStateException("Should NOT be able to remove a 
namespace property");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED
 +              || 
!map(root_conn.namespaceOperations().getProperties(namespace)).get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +            throw e;
 +        }
 +        String namespace2 = namespace + "2";
 +        try {
 +          loginAs(testUser);
 +          test_user_conn.namespaceOperations().rename(namespace, namespace2);
 +          throw new IllegalStateException("Should NOT be able to rename a 
namespace");
 +        } catch (AccumuloSecurityException e) {
 +          loginAs(rootUser);
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED 
|| !root_conn.namespaceOperations().list().contains(namespace)
 +              || root_conn.namespaceOperations().list().contains(namespace2))
 +            throw e;
 +        }
 +        break;
 +      case OBTAIN_DELEGATION_TOKEN:
 +        ClientConfiguration clientConf = cluster.getClientConfig();
 +        if 
(clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), 
false)) {
 +          // TODO Try to obtain a delegation token without the permission
 +        }
 +        break;
 +      case GRANT:
 +        loginAs(testUser);
 +        try {
 +          
test_user_conn.securityOperations().grantSystemPermission(testUser.getPrincipal(),
 SystemPermission.GRANT);
 +          throw new IllegalStateException("Should NOT be able to grant 
System.GRANT to yourself");
 +        } catch (AccumuloSecurityException e) {
 +          // Expected
 +          loginAs(rootUser);
 +          
assertFalse(root_conn.securityOperations().hasSystemPermission(testUser.getPrincipal(),
 SystemPermission.GRANT));
 +        }
 +        break;
 +      default:
 +        throw new IllegalArgumentException("Unrecognized System Permission: " 
+ perm);
 +    }
 +  }
 +
 +  private void testGrantedSystemPermission(String tableNamePrefix, Connector 
root_conn, ClusterUser rootUser, Connector test_user_conn, ClusterUser testUser,
 +      SystemPermission perm) throws Exception {
 +    String tableName, user, password = "password", namespace;
 +    boolean passwordBased = testUser.getPassword() != null;
 +    log.debug("Confirming that the presence of the " + perm + " permission 
properly permits the user");
 +
 +    // test permission after granting it
 +    switch (perm) {
 +      case CREATE_TABLE:
 +        tableName = tableNamePrefix + "__CREATE_TABLE_WITH_PERM_TEST__";
 +        loginAs(testUser);
 +        test_user_conn.tableOperations().create(tableName);
 +        loginAs(rootUser);
 +        if (!root_conn.tableOperations().list().contains(tableName))
 +          throw new IllegalStateException("Should be able to create a table");
 +        break;
 +      case DROP_TABLE:
 +        tableName = tableNamePrefix + "__DROP_TABLE_WITH_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.tableOperations().create(tableName);
 +        loginAs(testUser);
 +        test_user_conn.tableOperations().delete(tableName);
 +        loginAs(rootUser);
 +        if (root_conn.tableOperations().list().contains(tableName))
 +          throw new IllegalStateException("Should be able to delete a table");
 +        break;
 +      case ALTER_TABLE:
 +        tableName = tableNamePrefix + "__ALTER_TABLE_WITH_PERM_TEST__";
 +        String table2 = tableName + "2";
 +        loginAs(rootUser);
 +        root_conn.tableOperations().create(tableName);
 +        loginAs(testUser);
 +        test_user_conn.tableOperations().setProperty(tableName, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +        loginAs(rootUser);
 +        Map<String,String> properties = 
map(root_conn.tableOperations().getProperties(tableName));
 +        if 
(!properties.get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +          throw new IllegalStateException("Should be able to set a table 
property");
 +        loginAs(testUser);
 +        test_user_conn.tableOperations().removeProperty(tableName, 
Property.TABLE_BLOOM_ERRORRATE.getKey());
 +        loginAs(rootUser);
 +        properties = 
map(root_conn.tableOperations().getProperties(tableName));
 +        if 
(properties.get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +          throw new IllegalStateException("Should be able to remove a table 
property");
 +        loginAs(testUser);
 +        test_user_conn.tableOperations().rename(tableName, table2);
 +        loginAs(rootUser);
 +        if (root_conn.tableOperations().list().contains(tableName) || 
!root_conn.tableOperations().list().contains(table2))
 +          throw new IllegalStateException("Should be able to rename a table");
 +        break;
 +      case CREATE_USER:
 +        user = "__CREATE_USER_WITH_PERM_TEST__";
 +        loginAs(testUser);
 +        test_user_conn.securityOperations().createLocalUser(user, 
(passwordBased ? new PasswordToken(password) : null));
 +        loginAs(rootUser);
 +        if (passwordBased && 
!root_conn.securityOperations().authenticateUser(user, new 
PasswordToken(password)))
 +          throw new IllegalStateException("Should be able to create a user");
 +        break;
 +      case DROP_USER:
 +        user = "__DROP_USER_WITH_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.securityOperations().createLocalUser(user, (passwordBased ? 
new PasswordToken(password) : null));
 +        loginAs(testUser);
 +        test_user_conn.securityOperations().dropLocalUser(user);
 +        loginAs(rootUser);
 +        if (passwordBased && 
root_conn.securityOperations().authenticateUser(user, new 
PasswordToken(password)))
 +          throw new IllegalStateException("Should be able to delete a user");
 +        break;
 +      case ALTER_USER:
 +        user = "__ALTER_USER_WITH_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.securityOperations().createLocalUser(user, (passwordBased ? 
new PasswordToken(password) : null));
 +        loginAs(testUser);
 +        test_user_conn.securityOperations().changeUserAuthorizations(user, 
new Authorizations("A", "B"));
 +        loginAs(rootUser);
 +        if 
(root_conn.securityOperations().getUserAuthorizations(user).isEmpty())
 +          throw new IllegalStateException("Should be able to alter a user");
 +        break;
 +      case SYSTEM:
 +        // test for system permission would go here
 +        break;
 +      case CREATE_NAMESPACE:
 +        namespace = "__CREATE_NAMESPACE_WITH_PERM_TEST__";
 +        loginAs(testUser);
 +        test_user_conn.namespaceOperations().create(namespace);
 +        loginAs(rootUser);
 +        if (!root_conn.namespaceOperations().list().contains(namespace))
 +          throw new IllegalStateException("Should be able to create a 
namespace");
 +        break;
 +      case DROP_NAMESPACE:
 +        namespace = "__DROP_NAMESPACE_WITH_PERM_TEST__";
 +        loginAs(rootUser);
 +        root_conn.namespaceOperations().create(namespace);
 +        loginAs(testUser);
 +        test_user_conn.namespaceOperations().delete(namespace);
 +        loginAs(rootUser);
 +        if (root_conn.namespaceOperations().list().contains(namespace))
 +          throw new IllegalStateException("Should be able to delete a 
namespace");
 +        break;
 +      case ALTER_NAMESPACE:
 +        namespace = "__ALTER_NAMESPACE_WITH_PERM_TEST__";
 +        String namespace2 = namespace + "2";
 +        loginAs(rootUser);
 +        root_conn.namespaceOperations().create(namespace);
 +        loginAs(testUser);
 +        test_user_conn.namespaceOperations().setProperty(namespace, 
Property.TABLE_BLOOM_ERRORRATE.getKey(), "003.14159%");
 +        loginAs(rootUser);
 +        Map<String,String> propies = 
map(root_conn.namespaceOperations().getProperties(namespace));
 +        if 
(!propies.get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +          throw new IllegalStateException("Should be able to set a table 
property");
 +        loginAs(testUser);
 +        test_user_conn.namespaceOperations().removeProperty(namespace, 
Property.TABLE_BLOOM_ERRORRATE.getKey());
 +        loginAs(rootUser);
 +        propies = 
map(root_conn.namespaceOperations().getProperties(namespace));
 +        if 
(propies.get(Property.TABLE_BLOOM_ERRORRATE.getKey()).equals("003.14159%"))
 +          throw new IllegalStateException("Should be able to remove a table 
property");
 +        loginAs(testUser);
 +        test_user_conn.namespaceOperations().rename(namespace, namespace2);
 +        loginAs(rootUser);
 +        if (root_conn.namespaceOperations().list().contains(namespace) || 
!root_conn.namespaceOperations().list().contains(namespace2))
 +          throw new IllegalStateException("Should be able to rename a table");
 +        break;
 +      case OBTAIN_DELEGATION_TOKEN:
 +        ClientConfiguration clientConf = cluster.getClientConfig();
 +        if 
(clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), 
false)) {
 +          // TODO Try to obtain a delegation token with the permission
 +        }
 +        break;
 +      case GRANT:
 +        loginAs(rootUser);
 +        
root_conn.securityOperations().grantSystemPermission(testUser.getPrincipal(), 
SystemPermission.GRANT);
 +        loginAs(testUser);
 +        
test_user_conn.securityOperations().grantSystemPermission(testUser.getPrincipal(),
 SystemPermission.CREATE_TABLE);
 +        loginAs(rootUser);
 +        assertTrue("Test user should have CREATE_TABLE",
 +            
root_conn.securityOperations().hasSystemPermission(testUser.getPrincipal(), 
SystemPermission.CREATE_TABLE));
 +        assertTrue("Test user should have GRANT", 
root_conn.securityOperations().hasSystemPermission(testUser.getPrincipal(), 
SystemPermission.GRANT));
 +        
root_conn.securityOperations().revokeSystemPermission(testUser.getPrincipal(), 
SystemPermission.CREATE_TABLE);
 +        break;
 +      default:
 +        throw new IllegalArgumentException("Unrecognized System Permission: " 
+ perm);
 +    }
 +  }
 +
 +  private void verifyHasOnlyTheseSystemPermissions(Connector root_conn, 
String user, SystemPermission... perms) throws AccumuloException,
 +      AccumuloSecurityException {
 +    List<SystemPermission> permList = Arrays.asList(perms);
 +    for (SystemPermission p : SystemPermission.values()) {
 +      if (permList.contains(p)) {
 +        // should have these
 +        if (!root_conn.securityOperations().hasSystemPermission(user, p))
 +          throw new IllegalStateException(user + " SHOULD have system 
permission " + p);
 +      } else {
 +        // should not have these
 +        if (root_conn.securityOperations().hasSystemPermission(user, p))
 +          throw new IllegalStateException(user + " SHOULD NOT have system 
permission " + p);
 +      }
 +    }
 +  }
 +
 +  private void verifyHasNoSystemPermissions(Connector root_conn, String user, 
SystemPermission... perms) throws AccumuloException, AccumuloSecurityException {
 +    for (SystemPermission p : perms)
 +      if (root_conn.securityOperations().hasSystemPermission(user, p))
 +        throw new IllegalStateException(user + " SHOULD NOT have system 
permission " + p);
 +  }
 +
 +  @Test
 +  public void tablePermissionTest() throws Exception {
 +    // create the test user
 +    ClusterUser testUser = getUser(0), rootUser = getAdminUser();
 +
 +    String principal = testUser.getPrincipal();
 +    AuthenticationToken token = testUser.getToken();
 +    PasswordToken passwordToken = null;
 +    if (token instanceof PasswordToken) {
 +      passwordToken = (PasswordToken) token;
 +    }
 +    loginAs(rootUser);
 +    Connector c = getConnector();
 +    c.securityOperations().createLocalUser(principal, passwordToken);
 +    loginAs(testUser);
 +    Connector test_user_conn = c.getInstance().getConnector(principal, token);
 +
 +    // check for read-only access to metadata table
 +    loginAs(rootUser);
 +    verifyHasOnlyTheseTablePermissions(c, c.whoami(), MetadataTable.NAME, 
TablePermission.READ, TablePermission.ALTER_TABLE);
 +    verifyHasOnlyTheseTablePermissions(c, principal, MetadataTable.NAME, 
TablePermission.READ);
 +    String tableName = getUniqueNames(1)[0] + "__TABLE_PERMISSION_TEST__";
 +
 +    // test each permission
 +    for (TablePermission perm : TablePermission.values()) {
 +      log.debug("Verifying the " + perm + " permission");
 +
 +      // test permission before and after granting it
 +      createTestTable(c, principal, tableName);
 +      loginAs(testUser);
 +      testMissingTablePermission(test_user_conn, testUser, perm, tableName);
 +      loginAs(rootUser);
 +      c.securityOperations().grantTablePermission(principal, tableName, perm);
 +      verifyHasOnlyTheseTablePermissions(c, principal, tableName, perm);
 +      loginAs(testUser);
 +      testGrantedTablePermission(test_user_conn, testUser, perm, tableName);
 +
 +      loginAs(rootUser);
 +      createTestTable(c, principal, tableName);
 +      c.securityOperations().revokeTablePermission(principal, tableName, 
perm);
 +      verifyHasNoTablePermissions(c, principal, tableName, perm);
 +    }
 +  }
 +
 +  private void createTestTable(Connector c, String testUser, String 
tableName) throws Exception, MutationsRejectedException {
 +    if (!c.tableOperations().exists(tableName)) {
 +      // create the test table
 +      c.tableOperations().create(tableName);
 +      // put in some initial data
 +      BatchWriter writer = c.createBatchWriter(tableName, new 
BatchWriterConfig());
 +      Mutation m = new Mutation(new Text("row"));
 +      m.put(new Text("cf"), new Text("cq"), new Value("val".getBytes()));
 +      writer.addMutation(m);
 +      writer.close();
 +
 +      // verify proper permissions for creator and test user
 +      verifyHasOnlyTheseTablePermissions(c, c.whoami(), tableName, 
TablePermission.values());
 +      verifyHasNoTablePermissions(c, testUser, tableName, 
TablePermission.values());
 +
 +    }
 +  }
 +
 +  private void testMissingTablePermission(Connector test_user_conn, 
ClusterUser testUser, TablePermission perm, String tableName) throws Exception {
 +    Scanner scanner;
 +    BatchWriter writer;
 +    Mutation m;
 +    log.debug("Confirming that the lack of the " + perm + " permission 
properly restricts the user");
 +
 +    // test permission prior to granting it
 +    switch (perm) {
 +      case READ:
 +        try {
 +          scanner = test_user_conn.createScanner(tableName, 
Authorizations.EMPTY);
 +          int i = 0;
 +          for (Entry<Key,Value> entry : scanner)
 +            i += 1 + entry.getKey().getRowData().length();
 +          if (i != 0)
 +            throw new IllegalStateException("Should NOT be able to read from 
the table");
 +        } catch (RuntimeException e) {
 +          AccumuloSecurityException se = (AccumuloSecurityException) 
e.getCause();
 +          if (se.getSecurityErrorCode() != 
SecurityErrorCode.PERMISSION_DENIED)
 +            throw se;
 +        }
 +        break;
 +      case WRITE:
 +        try {
 +          writer = test_user_conn.createBatchWriter(tableName, new 
BatchWriterConfig());
 +          m = new Mutation(new Text("row"));
 +          m.put(new Text("a"), new Text("b"), new Value("c".getBytes()));
 +          writer.addMutation(m);
 +          try {
 +            writer.close();
 +          } catch (MutationsRejectedException e1) {
 +            if (e1.getSecurityErrorCodes().size() > 0)
 +              throw new AccumuloSecurityException(test_user_conn.whoami(), 
org.apache.accumulo.core.client.impl.thrift.SecurityErrorCode.PERMISSION_DENIED,
 e1);
 +          }
 +          throw new IllegalStateException("Should NOT be able to write to a 
table");
 +        } catch (AccumuloSecurityException e) {
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED)
 +            throw e;
 +        }
 +        break;
 +      case BULK_IMPORT:
 +        // test for bulk import permission would go here
 +        break;
 +      case ALTER_TABLE:
 +        Map<String,Set<Text>> groups = new HashMap<>();
 +        groups.put("tgroup", new HashSet<>(Arrays.asList(new Text("t1"), new 
Text("t2"))));
 +        try {
 +          test_user_conn.tableOperations().setLocalityGroups(tableName, 
groups);
 +          throw new IllegalStateException("User should not be able to set 
locality groups");
 +        } catch (AccumuloSecurityException e) {
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED)
 +            throw e;
 +        }
 +        break;
 +      case DROP_TABLE:
 +        try {
 +          test_user_conn.tableOperations().delete(tableName);
 +          throw new IllegalStateException("User should not be able delete the 
table");
 +        } catch (AccumuloSecurityException e) {
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED)
 +            throw e;
 +        }
 +        break;
 +      case GRANT:
 +        try {
 +          
test_user_conn.securityOperations().grantTablePermission(getAdminPrincipal(), 
tableName, TablePermission.GRANT);
 +          throw new IllegalStateException("User should not be able grant 
permissions");
 +        } catch (AccumuloSecurityException e) {
 +          if (e.getSecurityErrorCode() != SecurityErrorCode.PERMISSION_DENIED)
 +            throw e;
 +        }
 +        break;
 +      default:
 +        throw new IllegalArgumentException("Unrecognized table Permission: " 
+ perm);
 +    }
 +  }
 +
 +  private void testGrantedTablePermission(Connector test_user_conn, 
ClusterUser normalUser, TablePermission perm, String tableName) throws 
AccumuloException,
 +      TableExistsException, AccumuloSecurityException, 
TableNotFoundException, MutationsRejectedException {
 +    Scanner scanner;
 +    BatchWriter writer;
 +    Mutation m;
 +    log.debug("Confirming that the presence of the " + perm + " permission 
properly permits the user");
 +
 +    // test permission after granting it
 +    switch (perm) {
 +      case READ:
 +        scanner = test_user_conn.createScanner(tableName, 
Authorizations.EMPTY);
 +        Iterator<Entry<Key,Value>> iter = scanner.iterator();
 +        while (iter.hasNext())
 +          iter.next();
 +        break;
 +      case WRITE:
 +        writer = test_user_conn.createBatchWriter(tableName, new 
BatchWriterConfig());
 +        m = new Mutation(new Text("row"));
 +        m.put(new Text("a"), new Text("b"), new Value("c".getBytes()));
 +        writer.addMutation(m);
 +        writer.close();
 +        break;
 +      case BULK_IMPORT:
 +        // test for bulk import permission would go here
 +        break;
 +      case ALTER_TABLE:
 +        Map<String,Set<Text>> groups = new HashMap<>();
 +        groups.put("tgroup", new HashSet<>(Arrays.asList(new Text("t1"), new 
Text("t2"))));
 +        break;
 +      case DROP_TABLE:
 +        test_user_conn.tableOperations().delete(tableName);
 +        break;
 +      case GRANT:
 +        
test_user_conn.securityOperations().grantTablePermission(getAdminPrincipal(), 
tableName, TablePermission.GRANT);
 +        break;
 +      default:
 +        throw new IllegalArgumentException("Unrecognized table Permission: " 
+ perm);
 +    }
 +  }
 +
 +  private void verifyHasOnlyTheseTablePermissions(Connector root_conn, String 
user, String table, TablePermission... perms) throws AccumuloException,
 +      AccumuloSecurityException {
 +    List<TablePermission> permList = Arrays.asList(perms);
 +    for (TablePermission p : TablePermission.values()) {
 +      if (permList.contains(p)) {
 +        // should have these
 +        if (!root_conn.securityOperations().hasTablePermission(user, table, 
p))
 +          throw new IllegalStateException(user + " SHOULD have table 
permission " + p + " for table " + table);
 +      } else {
 +        // should not have these
 +        if (root_conn.securityOperations().hasTablePermission(user, table, p))
 +          throw new IllegalStateException(user + " SHOULD NOT have table 
permission " + p + " for table " + table);
 +      }
 +    }
 +  }
 +
 +  private void verifyHasNoTablePermissions(Connector root_conn, String user, 
String table, TablePermission... perms) throws AccumuloException,
 +      AccumuloSecurityException {
 +    for (TablePermission p : perms)
 +      if (root_conn.securityOperations().hasTablePermission(user, table, p))
 +        throw new IllegalStateException(user + " SHOULD NOT have table 
permission " + p + " for table " + table);
 +  }
 +}

Reply via email to