Updated Branches: refs/heads/master 7f5bd5f5a -> 6569b2c94
ACCUMULO-2298 chunking up ExamplesIT Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/be152d85 Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/be152d85 Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/be152d85 Branch: refs/heads/master Commit: be152d8505e24591c3dac484787b2dfc9f6c0058 Parents: b178784 Author: John Vines <vi...@apache.org> Authored: Fri Jan 31 15:17:08 2014 -0500 Committer: John Vines <vi...@apache.org> Committed: Fri Jan 31 15:17:08 2014 -0500 ---------------------------------------------------------------------- .../accumulo/test/functional/AbstractMacIT.java | 2 +- .../accumulo/test/functional/ExamplesIT.java | 205 +++++++++++++------ 2 files changed, 138 insertions(+), 69 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/be152d85/test/src/test/java/org/apache/accumulo/test/functional/AbstractMacIT.java ---------------------------------------------------------------------- diff --git a/test/src/test/java/org/apache/accumulo/test/functional/AbstractMacIT.java b/test/src/test/java/org/apache/accumulo/test/functional/AbstractMacIT.java index ecfcf59..aab3edb 100644 --- a/test/src/test/java/org/apache/accumulo/test/functional/AbstractMacIT.java +++ b/test/src/test/java/org/apache/accumulo/test/functional/AbstractMacIT.java @@ -65,7 +65,7 @@ public abstract class AbstractMacIT { return testDir; } - protected File createTestDir(String name) { + protected static File createTestDir(String name) { File baseDir = new File(System.getProperty("user.dir") + "/target/mini-tests"); baseDir.mkdirs(); if (name == null) http://git-wip-us.apache.org/repos/asf/accumulo/blob/be152d85/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java ---------------------------------------------------------------------- diff --git a/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java b/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java index 559ae67..6ac0cd3 100644 --- a/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java +++ b/test/src/test/java/org/apache/accumulo/test/functional/ExamplesIT.java @@ -29,6 +29,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.accumulo.core.cli.BatchWriterOpts; +import org.apache.accumulo.core.client.AccumuloException; +import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; @@ -71,6 +73,7 @@ import org.apache.accumulo.examples.simple.shard.Index; import org.apache.accumulo.examples.simple.shard.Query; import org.apache.accumulo.examples.simple.shard.Reverse; import org.apache.accumulo.minicluster.MemoryUnit; +import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl; import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl.LogWriter; import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl; import org.apache.accumulo.server.util.Admin; @@ -79,37 +82,63 @@ import org.apache.accumulo.tracer.TraceServer; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -public class ExamplesIT extends ConfigurableMacIT { - +public class ExamplesIT extends AbstractMacIT { BatchWriterOpts bwOpts = new BatchWriterOpts(); - @Override - public void configure(MiniAccumuloConfigImpl cfg) { + static Connector c; + static String instance; + static String keepers; + static String user = "root"; + static String passwd; + String visibility = "A|B"; + static String auths = "A,B"; + BatchWriterConfig bwc = new BatchWriterConfig(); + BatchWriter bw; + IteratorSetting is; + static String dir; + static FileSystem fs; + private static MiniAccumuloClusterImpl cluster; + + @BeforeClass + public static void before() throws Exception { + MiniAccumuloConfigImpl cfg = new MiniAccumuloConfigImpl(createTestDir(ExamplesIT.class.getName()), AbstractMacIT.ROOT_PASSWORD); + cfg.setNativeLibPaths(NativeMapIT.nativeMapLocation().getAbsolutePath()); cfg.setDefaultMemory(cfg.getDefaultMemory() * 2, MemoryUnit.BYTE); + + cfg.setProperty(Property.TSERV_NATIVEMAP_ENABLED, Boolean.TRUE.toString()); + configureForEnvironment(cfg, ExamplesIT.class, createSharedTestDir(ExamplesIT.class.getName() + "-ssl")); + cluster = new MiniAccumuloClusterImpl(cfg); + cluster.start(); + + passwd = AbstractMacIT.ROOT_PASSWORD; + c = cluster.getConnector("root", ROOT_PASSWORD); + fs = FileSystem.get(CachedConfiguration.getInstance()); + instance = c.getInstance().getInstanceName(); + keepers = c.getInstance().getZooKeepers(); + dir = cluster.getConfig().getDir().getAbsolutePath(); + + c.securityOperations().changeUserAuthorizations(user, new Authorizations(auths.split(","))); } - @Test(timeout = 15 * 60 * 1000) - public void test() throws Exception { - Connector c = getConnector(); - String instance = c.getInstance().getInstanceName(); - String keepers = c.getInstance().getZooKeepers(); - String user = "root"; - String passwd = ROOT_PASSWORD; - String visibility = "A|B"; - String auths = "A,B"; - BatchWriterConfig bwc = new BatchWriterConfig(); - BatchWriter bw; - IteratorSetting is; - String dir = cluster.getConfig().getDir().getAbsolutePath(); - FileSystem fs = FileSystem.get(CachedConfiguration.getInstance()); + @Override + public Connector getConnector() throws AccumuloException, AccumuloSecurityException { + return cluster.getConnector("root", ROOT_PASSWORD); + } + + @Override + public String rootPath() { + return cluster.getConfig().getDir().getAbsolutePath(); + } + @Test(timeout = 15 * 1000) + public void testTrace() throws Exception { Process trace = exec(TraceServer.class); while (!c.tableOperations().exists("trace")) UtilWaitThread.sleep(500); - - log.info("trace example"); Process p = goodExec(TracingExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-C", "-D", "-c"); for (LogWriter writer : cluster.getLogWriters()) { writer.flush(); @@ -126,21 +155,28 @@ public class ExamplesIT extends ConfigurableMacIT { result = FunctionalTestUtils.readAll(cluster, TraceDumpExample.class, p); assertTrue(result.contains("myHost@myApp")); trace.destroy(); + } - log.info("testing dirlist example (a little)"); - c.securityOperations().changeUserAuthorizations(user, new Authorizations(auths.split(","))); + private Process exec(Class<TraceServer> class1) throws IOException { + return cluster.exec(class1); + } + + @Test(timeout = 45 * 1000) + public void testDirList() throws Exception { goodExec(Ingest.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--dirTable", "dirTable", "--indexTable", "indexTable", "--dataTable", "dataTable", "--vis", visibility, "--chunkSize", 10000 + "", cluster.getConfig().getDir().getAbsolutePath()); - p = goodExec(QueryUtil.class, "-i", instance, "-z", keepers, "-p", passwd, "-u", user, "-t", "indexTable", "--auths", auths, "--search", "--path", + Process p = goodExec(QueryUtil.class, "-i", instance, "-z", keepers, "-p", passwd, "-u", user, "-t", "indexTable", "--auths", auths, "--search", "--path", "accumulo-site.xml"); for (LogWriter writer : cluster.getLogWriters()) { writer.flush(); } - result = FunctionalTestUtils.readAll(cluster, QueryUtil.class, p); + String result = FunctionalTestUtils.readAll(cluster, QueryUtil.class, p); System.out.println("result " + result); assertTrue(result.contains("accumulo-site.xml")); + } - log.info("Testing ageoff filtering"); + @Test(timeout = 5 * 1000) + public void testAgeoffFilter() throws Exception { c.tableOperations().create("filtertest"); is = new IteratorSetting(10, AgeOffFilter.class); AgeOffFilter.setTTL(is, 1000L); @@ -150,35 +186,39 @@ public class ExamplesIT extends ConfigurableMacIT { m.put("a", "b", "c"); bw.addMutation(m); UtilWaitThread.sleep(1000); - count = 0; + int count = 0; for (@SuppressWarnings("unused") Entry<Key,Value> line : c.createScanner("filtertest", Authorizations.EMPTY)) count++; assertEquals(0, count); + } - log.info("Testing bloom filters are fast for missing data"); + @Test(timeout = 20 * 1000) + public void testBloomFilters() throws Exception { c.tableOperations().create("bloom_test"); c.tableOperations().setProperty("bloom_test", Property.TABLE_BLOOM_ENABLED.getKey(), "true"); - goodExec(RandomBatchWriter.class, "--seed", "7", "-i", instance, "-z", keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "100000", "--min", "0", "--max", + goodExec(RandomBatchWriter.class, "--seed", "7", "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--num", "100000", "--min", "0", "--max", "1000000000", "--size", "50", "--batchMemory", "2M", "--batchLatency", "60s", "--batchThreads", "3", "-t", "bloom_test"); c.tableOperations().flush("bloom_test", null, null, true); long diff = 0, diff2 = 0; // try the speed test a couple times in case the system is loaded with other tests for (int i = 0; i < 2; i++) { long now = System.currentTimeMillis(); - goodExec(RandomBatchScanner.class, "--seed", "7", "-i", instance, "-z", keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", "--min", "0", - "--max", "1000000000", "--size", "50", "--scanThreads", "4", "-t", "bloom_test"); + goodExec(RandomBatchScanner.class, "--seed", "7", "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--num", "10000", "--min", "0", "--max", + "1000000000", "--size", "50", "--scanThreads", "4", "-t", "bloom_test"); diff = System.currentTimeMillis() - now; now = System.currentTimeMillis(); - expectExec(1, RandomBatchScanner.class, "--seed", "8", "-i", instance, "-z", keepers, "-u", user, "-p", ROOT_PASSWORD, "--num", "10000", "--min", "0", - "--max", "1000000000", "--size", "50", "--scanThreads", "4", "-t", "bloom_test"); + expectExec(1, RandomBatchScanner.class, "--seed", "8", "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--num", "10000", "--min", "0", "--max", + "1000000000", "--size", "50", "--scanThreads", "4", "-t", "bloom_test"); diff2 = System.currentTimeMillis() - now; if (diff2 < diff) break; } assertTrue(diff2 < diff); + } - log.info("Creating a sharded index of the accumulo java files"); + @Test(timeout = 2 * 60 * 1000) + public void testShardedIndex() throws Exception { c.tableOperations().create("shard"); c.tableOperations().create("doc2term"); bw = c.createBatchWriter("shard", bwc); @@ -200,8 +240,10 @@ public class ExamplesIT extends ConfigurableMacIT { // run some queries goodExec(ContinuousQuery.class, "-i", instance, "-z", keepers, "--shardTable", "shard", "--doc2Term", "doc2Term", "-u", "root", "-p", passwd, "--terms", "5", "--count", "1000"); + } - log.info("Testing MaxMutation constraint"); + @Test(timeout = 2 * 1000) + public void testMaxMutationConstraint() throws Exception { c.tableOperations().create("test_ingest"); c.tableOperations().addConstraint("test_ingest", MaxMutationSize.class.getName()); TestIngest.Opts opts = new TestIngest.Opts(); @@ -212,25 +254,28 @@ public class ExamplesIT extends ConfigurableMacIT { } catch (MutationsRejectedException ex) { assertEquals(1, ex.getConstraintViolationSummaries().size()); } + } - log.info("Starting bulk ingest example"); + @Test(timeout = 20 * 1000) + public void testBulkIngest() throws Exception { goodExec(GenerateTestData.class, "--start-row", "0", "--count", "10000", "--output", dir + "/tmp/input/data"); goodExec(SetupTable.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", "bulkTable"); goodExec(BulkIngestExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", "bulkTable", "--inputDir", dir + "/tmp/input", "--workDir", dir + "/tmp"); + } - log.info("Running TeraSortIngest example"); - goodExec(TeraSortIngest.class, "--count", (1000 * 1000) + "", "-nk", "10", "-xk", "10", "-nv", "10", "-xv", "10", "-t", "sorted", "-i", instance, "-z", + @Test(timeout = 1 * 60 * 1000) + public void testTeraSortAndRead() throws Exception { + String sorted = "sorted"; + goodExec(TeraSortIngest.class, "--count", (1000 * 1000) + "", "-nk", "10", "-xk", "10", "-nv", "10", "-xv", "10", "-t", sorted, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--splits", "4"); - log.info("Running Regex example"); - goodExec(RegexExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "sorted", "--rowRegex", ".*999.*", "--output", dir - + "/tmp/nines"); - log.info("Running RowHash example"); - goodExec(RowHash.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "sorted", "--column", "c:"); - log.info("Running TableToFile example"); - goodExec(TableToFile.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "sorted", "--output", dir + "/tmp/tableFile"); - - log.info("Running word count example"); + goodExec(RegexExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", sorted, "--rowRegex", ".*999.*", "--output", dir + "/tmp/nines"); + goodExec(RowHash.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", sorted, "--column", "c:"); + goodExec(TableToFile.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", sorted, "--output", dir + "/tmp/tableFile"); + } + + @Test(timeout = 10 * 1000) + public void testWordCount() throws Exception { c.tableOperations().create("wordCount"); is = new IteratorSetting(10, SummingCombiner.class); SummingCombiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column(new Text("count")))); @@ -238,45 +283,69 @@ public class ExamplesIT extends ConfigurableMacIT { c.tableOperations().attachIterator("wordCount", is); fs.copyFromLocalFile(new Path(new Path(System.getProperty("user.dir")).getParent(), "README"), new Path(dir + "/tmp/wc/README")); goodExec(WordCount.class, "-i", instance, "-u", user, "-p", passwd, "-z", keepers, "--input", dir + "/tmp/wc", "-t", "wordCount"); + } - log.info("Inserting data with a batch writer"); - goodExec(InsertWithBatchWriter.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "helloBatch"); - log.info("Reading data"); - goodExec(ReadData.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "helloBatch"); - log.info("Running isolated scans"); + @Test(timeout = 30 * 1000) + public void testInsertWithBatchWriterAndReadData() throws Exception { + String helloBatch = "helloBatch"; + goodExec(InsertWithBatchWriter.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", helloBatch); + goodExec(ReadData.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", helloBatch); + } + + @Test(timeout = 90 * 1000) + public void testIsolatedScansWithInterference() throws Exception { goodExec(InterferenceTest.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "itest1", "--iterations", "100000", "--isolated"); - log.info("Running scans without isolation"); + } + + @Test(timeout = 75 * 1000) + public void testScansWithInterference() throws Exception { goodExec(InterferenceTest.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "itest2", "--iterations", "100000"); - log.info("Performing some row operations"); + } + + @Test(timeout = 5 * 1000) + public void testRowOperations() throws Exception { goodExec(RowOperations.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd); - log.info("Using the batch writer"); + } + + @Test(timeout = 6 * 1000) + public void testBatchWriter() throws Exception { c.tableOperations().create("test"); goodExec(SequentialBatchWriter.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "-t", "test", "--start", "0", "--num", "100000", "--size", "50", "--batchMemory", "10000000", "--batchLatency", "1000", "--batchThreads", "4", "--vis", visibility); - log.info("Reading and writing some data"); - goodExec(ReadWriteExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--auths", auths, "--table", "test2", "--createtable", "-c", + } + + @Test(timeout = 12 * 1000) + public void testReadWriteAndDelete() throws Exception { + String test2 = "test2"; + goodExec(ReadWriteExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--auths", auths, "--table", test2, "--createtable", "-c", "--debug"); - log.info("Deleting some data"); - goodExec(ReadWriteExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--auths", auths, "--table", "test2", "-d", "--debug"); - log.info("Writing some data with the batch writer"); - c.tableOperations().create("test3"); - goodExec(RandomBatchWriter.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", "test3", "--num", "100000", "--min", "0", "--max", + goodExec(ReadWriteExample.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--auths", auths, "--table", test2, "-d", "--debug"); + + } + + @Test(timeout = 20 * 1000) + public void testRandomBatchesAndFlush() throws Exception { + String test3 = "test3"; + c.tableOperations().create(test3); + goodExec(RandomBatchWriter.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", test3, "--num", "100000", "--min", "0", "--max", "100000", "--size", "100", "--batchMemory", "1000000", "--batchLatency", "1000", "--batchThreads", "4", "--vis", visibility); - log.info("Reading some data with the batch scanner"); - goodExec(RandomBatchScanner.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", "test3", "--num", "10000", "--min", "0", "--max", + goodExec(RandomBatchScanner.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", test3, "--num", "10000", "--min", "0", "--max", "100000", "--size", "100", "--scanThreads", "4", "--auths", auths); - log.info("Running an example table operation (Flush)"); - goodExec(Flush.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", "test3"); - goodExec(Admin.class, "stopAll"); + goodExec(Flush.class, "-i", instance, "-z", keepers, "-u", user, "-p", passwd, "--table", test3); + } + @AfterClass + public static void stop() throws Exception { + goodExec(Admin.class, "stopAll"); + cleanUp(cluster); } - private Process goodExec(Class<?> theClass, String... args) throws InterruptedException, IOException { + private static Process goodExec(Class<?> theClass, String... args) throws InterruptedException, IOException { return expectExec(0, theClass, args); } - private Process expectExec(int exitCode, Class<?> theClass, String... args) throws InterruptedException, IOException { + private static Process expectExec(int exitCode, Class<?> theClass, String... args) throws InterruptedException, IOException { Process p = null; assertEquals(exitCode, (p = cluster.exec(theClass, Collections.singletonList(MapReduceIT.hadoopTmpDirArg), args)).waitFor()); return p;