Merge branch '1.4.6-SNAPSHOT' into 1.5.2-SNAPSHOT
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/fd1ac998 Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/fd1ac998 Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/fd1ac998 Branch: refs/heads/master Commit: fd1ac9981bba9ec4e82e658e1ec60eee35d60bda Parents: c62d676 92c4171 Author: Christopher Tubbs <ctubb...@apache.org> Authored: Wed Apr 23 10:50:35 2014 -0400 Committer: Christopher Tubbs <ctubb...@apache.org> Committed: Wed Apr 23 10:50:35 2014 -0400 ---------------------------------------------------------------------- .../main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java | 2 -- 1 file changed, 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/fd1ac998/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java ---------------------------------------------------------------------- diff --cc core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java index 09a2d61,0000000..f26aaef mode 100644,000000..100644 --- a/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java +++ b/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java @@@ -1,81 -1,0 +1,79 @@@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.accumulo.core.file.rfile; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.accumulo.core.cli.Help; +import org.apache.accumulo.core.conf.DefaultConfiguration; +import org.apache.accumulo.core.file.FileSKVWriter; - import org.apache.accumulo.core.file.rfile.RFile.Writer; +import org.apache.accumulo.core.file.rfile.bcfile.TFile; +import org.apache.accumulo.core.util.CachedConfiguration; +import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import com.beust.jcommander.IParameterValidator; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.ParameterException; + +/** + * Create an empty RFile for use in recovering from data loss where Accumulo still refers internally to a path. + */ +public class CreateEmpty { + + public static class NamedLikeRFile implements IParameterValidator { + @Override + public void validate(String name, String value) throws ParameterException { + if (!value.endsWith(".rf")) { + throw new ParameterException("File must end with .rf and '" + value + "' does not."); + } + } + } + + public static class IsSupportedCompressionAlgorithm implements IParameterValidator { + @Override + public void validate(String name, String value) throws ParameterException { + String[] algorithms = TFile.getSupportedCompressionAlgorithms(); + if (!((Arrays.asList(algorithms)).contains(value))) { + throw new ParameterException("Compression codec must be one of " + Arrays.toString(TFile.getSupportedCompressionAlgorithms())); + } + } + } + + static class Opts extends Help { + @Parameter(names = {"-c", "--codec"}, description = "the compression codec to use.", validateWith = IsSupportedCompressionAlgorithm.class) + String codec = TFile.COMPRESSION_NONE; + @Parameter(description = " <path> { <path> ... } Each path given is a URL. Relative paths are resolved according to the default filesystem defined in your Hadoop configuration, which is usually an HDFS instance.", required = true, validateWith = NamedLikeRFile.class) + List<String> files = new ArrayList<String>(); + } + + public static void main(String[] args) throws Exception { + Configuration conf = CachedConfiguration.getInstance(); + + Opts opts = new Opts(); + opts.parseArgs(CreateEmpty.class.getName(), args); + + for (String arg : opts.files) { + Path path = new Path(arg); + FileSKVWriter writer = (new RFileOperations()).openWriter(arg, path.getFileSystem(conf), conf, DefaultConfiguration.getDefaultConfiguration(), opts.codec); + writer.close(); + } + } + +}