This is an automated email from the ASF dual-hosted git repository.

busbey pushed a commit to branch 1.9
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/1.9 by this push:
     new 5de8d0f  TableName from baseSplit is ignored, when getting 
inputTableConfig within AbstractInputFormat.initialize() (#711)
5de8d0f is described below

commit 5de8d0f1e1aa674bfada825ddd9014367b3e2e27
Author: Denys Kuzmenko <denisk...@gmail.com>
AuthorDate: Mon Oct 22 18:06:38 2018 +0200

    TableName from baseSplit is ignored, when getting inputTableConfig within 
AbstractInputFormat.initialize() (#711)
---
 .../mapreduce/lib/impl/InputConfigurator.java      | 26 ++++++++++++++++++----
 1 file changed, 22 insertions(+), 4 deletions(-)

diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
 
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
index e99a0e6..ac7ae28 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
+++ 
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
@@ -671,9 +671,26 @@ public class InputConfigurator extends ConfiguratorBase {
    */
   public static Map<String,InputTableConfig> getInputTableConfigs(Class<?> 
implementingClass,
       Configuration conf) {
+    return getInputTableConfigs(implementingClass, conf, 
getInputTableName(implementingClass, conf));
+  }
+
+  /**
+   * Returns all {@link InputTableConfig} objects associated with this job.
+   *
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property 
configuration key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @param tableName
+   *          the table name for which to retrieve the configuration
+   * @return all of the table query configs for the job
+   * @since 1.6.0
+   */
+  private static Map<String,InputTableConfig> getInputTableConfigs(Class<?> 
implementingClass,
+      Configuration conf, String tableName) {
     Map<String,InputTableConfig> configs = new HashMap<>();
     Map.Entry<String,InputTableConfig> defaultConfig = 
getDefaultInputTableConfig(implementingClass,
-        conf);
+        conf, tableName);
     if (defaultConfig != null)
       configs.put(defaultConfig.getKey(), defaultConfig.getValue());
     String configString = conf.get(enumToConfKey(implementingClass, 
ScanOpts.TABLE_CONFIGS));
@@ -709,7 +726,7 @@ public class InputConfigurator extends ConfiguratorBase {
    */
   public static InputTableConfig getInputTableConfig(Class<?> 
implementingClass, Configuration conf,
       String tableName) {
-    Map<String,InputTableConfig> queryConfigs = 
getInputTableConfigs(implementingClass, conf);
+    Map<String,InputTableConfig> queryConfigs = 
getInputTableConfigs(implementingClass, conf, tableName);
     return queryConfigs.get(tableName);
   }
 
@@ -881,12 +898,13 @@ public class InputConfigurator extends ConfiguratorBase {
    *          the class whose name will be used as a prefix for the property 
configuration key
    * @param conf
    *          the Hadoop instance for which to retrieve the configuration
+   * @param tableName
+   *          the table name for which to retrieve the configuration
    * @return the config object built from the single input table properties 
set on the job
    * @since 1.6.0
    */
   protected static Map.Entry<String,InputTableConfig> 
getDefaultInputTableConfig(
-      Class<?> implementingClass, Configuration conf) {
-    String tableName = getInputTableName(implementingClass, conf);
+      Class<?> implementingClass, Configuration conf, String tableName) {
     if (tableName != null) {
       InputTableConfig queryConfig = new InputTableConfig();
       List<IteratorSetting> itrs = getIterators(implementingClass, conf);

Reply via email to