This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 0d277e7c602db88dab2058781875d84d7d168f3b
Author: marius cornescu <marius_corne...@yahoo.com>
AuthorDate: Mon Sep 23 15:05:32 2019 +0200

    CAMEL-13998 - code review (refactor + codestyle)
---
 .../camel-hdfs/src/main/docs/hdfs-component.adoc   |  3 +-
 .../apache/camel/component/hdfs/HdfsComponent.java | 35 +++++++++++++--
 .../camel/component/hdfs/HdfsConfiguration.java    | 50 ++++++----------------
 .../apache/camel/component/hdfs/HdfsConsumer.java  |  4 +-
 .../apache/camel/component/hdfs/HdfsFileType.java  | 35 +++++++--------
 .../org/apache/camel/component/hdfs/HdfsInfo.java  |  8 ++--
 .../camel/component/hdfs/HdfsInfoFactory.java      |  4 +-
 .../apache/camel/component/hdfs/HdfsProducer.java  | 10 ++---
 .../kerberos/HdfsKerberosConfigurationFactory.java |  6 +--
 .../hdfs/kerberos/KerberosConfiguration.java       | 13 +++---
 .../HdfsKerberosConfigurationFactoryTest.java      |  4 +-
 .../hdfs/kerberos/KerberosConfigurationTest.java   |  8 ++--
 .../springboot/HdfsComponentConfiguration.java     | 13 ++++++
 13 files changed, 104 insertions(+), 89 deletions(-)

diff --git a/components/camel-hdfs/src/main/docs/hdfs-component.adoc 
b/components/camel-hdfs/src/main/docs/hdfs-component.adoc
index bd6549b..3e8268b 100644
--- a/components/camel-hdfs/src/main/docs/hdfs-component.adoc
+++ b/components/camel-hdfs/src/main/docs/hdfs-component.adoc
@@ -54,13 +54,14 @@ fileMode=Append to append each of the chunks together.
 
 
 // component options: START
-The HDFS component supports 1 options, which are listed below.
+The HDFS component supports 2 options, which are listed below.
 
 
 
 [width="100%",cols="2,5,^1,2",options="header"]
 |===
 | Name | Description | Default | Type
+| *jAASConfiguration* (common) | To use the given configuration for security 
with JAAS. |  | Configuration
 | *basicPropertyBinding* (advanced) | Whether the component should use basic 
property binding (Camel 2.x) or the newer property binding with additional 
capabilities | false | boolean
 |===
 // component options: END
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsComponent.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsComponent.java
index 814bb3c..1586ce7 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsComponent.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsComponent.java
@@ -16,6 +16,11 @@
  */
 package org.apache.camel.component.hdfs;
 
+import java.net.URL;
+import java.util.Map;
+
+import javax.security.auth.login.Configuration;
+
 import org.apache.camel.Endpoint;
 import org.apache.camel.spi.annotations.Component;
 import org.apache.camel.support.DefaultComponent;
@@ -23,9 +28,6 @@ import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.net.URL;
-import java.util.Map;
-
 @Component("hdfs")
 public class HdfsComponent extends DefaultComponent {
 
@@ -51,4 +53,31 @@ public class HdfsComponent extends DefaultComponent {
         }
     }
 
+    static Configuration getJAASConfiguration() {
+        Configuration auth = null;
+        try {
+            auth = Configuration.getConfiguration();
+            LOG.trace("Existing JAAS Configuration {}", auth);
+        } catch (SecurityException e) {
+            LOG.trace("Cannot load existing JAAS configuration", e);
+        }
+        return auth;
+    }
+
+    /**
+     * To use the given configuration for security with JAAS.
+     */
+    static void setJAASConfiguration(Configuration auth) {
+        if (auth != null) {
+            LOG.trace("Restoring existing JAAS Configuration {}", auth);
+            try {
+                Configuration.setConfiguration(auth);
+            } catch (SecurityException e) {
+                LOG.trace("Cannot restore JAAS Configuration. This exception 
is ignored.", e);
+            }
+        } else {
+            LOG.trace("No JAAS Configuration to restore");
+        }
+    }
+
 }
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConfiguration.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConfiguration.java
index f97fd89..d48161e 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConfiguration.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConfiguration.java
@@ -22,25 +22,20 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
-import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static java.util.Objects.nonNull;
+
 import org.apache.camel.spi.Metadata;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.spi.UriParams;
 import org.apache.camel.spi.UriPath;
 import org.apache.camel.util.URISupport;
 import org.apache.hadoop.io.SequenceFile;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.security.auth.login.Configuration;
 
 @UriParams
 public class HdfsConfiguration {
 
-    private static final Logger LOG = 
LoggerFactory.getLogger(HdfsConfiguration.class);
-
     private URI uri;
     private boolean wantAppend;
     private List<HdfsProducer.SplitStrategy> splitStrategies;
@@ -194,7 +189,7 @@ public class HdfsConfiguration {
 
         splitStrategy = getString(hdfsSettings, "splitStrategy", 
kerberosNamedNodes);
 
-        if (Objects.nonNull(splitStrategy)) {
+        if (nonNull(splitStrategy)) {
             String[] strstrategies = splitStrategy.split(",");
             for (String strstrategy : strstrategies) {
                 String[] tokens = strstrategy.split(":");
@@ -214,34 +209,6 @@ public class HdfsConfiguration {
         return 
Arrays.stream(kerberosNamedNodes.split(",")).distinct().collect(Collectors.toList());
     }
 
-
-    Configuration getJAASConfiguration() {
-        Configuration auth = null;
-        try {
-            auth = Configuration.getConfiguration();
-            LOG.trace("Existing JAAS Configuration {}", auth);
-        } catch (SecurityException e) {
-            LOG.trace("Cannot load existing JAAS configuration", e);
-        }
-        return auth;
-    }
-
-    /**
-     * To use the given configuration for security with JAAS.
-     */
-    void setJAASConfiguration(Configuration auth) {
-        if (auth != null) {
-            LOG.trace("Restoring existing JAAS Configuration {}", auth);
-            try {
-                Configuration.setConfiguration(auth);
-            } catch (SecurityException e) {
-                LOG.trace("Cannot restore JAAS Configuration. This exception 
is ignored.", e);
-            }
-        } else {
-            LOG.trace("No JAAS Configuration to restore");
-        }
-    }
-
     public void checkConsumerOptions() {
     }
 
@@ -618,7 +585,14 @@ public class HdfsConfiguration {
     }
 
     public boolean isKerberosAuthentication() {
-        return Objects.nonNull(kerberosNamedNodes) && 
Objects.nonNull(kerberosConfigFileLocation) && 
Objects.nonNull(kerberosUsername) && Objects.nonNull(kerberosKeytabLocation)
-                && !kerberosNamedNodes.isEmpty() && 
!kerberosConfigFileLocation.isEmpty() && !kerberosUsername.isEmpty() && 
!kerberosKeytabLocation.isEmpty();
+        return nonNullKerberosProperties() && nonEmptyKerberosProperties();
+    }
+
+    private boolean nonNullKerberosProperties() {
+        return nonNull(kerberosNamedNodes) && 
nonNull(kerberosConfigFileLocation) && nonNull(kerberosUsername) && 
nonNull(kerberosKeytabLocation);
+    }
+
+    private boolean nonEmptyKerberosProperties() {
+        return !kerberosNamedNodes.isEmpty() && 
!kerberosConfigFileLocation.isEmpty() && !kerberosUsername.isEmpty() && 
!kerberosKeytabLocation.isEmpty();
     }
 }
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
index 6dbd1de..98e1c7d 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
@@ -92,11 +92,11 @@ public final class HdfsConsumer extends 
ScheduledPollConsumer {
     @Override
     protected int poll() throws Exception {
         // need to remember auth as Hadoop will override that, which otherwise 
means the Auth is broken afterwards
-        Configuration auth = config.getJAASConfiguration();
+        Configuration auth = HdfsComponent.getJAASConfiguration();
         try {
             return doPoll();
         } finally {
-            config.setJAASConfiguration(auth);
+            HdfsComponent.setJAASConfiguration(auth);
         }
     }
 
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsFileType.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsFileType.java
index 71ec5e6..713e271 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsFileType.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsFileType.java
@@ -16,6 +16,19 @@
  */
 package org.apache.camel.component.hdfs;
 
+import java.io.ByteArrayOutputStream;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.camel.RuntimeCamelException;
 import org.apache.camel.TypeConverter;
 import org.apache.camel.util.IOHelper;
@@ -43,19 +56,6 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import java.io.ByteArrayOutputStream;
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.nio.ByteBuffer;
-import java.nio.file.Files;
-import java.util.HashMap;
-import java.util.Map;
-
 public enum HdfsFileType {
 
     NORMAL_FILE {
@@ -101,11 +101,9 @@ public enum HdfsFileType {
                 HdfsInfo hdfsInfo = HdfsInfoFactory.newHdfsInfo(hdfsPath, 
configuration);
                 if (!configuration.isAppend()) {
                     rout = hdfsInfo.getFileSystem().create(hdfsInfo.getPath(), 
configuration.isOverwrite(), configuration.getBufferSize(),
-                            configuration.getReplication(), 
configuration.getBlockSize(), () -> {
-                            });
+                            configuration.getReplication(), 
configuration.getBlockSize(), () -> { });
                 } else {
-                    rout = hdfsInfo.getFileSystem().append(hdfsInfo.getPath(), 
configuration.getBufferSize(), () -> {
-                    });
+                    rout = hdfsInfo.getFileSystem().append(hdfsInfo.getPath(), 
configuration.getBufferSize(), () -> { });
                 }
                 return rout;
             } catch (IOException ex) {
@@ -412,8 +410,7 @@ public enum HdfsFileType {
                 HdfsInfo hdfsInfo = HdfsInfoFactory.newHdfsInfo(hdfsPath, 
configuration);
                 Class<? extends WritableComparable> valueWritableClass = 
configuration.getValueType().getWritableClass();
                 rout = new ArrayFile.Writer(hdfsInfo.getConf(), 
hdfsInfo.getFileSystem(), hdfsPath, valueWritableClass,
-                        configuration.getCompressionType(), () -> {
-                        });
+                        configuration.getCompressionType(), () -> { });
                 return rout;
             } catch (IOException ex) {
                 throw new RuntimeCamelException(ex);
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfo.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfo.java
index 2ef0598..2d96101 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfo.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfo.java
@@ -16,15 +16,15 @@
  */
 package org.apache.camel.component.hdfs;
 
+import java.io.IOException;
+import java.net.URI;
+import java.util.List;
+
 import org.apache.camel.component.hdfs.kerberos.KerberosConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import java.io.IOException;
-import java.net.URI;
-import java.util.List;
-
 public final class HdfsInfo {
 
     private Configuration configuration;
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfoFactory.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfoFactory.java
index aec5348..4862a70 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfoFactory.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsInfoFactory.java
@@ -27,11 +27,11 @@ public final class HdfsInfoFactory {
 
     public static HdfsInfo newHdfsInfo(String hdfsPath, HdfsConfiguration 
configuration) throws IOException {
         // need to remember auth as Hadoop will override that, which otherwise 
means the Auth is broken afterwards
-        Configuration auth = configuration.getJAASConfiguration();
+        Configuration auth = HdfsComponent.getJAASConfiguration();
         try {
             return new HdfsInfo(hdfsPath, configuration);
         } finally {
-            configuration.setJAASConfiguration(auth);
+            HdfsComponent.setJAASConfiguration(auth);
         }
     }
 
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
index 999482e..6c21580 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
@@ -99,7 +99,7 @@ public class HdfsProducer extends DefaultProducer {
     @Override
     protected void doStart() {
         // need to remember auth as Hadoop will override that, which otherwise 
means the Auth is broken afterwards
-        Configuration auth = config.getJAASConfiguration();
+        Configuration auth = HdfsComponent.getJAASConfiguration();
         try {
             super.doStart();
 
@@ -122,10 +122,10 @@ public class HdfsProducer extends DefaultProducer {
             }
         } catch (Exception e) {
             LOG.warn("Failed to start the HDFS producer. Caused by: [{}]", 
e.getMessage());
-            LOG.trace("", e);
+            LOG.debug("", e);
             throw new RuntimeException(e);
         } finally {
-            config.setJAASConfiguration(auth);
+            HdfsComponent.setJAASConfiguration(auth);
         }
     }
 
@@ -177,11 +177,11 @@ public class HdfsProducer extends DefaultProducer {
     @Override
     public void process(Exchange exchange) throws Exception {
         // need to remember auth as Hadoop will override that, which otherwise 
means the Auth is broken afterwards
-        Configuration auth = config.getJAASConfiguration();
+        Configuration auth = HdfsComponent.getJAASConfiguration();
         try {
             doProcess(exchange);
         } finally {
-            config.setJAASConfiguration(auth);
+            HdfsComponent.setJAASConfiguration(auth);
         }
     }
 
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactory.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactory.java
index 3887d8f..00c9526 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactory.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactory.java
@@ -16,14 +16,14 @@
  */
 package org.apache.camel.component.hdfs.kerberos;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 import java.io.FileNotFoundException;
 
 import static java.lang.String.format;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 public final class HdfsKerberosConfigurationFactory {
 
     private static final Logger LOGGER = 
LoggerFactory.getLogger(HdfsKerberosConfigurationFactory.class);
diff --git 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/KerberosConfiguration.java
 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/KerberosConfiguration.java
index 0dfb4a9..77982e1 100644
--- 
a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/KerberosConfiguration.java
+++ 
b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/kerberos/KerberosConfiguration.java
@@ -16,12 +16,6 @@
  */
 package org.apache.camel.component.hdfs.kerberos;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DFSUtil;
-import 
org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
-import org.apache.hadoop.security.UserGroupInformation;
-
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -29,6 +23,13 @@ import java.util.List;
 import java.util.stream.Collectors;
 
 import static java.lang.String.format;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtil;
+import 
org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
+import org.apache.hadoop.security.UserGroupInformation;
+
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY;
diff --git 
a/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactoryTest.java
 
b/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactoryTest.java
index 1695b2f..ba65149 100644
--- 
a/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactoryTest.java
+++ 
b/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/HdfsKerberosConfigurationFactoryTest.java
@@ -16,11 +16,11 @@
  */
 package org.apache.camel.component.hdfs.kerberos;
 
-import org.junit.Test;
-
 import java.io.FileNotFoundException;
 import java.io.IOException;
 
+import org.junit.Test;
+
 public class HdfsKerberosConfigurationFactoryTest {
 
     @Test(expected = FileNotFoundException.class)
diff --git 
a/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/KerberosConfigurationTest.java
 
b/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/KerberosConfigurationTest.java
index e5e3f26..5288cd5 100644
--- 
a/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/KerberosConfigurationTest.java
+++ 
b/components/camel-hdfs/src/test/java/org/apache/camel/component/hdfs/kerberos/KerberosConfigurationTest.java
@@ -16,18 +16,18 @@
  */
 package org.apache.camel.component.hdfs.kerberos;
 
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.junit.Test;
-
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.junit.Test;
+
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertThat;
 
 public class KerberosConfigurationTest {
 
diff --git 
a/platforms/spring-boot/components-starter/camel-hdfs-starter/src/main/java/org/apache/camel/component/hdfs/springboot/HdfsComponentConfiguration.java
 
b/platforms/spring-boot/components-starter/camel-hdfs-starter/src/main/java/org/apache/camel/component/hdfs/springboot/HdfsComponentConfiguration.java
index deb56f5..ea88324 100644
--- 
a/platforms/spring-boot/components-starter/camel-hdfs-starter/src/main/java/org/apache/camel/component/hdfs/springboot/HdfsComponentConfiguration.java
+++ 
b/platforms/spring-boot/components-starter/camel-hdfs-starter/src/main/java/org/apache/camel/component/hdfs/springboot/HdfsComponentConfiguration.java
@@ -37,11 +37,24 @@ public class HdfsComponentConfiguration
      */
     private Boolean enabled;
     /**
+     * To use the given configuration for security with JAAS. The option is a
+     * javax.security.auth.login.Configuration type.
+     */
+    private String jAASConfiguration;
+    /**
      * Whether the component should use basic property binding (Camel 2.x) or
      * the newer property binding with additional capabilities
      */
     private Boolean basicPropertyBinding = false;
 
+    public String getJAASConfiguration() {
+        return jAASConfiguration;
+    }
+
+    public void setJAASConfiguration(String jAASConfiguration) {
+        this.jAASConfiguration = jAASConfiguration;
+    }
+
     public Boolean getBasicPropertyBinding() {
         return basicPropertyBinding;
     }

Reply via email to