Repository: camel
Updated Branches:
  refs/heads/master 4442b75b8 -> fe6d6b063


Refactoring.


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/fe6d6b06
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/fe6d6b06
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/fe6d6b06

Branch: refs/heads/master
Commit: fe6d6b063e8c2c01f4c6d43bb50f119a84317660
Parents: 4442b75
Author: Henryk Konsek <hekon...@gmail.com>
Authored: Fri Dec 11 11:51:04 2015 +0100
Committer: Henryk Konsek <hekon...@gmail.com>
Committed: Fri Dec 11 11:51:04 2015 +0100

----------------------------------------------------------------------
 .../org/apache/camel/component/spark/SparkEndpoint.java   | 10 ++++++++++
 .../apache/camel/component/spark/SparkProducerTest.java   |  8 ++++----
 2 files changed, 14 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/fe6d6b06/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
----------------------------------------------------------------------
diff --git 
a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
 
b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
index cb4e1a9..54e8f54 100644
--- 
a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
+++ 
b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
@@ -23,9 +23,18 @@ import org.apache.camel.impl.DefaultEndpoint;
 import org.apache.camel.spi.UriEndpoint;
 import org.apache.spark.api.java.AbstractJavaRDDLike;
 import org.apache.spark.sql.DataFrame;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.spi.LoggerFactoryBinder;
+
+import static org.slf4j.LoggerFactory.getLogger;
 
 public class SparkEndpoint extends DefaultEndpoint {
 
+    // Logger
+
+    private static final Logger LOG = getLogger(SparkEndpoint.class);
+
     // Endpoint collaborators
 
     private AbstractJavaRDDLike rdd;
@@ -53,6 +62,7 @@ public class SparkEndpoint extends DefaultEndpoint {
 
     @Override
     public Producer createProducer() throws Exception {
+        LOG.debug("Creating {} Spark producer.", endpointType);
         if (endpointType == EndpointType.rdd) {
             return new RddSparkProducer(this);
         } else if (endpointType == EndpointType.dataframe) {

http://git-wip-us.apache.org/repos/asf/camel/blob/fe6d6b06/components/camel-spark/src/test/java/org/apache/camel/component/spark/SparkProducerTest.java
----------------------------------------------------------------------
diff --git 
a/components/camel-spark/src/test/java/org/apache/camel/component/spark/SparkProducerTest.java
 
b/components/camel-spark/src/test/java/org/apache/camel/component/spark/SparkProducerTest.java
index 1533f8e..bf6f706 100644
--- 
a/components/camel-spark/src/test/java/org/apache/camel/component/spark/SparkProducerTest.java
+++ 
b/components/camel-spark/src/test/java/org/apache/camel/component/spark/SparkProducerTest.java
@@ -50,7 +50,7 @@ public class SparkProducerTest extends CamelTestSupport {
 
     static HiveContext hiveContext;
 
-    String sparkUri = "spark:rdd?rdd=#pomRdd";
+    String sparkUri = "spark:rdd?rdd=#testFileRdd";
 
     String sparkDataFrameUri = "spark:dataframe?dataFrame=#jsonCars";
 
@@ -69,7 +69,7 @@ public class SparkProducerTest extends CamelTestSupport {
     protected JndiRegistry createRegistry() throws Exception {
         JndiRegistry registry = super.createRegistry();
 
-        registry.bind("pomRdd", sparkContext.textFile("testrdd.txt"));
+        registry.bind("testFileRdd", sparkContext.textFile("testrdd.txt"));
 
         if (shouldRunHive) {
             registry.bind("hiveContext", hiveContext);
@@ -91,13 +91,13 @@ public class SparkProducerTest extends CamelTestSupport {
 
     @Test
     public void shouldExecuteRddCallback() {
-        long pomLinesCount = template.requestBodyAndHeader(sparkUri, null, 
SPARK_RDD_CALLBACK_HEADER, new org.apache.camel.component.spark.RddCallback() {
+        long linesCount = template.requestBodyAndHeader(sparkUri, null, 
SPARK_RDD_CALLBACK_HEADER, new org.apache.camel.component.spark.RddCallback() {
             @Override
             public Long onRdd(AbstractJavaRDDLike rdd, Object... payloads) {
                 return rdd.count();
             }
         }, Long.class);
-        Truth.assertThat(pomLinesCount).isEqualTo(19);
+        Truth.assertThat(linesCount).isEqualTo(19);
     }
 
     @Test

Reply via email to