This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 14ac1a2f58d63e28d7bf2ca4f824d394f20d32dc
Author: Andrea Cosentino <anco...@gmail.com>
AuthorDate: Fri Jul 5 09:15:56 2019 +0200

    Fixed CS for Camel-Spark and regen
---
 .../annotations/AnnotatedRddCallbackProxy.java     |   4 +-
 .../endpoint/dsl/SparkEndpointBuilderFactory.java  | 248 ++++++++-------------
 2 files changed, 96 insertions(+), 156 deletions(-)

diff --git 
a/components/camel-spark/src/main/java/org/apache/camel/component/spark/annotations/AnnotatedRddCallbackProxy.java
 
b/components/camel-spark/src/main/java/org/apache/camel/component/spark/annotations/AnnotatedRddCallbackProxy.java
index 3ac4732..eeefb25 100644
--- 
a/components/camel-spark/src/main/java/org/apache/camel/component/spark/annotations/AnnotatedRddCallbackProxy.java
+++ 
b/components/camel-spark/src/main/java/org/apache/camel/component/spark/annotations/AnnotatedRddCallbackProxy.java
@@ -27,8 +27,8 @@ import org.apache.camel.CamelContext;
 import org.apache.camel.component.spark.RddCallback;
 import org.apache.spark.api.java.JavaRDDLike;
 
-import static org.apache.camel.util.ObjectHelper.findMethodsWithAnnotation;
 import static org.apache.camel.support.ObjectHelper.invokeMethodSafe;
+import static org.apache.camel.util.ObjectHelper.findMethodsWithAnnotation;
 
 class AnnotatedRddCallbackProxy implements RddCallback {
 
@@ -76,4 +76,4 @@ class AnnotatedRddCallbackProxy implements RddCallback {
         }
     }
 
-}
\ No newline at end of file
+}
diff --git 
a/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
 
b/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
index 98f2a82..53617b1 100644
--- 
a/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
+++ 
b/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
@@ -22,8 +22,8 @@ import org.apache.camel.builder.EndpointProducerBuilder;
 import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
 
 /**
- * The spark-rest component is used for hosting REST services which has been
- * defined using Camel rest-dsl.
+ * The spark component can be used to send RDD or DataFrame jobs to Apache 
Spark
+ * cluster.
  * 
  * Generated by camel-package-maven-plugin - do not edit this file!
  */
@@ -32,166 +32,138 @@ public interface SparkEndpointBuilderFactory {
 
 
     /**
-     * Builder for endpoint for the Spark Rest component.
+     * Builder for endpoint for the Apache Spark component.
      */
-    public interface SparkEndpointBuilder extends EndpointConsumerBuilder {
+    public interface SparkEndpointBuilder extends EndpointProducerBuilder {
         default AdvancedSparkEndpointBuilder advanced() {
             return (AdvancedSparkEndpointBuilder) this;
         }
         /**
-         * get, post, put, patch, delete, head, trace, connect, or options.
-         * The option is a <code>java.lang.String</code> type.
-         * @group consumer
+         * Type of the endpoint (rdd, dataframe, hive).
+         * The option is a
+         * <code>org.apache.camel.component.spark.EndpointType</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder verb(String verb) {
-            setProperty("verb", verb);
+        default SparkEndpointBuilder endpointType(EndpointType endpointType) {
+            setProperty("endpointType", endpointType);
             return this;
         }
         /**
-         * The content path which support Spark syntax.
-         * The option is a <code>java.lang.String</code> type.
-         * @group consumer
+         * Type of the endpoint (rdd, dataframe, hive).
+         * The option will be converted to a
+         * <code>org.apache.camel.component.spark.EndpointType</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder path(String path) {
-            setProperty("path", path);
+        default SparkEndpointBuilder endpointType(String endpointType) {
+            setProperty("endpointType", endpointType);
             return this;
         }
         /**
-         * Accept type such as: 'text/xml', or 'application/json'. By default 
we
-         * accept all kinds of types.
-         * The option is a <code>java.lang.String</code> type.
-         * @group consumer
+         * Indicates if results should be collected or counted.
+         * The option is a <code>boolean</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder accept(String accept) {
-            setProperty("accept", accept);
+        default SparkEndpointBuilder collect(boolean collect) {
+            setProperty("collect", collect);
             return this;
         }
         /**
-         * Determines whether or not the raw input stream from Spark
-         * HttpRequest#getContent() is cached or not (Camel will read the 
stream
-         * into a in light-weight memory based Stream caching) cache. By 
default
-         * Camel will cache the Netty input stream to support reading it
-         * multiple times to ensure Camel can retrieve all data from the 
stream.
-         * However you can set this option to true when you for example need to
-         * access the raw stream, such as streaming it directly to a file or
-         * other persistent store. Mind that if you enable this option, then 
you
-         * cannot read the Netty stream multiple times out of the box, and you
-         * would need manually to reset the reader index on the Spark raw
-         * stream.
-         * The option is a <code>boolean</code> type.
-         * @group consumer
+         * Indicates if results should be collected or counted.
+         * The option will be converted to a <code>boolean</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder disableStreamCache(
-                boolean disableStreamCache) {
-            setProperty("disableStreamCache", disableStreamCache);
+        default SparkEndpointBuilder collect(String collect) {
+            setProperty("collect", collect);
             return this;
         }
         /**
-         * Determines whether or not the raw input stream from Spark
-         * HttpRequest#getContent() is cached or not (Camel will read the 
stream
-         * into a in light-weight memory based Stream caching) cache. By 
default
-         * Camel will cache the Netty input stream to support reading it
-         * multiple times to ensure Camel can retrieve all data from the 
stream.
-         * However you can set this option to true when you for example need to
-         * access the raw stream, such as streaming it directly to a file or
-         * other persistent store. Mind that if you enable this option, then 
you
-         * cannot read the Netty stream multiple times out of the box, and you
-         * would need manually to reset the reader index on the Spark raw
-         * stream.
-         * The option will be converted to a <code>boolean</code> type.
-         * @group consumer
+         * DataFrame to compute against.
+         * The option is a
+         * 
<code>org.apache.spark.sql.Dataset&lt;org.apache.spark.sql.Row&gt;</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder disableStreamCache(
-                String disableStreamCache) {
-            setProperty("disableStreamCache", disableStreamCache);
+        default SparkEndpointBuilder dataFrame(Object dataFrame) {
+            setProperty("dataFrame", dataFrame);
             return this;
         }
         /**
-         * If this option is enabled, then during binding from Spark to Camel
-         * Message then the headers will be mapped as well (eg added as header
-         * to the Camel Message as well). You can turn off this option to
-         * disable this. The headers can still be accessed from the
-         * org.apache.camel.component.sparkrest.SparkMessage message with the
-         * method getRequest() that returns the Spark HTTP request instance.
-         * The option is a <code>boolean</code> type.
-         * @group consumer
+         * DataFrame to compute against.
+         * The option will be converted to a
+         * 
<code>org.apache.spark.sql.Dataset&lt;org.apache.spark.sql.Row&gt;</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder mapHeaders(boolean mapHeaders) {
-            setProperty("mapHeaders", mapHeaders);
+        default SparkEndpointBuilder dataFrame(String dataFrame) {
+            setProperty("dataFrame", dataFrame);
             return this;
         }
         /**
-         * If this option is enabled, then during binding from Spark to Camel
-         * Message then the headers will be mapped as well (eg added as header
-         * to the Camel Message as well). You can turn off this option to
-         * disable this. The headers can still be accessed from the
-         * org.apache.camel.component.sparkrest.SparkMessage message with the
-         * method getRequest() that returns the Spark HTTP request instance.
-         * The option will be converted to a <code>boolean</code> type.
-         * @group consumer
+         * Function performing action against an DataFrame.
+         * The option is a
+         * <code>org.apache.camel.component.spark.DataFrameCallback</code> 
type.
+         * @group producer
          */
-        default SparkEndpointBuilder mapHeaders(String mapHeaders) {
-            setProperty("mapHeaders", mapHeaders);
+        default SparkEndpointBuilder dataFrameCallback(Object 
dataFrameCallback) {
+            setProperty("dataFrameCallback", dataFrameCallback);
             return this;
         }
         /**
-         * If enabled and an Exchange failed processing on the consumer side,
-         * and if the caused Exception was send back serialized in the response
-         * as a application/x-java-serialized-object content type. This is by
-         * default turned off. If you enable this then be aware that Java will
-         * deserialize the incoming data from the request to Java and that can
-         * be a potential security risk.
-         * The option is a <code>boolean</code> type.
-         * @group consumer
+         * Function performing action against an DataFrame.
+         * The option will be converted to a
+         * <code>org.apache.camel.component.spark.DataFrameCallback</code> 
type.
+         * @group producer
          */
-        default SparkEndpointBuilder transferException(boolean 
transferException) {
-            setProperty("transferException", transferException);
+        default SparkEndpointBuilder dataFrameCallback(String 
dataFrameCallback) {
+            setProperty("dataFrameCallback", dataFrameCallback);
             return this;
         }
         /**
-         * If enabled and an Exchange failed processing on the consumer side,
-         * and if the caused Exception was send back serialized in the response
-         * as a application/x-java-serialized-object content type. This is by
-         * default turned off. If you enable this then be aware that Java will
-         * deserialize the incoming data from the request to Java and that can
-         * be a potential security risk.
-         * The option will be converted to a <code>boolean</code> type.
-         * @group consumer
+         * RDD to compute against.
+         * The option is a <code>org.apache.spark.api.java.JavaRDDLike</code>
+         * type.
+         * @group producer
          */
-        default SparkEndpointBuilder transferException(String 
transferException) {
-            setProperty("transferException", transferException);
+        default SparkEndpointBuilder rdd(Object rdd) {
+            setProperty("rdd", rdd);
             return this;
         }
         /**
-         * If this option is enabled, then during binding from Spark to Camel
-         * Message then the header values will be URL decoded (eg %20 will be a
-         * space character.).
-         * The option is a <code>boolean</code> type.
-         * @group consumer
+         * RDD to compute against.
+         * The option will be converted to a
+         * <code>org.apache.spark.api.java.JavaRDDLike</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder urlDecodeHeaders(boolean 
urlDecodeHeaders) {
-            setProperty("urlDecodeHeaders", urlDecodeHeaders);
+        default SparkEndpointBuilder rdd(String rdd) {
+            setProperty("rdd", rdd);
             return this;
         }
         /**
-         * If this option is enabled, then during binding from Spark to Camel
-         * Message then the header values will be URL decoded (eg %20 will be a
-         * space character.).
-         * The option will be converted to a <code>boolean</code> type.
-         * @group consumer
+         * Function performing action against an RDD.
+         * The option is a
+         * <code>org.apache.camel.component.spark.RddCallback</code> type.
+         * @group producer
          */
-        default SparkEndpointBuilder urlDecodeHeaders(String urlDecodeHeaders) 
{
-            setProperty("urlDecodeHeaders", urlDecodeHeaders);
+        default SparkEndpointBuilder rddCallback(Object rddCallback) {
+            setProperty("rddCallback", rddCallback);
+            return this;
+        }
+        /**
+         * Function performing action against an RDD.
+         * The option will be converted to a
+         * <code>org.apache.camel.component.spark.RddCallback</code> type.
+         * @group producer
+         */
+        default SparkEndpointBuilder rddCallback(String rddCallback) {
+            setProperty("rddCallback", rddCallback);
             return this;
         }
     }
 
     /**
-     * Advanced builder for endpoint for the Spark Rest component.
+     * Advanced builder for endpoint for the Apache Spark component.
      */
     public interface AdvancedSparkEndpointBuilder
             extends
-                EndpointConsumerBuilder {
+                EndpointProducerBuilder {
         default SparkEndpointBuilder basic() {
             return (SparkEndpointBuilder) this;
         }
@@ -218,48 +190,6 @@ public interface SparkEndpointBuilderFactory {
             return this;
         }
         /**
-         * Whether or not the consumer should try to find a target consumer by
-         * matching the URI prefix if no exact match is found.
-         * The option is a <code>boolean</code> type.
-         * @group advanced
-         */
-        default AdvancedSparkEndpointBuilder matchOnUriPrefix(
-                boolean matchOnUriPrefix) {
-            setProperty("matchOnUriPrefix", matchOnUriPrefix);
-            return this;
-        }
-        /**
-         * Whether or not the consumer should try to find a target consumer by
-         * matching the URI prefix if no exact match is found.
-         * The option will be converted to a <code>boolean</code> type.
-         * @group advanced
-         */
-        default AdvancedSparkEndpointBuilder matchOnUriPrefix(
-                String matchOnUriPrefix) {
-            setProperty("matchOnUriPrefix", matchOnUriPrefix);
-            return this;
-        }
-        /**
-         * To use a custom SparkBinding to map to/from Camel message.
-         * The option is a
-         * <code>org.apache.camel.component.sparkrest.SparkBinding</code> type.
-         * @group advanced
-         */
-        default AdvancedSparkEndpointBuilder sparkBinding(Object sparkBinding) 
{
-            setProperty("sparkBinding", sparkBinding);
-            return this;
-        }
-        /**
-         * To use a custom SparkBinding to map to/from Camel message.
-         * The option will be converted to a
-         * <code>org.apache.camel.component.sparkrest.SparkBinding</code> type.
-         * @group advanced
-         */
-        default AdvancedSparkEndpointBuilder sparkBinding(String sparkBinding) 
{
-            setProperty("sparkBinding", sparkBinding);
-            return this;
-        }
-        /**
          * Sets whether synchronous processing should be strictly used, or 
Camel
          * is allowed to use asynchronous processing (if supported).
          * The option is a <code>boolean</code> type.
@@ -280,15 +210,25 @@ public interface SparkEndpointBuilderFactory {
             return this;
         }
     }
+
+    /**
+     * Proxy enum for 
<code>org.apache.camel.component.spark.EndpointType</code>
+     * enum.
+     */
+    enum EndpointType {
+        rdd,
+        dataframe,
+        hive;
+    }
     /**
-     * The spark-rest component is used for hosting REST services which has 
been
-     * defined using Camel rest-dsl.
-     * Maven coordinates: org.apache.camel:camel-spark-rest
+     * The spark component can be used to send RDD or DataFrame jobs to Apache
+     * Spark cluster.
+     * Maven coordinates: org.apache.camel:camel-spark
      */
     default SparkEndpointBuilder spark(String path) {
         class SparkEndpointBuilderImpl extends AbstractEndpointBuilder 
implements SparkEndpointBuilder, AdvancedSparkEndpointBuilder {
             public SparkEndpointBuilderImpl(String path) {
-                super("spark-rest", path);
+                super("spark", path);
             }
         }
         return new SparkEndpointBuilderImpl(path);

Reply via email to