Repository: camel
Updated Branches:
  refs/heads/camel-2.15.x 8bb7466c9 -> 3c45d3726


Add component documentation.


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/3c45d372
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/3c45d372
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/3c45d372

Branch: refs/heads/camel-2.15.x
Commit: 3c45d3726636b533247fb17a28e6440c34686773
Parents: 8bb7466
Author: Claus Ibsen <davscl...@apache.org>
Authored: Fri Apr 17 16:48:33 2015 +0200
Committer: Claus Ibsen <davscl...@apache.org>
Committed: Fri Apr 17 17:00:42 2015 +0200

----------------------------------------------------------------------
 .../component/openshift/OpenShiftComponent.java | 14 ++++++++
 .../component/openshift/OpenShiftEndpoint.java  | 35 ++++++++++++++++++--
 .../component/sparkrest/SparkComponent.java     | 14 ++++++++
 .../component/sparkrest/SparkConfiguration.java | 22 ++++++++++++
 .../component/sparkrest/SparkEndpoint.java      | 17 +++++++++-
 5 files changed, 98 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/3c45d372/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftComponent.java
----------------------------------------------------------------------
diff --git 
a/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftComponent.java
 
b/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftComponent.java
index fd4747b..215742b 100644
--- 
a/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftComponent.java
+++ 
b/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftComponent.java
@@ -48,6 +48,9 @@ public class OpenShiftComponent extends UriEndpointComponent {
         return username;
     }
 
+    /**
+     * The username to login to openshift server.
+     */
     public void setUsername(String username) {
         this.username = username;
     }
@@ -56,6 +59,9 @@ public class OpenShiftComponent extends UriEndpointComponent {
         return password;
     }
 
+    /**
+     * The password for login to openshift server.
+     */
     public void setPassword(String password) {
         this.password = password;
     }
@@ -64,6 +70,9 @@ public class OpenShiftComponent extends UriEndpointComponent {
         return domain;
     }
 
+    /**
+     * Domain name. If not specified then the default domain is used.
+     */
     public void setDomain(String domain) {
         this.domain = domain;
     }
@@ -72,6 +81,11 @@ public class OpenShiftComponent extends UriEndpointComponent 
{
         return server;
     }
 
+    /**
+     * Url to the openshift server.
+     * If not specified then the default value from the local openshift 
configuration file ~/.openshift/express.conf is used.
+     * And if that fails as well then "openshift.redhat.com" is used.
+     */
     public void setServer(String server) {
         this.server = server;
     }

http://git-wip-us.apache.org/repos/asf/camel/blob/3c45d372/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java
----------------------------------------------------------------------
diff --git 
a/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java
 
b/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java
index 28849c9..d73a7b2 100644
--- 
a/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java
+++ 
b/components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java
@@ -43,11 +43,11 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
     private String domain;
     @UriParam
     private String server;
-    @UriParam(enums = "list,start,stop,restart,state")
+    @UriParam(label = "producer", enums = "list,start,stop,restart,state")
     private String operation;
-    @UriParam
+    @UriParam(label = "producer")
     private String application;
-    @UriParam
+    @UriParam(label = "producer", enums = "pojo,json")
     private String mode;
 
     public OpenShiftEndpoint(String endpointUri, Component component) {
@@ -89,6 +89,9 @@ public class OpenShiftEndpoint extends ScheduledPollEndpoint {
         return username;
     }
 
+    /**
+     * The username to login to openshift server.
+     */
     public void setUsername(String username) {
         this.username = username;
     }
@@ -97,6 +100,9 @@ public class OpenShiftEndpoint extends ScheduledPollEndpoint 
{
         return password;
     }
 
+    /**
+     * The password for login to openshift server.
+     */
     public void setPassword(String password) {
         this.password = password;
     }
@@ -105,6 +111,9 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return clientId;
     }
 
+    /**
+     * The client id
+     */
     public void setClientId(String clientId) {
         this.clientId = clientId;
     }
@@ -113,6 +122,9 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return domain;
     }
 
+    /**
+     * Domain name. If not specified then the default domain is used.
+     */
     public void setDomain(String domain) {
         this.domain = domain;
     }
@@ -121,6 +133,11 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return server;
     }
 
+    /**
+     * Url to the openshift server.
+     * If not specified then the default value from the local openshift 
configuration file ~/.openshift/express.conf is used.
+     * And if that fails as well then "openshift.redhat.com" is used.
+     */
     public void setServer(String server) {
         this.server = server;
     }
@@ -129,6 +146,12 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return operation;
     }
 
+    /**
+     * The operation to perform which can be: list, start, stop, restart, and 
state.
+     * The list operation returns information about all the applications in 
json format.
+     * The state operation returns the state such as: started, stopped etc.
+     * The other operations does not return any value.
+     */
     public void setOperation(String operation) {
         this.operation = operation;
     }
@@ -141,6 +164,9 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return application;
     }
 
+    /**
+     * The application name to start, stop, restart, or get the state.
+     */
     public void setApplication(String application) {
         this.application = application;
     }
@@ -149,6 +175,9 @@ public class OpenShiftEndpoint extends 
ScheduledPollEndpoint {
         return mode;
     }
 
+    /**
+     * Whether to output the message body as a pojo or json. For pojo the 
message is a List<com.openshift.client.IApplication> type.
+     */
     public void setMode(String mode) {
         this.mode = mode;
     }

http://git-wip-us.apache.org/repos/asf/camel/blob/3c45d372/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkComponent.java
----------------------------------------------------------------------
diff --git 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkComponent.java
 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkComponent.java
index b5b91a5..45cf59e 100644
--- 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkComponent.java
+++ 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkComponent.java
@@ -51,6 +51,11 @@ public class SparkComponent extends UriEndpointComponent 
implements RestConsumer
         return port;
     }
 
+    /**
+     * Port number.
+     * <p/>
+     * Will by default use 4567
+     */
     public void setPort(int port) {
         this.port = port;
     }
@@ -59,6 +64,9 @@ public class SparkComponent extends UriEndpointComponent 
implements RestConsumer
         return ipAddress;
     }
 
+    /**
+     * Set the IP address that Spark should listen on. If not called the 
default address is '0.0.0.0'.
+     */
     public void setIpAddress(String ipAddress) {
         this.ipAddress = ipAddress;
     }
@@ -67,6 +75,9 @@ public class SparkComponent extends UriEndpointComponent 
implements RestConsumer
         return sparkConfiguration;
     }
 
+    /**
+     * To use the shared SparkConfiguration
+     */
     public void setSparkConfiguration(SparkConfiguration sparkConfiguration) {
         this.sparkConfiguration = sparkConfiguration;
     }
@@ -75,6 +86,9 @@ public class SparkComponent extends UriEndpointComponent 
implements RestConsumer
         return sparkBinding;
     }
 
+    /**
+     * To use a custom SparkBinding to map to/from Camel message.
+     */
     public void setSparkBinding(SparkBinding sparkBinding) {
         this.sparkBinding = sparkBinding;
     }

http://git-wip-us.apache.org/repos/asf/camel/blob/3c45d372/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkConfiguration.java
----------------------------------------------------------------------
diff --git 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkConfiguration.java
 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkConfiguration.java
index cae1e7b..ac1e040 100644
--- 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkConfiguration.java
+++ 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkConfiguration.java
@@ -35,6 +35,12 @@ public class SparkConfiguration {
         return mapHeaders;
     }
 
+    /**
+     * If this option is enabled, then during binding from Spark to Camel 
Message then the headers will be mapped as well
+     * (eg added as header to the Camel Message as well). You can turn off 
this option to disable this.
+     * The headers can still be accessed from the 
org.apache.camel.component.sparkrest.SparkMessage message with the
+     * method getRequest() that returns the Spark HTTP request instance.
+     */
     public void setMapHeaders(boolean mapHeaders) {
         this.mapHeaders = mapHeaders;
     }
@@ -43,6 +49,15 @@ public class SparkConfiguration {
         return disableStreamCache;
     }
 
+    /**
+     * Determines whether or not the raw input stream from Spark 
HttpRequest#getContent() is cached or not
+     * (Camel will read the stream into a in light-weight memory based Stream 
caching) cache.
+     * By default Camel will cache the Netty input stream to support reading 
it multiple times to ensure Camel
+     * can retrieve all data from the stream. However you can set this option 
to true when you for example need
+     * to access the raw stream, such as streaming it directly to a file or 
other persistent store.
+     * Mind that if you enable this option, then you cannot read the Netty 
stream multiple times out of the box,
+     * and you would need manually to reset the reader index on the Spark raw 
stream.
+     */
     public void setDisableStreamCache(boolean disableStreamCache) {
         this.disableStreamCache = disableStreamCache;
     }
@@ -51,6 +66,9 @@ public class SparkConfiguration {
         return urlDecodeHeaders;
     }
 
+    /**
+     * If this option is enabled, then during binding from Spark to Camel 
Message then the header values will be URL decoded (eg %20 will be a space 
character.)
+     */
     public void setUrlDecodeHeaders(boolean urlDecodeHeaders) {
         this.urlDecodeHeaders = urlDecodeHeaders;
     }
@@ -59,6 +77,10 @@ public class SparkConfiguration {
         return transferException;
     }
 
+    /**
+     * If enabled and an Exchange failed processing on the consumer side, and 
if the caused Exception was send back serialized
+     * in the response as a application/x-java-serialized-object content type.
+     */
     public void setTransferException(boolean transferException) {
         this.transferException = transferException;
     }

http://git-wip-us.apache.org/repos/asf/camel/blob/3c45d372/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkEndpoint.java
----------------------------------------------------------------------
diff --git 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkEndpoint.java
 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkEndpoint.java
index 1dbe524..0bc818d 100644
--- 
a/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkEndpoint.java
+++ 
b/components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkEndpoint.java
@@ -30,7 +30,7 @@ import spark.route.HttpMethod;
 
 @UriEndpoint(scheme = "spark-rest", title = "Spark Rest", syntax = 
"spark-rest:verb:path", consumerOnly = true, consumerClass =  
SparkConsumer.class, label = "rest")
 public class SparkEndpoint extends DefaultEndpoint {
-    @UriPath @Metadata(required = "true")
+    @UriPath(enums = "get,post,put,patch,delete,head,trace,connect,options") 
@Metadata(required = "true")
     private String verb;
     @UriPath @Metadata(required = "true")
     private String path;
@@ -49,6 +49,9 @@ public class SparkEndpoint extends DefaultEndpoint {
         return sparkConfiguration;
     }
 
+    /**
+     * To use the SparkConfiguration
+     */
     public void setSparkConfiguration(SparkConfiguration sparkConfiguration) {
         this.sparkConfiguration = sparkConfiguration;
     }
@@ -57,6 +60,9 @@ public class SparkEndpoint extends DefaultEndpoint {
         return sparkBinding;
     }
 
+    /**
+     * To use a custom SparkBinding to map to/from Camel message.
+     */
     public void setSparkBinding(SparkBinding sparkBinding) {
         this.sparkBinding = sparkBinding;
     }
@@ -65,6 +71,9 @@ public class SparkEndpoint extends DefaultEndpoint {
         return verb;
     }
 
+    /**
+     * get, post, put, patch, delete, head, trace, connect, or options.
+     */
     public void setVerb(String verb) {
         this.verb = verb;
     }
@@ -73,6 +82,9 @@ public class SparkEndpoint extends DefaultEndpoint {
         return path;
     }
 
+    /**
+     * The content path which support Spark syntax.
+     */
     public void setPath(String path) {
         this.path = path;
     }
@@ -81,6 +93,9 @@ public class SparkEndpoint extends DefaultEndpoint {
         return accept;
     }
 
+    /**
+     * Accept type such as: 'text/xml', or 'application/json'. By default we 
accept all kinds of types.
+     */
     public void setAccept(String accept) {
         this.accept = accept;
     }

Reply via email to