This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit b45bb5dd8eeb5138a11d28c1e6ab497f3f427df7
Author: Christoph Deppisch <cdeppi...@redhat.com>
AuthorDate: Wed Nov 30 22:19:41 2022 +0100

    Add experimental Kamelets using data type converter API
---
 .github/workflows/yaks-tests.yaml                  |   6 +
 experimental/aws-ddb-sink.exp.kamelet.yaml         | 146 ++++++++++++++++++
 experimental/aws-s3-source.exp.kamelet.yaml        | 165 +++++++++++++++++++++
 .../test/aws-ddb-sink/amazonDDBClient.groovy       |  53 +++++++
 .../test/aws-ddb-sink/aws-ddb-sink-binding.yaml    |  33 +++--
 .../aws-ddb-sink/aws-ddb-sink-deleteItem.feature   |  65 ++++++++
 .../test/aws-ddb-sink/aws-ddb-sink-putItem.feature |  58 ++++++++
 .../aws-ddb-sink/aws-ddb-sink-updateItem.feature   |  68 +++++++++
 experimental/test/aws-ddb-sink/putItem.groovy      |  30 ++++
 experimental/test/aws-ddb-sink/verifyItems.groovy  |  18 +++
 .../test/aws-ddb-sink}/yaks-config.yaml            |  29 ++--
 experimental/test/aws-s3/amazonS3Client.groovy     |  36 +++++
 .../test}/aws-s3/aws-s3-cloudevents.feature        |   2 +
 .../test}/aws-s3/aws-s3-knative.feature            |   2 +
 .../test}/aws-s3/aws-s3-to-knative.yaml            |   2 +-
 .../test}/aws-s3/yaks-config.yaml                  |  10 +-
 test/aws-s3/yaks-config.yaml                       |   1 -
 17 files changed, 682 insertions(+), 42 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml 
b/.github/workflows/yaks-tests.yaml
index 73dcec77..7f168ca2 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -65,6 +65,7 @@ jobs:
 
         # Overwrite JitPack coordinates in the local Kamelets so the tests can 
use the utility classes in this PR
         find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i 
"s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g"
 {} +
+        find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i 
"s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g"
 {} +
     - name: Get Camel K CLI
       run: |
         curl --fail -L --silent 
https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz
 -o kamel.tar.gz
@@ -121,6 +122,11 @@ jobs:
         yaks run test/earthquake-source $YAKS_RUN_OPTIONS
         yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS
         yaks run test/kafka $YAKS_RUN_OPTIONS
+    - name: YAKS Tests on experimental Kamelets
+      run: |
+        echo "Running tests for experimental Kamelets"
+        yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS
+        yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS
     - name: YAKS Report
       if: failure()
       run: |
diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml 
b/experimental/aws-ddb-sink.exp.kamelet.yaml
new file mode 100644
index 00000000..e19185fa
--- /dev/null
+++ b/experimental/aws-ddb-sink.exp.kamelet.yaml
@@ -0,0 +1,146 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: Kamelet
+metadata:
+  name: aws-ddb-sink-experimental
+  annotations:
+    camel.apache.org/kamelet.support.level: "Experiemental"
+    camel.apache.org/catalog.version: "main-SNAPSHOT"
+    camel.apache.org/kamelet.icon: 
"data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3Mk
 [...]
+    camel.apache.org/provider: "Apache Software Foundation"
+    camel.apache.org/kamelet.group: "AWS DynamoDB Streams"
+  labels:
+    camel.apache.org/kamelet.type: "sink"
+spec:
+  definition:
+    title: "AWS DynamoDB Sink"
+    description: |-
+      Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes 
an item on the specified AWS DynamoDB table.
+
+      The basic authentication method for the AWS DynamoDB service is to 
specify an access key and a secret key. These parameters are optional because 
the Kamelet provides a default credentials provider.
+
+      If you use the default credentials provider, the DynamoDB client loads 
the credentials through this provider and doesn't use the basic authentication 
method.
+
+      This Kamelet expects a JSON-formatted body and it must include the 
primary key values that define the DynamoDB item. The mapping between the JSON 
fields and table attribute values is done by key. For example, for  
'{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an 
item in the specified AWS DynamoDB table and sets the values for the 'username' 
and 'city' attributes. 
+    required:
+      - table
+      - region
+    type: object
+    properties:
+      table:
+        title: Table
+        description: The name of the DynamoDB table.
+        type: string
+      accessKey:
+        title: Access Key
+        description: The access key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      secretKey:
+        title: Secret Key
+        description: The secret key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      region:
+        title: AWS Region
+        description: The AWS region to access.
+        type: string
+        enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", 
"ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", 
"af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", 
"ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", 
"sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", 
"ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", 
"cn-northwest-1", "us-isob-east-1", "aws-global", "a [...]
+      operation:
+        title: Operation
+        description: "The operation to perform. The options are PutItem, 
UpdateItem, or DeleteItem."
+        type: string
+        default: PutItem
+        example: PutItem
+      writeCapacity:
+        title: Write Capacity
+        description: The provisioned throughput to reserve for writing 
resources to your table.
+        type: integer
+        default: 1
+      useDefaultCredentialsProvider:
+        title: Default Credentials Provider
+        description: If true, the DynamoDB client loads credentials through a 
default credentials provider. If false, it uses the basic authentication method 
(access key and secret key).
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      uriEndpointOverride:
+        title: Overwrite Endpoint URI
+        description: The overriding endpoint URI. To use this option, you must 
also select the `overrideEndpoint` option.
+        type: string
+      overrideEndpoint:
+        title: Endpoint Overwrite
+        description: Select this option to override the endpoint URI. To use 
this option, you must also provide a URI for the `uriEndpointOverride` option.
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      inputFormat:
+        title: Input Type
+        description: Specify the input type for this Kamelet. The Kamelet will 
automatically apply conversion logic in order to transform message content to 
this data type.
+        type: string
+        default: json
+        example: json
+  types:
+    in:
+      mediaType: application/json
+  dependencies:
+  - github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT
+  - "camel:core"
+  - "camel:jackson"
+  - "camel:aws2-ddb"
+  - "camel:kamelet"
+  template:
+    beans:
+    - name: dataTypeRegistry
+      type: 
"#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+    - name: inputTypeProcessor
+      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+      property:
+        - key: scheme
+          value: 'aws2-ddb'
+        - key: format
+          value: '{{inputFormat}}'
+        - key: registry
+          value: '#bean:{{dataTypeRegistry}}'
+    from:
+      uri: "kamelet:source"
+      steps:
+      - set-property:
+          name: operation
+          constant: "{{operation}}"
+      - process:
+          ref: "{{inputTypeProcessor}}"
+      - to:
+          uri: "aws2-ddb:{{table}}"
+          parameters:
+            secretKey: "{{?secretKey}}"
+            accessKey: "{{?accessKey}}"
+            region: "{{region}}"
+            operation: "{{operation}}"
+            writeCapacity: "{{?writeCapacity}}"
+            useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}"
+            uriEndpointOverride: "{{?uriEndpointOverride}}"
+            overrideEndpoint: "{{overrideEndpoint}}"
diff --git a/experimental/aws-s3-source.exp.kamelet.yaml 
b/experimental/aws-s3-source.exp.kamelet.yaml
new file mode 100644
index 00000000..7a8d8fe5
--- /dev/null
+++ b/experimental/aws-s3-source.exp.kamelet.yaml
@@ -0,0 +1,165 @@
+apiVersion: camel.apache.org/v1alpha1
+kind: Kamelet
+metadata:
+  name: aws-s3-source-experimental
+  annotations:
+    camel.apache.org/kamelet.support.level: "Experimental"
+    camel.apache.org/catalog.version: "main-SNAPSHOT"
+    camel.apache.org/kamelet.icon: 
"data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAyNDguMiAzMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxzdHlsZT4uc3QyOHtmaWxsOiM4YzMxMjN9LnN0Mjl7ZmlsbDojZTA1MjQzfTwvc3R5bGU+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik0yMCA1Mi4xTDAgNjJ2MTc1LjVsMjAgOS45LjEtLjFWNTIuMmwtLjEtLjEiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNyAyMjJMMjAgMjQ3LjVWNTIuMUwxMjcgNzd2MTQ1Ii8+PHBhdGggY2xhc3M9InN0MjgiIG
 [...]
+    camel.apache.org/provider: "Apache Software Foundation"
+    camel.apache.org/kamelet.group: "AWS S3"
+  labels:
+    camel.apache.org/kamelet.type: "source"
+spec:
+  definition:
+    title: "AWS S3 Source"
+    description: |-
+      Receive data from an Amazon S3 Bucket.
+
+      The basic authentication method for the S3 service is to specify an 
access key and a secret key. These parameters are optional because the Kamelet 
provides a default credentials provider.
+      
+      If you use the default credentials provider, the S3 client loads the 
credentials through this provider and doesn't use the basic authentication 
method.
+
+      Two headers will be duplicated with different names for clarity at sink 
level, CamelAwsS3Key will be duplicated into aws.s3.key and 
CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name
+    required:
+      - bucketNameOrArn
+      - region
+    type: object
+    properties:
+      bucketNameOrArn:
+        title: Bucket Name
+        description: The S3 Bucket name or Amazon Resource Name (ARN).
+        type: string
+      deleteAfterRead:
+        title: Auto-delete Objects
+        description: Specifies to delete objects after consuming them.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: true
+      accessKey:
+        title: Access Key
+        description: The access key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      secretKey:
+        title: Secret Key
+        description: The secret key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      region:
+        title: AWS Region
+        description: The AWS region to access.
+        type: string
+        enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", 
"ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", 
"af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", 
"ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", 
"sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", 
"ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", 
"cn-northwest-1", "us-isob-east-1", "aws-global", "a [...]
+      autoCreateBucket:
+        title: Autocreate Bucket
+        description: Specifies to automatically create the S3 bucket.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      includeBody:
+        title: Include Body
+        description: If true, the exchange is consumed and put into the body 
and closed. If false, the S3Object stream is put raw into the body and the 
headers are set with the S3 object metadata.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: true
+      prefix:
+        title: Prefix
+        description: The AWS S3 bucket prefix to consider while searching.
+        type: string
+        example: 'folder/'
+      ignoreBody:
+        title: Ignore Body
+        description: If true, the S3 Object body is ignored. Setting this to 
true overrides any behavior defined by the `includeBody` option. If false, the 
S3 object is put in the body.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      useDefaultCredentialsProvider:
+        title: Default Credentials Provider
+        description: If true, the S3 client loads credentials through a 
default credentials provider. If false, it uses the basic authentication method 
(access key and secret key).
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      uriEndpointOverride:
+        title: Overwrite Endpoint URI
+        description: The overriding endpoint URI. To use this option, you must 
also select the `overrideEndpoint` option.
+        type: string
+      overrideEndpoint:
+        title: Endpoint Overwrite
+        description: Select this option to override the endpoint URI. To use 
this option, you must also provide a URI for the `uriEndpointOverride` option.
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      delay:
+        title: Delay
+        description: The number of milliseconds before the next poll of the 
selected bucket.
+        type: integer
+        default: 500
+      outputFormat:
+        title: Output Type
+        description: Choose the output type for this Kamelet. The Kamelet 
supports different output types and performs automatic message conversion 
according to this data type.
+        type: string
+        default: binary
+        example: binary
+  dependencies:
+    - "camel:core"
+    - "camel:aws2-s3"
+    - "github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT"
+    - "camel:kamelet"
+  template:
+    beans:
+      - name: dataTypeRegistry
+        type: 
"#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+      - name: outputTypeProcessor
+        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+        property:
+          - key: scheme
+            value: 'aws2-s3'
+          - key: format
+            value: '{{outputFormat}}'
+          - key: registry
+            value: '#bean:{{dataTypeRegistry}}'
+      - name: renameHeaders
+        type: 
"#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
+        property:
+          - key: prefix
+            value: 'CamelAwsS3'
+          - key: renamingPrefix
+            value: 'aws.s3.'
+          - key: mode
+            value: 'filtering'
+          - key: selectedHeaders
+            value: 'CamelAwsS3Key,CamelAwsS3BucketName'
+    from:
+      uri: "aws2-s3:{{bucketNameOrArn}}"
+      parameters:
+        autoCreateBucket: "{{autoCreateBucket}}"
+        secretKey: "{{?secretKey}}"
+        accessKey: "{{?accessKey}}"
+        region: "{{region}}"
+        includeBody: "{{includeBody}}"
+        ignoreBody: "{{ignoreBody}}"
+        deleteAfterRead: "{{deleteAfterRead}}"
+        prefix: "{{?prefix}}"
+        useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}"
+        uriEndpointOverride: "{{?uriEndpointOverride}}"
+        overrideEndpoint: "{{overrideEndpoint}}"
+        delay: "{{delay}}"
+      steps:
+      - process:
+          ref: "{{renameHeaders}}"
+      - process:
+          ref: "{{outputTypeProcessor}}"
+      - to: "kamelet:sink"
diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy 
b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy
new file mode 100644
index 00000000..dc0b2a8b
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.regions.Region
+import software.amazon.awssdk.services.dynamodb.DynamoDbClient
+import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition
+import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement
+import software.amazon.awssdk.services.dynamodb.model.KeyType
+import software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughput
+import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType
+
+DynamoDbClient amazonDDBClient = DynamoDbClient
+        .builder()
+        
.endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL}"))
+        .credentialsProvider(StaticCredentialsProvider.create(
+                AwsBasicCredentials.create(
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}",
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}")
+        ))
+        .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}"))
+        .build()
+
+amazonDDBClient.createTable(b -> {
+        b.tableName("${aws.ddb.tableName}")
+        b.keySchema(
+                
KeySchemaElement.builder().attributeName("id").keyType(KeyType.HASH).build(),
+        )
+        b.attributeDefinitions(
+                
AttributeDefinition.builder().attributeName("id").attributeType(ScalarAttributeType.N).build(),
+        )
+        b.provisionedThroughput(
+                ProvisionedThroughput.builder()
+                        .readCapacityUnits(1L)
+                        .writeCapacityUnits(1L).build())
+})
+
+return amazonDDBClient
diff --git a/test/aws-s3/aws-s3-to-knative.yaml 
b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
similarity index 75%
copy from test/aws-s3/aws-s3-to-knative.yaml
copy to experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
index e99ee20f..6b4b2b02 100644
--- a/test/aws-s3/aws-s3-to-knative.yaml
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
@@ -18,30 +18,33 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: aws-s3-to-knative
+  name: aws-ddb-sink-binding
 spec:
   source:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-s3-source
+      name: timer-source
     properties:
-      bucketNameOrArn: ${aws.s3.bucketNameOrArn}
-      overrideEndpoint: true
-      outputFormat: ${aws.s3.output}
-      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
-      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
-      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
-      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+      period: ${timer.source.period}
+      message: '${aws.ddb.json.data}'
   steps:
     - ref:
         kind: Kamelet
         apiVersion: camel.apache.org/v1alpha1
-        name: log-sink
-      properties:
-        showHeaders: true
+        name: log-action
+        properties:
+          showHeaders: true
   sink:
     ref:
-      kind: Broker
-      apiVersion: eventing.knative.dev/v1
-      name: default
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: aws-ddb-sink-experimental
+    properties:
+      table: ${aws.ddb.tableName}
+      operation: ${aws.ddb.operation}
+      overrideEndpoint: true
+      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL}
+      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature 
b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
new file mode 100644
index 00000000..6c54fdc3
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
@@ -0,0 +1,65 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - DeleteItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period  | 10000 |
+      | aws.ddb.operation    | DeleteItem |
+      | aws.ddb.tableName    | movies |
+      | aws.ddb.item.id      | 1 |
+      | aws.ddb.item.year    | 1985 |
+      | aws.ddb.item.title   | Back to the future |
+      | aws.ddb.json.data    | {"id": ${aws.ddb.item.id}} |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: 
${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create item on AWS-DDB
+    Given run script putItem.groovy
+    Given variables
+      | aws.ddb.items     | [{year=AttributeValue(N=${aws.ddb.item.year}), 
id=AttributeValue(N=${aws.ddb.item.id}), 
title=AttributeValue(S=${aws.ddb.item.title})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Given variables
+      | aws.ddb.items     | [] |
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature 
b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
new file mode 100644
index 00000000..f117889b
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
@@ -0,0 +1,58 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - PutItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period  | 10000 |
+      | aws.ddb.operation    | PutItem |
+      | aws.ddb.tableName    | movies |
+      | aws.ddb.item.id      | 1 |
+      | aws.ddb.item.year    | 1977 |
+      | aws.ddb.item.title   | Star Wars IV |
+      | aws.ddb.json.data    | { "id":${aws.ddb.item.id}, 
"year":${aws.ddb.item.year}, "title":"${aws.ddb.item.title}" } |
+      | aws.ddb.items        | [{year=AttributeValue(N=${aws.ddb.item.year}), 
id=AttributeValue(N=${aws.ddb.item.id}), 
title=AttributeValue(S=${aws.ddb.item.title})}] |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: 
${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature 
b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
new file mode 100644
index 00000000..215adbe2
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
@@ -0,0 +1,68 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - UpdateItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period    | 10000 |
+      | aws.ddb.operation      | UpdateItem |
+      | aws.ddb.tableName      | movies |
+      | aws.ddb.item.id        | 1 |
+      | aws.ddb.item.year      | 1933 |
+      | aws.ddb.item.title     | King Kong |
+      | aws.ddb.item.title.new | King Kong - Historical |
+      | aws.ddb.item.directors | ["Merian C. Cooper", "Ernest B. Schoedsack"] |
+      | aws.ddb.json.data      | { "key": {"id": ${aws.ddb.item.id}}, "item": 
{"title": "${aws.ddb.item.title.new}", "year": ${aws.ddb.item.year}, 
"directors": ${aws.ddb.item.directors}} } |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: 
${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create item on AWS-DDB
+    Given run script putItem.groovy
+    Given variables
+      | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), 
id=AttributeValue(N=${aws.ddb.item.id}), 
title=AttributeValue(S=${aws.ddb.item.title})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Given variables
+      | aws.ddb.item.directors | [Ernest B. Schoedsack, Merian C. Cooper] |
+      | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), 
directors=AttributeValue(SS=${aws.ddb.item.directors}), 
id=AttributeValue(N=${aws.ddb.item.id}), 
title=AttributeValue(S=${aws.ddb.item.title.new})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/putItem.groovy 
b/experimental/test/aws-ddb-sink/putItem.groovy
new file mode 100644
index 00000000..fd482f90
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/putItem.groovy
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.services.dynamodb.model.AttributeValue
+import software.amazon.awssdk.services.dynamodb.model.ReturnValue
+
+Map<String, AttributeValue> item = new HashMap<>()
+item.put("id", AttributeValue.builder().n("${aws.ddb.item.id}").build())
+item.put("year", AttributeValue.builder().n("${aws.ddb.item.year}").build())
+item.put("title", AttributeValue.builder().s("${aws.ddb.item.title}").build())
+
+amazonDDBClient.putItem(b -> {
+    b.tableName("${aws.ddb.tableName}")
+    b.item(item)
+    b.returnValues(ReturnValue.ALL_OLD)
+})
diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy 
b/experimental/test/aws-ddb-sink/verifyItems.groovy
new file mode 100644
index 00000000..b6e9d27c
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/verifyItems.groovy
@@ -0,0 +1,18 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+assert "${aws.ddb.items}".equals(amazonDDBClient.scan(b -> 
b.tableName("${aws.ddb.tableName}"))?.items()?.toString())
diff --git a/test/aws-s3/yaks-config.yaml 
b/experimental/test/aws-ddb-sink/yaks-config.yaml
similarity index 73%
copy from test/aws-s3/yaks-config.yaml
copy to experimental/test/aws-ddb-sink/yaks-config.yaml
index 6f1a0d0d..51cf3b52 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/experimental/test/aws-ddb-sink/yaks-config.yaml
@@ -28,38 +28,25 @@ config:
         value: false
       - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES
         value: false
-      - name: YAKS_KUBERNETES_AUTO_REMOVE_RESOURCES
-        value: false
-      - name: YAKS_KNATIVE_AUTO_REMOVE_RESOURCES
-        value: false
       - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES
         value: false
       - name: CITRUS_TYPE_CONVERTER
         value: camel
     resources:
-      - amazonS3Client.groovy
-      - aws-s3-credentials.properties
-      - aws-s3-to-log-uri-based.groovy
-      - aws-s3-to-log-secret-based.groovy
-      - aws-s3-uri-binding.yaml
-      - aws-s3-to-knative.yaml
-      - aws-s3-to-knative-channel.yaml
-      - ../utils/knative-channel-to-log.yaml
+      - putItem.groovy
+      - verifyItems.groovy
+      - amazonDDBClient.groovy
+      - aws-ddb-sink-binding.yaml
     cucumber:
       tags:
         - "not @ignored"
     settings:
-      loggers:
-        - name: Logger.Message_IN
-          level: DEBUG
-        - name: Logger.Message_OUT
-          level: DEBUG
       dependencies:
         - groupId: com.amazonaws
-          artifactId: aws-java-sdk-kinesis
+          artifactId: aws-java-sdk-dynamodb
           version: "@aws-java-sdk.version@"
         - groupId: org.apache.camel
-          artifactId: camel-aws2-s3
+          artifactId: camel-aws2-ddb
           version: "@camel.version@"
         - groupId: org.apache.camel
           artifactId: camel-jackson
@@ -69,3 +56,7 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
+pre:
+  - name: Install experimental Kamelets
+    run: |
+      kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE
diff --git a/experimental/test/aws-s3/amazonS3Client.groovy 
b/experimental/test/aws-s3/amazonS3Client.groovy
new file mode 100644
index 00000000..5c3ff8a0
--- /dev/null
+++ b/experimental/test/aws-s3/amazonS3Client.groovy
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.regions.Region
+import software.amazon.awssdk.services.s3.S3Client
+
+S3Client s3 = S3Client
+        .builder()
+        
.endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}"))
+        .credentialsProvider(StaticCredentialsProvider.create(
+                AwsBasicCredentials.create(
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}",
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}")
+        ))
+        .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}"))
+        .build()
+
+s3.createBucket(b -> b.bucket("${aws.s3.bucketNameOrArn}"))
+
+return s3
diff --git a/test/aws-s3/aws-s3-cloudevents.feature 
b/experimental/test/aws-s3/aws-s3-cloudevents.feature
similarity index 96%
rename from test/aws-s3/aws-s3-cloudevents.feature
rename to experimental/test/aws-s3/aws-s3-cloudevents.feature
index 5774b738..6f5513fc 100644
--- a/test/aws-s3/aws-s3-cloudevents.feature
+++ b/experimental/test/aws-s3/aws-s3-cloudevents.feature
@@ -1,7 +1,9 @@
 @knative
+@experimental
 Feature: AWS S3 Kamelet - cloud events data type
 
   Background:
+    Given Kamelet aws-s3-source-experimental is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
diff --git a/test/aws-s3/aws-s3-knative.feature 
b/experimental/test/aws-s3/aws-s3-knative.feature
similarity index 96%
rename from test/aws-s3/aws-s3-knative.feature
rename to experimental/test/aws-s3/aws-s3-knative.feature
index dc358797..8a6512a9 100644
--- a/test/aws-s3/aws-s3-knative.feature
+++ b/experimental/test/aws-s3/aws-s3-knative.feature
@@ -1,7 +1,9 @@
 @knative
+@experimental
 Feature: AWS S3 Kamelet - Knative binding
 
   Background:
+    Given Kamelet aws-s3-source-experimental is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
diff --git a/test/aws-s3/aws-s3-to-knative.yaml 
b/experimental/test/aws-s3/aws-s3-to-knative.yaml
similarity index 97%
rename from test/aws-s3/aws-s3-to-knative.yaml
rename to experimental/test/aws-s3/aws-s3-to-knative.yaml
index e99ee20f..afa1b572 100644
--- a/test/aws-s3/aws-s3-to-knative.yaml
+++ b/experimental/test/aws-s3/aws-s3-to-knative.yaml
@@ -24,7 +24,7 @@ spec:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-s3-source
+      name: aws-s3-source-experimental
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
diff --git a/test/aws-s3/yaks-config.yaml 
b/experimental/test/aws-s3/yaks-config.yaml
similarity index 90%
copy from test/aws-s3/yaks-config.yaml
copy to experimental/test/aws-s3/yaks-config.yaml
index 6f1a0d0d..6431eaf8 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/experimental/test/aws-s3/yaks-config.yaml
@@ -38,13 +38,7 @@ config:
         value: camel
     resources:
       - amazonS3Client.groovy
-      - aws-s3-credentials.properties
-      - aws-s3-to-log-uri-based.groovy
-      - aws-s3-to-log-secret-based.groovy
-      - aws-s3-uri-binding.yaml
       - aws-s3-to-knative.yaml
-      - aws-s3-to-knative-channel.yaml
-      - ../utils/knative-channel-to-log.yaml
     cucumber:
       tags:
         - "not @ignored"
@@ -69,3 +63,7 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
+pre:
+  - name: Install experimental Kamelets
+    run: |
+      kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
index 6f1a0d0d..a2831684 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/test/aws-s3/yaks-config.yaml
@@ -42,7 +42,6 @@ config:
       - aws-s3-to-log-uri-based.groovy
       - aws-s3-to-log-secret-based.groovy
       - aws-s3-uri-binding.yaml
-      - aws-s3-to-knative.yaml
       - aws-s3-to-knative-channel.yaml
       - ../utils/knative-channel-to-log.yaml
     cucumber:


Reply via email to