This is an automated email from the ASF dual-hosted git repository. gnodet pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/camel.git
commit 9e0d5c23e8b96051432af31730004bae671552d1 Author: Guillaume Nodet <gno...@gmail.com> AuthorDate: Thu Jul 9 14:07:56 2020 +0200 [CAMEL-11807] Upgrade camel-kafka to junit5 --- components/camel-kafka/pom.xml | 11 ++- .../component/kafka/BaseEmbeddedKafkaTest.java | 6 +- .../camel/component/kafka/KafkaComponentTest.java | 39 +++++----- .../kafka/KafkaConsumerBatchSizeTest.java | 10 +-- .../component/kafka/KafkaConsumerFullTest.java | 29 ++++--- .../kafka/KafkaConsumerLastRecordHeaderTest.java | 21 ++--- .../kafka/KafkaConsumerManualCommitTest.java | 17 ++-- .../kafka/KafkaConsumerRebalanceTest.java | 10 ++- .../camel/component/kafka/KafkaConsumerTest.java | 8 +- .../kafka/KafkaConsumerTopicIsPatternTest.java | 12 +-- .../camel/component/kafka/KafkaEndpointTest.java | 12 +-- .../component/kafka/KafkaProducerFullTest.java | 90 ++++++++++++---------- .../camel/component/kafka/KafkaProducerTest.java | 10 +-- .../serde/DefaultKafkaHeaderDeserializerTest.java | 2 +- .../serde/DefaultKafkaHeaderSerializerTest.java | 22 ++---- .../kafka/KafkaIdempotentRepositoryEagerTest.java | 2 +- .../KafkaIdempotentRepositoryNonEagerTest.java | 2 +- .../kafka/clients/consumer/KafkaConsumerTest.java | 10 ++- 18 files changed, 168 insertions(+), 145 deletions(-) diff --git a/components/camel-kafka/pom.xml b/components/camel-kafka/pom.xml index b271c76..e9afe60 100644 --- a/components/camel-kafka/pom.xml +++ b/components/camel-kafka/pom.xml @@ -51,7 +51,7 @@ <!-- test --> <dependency> <groupId>org.apache.camel</groupId> - <artifactId>camel-testcontainers</artifactId> + <artifactId>camel-testcontainers-junit5</artifactId> <scope>test</scope> </dependency> <dependency> @@ -62,12 +62,17 @@ </dependency> <dependency> <groupId>org.apache.camel</groupId> - <artifactId>camel-test</artifactId> + <artifactId>camel-test-junit5</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.junit.jupiter</groupId> + <artifactId>junit-jupiter-params</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.mockito</groupId> - <artifactId>mockito-core</artifactId> + <artifactId>mockito-junit-jupiter</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/BaseEmbeddedKafkaTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/BaseEmbeddedKafkaTest.java index 3c1f2ef..c7f71ff 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/BaseEmbeddedKafkaTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/BaseEmbeddedKafkaTest.java @@ -19,11 +19,11 @@ package org.apache.camel.component.kafka; import java.util.Properties; import org.apache.camel.CamelContext; -import org.apache.camel.test.junit4.CamelTestSupport; +import org.apache.camel.test.junit5.CamelTestSupport; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.KafkaAdminClient; import org.apache.kafka.clients.producer.ProducerConfig; -import org.junit.BeforeClass; +import org.junit.jupiter.api.BeforeAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.KafkaContainer; @@ -45,7 +45,7 @@ public abstract class BaseEmbeddedKafkaTest extends CamelTestSupport { kafkaAdminClient = createAdminClient(); } - @BeforeClass + @BeforeAll public static void beforeClass() { LOG.info("### Embedded Kafka cluster broker list: " + kafkaBroker.getBootstrapServers()); } diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java index 376b680..695c2ea 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java @@ -23,12 +23,15 @@ import java.util.Properties; import org.apache.camel.support.jsse.KeyStoreParameters; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.camel.support.jsse.TrustManagersParameters; -import org.apache.camel.test.junit4.CamelTestSupport; +import org.apache.camel.test.junit5.CamelTestSupport; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.SslConfigs; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; public class KafkaComponentTest extends CamelTestSupport { @@ -103,27 +106,27 @@ public class KafkaComponentTest extends CamelTestSupport { assertEquals("mytopic", endpoint.getConfiguration().getTopic()); assertEquals("1", endpoint.getConfiguration().getRequestRequiredAcks()); - assertEquals(new Integer(1), endpoint.getConfiguration().getBufferMemorySize()); - assertEquals(new Integer(10), endpoint.getConfiguration().getProducerBatchSize()); - assertEquals(new Integer(12), endpoint.getConfiguration().getConnectionMaxIdleMs()); - assertEquals(new Integer(1), endpoint.getConfiguration().getMaxBlockMs()); - assertEquals(new Integer(1), endpoint.getConfiguration().getBufferMemorySize()); + assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getBufferMemorySize()); + assertEquals(Integer.valueOf(10), endpoint.getConfiguration().getProducerBatchSize()); + assertEquals(Integer.valueOf(12), endpoint.getConfiguration().getConnectionMaxIdleMs()); + assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getMaxBlockMs()); + assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getBufferMemorySize()); assertEquals("testing", endpoint.getConfiguration().getClientId()); assertEquals("none", endpoint.getConfiguration().getCompressionCodec()); - assertEquals(new Integer(1), endpoint.getConfiguration().getLingerMs()); - assertEquals(new Integer(100), endpoint.getConfiguration().getMaxRequestSize()); + assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getLingerMs()); + assertEquals(Integer.valueOf(100), endpoint.getConfiguration().getMaxRequestSize()); assertEquals(100, endpoint.getConfiguration().getRequestTimeoutMs().intValue()); - assertEquals(new Integer(1029), endpoint.getConfiguration().getMetadataMaxAgeMs()); - assertEquals(new Integer(23), endpoint.getConfiguration().getReceiveBufferBytes()); - assertEquals(new Integer(234), endpoint.getConfiguration().getReconnectBackoffMs()); - assertEquals(new Integer(234), endpoint.getConfiguration().getReconnectBackoffMaxMs()); - assertEquals(new Integer(0), endpoint.getConfiguration().getRetries()); + assertEquals(Integer.valueOf(1029), endpoint.getConfiguration().getMetadataMaxAgeMs()); + assertEquals(Integer.valueOf(23), endpoint.getConfiguration().getReceiveBufferBytes()); + assertEquals(Integer.valueOf(234), endpoint.getConfiguration().getReconnectBackoffMs()); + assertEquals(Integer.valueOf(234), endpoint.getConfiguration().getReconnectBackoffMaxMs()); + assertEquals(Integer.valueOf(0), endpoint.getConfiguration().getRetries()); assertEquals(3782, endpoint.getConfiguration().getRetryBackoffMs().intValue()); assertEquals(765, endpoint.getConfiguration().getSendBufferBytes().intValue()); - assertEquals(new Integer(1), endpoint.getConfiguration().getMaxInFlightRequest()); + assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getMaxInFlightRequest()); assertEquals("org.apache.camel.reporters.TestReport,org.apache.camel.reporters.SampleReport", endpoint.getConfiguration().getMetricReporters()); - assertEquals(new Integer(3), endpoint.getConfiguration().getNoOfMetricsSample()); - assertEquals(new Integer(12344), endpoint.getConfiguration().getMetricsSampleWindowMs()); + assertEquals(Integer.valueOf(3), endpoint.getConfiguration().getNoOfMetricsSample()); + assertEquals(Integer.valueOf(12344), endpoint.getConfiguration().getMetricsSampleWindowMs()); assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getSerializerClass()); assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getKeySerializerClass()); assertEquals("testing", endpoint.getConfiguration().getSslKeyPassword()); @@ -139,7 +142,7 @@ public class KafkaComponentTest extends CamelTestSupport { assertEquals("test", endpoint.getConfiguration().getSslProvider()); assertEquals("JKS", endpoint.getConfiguration().getSslTruststoreType()); assertEquals("/usr/bin/kinit", endpoint.getConfiguration().getKerberosInitCmd()); - assertEquals(new Integer(60000), endpoint.getConfiguration().getKerberosBeforeReloginMinTime()); + assertEquals(Integer.valueOf(60000), endpoint.getConfiguration().getKerberosBeforeReloginMinTime()); assertEquals(new Double(0.05), endpoint.getConfiguration().getKerberosRenewJitter()); assertEquals(new Double(0.8), endpoint.getConfiguration().getKerberosRenewWindowFactor()); assertEquals("MAC", endpoint.getConfiguration().getSslCipherSuites()); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerBatchSizeTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerBatchSizeTest.java index 4588d8f..2ecaade 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerBatchSizeTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerBatchSizeTest.java @@ -24,9 +24,9 @@ import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.kafka.clients.producer.ProducerRecord; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class KafkaConsumerBatchSizeTest extends BaseEmbeddedKafkaTest { @@ -40,13 +40,13 @@ public class KafkaConsumerBatchSizeTest extends BaseEmbeddedKafkaTest { private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer; - @Before + @BeforeEach public void before() { Properties props = getDefaultProperties(); producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props); } - @After + @AfterEach public void after() { if (producer != null) { producer.close(); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java index e26a324..be592f4 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java @@ -30,10 +30,15 @@ import org.apache.camel.component.kafka.serde.DefaultKafkaHeaderDeserializer; import org.apache.camel.component.mock.MockEndpoint; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.header.internals.RecordHeader; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import static org.apache.camel.test.junit5.TestSupport.assertIsInstanceOf; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest { @@ -52,13 +57,13 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest { private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer; - @Before + @BeforeEach public void before() { Properties props = getDefaultProperties(); producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props); } - @After + @AfterEach public void after() { if (producer != null) { producer.close(); @@ -103,8 +108,8 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest { assertEquals(5, StreamSupport.stream(MockConsumerInterceptor.recordsCaptured.get(0).records(TOPIC).spliterator(), false).count()); Map<String, Object> headers = to.getExchanges().get(0).getIn().getHeaders(); - assertFalse("Should not receive skipped header", headers.containsKey(skippedHeaderKey)); - assertTrue("Should receive propagated header", headers.containsKey(propagatedHeaderKey)); + assertFalse(headers.containsKey(skippedHeaderKey), "Should not receive skipped header"); + assertTrue(headers.containsKey(propagatedHeaderKey), "Should receive propagated header"); } @Test @@ -120,12 +125,12 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest { to.assertIsSatisfied(3000); Map<String, Object> headers = to.getExchanges().get(0).getIn().getHeaders(); - assertTrue("Should receive KafkaEndpoint populated kafka.TOPIC header", headers.containsKey(KafkaConstants.TOPIC)); - assertEquals("Topic name received", TOPIC, headers.get(KafkaConstants.TOPIC)); + assertTrue(headers.containsKey(KafkaConstants.TOPIC), "Should receive KafkaEndpoint populated kafka.TOPIC header"); + assertEquals(TOPIC, headers.get(KafkaConstants.TOPIC), "Topic name received"); } @Test - @Ignore("Currently there is a bug in kafka which leads to an uninterruptable thread so a resub take too long (works manually)") + @Disabled("Currently there is a bug in kafka which leads to an uninterruptable thread so a resub take too long (works manually)") public void kafkaMessageIsConsumedByCamelSeekedToBeginning() throws Exception { to.expectedMessageCount(5); to.expectedBodiesReceivedInAnyOrder("message-0", "message-1", "message-2", "message-3", "message-4"); @@ -154,7 +159,7 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest { } @Test - @Ignore("Currently there is a bug in kafka which leads to an uninterruptable thread so a resub take too long (works manually)") + @Disabled("Currently there is a bug in kafka which leads to an uninterruptable thread so a resub take too long (works manually)") public void kafkaMessageIsConsumedByCamelSeekedToEnd() throws Exception { to.expectedMessageCount(5); to.expectedBodiesReceivedInAnyOrder("message-0", "message-1", "message-2", "message-3", "message-4"); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerLastRecordHeaderTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerLastRecordHeaderTest.java index a4eb580..574be9b 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerLastRecordHeaderTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerLastRecordHeaderTest.java @@ -25,9 +25,12 @@ import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.kafka.clients.producer.ProducerRecord; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class KafkaConsumerLastRecordHeaderTest extends BaseEmbeddedKafkaTest { private static final String TOPIC = "last-record"; @@ -37,13 +40,13 @@ public class KafkaConsumerLastRecordHeaderTest extends BaseEmbeddedKafkaTest { private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer; - @Before + @BeforeEach public void before() { Properties props = getDefaultProperties(); producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props); } - @After + @AfterEach public void after() { if (producer != null) { producer.close(); @@ -71,12 +74,12 @@ public class KafkaConsumerLastRecordHeaderTest extends BaseEmbeddedKafkaTest { List<Exchange> exchanges = result.getExchanges(); for (int i = 0; i < exchanges.size(); i++) { Boolean header = exchanges.get(i).getIn().getHeader(KafkaConstants.LAST_RECORD_BEFORE_COMMIT, Boolean.class); - assertNotNull("Header not set for #" + i, header); - assertEquals("Header invalid for #" + i, header, i == exchanges.size() - 1); + assertNotNull(header, "Header not set for #" + i); + assertEquals(header, i == exchanges.size() - 1, "Header invalid for #" + i); // as long as the partitions count is 1 on topic: header = exchanges.get(i).getIn().getHeader(KafkaConstants.LAST_POLL_RECORD, Boolean.class); - assertNotNull("Last record header not set for #" + i, header); - assertEquals("Last record header invalid for #" + i, header, i == exchanges.size() - 1); + assertNotNull(header, "Last record header not set for #" + i); + assertEquals(header, i == exchanges.size() - 1, "Last record header invalid for #" + i); } } diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerManualCommitTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerManualCommitTest.java index 0e6524d..284d73f 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerManualCommitTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerManualCommitTest.java @@ -26,12 +26,15 @@ import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.kafka.clients.producer.ProducerRecord; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; -@Ignore +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +@Disabled public class KafkaConsumerManualCommitTest extends BaseEmbeddedKafkaTest { public static final String TOPIC = "test"; @@ -45,13 +48,13 @@ public class KafkaConsumerManualCommitTest extends BaseEmbeddedKafkaTest { private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer; - @Before + @BeforeEach public void before() { Properties props = getDefaultProperties(); producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props); } - @After + @AfterEach public void after() { if (producer != null) { producer.close(); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerRebalanceTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerRebalanceTest.java index ca19ce0..0c6ac84 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerRebalanceTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerRebalanceTest.java @@ -25,8 +25,10 @@ import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.spi.StateRepository; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; public class KafkaConsumerRebalanceTest extends BaseEmbeddedKafkaTest { private static final String TOPIC = "offset-rebalance"; @@ -47,10 +49,10 @@ public class KafkaConsumerRebalanceTest extends BaseEmbeddedKafkaTest { @Test public void offsetGetStateMustHaveBeenCalledTwice() throws Exception { boolean offsetGetStateCalled = messagesLatch.await(30000, TimeUnit.MILLISECONDS); - assertTrue("StateRepository.getState should have been called twice for topic " + TOPIC + ". Remaining count : " + messagesLatch.getCount(), offsetGetStateCalled); + assertTrue(offsetGetStateCalled, "StateRepository.getState should have been called twice for topic " + TOPIC + ". Remaining count : " + messagesLatch.getCount()); } - @After + @AfterEach public void after() { // clean all test topics kafkaAdminClient.deleteTopics(Collections.singletonList(TOPIC)); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTest.java index 98be7e8..1c22fd1 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTest.java @@ -17,8 +17,9 @@ package org.apache.camel.component.kafka; import org.apache.camel.Processor; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -29,12 +30,13 @@ public class KafkaConsumerTest { private KafkaEndpoint endpoint = mock(KafkaEndpoint.class); private Processor processor = mock(Processor.class); - @Test(expected = IllegalArgumentException.class) + @Test public void consumerRequiresBootstrapServers() throws Exception { when(endpoint.getComponent()).thenReturn(component); when(endpoint.getConfiguration()).thenReturn(configuration); when(endpoint.getConfiguration().getGroupId()).thenReturn("groupOne"); - new KafkaConsumer(endpoint, processor); + assertThrows(IllegalArgumentException.class, + () -> new KafkaConsumer(endpoint, processor)); } @Test diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTopicIsPatternTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTopicIsPatternTest.java index 65325aa..61b0ce1 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTopicIsPatternTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerTopicIsPatternTest.java @@ -25,9 +25,11 @@ import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.kafka.clients.producer.ProducerRecord; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; public class KafkaConsumerTopicIsPatternTest extends BaseEmbeddedKafkaTest { @@ -43,14 +45,14 @@ public class KafkaConsumerTopicIsPatternTest extends BaseEmbeddedKafkaTest { private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer; - @Before + @BeforeEach public void before() { Properties props = getDefaultProperties(); producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props); } - @After + @AfterEach public void after() { if (producer != null) { producer.close(); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaEndpointTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaEndpointTest.java index e952fe8..7bfb0df 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaEndpointTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaEndpointTest.java @@ -20,18 +20,18 @@ import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.impl.DefaultCamelContext; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class KafkaEndpointTest { private KafkaEndpoint endpoint; @@ -42,7 +42,7 @@ public class KafkaEndpointTest { @Mock private KafkaComponent mockKafkaComponent; - @Before + @BeforeEach public void setup() { KafkaComponent kafka = new KafkaComponent(new DefaultCamelContext()); kafka.init(); diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java index a953c48..862846c 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java @@ -45,9 +45,15 @@ import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.Headers; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import static org.apache.camel.test.junit5.TestSupport.assertIsInstanceOf; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { @@ -109,13 +115,13 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { @BindToRegistry("myHeaderSerializer") private MyKafkaHeadersSerializer serializer = new MyKafkaHeadersSerializer(); - @BeforeClass + @BeforeAll public static void before() { stringsConsumerConn = createStringKafkaConsumer("DemoConsumer"); bytesConsumerConn = createByteKafkaConsumer(GROUP_BYTES); } - @AfterClass + @AfterAll public static void after() { // clean all test topics final List<String> topics = new ArrayList<>(); @@ -162,16 +168,16 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { boolean allMessagesReceived = messagesLatch.await(200, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); List<Exchange> exchangeList = mockEndpoint.getExchanges(); - assertEquals("Fifteen Exchanges are expected", exchangeList.size(), 15); + assertEquals(exchangeList.size(), 15, "Fifteen Exchanges are expected"); for (Exchange exchange : exchangeList) { @SuppressWarnings("unchecked") List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>)(exchange.getIn().getHeader(KafkaConstants.KAFKA_RECORDMETA)); - assertEquals("One RecordMetadata is expected.", recordMetaData1.size(), 1); - assertTrue("Offset is positive", recordMetaData1.get(0).offset() >= 0); - assertTrue("Topic Name start with 'test'", recordMetaData1.get(0).topic().startsWith("test")); + assertEquals(recordMetaData1.size(), 1, "One RecordMetadata is expected."); + assertTrue(recordMetaData1.get(0).offset() >= 0, "Offset is positive"); + assertTrue(recordMetaData1.get(0).topic().startsWith("test"), "Topic Name start with 'test'"); } } @@ -190,16 +196,16 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { boolean allMessagesReceived = messagesLatch.await(200, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); List<Exchange> exchangeList = mockEndpoint.getExchanges(); - assertEquals("Fifteen Exchanges are expected", exchangeList.size(), 15); + assertEquals(exchangeList.size(), 15, "Fifteen Exchanges are expected"); for (Exchange exchange : exchangeList) { @SuppressWarnings("unchecked") List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>)(exchange.getIn().getHeader(KafkaConstants.KAFKA_RECORDMETA)); - assertEquals("One RecordMetadata is expected.", recordMetaData1.size(), 1); - assertTrue("Offset is positive", recordMetaData1.get(0).offset() >= 0); - assertTrue("Topic Name start with 'test'", recordMetaData1.get(0).topic().startsWith("test")); + assertEquals(1, recordMetaData1.size(), "One RecordMetadata is expected."); + assertTrue(recordMetaData1.get(0).offset() >= 0, "Offset is positive"); + assertTrue(recordMetaData1.get(0).topic().startsWith("test"), "Topic Name start with 'test'"); } } @@ -217,7 +223,7 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { boolean allMessagesReceived = messagesLatch.await(200, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); assertEquals(messageInTopic + messageInOtherTopic, MockProducerInterceptor.recordsCaptured.size()); } @@ -245,24 +251,24 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { boolean allMessagesReceived = messagesLatch.await(200, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); List<Exchange> exchangeList = mockEndpoint.getExchanges(); - assertEquals("Two Exchanges are expected", exchangeList.size(), 2); + assertEquals(2, exchangeList.size(), "Two Exchanges are expected"); Exchange e1 = exchangeList.get(0); @SuppressWarnings("unchecked") List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>)(e1.getIn().getHeader(KafkaConstants.KAFKA_RECORDMETA)); - assertEquals("Ten RecordMetadata is expected.", recordMetaData1.size(), 10); + assertEquals(10, recordMetaData1.size(), "Ten RecordMetadata is expected."); for (RecordMetadata recordMeta : recordMetaData1) { - assertTrue("Offset is positive", recordMeta.offset() >= 0); - assertTrue("Topic Name start with 'test'", recordMeta.topic().startsWith("test")); + assertTrue(recordMeta.offset() >= 0, "Offset is positive"); + assertTrue(recordMeta.topic().startsWith("test"), "Topic Name start with 'test'"); } Exchange e2 = exchangeList.get(1); @SuppressWarnings("unchecked") List<RecordMetadata> recordMetaData2 = (List<RecordMetadata>)(e2.getIn().getHeader(KafkaConstants.KAFKA_RECORDMETA)); - assertEquals("Five RecordMetadata is expected.", recordMetaData2.size(), 5); + assertEquals(5, recordMetaData2.size(), "Five RecordMetadata is expected."); for (RecordMetadata recordMeta : recordMetaData2) { - assertTrue("Offset is positive", recordMeta.offset() >= 0); - assertTrue("Topic Name start with 'test'", recordMeta.topic().startsWith("test")); + assertTrue(recordMeta.offset() >= 0, "Offset is positive"); + assertTrue(recordMeta.topic().startsWith("test"), "Topic Name start with 'test'"); } } @@ -286,16 +292,16 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { boolean allMessagesReceived = messagesLatch.await(200, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); List<Exchange> exchangeList = mockEndpoint.getExchanges(); - assertEquals("Fifteen Exchanges are expected", exchangeList.size(), 15); + assertEquals(exchangeList.size(), 15, "Fifteen Exchanges are expected"); for (Exchange exchange : exchangeList) { @SuppressWarnings("unchecked") List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>)(exchange.getIn().getHeader(KafkaConstants.KAFKA_RECORDMETA)); - assertEquals("One RecordMetadata is expected.", recordMetaData1.size(), 1); - assertTrue("Offset is positive", recordMetaData1.get(0).offset() >= 0); - assertTrue("Topic Name start with 'test'", recordMetaData1.get(0).topic().startsWith("test")); + assertEquals(1, recordMetaData1.size(), "One RecordMetadata is expected."); + assertTrue(recordMetaData1.get(0).offset() >= 0, "Offset is positive"); + assertTrue(recordMetaData1.get(0).topic().startsWith("test"), "Topic Name start with 'test'"); } } @@ -337,19 +343,19 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { List<ConsumerRecord<String, String>> records = pollForRecords(createStringKafkaConsumer("propagatedHeaderConsumer"), TOPIC_PROPAGATED_HEADERS, messagesLatch); boolean allMessagesReceived = messagesLatch.await(10_000, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); ConsumerRecord<String, String> record = records.get(0); Headers headers = record.headers(); - assertNotNull("Kafka Headers should not be null.", headers); + assertNotNull(headers, "Kafka Headers should not be null."); // we have 6 headers - assertEquals("6 propagated header is expected.", 6, headers.toArray().length); - assertEquals("Propagated string value received", propagatedStringHeaderValue, new String(getHeaderValue(propagatedStringHeaderKey, headers))); - assertEquals("Propagated integer value received", propagatedIntegerHeaderValue, new Integer(ByteBuffer.wrap(getHeaderValue(propagatedIntegerHeaderKey, headers)).getInt())); - assertEquals("Propagated long value received", propagatedLongHeaderValue, new Long(ByteBuffer.wrap(getHeaderValue(propagatedLongHeaderKey, headers)).getLong())); - assertEquals("Propagated double value received", propagatedDoubleHeaderValue, new Double(ByteBuffer.wrap(getHeaderValue(propagatedDoubleHeaderKey, headers)).getDouble())); - assertArrayEquals("Propagated byte array value received", propagatedBytesHeaderValue, getHeaderValue(propagatedBytesHeaderKey, headers)); - assertEquals("Propagated boolean value received", propagatedBooleanHeaderValue, Boolean.valueOf(new String(getHeaderValue(propagatedBooleanHeaderKey, headers)))); + assertEquals(6, headers.toArray().length, "6 propagated header is expected."); + assertEquals(propagatedStringHeaderValue, new String(getHeaderValue(propagatedStringHeaderKey, headers)), "Propagated string value received"); + assertEquals(propagatedIntegerHeaderValue, Integer.valueOf(ByteBuffer.wrap(getHeaderValue(propagatedIntegerHeaderKey, headers)).getInt()), "Propagated integer value received"); + assertEquals(propagatedLongHeaderValue, Long.valueOf(ByteBuffer.wrap(getHeaderValue(propagatedLongHeaderKey, headers)).getLong()), "Propagated long value received"); + assertEquals(propagatedDoubleHeaderValue, Double.valueOf(ByteBuffer.wrap(getHeaderValue(propagatedDoubleHeaderKey, headers)).getDouble()), "Propagated double value received"); + assertArrayEquals(propagatedBytesHeaderValue, getHeaderValue(propagatedBytesHeaderKey, headers), "Propagated byte array value received"); + assertEquals(propagatedBooleanHeaderValue, Boolean.valueOf(new String(getHeaderValue(propagatedBooleanHeaderKey, headers))), "Propagated boolean value received"); } @Test @@ -366,13 +372,13 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { List<ConsumerRecord<String, String>> records = pollForRecords(createStringKafkaConsumer("noRecordSpecificHeadersConsumer"), TOPIC_NO_RECORD_SPECIFIC_HEADERS, messagesLatch); boolean allMessagesReceived = messagesLatch.await(10_000, TimeUnit.MILLISECONDS); - assertTrue("Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount(), allMessagesReceived); + assertTrue(allMessagesReceived, "Not all messages were published to the kafka topics. Not received: " + messagesLatch.getCount()); ConsumerRecord<String, String> record = records.get(0); Headers headers = record.headers(); - assertNotNull("Kafka Headers should not be null.", headers); + assertNotNull(headers, "Kafka Headers should not be null."); // we have 0 headers - assertEquals("0 propagated headers are expected", 0, headers.toArray().length); + assertEquals(0, headers.toArray().length, "0 propagated headers are expected"); } @Test @@ -389,7 +395,7 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest { private byte[] getHeaderValue(String headerKey, Headers headers) { Header foundHeader = StreamSupport.stream(headers.spliterator(), false).filter(header -> header.key().equals(headerKey)).findFirst().orElse(null); - assertNotNull("Header should be sent", foundHeader); + assertNotNull(foundHeader, "Header should be sent"); return foundHeader.value(); } diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java index 57ee16c..be5df93 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java @@ -43,7 +43,7 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.errors.ApiException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -51,6 +51,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; @@ -117,7 +118,7 @@ public class KafkaProducerTest { assertRecordMetadataExists(); } - @Test(expected = Exception.class) + @Test @SuppressWarnings({"unchecked"}) public void processSendsMessageWithException() throws Exception { endpoint.getConfiguration().setTopic("sometopic"); @@ -127,9 +128,8 @@ public class KafkaProducerTest { Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); - producer.process(exchange); - - assertRecordMetadataExists(); + assertThrows(Exception.class, + () -> producer.process(exchange)); } @Test diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderDeserializerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderDeserializerTest.java index 334fcf5..b54959d 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderDeserializerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderDeserializerTest.java @@ -17,7 +17,7 @@ package org.apache.camel.component.kafka.serde; import org.hamcrest.CoreMatchers; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertThat; diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderSerializerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderSerializerTest.java index cda893e..9962302 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderSerializerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/serde/DefaultKafkaHeaderSerializerTest.java @@ -19,34 +19,24 @@ package org.apache.camel.component.kafka.serde; import java.util.Arrays; import java.util.Collection; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import static org.junit.Assert.assertArrayEquals; -@RunWith(Parameterized.class) public class DefaultKafkaHeaderSerializerTest { private KafkaHeaderSerializer serializer = new DefaultKafkaHeaderSerializer(); - private Object value; - private byte[] expectedResult; - - public DefaultKafkaHeaderSerializerTest(Object value, byte[] expectedResult) { - this.value = value; - this.expectedResult = expectedResult; - } - - @Test - public void serialize() { + @ParameterizedTest + @MethodSource("primeNumbers") + public void serialize(Object value, byte[] expectedResult) { byte[] result = serializer.serialize("someKey", value); assertArrayEquals(expectedResult, result); } - @Parameterized.Parameters - public static Collection primeNumbers() { + public static Collection<Object[]> primeNumbers() { return Arrays.asList(new Object[][] {{Boolean.TRUE, "true".getBytes()}, // boolean {-12, new byte[] {-1, -1, -1, -12}}, // integer {19L, new byte[] {0, 0, 0, 0, 0, 0, 0, 19}}, // long diff --git a/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryEagerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryEagerTest.java index 3974db6..7c96898 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryEagerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryEagerTest.java @@ -23,7 +23,7 @@ import org.apache.camel.RoutesBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.kafka.BaseEmbeddedKafkaTest; import org.apache.camel.component.mock.MockEndpoint; -import org.junit.Test; +import org.junit.jupiter.api.*; import static org.junit.jupiter.api.Assertions.*; /** * Test for eager idempotentRepository usage. diff --git a/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryNonEagerTest.java b/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryNonEagerTest.java index 66e1f44..6633f39 100644 --- a/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryNonEagerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/camel/processor/idempotent/kafka/KafkaIdempotentRepositoryNonEagerTest.java @@ -23,7 +23,7 @@ import org.apache.camel.RoutesBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.kafka.BaseEmbeddedKafkaTest; import org.apache.camel.component.mock.MockEndpoint; -import org.junit.Test; +import org.junit.jupiter.api.*; import static org.junit.jupiter.api.Assertions.*; /** * Test for non-eager idempotentRepository usage. diff --git a/components/camel-kafka/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java b/components/camel-kafka/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java index 134787f..528346d 100644 --- a/components/camel-kafka/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java +++ b/components/camel-kafka/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java @@ -17,23 +17,25 @@ package org.apache.kafka.clients.consumer; import org.hamcrest.core.IsNot; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.*; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class KafkaConsumerTest { @Mock private KafkaConsumer<Object, Object> kafkaConsumer; - @Before + @BeforeEach public void init() { when(kafkaConsumer.poll(1000)).thenReturn(ConsumerRecords.empty()); }