guozhangwang commented on a change in pull request #11624:
URL: https://github.com/apache/kafka/pull/11624#discussion_r778303747
##########
File path:
streams/src/test/java/org/apache/kafka/streams/integration/IQv2StoreIntegrationTest.java
##########
@@ -365,109 +395,200 @@ public void beforeTest() {
final Properties streamsConfig = streamsConfiguration(
cache,
log,
- storeToTest.name()
+ storeToTest.name(),
+ kind
);
final StreamsBuilder builder = new StreamsBuilder();
- if (supplier instanceof KeyValueBytesStoreSupplier) {
- final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
- Materialized.as((KeyValueBytesStoreSupplier) supplier);
+ if (Objects.equals(kind, "DSL") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValueDSLTopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValuePAPITopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
WindowBytesStoreSupplier) {
+ setUpWindowDSLTopology((WindowBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
WindowBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
SessionBytesStoreSupplier) {
+ setUpSessionDSLTopology((SessionBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
SessionBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else {
+ throw new AssertionError("Store supplier is an unrecognized
type.");
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ // Don't need to wait for running, since tests can use iqv2 to wait
until they
+ // get a valid response.
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ kafkaStreams =
+ IntegrationTestUtils.getStartedStreams(
+ streamsConfig,
+ builder,
+ true
+ );
+ }
- if (storeToTest.global()) {
- builder.globalTable(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- } else {
- builder.table(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- }
- } else if (supplier instanceof WindowBytesStoreSupplier) {
- final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
- Materialized.as((WindowBytesStoreSupplier) supplier);
+ private void setUpSessionDSLTopology(final SessionBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
Review comment:
withLoggingDisabled?
##########
File path:
streams/src/test/java/org/apache/kafka/streams/integration/IQv2StoreIntegrationTest.java
##########
@@ -365,109 +395,200 @@ public void beforeTest() {
final Properties streamsConfig = streamsConfiguration(
cache,
log,
- storeToTest.name()
+ storeToTest.name(),
+ kind
);
final StreamsBuilder builder = new StreamsBuilder();
- if (supplier instanceof KeyValueBytesStoreSupplier) {
- final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
- Materialized.as((KeyValueBytesStoreSupplier) supplier);
+ if (Objects.equals(kind, "DSL") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValueDSLTopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValuePAPITopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
WindowBytesStoreSupplier) {
+ setUpWindowDSLTopology((WindowBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
WindowBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
SessionBytesStoreSupplier) {
+ setUpSessionDSLTopology((SessionBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
SessionBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else {
+ throw new AssertionError("Store supplier is an unrecognized
type.");
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ // Don't need to wait for running, since tests can use iqv2 to wait
until they
+ // get a valid response.
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ kafkaStreams =
+ IntegrationTestUtils.getStartedStreams(
+ streamsConfig,
+ builder,
+ true
+ );
+ }
- if (storeToTest.global()) {
- builder.globalTable(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- } else {
- builder.table(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- }
- } else if (supplier instanceof WindowBytesStoreSupplier) {
- final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
- Materialized.as((WindowBytesStoreSupplier) supplier);
+ private void setUpSessionDSLTopology(final SessionBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
- .windowedBy(TimeWindows.ofSizeWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- materialized
- );
- } else if (supplier instanceof SessionBytesStoreSupplier) {
- final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
- Materialized.as((SessionBytesStoreSupplier) supplier);
+ builder
+ .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
+ .groupByKey()
+ .windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
+ .aggregate(
+ () -> 0,
+ (key, value, aggregate) -> aggregate + value,
+ (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
+ materialized
+ );
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ private void setUpWindowDSLTopology(final WindowBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
-
.windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
- materialized
- );
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
} else {
- throw new AssertionError("Store supplier is an unrecognized
type.");
+ materialized.withCachingDisabled();
Review comment:
withLoggingDisabled?
##########
File path:
streams/src/test/java/org/apache/kafka/streams/integration/IQv2StoreIntegrationTest.java
##########
@@ -365,109 +395,200 @@ public void beforeTest() {
final Properties streamsConfig = streamsConfiguration(
cache,
log,
- storeToTest.name()
+ storeToTest.name(),
+ kind
);
final StreamsBuilder builder = new StreamsBuilder();
- if (supplier instanceof KeyValueBytesStoreSupplier) {
- final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
- Materialized.as((KeyValueBytesStoreSupplier) supplier);
+ if (Objects.equals(kind, "DSL") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValueDSLTopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValuePAPITopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
WindowBytesStoreSupplier) {
+ setUpWindowDSLTopology((WindowBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
WindowBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
SessionBytesStoreSupplier) {
+ setUpSessionDSLTopology((SessionBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
SessionBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else {
+ throw new AssertionError("Store supplier is an unrecognized
type.");
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ // Don't need to wait for running, since tests can use iqv2 to wait
until they
+ // get a valid response.
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ kafkaStreams =
+ IntegrationTestUtils.getStartedStreams(
+ streamsConfig,
+ builder,
+ true
+ );
+ }
- if (storeToTest.global()) {
- builder.globalTable(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- } else {
- builder.table(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- }
- } else if (supplier instanceof WindowBytesStoreSupplier) {
- final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
- Materialized.as((WindowBytesStoreSupplier) supplier);
+ private void setUpSessionDSLTopology(final SessionBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
- .windowedBy(TimeWindows.ofSizeWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- materialized
- );
- } else if (supplier instanceof SessionBytesStoreSupplier) {
- final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
- Materialized.as((SessionBytesStoreSupplier) supplier);
+ builder
+ .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
+ .groupByKey()
+ .windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
+ .aggregate(
+ () -> 0,
+ (key, value, aggregate) -> aggregate + value,
+ (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
+ materialized
+ );
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ private void setUpWindowDSLTopology(final WindowBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
-
.windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
- materialized
- );
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
} else {
- throw new AssertionError("Store supplier is an unrecognized
type.");
+ materialized.withCachingDisabled();
}
- // Don't need to wait for running, since tests can use iqv2 to wait
until they
- // get a valid response.
+ builder
+ .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
+ .groupByKey()
+ .windowedBy(TimeWindows.ofSizeWithNoGrace(WINDOW_SIZE))
+ .aggregate(
+ () -> 0,
+ (key, value, aggregate) -> aggregate + value,
+ materialized
+ );
+ }
- kafkaStreams =
- IntegrationTestUtils.getStartedStreams(
- streamsConfig,
- builder,
- true
+ private void setUpKeyValueDSLTopology(final KeyValueBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
+
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
+
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
+ }
+
+ if (storeToTest.global()) {
+ builder.globalTable(
+ INPUT_TOPIC_NAME,
+ Consumed.with(Serdes.Integer(), Serdes.Integer()),
+ materialized
+ );
+ } else {
+ builder.table(
+ INPUT_TOPIC_NAME,
+ Consumed.with(Serdes.Integer(), Serdes.Integer()),
+ materialized
+ );
+ }
+ }
+
+ private void setUpKeyValuePAPITopology(final KeyValueBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final StoreBuilder<?> keyValueStoreStoreBuilder;
+ final ProcessorSupplier<Integer, Integer, Void, Void>
processorSupplier;
+ if (storeToTest.timestamped()) {
+ keyValueStoreStoreBuilder = Stores.timestampedKeyValueStoreBuilder(
+ supplier,
+ Serdes.Integer(),
+ Serdes.Integer()
+ );
+ processorSupplier = () -> new ContextualProcessor<Integer,
Integer, Void, Void>() {
+ @Override
+ public void process(final Record<Integer, Integer> record) {
+ final TimestampedKeyValueStore<Integer, Integer>
stateStore =
+
context().getStateStore(keyValueStoreStoreBuilder.name());
+ stateStore.put(
+ record.key(),
+ ValueAndTimestamp.make(
+ record.value(), record.timestamp()
+ )
+ );
+ }
+ };
+ } else {
+ keyValueStoreStoreBuilder = Stores.keyValueStoreBuilder(
+ supplier,
+ Serdes.Integer(),
+ Serdes.Integer()
+ );
+ processorSupplier =
+ () -> new ContextualProcessor<Integer, Integer, Void, Void>() {
+ @Override
+ public void process(final Record<Integer, Integer> record)
{
+ final KeyValueStore<Integer, Integer> stateStore =
+
context().getStateStore(keyValueStoreStoreBuilder.name());
+ stateStore.put(record.key(), record.value());
+ }
+ };
+ }
+ if (cache) {
+ keyValueStoreStoreBuilder.withCachingEnabled();
+ } else {
+ keyValueStoreStoreBuilder.withCachingDisabled();
+ }
+ if (log) {
+
keyValueStoreStoreBuilder.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ keyValueStoreStoreBuilder.withCachingDisabled();
Review comment:
Ditto.
##########
File path:
streams/src/test/java/org/apache/kafka/streams/integration/IQv2StoreIntegrationTest.java
##########
@@ -365,109 +395,200 @@ public void beforeTest() {
final Properties streamsConfig = streamsConfiguration(
cache,
log,
- storeToTest.name()
+ storeToTest.name(),
+ kind
);
final StreamsBuilder builder = new StreamsBuilder();
- if (supplier instanceof KeyValueBytesStoreSupplier) {
- final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
- Materialized.as((KeyValueBytesStoreSupplier) supplier);
+ if (Objects.equals(kind, "DSL") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValueDSLTopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
KeyValueBytesStoreSupplier) {
+ setUpKeyValuePAPITopology((KeyValueBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
WindowBytesStoreSupplier) {
+ setUpWindowDSLTopology((WindowBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
WindowBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else if (Objects.equals(kind, "DSL") && supplier instanceof
SessionBytesStoreSupplier) {
+ setUpSessionDSLTopology((SessionBytesStoreSupplier) supplier,
builder);
+ } else if (Objects.equals(kind, "PAPI") && supplier instanceof
SessionBytesStoreSupplier) {
+ throw new AssumptionViolatedException("Case not implemented yet");
+ } else {
+ throw new AssertionError("Store supplier is an unrecognized
type.");
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ // Don't need to wait for running, since tests can use iqv2 to wait
until they
+ // get a valid response.
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ kafkaStreams =
+ IntegrationTestUtils.getStartedStreams(
+ streamsConfig,
+ builder,
+ true
+ );
+ }
- if (storeToTest.global()) {
- builder.globalTable(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- } else {
- builder.table(
- INPUT_TOPIC_NAME,
- Consumed.with(Serdes.Integer(), Serdes.Integer()),
- materialized
- );
- }
- } else if (supplier instanceof WindowBytesStoreSupplier) {
- final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
- Materialized.as((WindowBytesStoreSupplier) supplier);
+ private void setUpSessionDSLTopology(final SessionBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
- .windowedBy(TimeWindows.ofSizeWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- materialized
- );
- } else if (supplier instanceof SessionBytesStoreSupplier) {
- final Materialized<Integer, Integer, SessionStore<Bytes, byte[]>>
materialized =
- Materialized.as((SessionBytesStoreSupplier) supplier);
+ builder
+ .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
+ .groupByKey()
+ .windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
+ .aggregate(
+ () -> 0,
+ (key, value, aggregate) -> aggregate + value,
+ (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
+ materialized
+ );
+ }
- if (cache) {
- materialized.withCachingEnabled();
- } else {
- materialized.withCachingDisabled();
- }
+ private void setUpWindowDSLTopology(final WindowBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, WindowStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
- if (log) {
- materialized.withLoggingEnabled(Collections.emptyMap());
- } else {
- materialized.withCachingDisabled();
- }
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
- builder
- .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
- .groupByKey()
-
.windowedBy(SessionWindows.ofInactivityGapWithNoGrace(WINDOW_SIZE))
- .aggregate(
- () -> 0,
- (key, value, aggregate) -> aggregate + value,
- (aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
- materialized
- );
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
} else {
- throw new AssertionError("Store supplier is an unrecognized
type.");
+ materialized.withCachingDisabled();
}
- // Don't need to wait for running, since tests can use iqv2 to wait
until they
- // get a valid response.
+ builder
+ .stream(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(),
Serdes.Integer()))
+ .groupByKey()
+ .windowedBy(TimeWindows.ofSizeWithNoGrace(WINDOW_SIZE))
+ .aggregate(
+ () -> 0,
+ (key, value, aggregate) -> aggregate + value,
+ materialized
+ );
+ }
- kafkaStreams =
- IntegrationTestUtils.getStartedStreams(
- streamsConfig,
- builder,
- true
+ private void setUpKeyValueDSLTopology(final KeyValueBytesStoreSupplier
supplier,
+ final StreamsBuilder builder) {
+ final Materialized<Integer, Integer, KeyValueStore<Bytes, byte[]>>
materialized =
+ Materialized.as(supplier);
+
+ if (cache) {
+ materialized.withCachingEnabled();
+ } else {
+ materialized.withCachingDisabled();
+ }
+
+ if (log) {
+ materialized.withLoggingEnabled(Collections.emptyMap());
+ } else {
+ materialized.withCachingDisabled();
Review comment:
Ditto.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]