From 02934669ec11a50cafd78dbc95fa3e80bc26c656 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Thu, 20 Sep 2018 15:27:54 +0300 Subject: [PATCH 01/14] KAFKA-7277: Migration to Duration and Instant in public API. --- .../org/apache/kafka/streams/ApiUtils.java | 67 +++++++++++++++ .../apache/kafka/streams/KafkaStreams.java | 22 ++++- .../kafka/streams/kstream/JoinWindows.java | 82 +++++++++++++++---- .../kafka/streams/kstream/Materialized.java | 11 +-- .../kafka/streams/kstream/SessionWindows.java | 34 +++++--- .../kafka/streams/kstream/TimeWindows.java | 63 +++++++++++--- .../streams/kstream/UnlimitedWindows.java | 22 ++++- .../apache/kafka/streams/kstream/Window.java | 25 ++++++ .../streams/processor/ProcessorContext.java | 39 +++++++++ .../ForwardingDisabledProcessorContext.java | 8 +- .../internals/GlobalProcessorContextImpl.java | 9 ++ .../internals/ProcessorContextImpl.java | 7 ++ .../internals/StandbyContextImpl.java | 9 ++ .../streams/state/ReadOnlyWindowStore.java | 76 ++++++++++++++++- .../apache/kafka/streams/state/Stores.java | 74 ++++++++++++++--- .../state/internals/CachingWindowStore.java | 74 ++++++++++++----- .../ChangeLoggingWindowBytesStore.java | 26 +++++- .../CompositeReadOnlyWindowStore.java | 41 +++++++--- .../state/internals/MeteredWindowStore.java | 47 +++++++---- .../state/internals/RocksDBWindowStore.java | 17 ++++ .../processor/MockProcessorContext.java | 9 +- 21 files changed, 649 insertions(+), 113 deletions(-) create mode 100644 streams/src/main/java/org/apache/kafka/streams/ApiUtils.java diff --git a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java new file mode 100644 index 0000000000000..e44764fc98d96 --- /dev/null +++ b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kafka.streams; + +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; + +/** + */ +public final class ApiUtils { + private ApiUtils() { + } + + /** + * Validates that milliseconds from duration {@code d} can be retrieved. + * @param d Duration to check + * @param name Name of params for an error message. + */ + public static void validateMillisecondDuration(final Duration d, final String name) { + try { + Objects.requireNonNull(d); + + //noinspection ResultOfMethodCallIgnored + d.toMillis(); + } catch (final NullPointerException e) { + throw new IllegalArgumentException(name + " shouldn't be null.", e); + } catch (final ArithmeticException e) { + throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + d + + " is negative or too big", e); + } + } + + /** + * Validates that milliseconds from instant {@code i} can be retrieved. + * @param i Instant to check + * @param name Name of params for an error message. + */ + public static void validateMillisecondInstant(final Instant i, final String name) { + try { + Objects.requireNonNull(i); + + //noinspection ResultOfMethodCallIgnored + i.toEpochMilli(); + } catch (final NullPointerException e) { + throw new IllegalArgumentException(name + " shouldn't be null.", e); + } catch (final ArithmeticException e) { + throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + i + + " is negative or too big", e); + } + } +} diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 82323d9081de1..b84fb30197ad3 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams; +import java.time.Duration; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; @@ -826,9 +827,26 @@ public void close() { * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached * before all threads stopped * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. + * @deprecated Use {@link #close(Duration)} instead */ + @Deprecated public synchronized boolean close(final long timeout, final TimeUnit timeUnit) { - log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeUnit.toMillis(timeout)); + return close(Duration.ofMillis(timeUnit.toMillis(timeout))); + } + + /** + * Shutdown this {@code KafkaStreams} by signaling all the threads to stop, and then wait up to the timeout for the + * threads to join. + * A {@code timeout} of 0 means to wait forever. + * + * @param timeout how long to wait for the threads to shutdown + * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached + * before all threads stopped + * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. + * @throws IllegalArgumentException if {@param timeout} is negative or too big + */ + public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { + log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeout.toMillis()); if (!setState(State.PENDING_SHUTDOWN)) { // if transition failed, it means it was either in PENDING_SHUTDOWN @@ -885,7 +903,7 @@ public void run() { shutdownThread.start(); } - if (waitOnState(State.NOT_RUNNING, timeUnit.toMillis(timeout))) { + if (waitOnState(State.NOT_RUNNING, timeout.toMillis())) { log.info("Streams client stopped completely"); return true; } else { diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index 5e742e1a25e0d..3111f5e5f4d5d 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.processor.TimestampExtractor; import java.time.Duration; @@ -109,11 +110,27 @@ private JoinWindows(final long beforeMs, * the timestamp of the record from the primary stream. * * @param timeDifferenceMs join window interval in milliseconds - * @throws IllegalArgumentException if {@code timeDifferenceMs} is negative + * @throws IllegalArgumentException if {@code timeDifferenceMs} is negative or too big + * @deprecated Use {@link #of(Duration)} instead. */ + @Deprecated public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgumentException { // This is a static factory method, so we initialize grace and retention to the defaults. - return new JoinWindows(timeDifferenceMs, timeDifferenceMs, null, DEFAULT_RETENTION_MS); + return of(Duration.ofMillis(timeDifferenceMs)); + } + + /** + * Specifies that records of the same key are joinable if their timestamps are within {@code timeDifference}, + * i.e., the timestamp of a record from the secondary stream is max {@code timeDifference} earlier or later than + * the timestamp of the record from the primary stream. + * + * @param timeDifference join window interval + * @throws IllegalArgumentException if {@code timeDifference} is negative or too big + */ + public static JoinWindows of(final Duration timeDifference) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); + + return new JoinWindows(timeDifference.toMillis(), timeDifference.toMillis(), null, DEFAULT_RETENTION_MS); } /** @@ -124,11 +141,30 @@ public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgument * value (which would result in a negative window size). * * @param timeDifferenceMs relative window start time in milliseconds - * @throws IllegalArgumentException if the resulting window size is negative + * @throws IllegalArgumentException if the resulting window size is negative or too big + * @deprecated Use {@link #before(Duration)} instead. */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this + @Deprecated public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentException { - return new JoinWindows(timeDifferenceMs, afterMs, grace, maintainDurationMs, segments); + return before(Duration.ofMillis(timeDifferenceMs)); + } + + /** + * Changes the start window boundary to {@code timeDifference} but keep the end window boundary as is. + * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most + * {@code timeDifference} earlier than the timestamp of the record from the primary stream. + * {@code timeDifference} can be negative but it's absolute value must not be larger than current window "after" + * value (which would result in a negative window size). + * + * @param timeDifference relative window start time + * @throws IllegalArgumentException if the resulting window size is negative or too big + */ + @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this + public JoinWindows before(final Duration timeDifference) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); + + return new JoinWindows(timeDifference.toMillis(), afterMs, grace, maintainDurationMs, segments); } /** @@ -139,11 +175,30 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc * value (which would result in a negative window size). * * @param timeDifferenceMs relative window end time in milliseconds - * @throws IllegalArgumentException if the resulting window size is negative + * @throws IllegalArgumentException if the resulting window size is negative or too big + * @deprecated Use {@link #after(Duration)} instead */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this + @Deprecated public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentException { - return new JoinWindows(beforeMs, timeDifferenceMs, grace, maintainDurationMs, segments); + return after(Duration.ofMillis(timeDifferenceMs)); + } + + /** + * Changes the end window boundary to {@code timeDifference} but keep the start window boundary as is. + * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most + * {@code timeDifference} later than the timestamp of the record from the primary stream. + * {@code timeDifference} can be negative but it's absolute value must not be larger than current window "before" + * value (which would result in a negative window size). + * + * @param timeDifference relative window end time + * @throws IllegalArgumentException if the resulting window size is negative or too big + */ + @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this + public JoinWindows after(final Duration timeDifference) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); + + return new JoinWindows(beforeMs, timeDifference.toMillis(), grace, maintainDurationMs, segments); } /** @@ -163,20 +218,19 @@ public long size() { } /** - * Reject late events that arrive more than {@code millisAfterWindowEnd} + * Reject late events that arrive more than {@code afterWindowEnd} * after the end of its window. * * Lateness is defined as (stream_time - record_timestamp). * - * @param millisAfterWindowEnd The grace period to admit late-arriving events to a window. + * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this - public JoinWindows grace(final long millisAfterWindowEnd) { - if (millisAfterWindowEnd < 0) { - throw new IllegalArgumentException("Grace period must not be negative."); - } - return new JoinWindows(beforeMs, afterMs, Duration.ofMillis(millisAfterWindowEnd), maintainDurationMs, segments); + public JoinWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); + + return new JoinWindows(beforeMs, afterMs, afterWindowEnd, maintainDurationMs, segments); } @SuppressWarnings("deprecation") // continuing to support Windows#maintainMs/segmentInterval in fallback mode @@ -192,7 +246,7 @@ public long gracePeriodMs() { * @param durationMs the window retention time in milliseconds * @return itself * @throws IllegalArgumentException if {@code durationMs} is smaller than the window size - * @deprecated since 2.1. Use {@link JoinWindows#grace(long)} instead. + * @deprecated since 2.1. Use {@link JoinWindows#grace(Duration)} instead. */ @SuppressWarnings("deprecation") @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java index 15ec6ce877202..be15c446515e7 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java @@ -19,6 +19,7 @@ import org.apache.kafka.common.internals.Topic; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.KeyValueStore; @@ -243,12 +244,12 @@ public Materialized withCachingDisabled() { * from window-start through window-end, and for the entire grace period. * * @return itself + * @throws IllegalArgumentException if retention is negative or too big */ - public Materialized withRetention(final long retentionMs) { - if (retentionMs < 0) { - throw new IllegalArgumentException("Retention must not be negative."); - } - retention = Duration.ofMillis(retentionMs); + public Materialized withRetention(final Duration retention) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(retention, "retention"); + + this.retention = retention; return this; } } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java index 9efb78c786650..dfb3dbd10f279 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.state.SessionBytesStoreSupplier; @@ -87,13 +88,26 @@ private SessionWindows(final long gapMs, final long maintainDurationMs, final Du * @param inactivityGapMs the gap of inactivity between sessions in milliseconds * @return a new window specification with default maintain duration of 1 day * - * @throws IllegalArgumentException if {@code inactivityGapMs} is zero or negative + * @throws IllegalArgumentException if {@code inactivityGapMs} is zero or negative or too big + * @deprecated User {@link #with(Duration)} instead. */ + @Deprecated public static SessionWindows with(final long inactivityGapMs) { - if (inactivityGapMs <= 0) { - throw new IllegalArgumentException("Gap time (inactivityGapMs) cannot be zero or negative."); - } - return new SessionWindows(inactivityGapMs, DEFAULT_RETENTION_MS, null); + return with(Duration.ofMillis(inactivityGapMs)); + } + + /** + * Create a new window specification with the specified inactivity gap. + * + * @param inactivityGap the gap of inactivity between sessions + * @return a new window specification with default maintain duration of 1 day + * + * @throws IllegalArgumentException if {@code inactivityGap} is zero or negative or too big + */ + public static SessionWindows with(final Duration inactivityGap) { + ApiUtils.validateMillisecondDuration(inactivityGap, "inactivityGap"); + + return new SessionWindows(inactivityGap.toMillis(), DEFAULT_RETENTION_MS, null); } /** @@ -124,18 +138,16 @@ public SessionWindows until(final long durationMs) throws IllegalArgumentExcepti * close times can lead to surprising results in which a too-late event is rejected and then * a subsequent event moves the window boundary forward. * - * @param millisAfterWindowEnd The grace period to admit late-arriving events to a window. + * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder */ - public SessionWindows grace(final long millisAfterWindowEnd) { - if (millisAfterWindowEnd < 0) { - throw new IllegalArgumentException("Grace period must not be negative."); - } + public SessionWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); return new SessionWindows( gapMs, maintainDurationMs, - Duration.ofMillis(millisAfterWindowEnd) + afterWindowEnd ); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index 6a58c2c09c4d7..32bef982ccc0d 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.kstream.internals.TimeWindow; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.state.WindowBytesStoreSupplier; @@ -99,14 +100,31 @@ private TimeWindows(final long sizeMs, * * @param sizeMs The size of the window in milliseconds * @return a new window definition with default maintain duration of 1 day - * @throws IllegalArgumentException if the specified window size is zero or negative + * @throws IllegalArgumentException if the specified window size is zero or negative or too big + * @deprecated Use {@link #of(Duration)} instead */ + @Deprecated public static TimeWindows of(final long sizeMs) throws IllegalArgumentException { - if (sizeMs <= 0) { - throw new IllegalArgumentException("Window size (sizeMs) must be larger than zero."); - } + return of(Duration.ofMillis(sizeMs)); + } + + /** + * Return a window definition with the given window size, and with the advance interval being equal to the window + * size. + * The time interval represented by the N-th window is: {@code [N * size, N * size + size)}. + *

+ * This provides the semantics of tumbling windows, which are fixed-sized, gap-less, non-overlapping windows. + * Tumbling windows are a special case of hopping windows with {@code advance == size}. + * + * @param size The size of the window + * @return a new window definition with default maintain duration of 1 day + * @throws IllegalArgumentException if the specified window size is zero or negative or too big + */ + public static TimeWindows of(final Duration size) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(size, "size"); + // This is a static factory method, so we initialize grace and retention to the defaults. - return new TimeWindows(sizeMs, sizeMs, null, DEFAULT_RETENTION_MS); + return new TimeWindows(size.toMillis(), size.toMillis(), null, DEFAULT_RETENTION_MS); } /** @@ -120,9 +138,32 @@ public static TimeWindows of(final long sizeMs) throws IllegalArgumentException * {@code 0 < advanceMs ≤ sizeMs}. * @return a new window definition with default maintain duration of 1 day * @throws IllegalArgumentException if the advance interval is negative, zero, or larger-or-equal the window size + * @deprecated Use {@link #advanceBy(Duration)} instead */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows + @Deprecated public TimeWindows advanceBy(final long advanceMs) { + return advanceBy(Duration.ofMillis(advanceMs)); + } + + /** + * Return a window definition with the original size, but advance ("hop") the window by the given interval, which + * specifies by how much a window moves forward relative to the previous one. + * The time interval represented by the N-th window is: {@code [N * advance, N * advance + size)}. + *

+ * This provides the semantics of hopping windows, which are fixed-sized, overlapping windows. + * + * @param advance The advance interval ("hop") of the window, with the requirement that + * {@code 0 < advance.toMillis() ≤ sizeMs}. + * @return a new window definition with default maintain duration of 1 day + * @throws IllegalArgumentException if the advance interval is negative, zero, or larger-or-equal the window size + */ + @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows + public TimeWindows advanceBy(final Duration advance) { + ApiUtils.validateMillisecondDuration(advance, "advance"); + + long advanceMs = advance.toMillis(); + if (advanceMs <= 0 || advanceMs > sizeMs) { throw new IllegalArgumentException(String.format("AdvanceMs must lie within interval (0, %d].", sizeMs)); } @@ -152,15 +193,15 @@ public long size() { * * Lateness is defined as (stream_time - record_timestamp). * - * @param millisAfterWindowEnd The grace period to admit late-arriving events to a window. + * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder + * @throws IllegalArgumentException if afterWindowEnd is negative or too big */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows - public TimeWindows grace(final long millisAfterWindowEnd) { - if (millisAfterWindowEnd < 0) { - throw new IllegalArgumentException("Grace period must not be negative."); - } - return new TimeWindows(sizeMs, advanceMs, Duration.ofMillis(millisAfterWindowEnd), maintainDurationMs, segments); + public TimeWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); + + return new TimeWindows(sizeMs, advanceMs, afterWindowEnd, maintainDurationMs, segments); } @SuppressWarnings("deprecation") // continuing to support Windows#maintainMs/segmentInterval in fallback mode diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java index 73aa9b1cd2abe..788096364a347 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.kstream; +import java.time.Instant; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.kstream.internals.UnlimitedWindow; import org.apache.kafka.streams.processor.TimestampExtractor; @@ -62,12 +64,24 @@ public static UnlimitedWindows of() { * @param startMs the window start time * @return a new unlimited window that starts at {@code startMs} * @throws IllegalArgumentException if the start time is negative + * @deprecated Use {@link #startOn(Instant)} instead */ + @Deprecated public UnlimitedWindows startOn(final long startMs) throws IllegalArgumentException { - if (startMs < 0) { - throw new IllegalArgumentException("Window start time (startMs) cannot be negative."); - } - return new UnlimitedWindows(startMs); + return startOn(Instant.ofEpochMilli(startMs)); + } + + /** + * Return a new unlimited window for the specified start timestamp. + * + * @param start the window start time + * @return a new unlimited window that starts at {@code start} + * @throws IllegalArgumentException if the start time is negative + */ + public UnlimitedWindows startOn(final Instant start) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(start, "start"); + + return new UnlimitedWindows(start.toEpochMilli()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Window.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Window.java index f6250683282c8..ac49174b995af 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Window.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Window.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import java.time.Instant; import org.apache.kafka.streams.processor.TimestampExtractor; /** @@ -36,6 +37,9 @@ public abstract class Window { protected final long startMs; protected final long endMs; + private final Instant startTime; + private final Instant endTime; + /** * Create a new window for the given start and end time. @@ -53,6 +57,9 @@ public Window(final long startMs, final long endMs) throws IllegalArgumentExcept } this.startMs = startMs; this.endMs = endMs; + + this.startTime = Instant.ofEpochMilli(startMs); + this.endTime = Instant.ofEpochMilli(endMs); } /** @@ -69,6 +76,24 @@ public long end() { return endMs; } + /** + * Return the start time of this window. + * + * @return The start time of this window. + */ + public Instant startTime() { + return startTime; + } + + /** + * Return the end time of this window. + * + * @return The end time of this window. + */ + public Instant endTime() { + return endTime; + } + /** * Check if the given window overlaps with this window. * Should throw an {@link IllegalArgumentException} if the {@code other} window has a different type than {@code diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java index d21667fc6dd4e..bef6c9cd3842e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; import org.apache.kafka.common.annotation.InterfaceStability; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.serialization.Serde; @@ -124,11 +125,49 @@ void register(final StateStore store, * @param type one of: {@link PunctuationType#STREAM_TIME}, {@link PunctuationType#WALL_CLOCK_TIME} * @param callback a function consuming timestamps representing the current stream or system time * @return a handle allowing cancellation of the punctuation schedule established by this method + * @deprecated Use {@link #schedule(Duration, PunctuationType, Punctuator)} instead */ + @Deprecated Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback); + /** + * Schedules a periodic operation for processors. A processor may call this method during + * {@link Processor#init(ProcessorContext) initialization} or + * {@link Processor#process(Object, Object) processing} to + * schedule a periodic callback - called a punctuation - to {@link Punctuator#punctuate(long)}. + * The type parameter controls what notion of time is used for punctuation: + *

    + *
  • {@link PunctuationType#STREAM_TIME} - uses "stream time", which is advanced by the processing of messages + * in accordance with the timestamp as extracted by the {@link TimestampExtractor} in use. + * The first punctuation will be triggered by the first record that is processed. + * NOTE: Only advanced if messages arrive
  • + *
  • {@link PunctuationType#WALL_CLOCK_TIME} - uses system time (the wall-clock time), + * which is advanced at the polling interval ({@link org.apache.kafka.streams.StreamsConfig#POLL_MS_CONFIG}) + * independent of whether new messages arrive. + * The first punctuation will be triggered after interval has elapsed. + * NOTE: This is best effort only as its granularity is limited by how long an iteration of the + * processing loop takes to complete
  • + *
+ * + * Skipping punctuations: Punctuations will not be triggered more than once at any given timestamp. + * This means that "missed" punctuation will be skipped. + * It's possible to "miss" a punctuation if: + *
    + *
  • with {@link PunctuationType#STREAM_TIME}, when stream time advances more than interval
  • + *
  • with {@link PunctuationType#WALL_CLOCK_TIME}, on GC pause, too short interval, ...
  • + *
+ * + * @param interval the time interval between punctuations + * @param type one of: {@link PunctuationType#STREAM_TIME}, {@link PunctuationType#WALL_CLOCK_TIME} + * @param callback a function consuming timestamps representing the current stream or system time + * @return a handle allowing cancellation of the punctuation schedule established by this method + */ + Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException; + /** * Forwards a key/value pair to all downstream processors. * Used the input record's timestamp as timestamp for the output record. diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java index 7e2610cc8fe05..ad6987d018a11 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.streams.StreamsMetrics; @@ -88,7 +89,12 @@ public StateStore getStateStore(final String name) { public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { - return delegate.schedule(intervalMs, type, callback); + return schedule(Duration.ofMillis(intervalMs), type, callback); + } + + @Override + public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + return delegate.schedule(interval, type, callback); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java index 56af6e50a697d..b894ca33927c3 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.processor.Cancellable; import org.apache.kafka.streams.processor.PunctuationType; @@ -96,6 +97,14 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin throw new UnsupportedOperationException("this should not happen: schedule() not supported in global processor context."); } + /** + * @throws UnsupportedOperationException on every invocation + */ + @Override + public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + throw new UnsupportedOperationException("this should not happen: schedule() not supported in global processor context."); + } + @Override public long streamTime() { throw new RuntimeException("Stream time is not implemented for the global processor context."); diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index ee21379c79ff9..587ab8fb39673 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.Cancellable; @@ -154,6 +155,12 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin return task.schedule(interval, type, callback); } + @Override + public Cancellable schedule(Duration interval, PunctuationType type, + Punctuator callback) throws IllegalArgumentException { + return schedule(interval.toMillis(), type, callback); + } + void setStreamTimeSupplier(final TimestampSupplier streamTimeSupplier) { this.streamTimeSupplier = streamTimeSupplier; } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java index e8631aaf85a4f..776c11208045c 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.header.Headers; @@ -193,6 +194,14 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin throw new UnsupportedOperationException("this should not happen: schedule() not supported in standby tasks."); } + /** + * @throws UnsupportedOperationException on every invocation + */ + @Override + public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + throw new UnsupportedOperationException("this should not happen: schedule() not supported in standby tasks."); + } + /** * @throws UnsupportedOperationException on every invocation */ diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index dea759f486c30..9bc2717b72a80 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; @@ -73,9 +75,49 @@ public interface ReadOnlyWindowStore { * @return an iterator over key-value pairs {@code } * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for key. + * @deprecated Use {@link #fetch(K, Instant, Duration)} instead */ + @Deprecated WindowStoreIterator fetch(K key, long timeFrom, long timeTo); + /** + * Get all the key-value pairs with the given key and the time range from all + * the existing windows. + * + * This iterator must be closed after use. + *

+ * The time range is inclusive and applies to the starting timestamp of the window. + * For example, if we have the following windows: + *

+ *

+     * +-------------------------------+
+     * |  key  | start time | end time |
+     * +-------+------------+----------+
+     * |   A   |     10     |    20    |
+     * +-------+------------+----------+
+     * |   A   |     15     |    25    |
+     * +-------+------------+----------+
+     * |   A   |     20     |    30    |
+     * +-------+------------+----------+
+     * |   A   |     25     |    35    |
+     * +--------------------------------
+     * 
+ * And we call {@code store.fetch("A", 10, 20)} then the results will contain the first + * three windows from the table above, i.e., all those where 10 <= start time <= 20. + *

+ * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest + * available window to the newest/latest window. + * + * @param key the key to fetch + * @param from time range start (inclusive) + * @param duration duration of range + * @return an iterator over key-value pairs {@code } + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException If {@code null} is used for key. + * @throws IllegalArgumentException if duration is negative or too big + */ + WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException; + /** * Get all the key-value pairs in the given key range and time range from all * the existing windows. @@ -91,7 +133,25 @@ public interface ReadOnlyWindowStore { * @throws NullPointerException If {@code null} is used for any key. */ KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); - + + /** + * Get all the key-value pairs in the given key range and time range from all + * the existing windows. + * + * This iterator must be closed after use. + * + * @param from the first key in the range + * @param to the last key in the range + * @param fromTime time range start (inclusive) + * @param duration time range duration + * @return an iterator over windowed key-value pairs {@code , value>} + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException If {@code null} is used for any key. + * @throws IllegalArgumentException if duration is negative or too big + */ + KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) + throws IllegalArgumentException; + /** * Gets all the key-value pairs in the existing windows. * @@ -108,6 +168,20 @@ public interface ReadOnlyWindowStore { * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException if {@code null} is used for any key + * @deprecated Use {@link #fetchAll(Instant, Duration)} instead */ + @Deprecated KeyValueIterator, V> fetchAll(long timeFrom, long timeTo); + + /** + * Gets all the key-value pairs that belong to the windows within in the given time range. + * + * @param from the beginning of the time slot from which to search (inclusive) + * @param duration the druation of the time slot from which to search (inclusive) + * @return an iterator over windowed key-value pairs {@code , value>} + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException if {@code null} is used for any key + * @throws IllegalArgumentException if duration is negative or too big + */ + KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException; } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index 6e965fb27fab5..c28304f6fdf07 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -16,11 +16,13 @@ */ package org.apache.kafka.streams.state; +import java.time.Duration; import org.apache.kafka.common.annotation.InterfaceStability; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.Time; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore; import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder; import org.apache.kafka.streams.state.internals.MemoryNavigableLRUCache; @@ -186,13 +188,38 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @param windowSize size of the windows (cannot be negative) * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} + * @deprecated Use {@link #persistentWindowStore(String, Duration, Duration, boolean)} instead */ + @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, final long retentionPeriod, final long windowSize, final boolean retainDuplicates) { + return persistentWindowStore(name, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), + retainDuplicates); + } + + /** + * Create a persistent {@link WindowBytesStoreSupplier}. + * @param name name of the store (cannot be {@code null}) + * @param retentionPeriod length of time to retain data in the store (cannot be negative) + * Note that the retention period must be at least long enough to contain the + * windowed data's entire life cycle, from window-start through window-end, + * and for the entire grace period. + * @param windowSize size of the windows (cannot be negative) + * @param retainDuplicates whether or not to retain duplicates. + * @return an instance of {@link WindowBytesStoreSupplier} + */ + public static WindowBytesStoreSupplier persistentWindowStore(final String name, + final Duration retentionPeriod, + final Duration windowSize, + final boolean retainDuplicates) throws IllegalArgumentException { + Objects.requireNonNull(name, "name cannot be null"); + ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); + ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); + // we're arbitrarily defaulting to segments no smaller than one minute. - final long defaultSegmentInterval = Math.max(retentionPeriod / 2, 60_000L); + final long defaultSegmentInterval = Math.max(retentionPeriod.toMillis() / 2, 60_000L); return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, defaultSegmentInterval); } @@ -207,29 +234,54 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @param windowSize size of the windows (cannot be negative) * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} + * @deprecated Use {@link #persistentWindowStore(String, Duration, Duration, boolean, long)} */ + @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, final long retentionPeriod, final long windowSize, final boolean retainDuplicates, final long segmentInterval) { + return persistentWindowStore(name, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), + retainDuplicates, segmentInterval); + } + + /** + * Create a persistent {@link WindowBytesStoreSupplier}. + * @param name name of the store (cannot be {@code null}) + * @param retentionPeriod length of time to retain data in the store (cannot be negative) + * Note that the retention period must be at least long enough to contain the + * windowed data's entire life cycle, from window-start through window-end, + * and for the entire grace period. + * @param segmentInterval size of segments + * @param windowSize size of the windows + * @param retainDuplicates whether or not to retain duplicates. + * @return an instance of {@link WindowBytesStoreSupplier} + */ + public static WindowBytesStoreSupplier persistentWindowStore(final String name, + final Duration retentionPeriod, + final Duration windowSize, + final boolean retainDuplicates, + final long segmentInterval) { Objects.requireNonNull(name, "name cannot be null"); - if (retentionPeriod < 0L) { - throw new IllegalArgumentException("retentionPeriod cannot be negative"); - } - if (windowSize < 0L) { - throw new IllegalArgumentException("windowSize cannot be negative"); - } + ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); + ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); + if (segmentInterval < 1L) { throw new IllegalArgumentException("segmentInterval cannot be zero or negative"); } - if (windowSize > retentionPeriod) { + + long retentionPeriodMs = retentionPeriod.toMillis(); + long windowSizeMs = windowSize.toMillis(); + + if (windowSizeMs > retentionPeriodMs) { throw new IllegalArgumentException("The retention period of the window store " - + name + " must be no smaller than its window size. Got size=[" - + windowSize + "], retention=[" + retentionPeriod + "]"); + + name + " must be no smaller than its window size. Got size=[" + + windowSize + "], retention=[" + retentionPeriod + "]"); } - return new RocksDbWindowBytesStoreSupplier(name, retentionPeriod, segmentInterval, windowSize, retainDuplicates); + return new RocksDbWindowBytesStoreSupplier(name, retentionPeriodMs, segmentInterval, + windowSizeMs, retainDuplicates); } /** diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index 688e88962a2a1..aae9adc20b355 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; @@ -180,22 +182,31 @@ public byte[] fetch(final Bytes key, final long timestamp) { @Override public synchronized WindowStoreIterator fetch(final Bytes key, final long timeFrom, final long timeTo) { + return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public WindowStoreIterator fetch(Bytes key, Instant from, Duration duration) throws IllegalArgumentException { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); - final WindowStoreIterator underlyingIterator = underlying.fetch(key, timeFrom, timeTo); + final WindowStoreIterator underlyingIterator = underlying.fetch(key, from, duration); if (cache == null) { return underlyingIterator; } + + final long timeFrom = from.toEpochMilli(); + final long timeTo = from.toEpochMilli() + duration.toMillis(); + final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(key, timeFrom)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(key, timeTo)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(key, - key, - timeFrom, - timeTo); + key, + timeFrom, + timeTo); final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator( cacheIterator, hasNextCondition, cacheFunction ); @@ -204,12 +215,24 @@ public synchronized WindowStoreIterator fetch(final Bytes key, final lon } @Override - public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, final long timeTo) { + public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, + final long timeTo) { + return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, byte[]> fetch(Bytes from, Bytes to, Instant fromTime, + Duration duration) throws IllegalArgumentException { + // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); - final KeyValueIterator, byte[]> underlyingIterator = underlying.fetch(from, to, timeFrom, timeTo); + final long timeFrom = fromTime.toEpochMilli(); + final long timeTo = fromTime.toEpochMilli() + duration.toMillis(); + + final KeyValueIterator, byte[]> underlyingIterator = + underlying.fetch(from, to, fromTime, duration); if (cache == null) { return underlyingIterator; } @@ -218,10 +241,11 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(from, - to, - timeFrom, - timeTo); - final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); + to, + timeFrom, + timeTo); + final PeekingKeyValueIterator filteredCacheIterator = + new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( filteredCacheIterator, @@ -231,7 +255,7 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B cacheFunction ); } - + private V fetchPrevious(final Bytes key, final long timestamp) { final byte[] value = underlying.fetch(key, timestamp); if (value != null) { @@ -258,21 +282,29 @@ public KeyValueIterator, byte[]> all() { @Override public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, byte[]> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { validateStoreOpen(); - - final KeyValueIterator, byte[]> underlyingIterator = underlying.fetchAll(timeFrom, timeTo); + + final long timeFrom = from.toEpochMilli(); + final long timeTo = from.toEpochMilli() + duration.toMillis(); + + final KeyValueIterator, byte[]> underlyingIterator = underlying.fetchAll(from, duration); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name); - + final HasNextCondition hasNextCondition = keySchema.hasNextCondition(null, null, timeFrom, timeTo); final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator(cacheIterator, - hasNextCondition, - cacheFunction); + hasNextCondition, + cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( - filteredCacheIterator, - underlyingIterator, - bytesSerdes, - windowSize, - cacheFunction + filteredCacheIterator, + underlyingIterator, + bytesSerdes, + windowSize, + cacheFunction ); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index 785aacd25c1bd..e968b1acfc6af 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.kstream.Windowed; @@ -53,12 +55,23 @@ public byte[] fetch(final Bytes key, final long timestamp) { @Override public WindowStoreIterator fetch(final Bytes key, final long from, final long to) { - return bytesStore.fetch(key, from, to); + return fetch(key, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)); + } + + @Override + public WindowStoreIterator fetch(Bytes key, Instant from, Duration duration) throws IllegalArgumentException { + return bytesStore.fetch(key, from, duration); } @Override public KeyValueIterator, byte[]> fetch(final Bytes keyFrom, final Bytes keyTo, final long from, final long to) { - return bytesStore.fetch(keyFrom, keyTo, from, to); + return fetch(keyFrom, keyTo, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)); + } + + @Override + public KeyValueIterator, byte[]> fetch(Bytes from, Bytes to, Instant fromTime, + Duration duration) throws IllegalArgumentException { + return bytesStore.fetch(from, to, fromTime, duration); } @Override @@ -68,9 +81,14 @@ public KeyValueIterator, byte[]> all() { @Override public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { - return bytesStore.fetchAll(timeFrom, timeTo); + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } - + + @Override + public KeyValueIterator, byte[]> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + return bytesStore.fetchAll(from, duration); + } + @Override public void put(final Bytes key, final byte[] value) { put(key, value, context.timestamp()); diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index 1b5d5e5611a9b..0ac8681b8c107 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.state.KeyValueIterator; @@ -65,11 +67,16 @@ public V fetch(final K key, final long time) { @Override public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { + return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); final List> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore windowStore : stores) { try { - final WindowStoreIterator result = windowStore.fetch(key, timeFrom, timeTo); + final WindowStoreIterator result = windowStore.fetch(key, from, duration); if (!result.hasNext()) { result.close(); } else { @@ -77,8 +84,8 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException( - "State store is not available anymore and may have been migrated to another instance; " + - "please re-discover its location from the state metadata."); + "State store is not available anymore and may have been migrated to another instance; " + + "please re-discover its location from the state metadata."); } } return KeyValueIterators.emptyWindowStoreIterator(); @@ -86,20 +93,25 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long @Override public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { + return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { Objects.requireNonNull(from, "from can't be null"); Objects.requireNonNull(to, "to can't be null"); final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { - return store.fetch(from, to, timeFrom, timeTo); + return store.fetch(from, to, fromTime, duration); } }; return new DelegatingPeekingKeyValueIterator<>(storeName, - new CompositeKeyValueIterator<>( - provider.stores(storeName, windowStoreType).iterator(), - nextIteratorFunction)); + new CompositeKeyValueIterator<>( + provider.stores(storeName, windowStoreType).iterator(), + nextIteratorFunction)); } - + @Override public KeyValueIterator, V> all() { final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @@ -116,15 +128,20 @@ public KeyValueIterator, V> apply(final ReadOnlyWindowStore st @Override public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { - return store.fetchAll(timeFrom, timeTo); + return store.fetchAll(from, duration); } }; return new DelegatingPeekingKeyValueIterator<>(storeName, - new CompositeKeyValueIterator<>( - provider.stores(storeName, windowStoreType).iterator(), - nextIteratorFunction)); + new CompositeKeyValueIterator<>( + provider.stores(storeName, windowStoreType).iterator(), + nextIteratorFunction)); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index 5162eac8848d2..d68f3e23bf31e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; @@ -140,11 +142,16 @@ public V fetch(final K key, final long timestamp) { @Override public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), timeFrom, timeTo), - fetchTime, - metrics, - serdes, - time); + return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { + return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), from, duration), + fetchTime, + metrics, + serdes, + time); } @Override @@ -154,20 +161,30 @@ public KeyValueIterator, V> all() { @Override public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(timeFrom, timeTo), - fetchTime, - metrics, - serdes, - time); + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(from, duration), + fetchTime, + metrics, + serdes, + time); } @Override public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), timeFrom, timeTo), - fetchTime, - metrics, - serdes, - time); + return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { + return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), fromTime, duration), + fetchTime, + metrics, + serdes, + time); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index d7bb523b049ff..8003a0489ea01 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.kstream.Windowed; @@ -91,12 +93,22 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); } + @Override + public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + } + @Override public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), Bytes.wrap(serdes.rawKey(to)), timeFrom, timeTo); return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } + @Override + public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + } + @Override public KeyValueIterator, V> all() { final KeyValueIterator bytesIterator = bytesStore.all(); @@ -109,6 +121,11 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } + @Override + public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + } + private void maybeUpdateSeqnumForDups() { if (retainDuplicates) { seqnum = (seqnum + 1) & 0x7FFFFFFF; diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index cba02573b59da..2d67c06e44599 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; import org.apache.kafka.common.annotation.InterfaceStability; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.metrics.Metrics; @@ -378,7 +379,13 @@ public StateStore getStateStore(final String name) { @Override public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { - final CapturedPunctuator capturedPunctuator = new CapturedPunctuator(intervalMs, type, callback); + return schedule(Duration.ofMillis(intervalMs), type, callback); + } + + @Override + public Cancellable schedule(Duration interval, PunctuationType type, + Punctuator callback) throws IllegalArgumentException { + final CapturedPunctuator capturedPunctuator = new CapturedPunctuator(interval.toMillis(), type, callback); punctuators.add(capturedPunctuator); From 97304615fcfec2b4e149fda80299d67da11b7678 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Sun, 23 Sep 2018 00:28:56 +0300 Subject: [PATCH 02/14] KAFKA-7277: Call in tests and examples are changed to Duration versions. --- .../examples/pageview/PageViewTypedDemo.java | 3 +- .../pageview/PageViewUntypedDemo.java | 3 +- .../examples/temperature/TemperatureDemo.java | 3 +- .../wordcount/WordCountProcessorDemo.java | 3 +- .../kafka/streams/kstream/Transformer.java | 5 +- .../streams/kstream/ValueTransformer.java | 5 +- .../kstream/ValueTransformerWithKey.java | 7 +- .../kafka/streams/processor/Cancellable.java | 4 +- .../kafka/streams/processor/Processor.java | 3 +- .../streams/processor/PunctuationType.java | 4 +- .../kafka/streams/processor/Punctuator.java | 4 +- .../internals/ProcessorContextImpl.java | 2 +- .../apache/kafka/streams/TopologyTest.java | 13 +-- .../AbstractResetIntegrationTest.java | 3 +- .../InternalTopicIntegrationTest.java | 5 +- ...StreamAggregationDedupIntegrationTest.java | 6 +- .../KStreamAggregationIntegrationTest.java | 14 +-- .../QueryableStateIntegrationTest.java | 5 +- .../RepartitionOptimizingIntegrationTest.java | 5 +- .../StreamStreamJoinIntegrationTest.java | 17 ++-- .../streams/kstream/JoinWindowsTest.java | 87 ++++++++++--------- .../streams/kstream/SessionWindowsTest.java | 41 ++++----- .../streams/kstream/TimeWindowsTest.java | 77 ++++++++-------- .../streams/kstream/UnlimitedWindowsTest.java | 17 ++-- .../internals/KGroupedStreamImplTest.java | 45 +++++----- .../kstream/internals/KStreamImplTest.java | 17 ++-- .../internals/KStreamKStreamJoinTest.java | 15 ++-- .../internals/KStreamKStreamLeftJoinTest.java | 5 +- ...amSessionWindowAggregateProcessorTest.java | 7 +- .../internals/KStreamTransformTest.java | 3 +- .../internals/KStreamWindowAggregateTest.java | 13 +-- .../internals/KStreamWindowReduceTest.java | 5 +- .../SessionWindowedKStreamImplTest.java | 5 +- .../kstream/internals/TimeWindowTest.java | 5 +- .../TimeWindowedKStreamImplTest.java | 5 +- .../internals/graph/StreamsGraphTest.java | 7 +- .../kafka/streams/perf/SimpleBenchmark.java | 11 +-- .../kafka/streams/perf/YahooBenchmark.java | 3 +- .../processor/internals/StandbyTaskTest.java | 9 +- .../processor/internals/StreamThreadTest.java | 5 +- .../StreamsPartitionAssignorTest.java | 3 +- .../kafka/streams/tests/SmokeTestClient.java | 9 +- .../org/apache/kafka/test/MockProcessor.java | 3 +- .../streams/scala/kstream/KStreamTest.scala | 4 +- .../kafka/streams/TopologyTestDriver.java | 4 +- .../streams/MockProcessorContextTest.java | 3 +- .../kafka/streams/TopologyTestDriverTest.java | 7 +- 47 files changed, 290 insertions(+), 239 deletions(-) diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java index 871d83606f17b..14866cb4191ef 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.serialization.Deserializer; @@ -207,7 +208,7 @@ public static void main(final String[] args) { }) .map((user, viewRegion) -> new KeyValue<>(viewRegion.region, viewRegion)) .groupByKey(Serialized.with(Serdes.String(), new JSONSerde<>())) - .windowedBy(TimeWindows.of(TimeUnit.DAYS.toMillis(7)).advanceBy(TimeUnit.SECONDS.toMillis(1))) + .windowedBy(TimeWindows.of(Duration.ofDays(7)).advanceBy(Duration.ofSeconds(1))) .count() .toStream() .map((key, value) -> { diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewUntypedDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewUntypedDemo.java index e2b7c377682f4..25b10e8fa067c 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewUntypedDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewUntypedDemo.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serde; @@ -87,7 +88,7 @@ public static void main(final String[] args) throws Exception { }) .map((user, viewRegion) -> new KeyValue<>(viewRegion.get("region").textValue(), viewRegion)) .groupByKey(Serialized.with(Serdes.String(), jsonSerde)) - .windowedBy(TimeWindows.of(7 * 24 * 60 * 60 * 1000L).advanceBy(1000)) + .windowedBy(TimeWindows.of(Duration.ofDays(7)).advanceBy(Duration.ofSeconds(1))) .count() .toStream() .map((key, value) -> { diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java index c1c7a427caf2c..b91b5a4d4ffc3 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.examples.temperature; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; @@ -88,7 +89,7 @@ public static void main(final String[] args) { // to group and reduce them, a key is needed ("temp" has been chosen) .selectKey((key, value) -> "temp") .groupByKey() - .windowedBy(TimeWindows.of(TimeUnit.SECONDS.toMillis(TEMPERATURE_WINDOW_SIZE))) + .windowedBy(TimeWindows.of(Duration.ofSeconds(TEMPERATURE_WINDOW_SIZE))) .reduce((value1, value2) -> { if (Integer.parseInt(value1) > Integer.parseInt(value2)) return value1; diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java index 4fa38dcf5374f..6f902f8f275fb 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.examples.wordcount; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.KafkaStreams; @@ -63,7 +64,7 @@ public Processor get() { @SuppressWarnings("unchecked") public void init(final ProcessorContext context) { this.context = context; - this.context.schedule(1000, PunctuationType.STREAM_TIME, timestamp -> { + this.context.schedule(Duration.ofMillis(1000), PunctuationType.STREAM_TIME, timestamp -> { try (final KeyValueIterator iter = kvStore.all()) { System.out.println("----------- " + timestamp + " ----------- "); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Transformer.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Transformer.java index 0ab34699cf70a..bb602a2441a37 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Transformer.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Transformer.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import java.time.Duration; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.PunctuationType; @@ -29,7 +30,7 @@ * This is a stateful record-by-record operation, i.e, {@link #transform(Object, Object)} is invoked individually for * each record of a stream and can access and modify a state that is available beyond a single call of * {@link #transform(Object, Object)} (cf. {@link KeyValueMapper} for stateless record transformation). - * Additionally, this {@code Transformer} can {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} + * Additionally, this {@code Transformer} can {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} * a method to be {@link Punctuator#punctuate(long) called periodically} with the provided context. *

* Use {@link TransformerSupplier} to provide new instances of {@code Transformer} to Kafka Stream's runtime. @@ -55,7 +56,7 @@ public interface Transformer { * framework may later re-use the transformer by calling {@link #init(ProcessorContext)} again. *

* The provided {@link ProcessorContext context} can be used to access topology and record meta data, to - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} a method to be + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be * {@link Punctuator#punctuate(long) called periodically} and to access attached {@link StateStore}s. *

* Note, that {@link ProcessorContext} is updated in the background with the current record's meta data. diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformer.java b/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformer.java index b02311bf95890..987cae591bd13 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformer.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformer.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import java.time.Duration; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.ProcessorContext; @@ -29,7 +30,7 @@ * This is a stateful record-by-record operation, i.e, {@link #transform(Object)} is invoked individually for each * record of a stream and can access and modify a state that is available beyond a single call of * {@link #transform(Object)} (cf. {@link ValueMapper} for stateless value transformation). - * Additionally, this {@code ValueTransformer} can {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} + * Additionally, this {@code ValueTransformer} can {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} * a method to be {@link Punctuator#punctuate(long) called periodically} with the provided context. * If {@code ValueTransformer} is applied to a {@link KeyValue} pair record the record's key is preserved. *

@@ -54,7 +55,7 @@ public interface ValueTransformer { * framework may later re-use the transformer by calling {@link #init(ProcessorContext)} again. *

* The provided {@link ProcessorContext context} can be used to access topology and record meta data, to - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} a method to be + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be * {@link Punctuator#punctuate(long) called periodically} and to access attached {@link StateStore}s. *

* Note that {@link ProcessorContext} is updated in the background with the current record's meta data. diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformerWithKey.java b/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformerWithKey.java index 20c8692f13ea5..be37b0cfba61b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformerWithKey.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/ValueTransformerWithKey.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream; +import java.time.Duration; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.ProcessorContext; @@ -30,7 +31,7 @@ * record of a stream and can access and modify a state that is available beyond a single call of * {@link #transform(Object, Object)} (cf. {@link ValueMapper} for stateless value transformation). * Additionally, this {@code ValueTransformerWithKey} can - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} a method to be + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be * {@link Punctuator#punctuate(long) called periodically} with the provided context. * Note that the key is read-only and should not be modified, as this can lead to corrupt partitioning. * If {@code ValueTransformerWithKey} is applied to a {@link KeyValue} pair record the record's key is preserved. @@ -57,7 +58,7 @@ public interface ValueTransformerWithKey { * This is called once per instance when the topology gets initialized. *

* The provided {@link ProcessorContext context} can be used to access topology and record meta data, to - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} a method to be + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be * {@link Punctuator#punctuate(long) called periodically} and to access attached {@link StateStore}s. *

* Note that {@link ProcessorContext} is updated in the background with the current record's meta data. @@ -97,4 +98,4 @@ public interface ValueTransformerWithKey { * will result in an {@link StreamsException exception}. */ void close(); -} \ No newline at end of file +} diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/Cancellable.java b/streams/src/main/java/org/apache/kafka/streams/processor/Cancellable.java index 2e56b560c3cfb..2acb7625ec1d2 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/Cancellable.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/Cancellable.java @@ -16,8 +16,10 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; + /** - * Cancellable interface returned in {@link ProcessorContext#schedule(long, PunctuationType, Punctuator)}. + * Cancellable interface returned in {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator)}. * * @see Punctuator */ diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/Processor.java b/streams/src/main/java/org/apache/kafka/streams/processor/Processor.java index e35337f73d0ab..f91f22f0baff8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/Processor.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/Processor.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; import org.apache.kafka.common.annotation.InterfaceStability; /** @@ -33,7 +34,7 @@ public interface Processor { * framework may later re-use the processor by calling {@link #init()} again. *

* The provided {@link ProcessorContext context} can be used to access topology and record meta data, to - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) schedule} a method to be + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be * {@link Punctuator#punctuate(long) called periodically} and to access attached {@link StateStore}s. * * @param context the context; may not be null diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/PunctuationType.java b/streams/src/main/java/org/apache/kafka/streams/processor/PunctuationType.java index bc0003da38cf1..32965e815dead 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/PunctuationType.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/PunctuationType.java @@ -16,8 +16,10 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; + /** - * Controls what notion of time is used for punctuation scheduled via {@link ProcessorContext#schedule(long, PunctuationType, Punctuator)} schedule}: + * Controls what notion of time is used for punctuation scheduled via {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator)} schedule}: *

    *
  • STREAM_TIME - uses "stream time", which is advanced by the processing of messages * in accordance with the timestamp as extracted by the {@link TimestampExtractor} in use. diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/Punctuator.java b/streams/src/main/java/org/apache/kafka/streams/processor/Punctuator.java index 407270f60388f..1886dad827a35 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/Punctuator.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/Punctuator.java @@ -16,8 +16,10 @@ */ package org.apache.kafka.streams.processor; +import java.time.Duration; + /** - * A functional interface used as an argument to {@link ProcessorContext#schedule(long, PunctuationType, Punctuator)}. + * A functional interface used as an argument to {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator)}. * * @see Cancellable */ diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index 587ab8fb39673..c4bf13c735de0 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -158,7 +158,7 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin @Override public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { - return schedule(interval.toMillis(), type, callback); + return schedule(interval, type, callback); } void setStreamTimeSupplier(final TimestampSupplier streamTimeSupplier) { diff --git a/streams/src/test/java/org/apache/kafka/streams/TopologyTest.java b/streams/src/test/java/org/apache/kafka/streams/TopologyTest.java index 02840b4641cd6..289cdf0d6dc6c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/TopologyTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/TopologyTest.java @@ -47,6 +47,7 @@ import java.util.Set; import java.util.regex.Pattern; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; @@ -737,7 +738,7 @@ public void timeWindowZeroArgCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(TimeWindows.of(1)) + .windowedBy(TimeWindows.of(ofMillis(1))) .count(); final TopologyDescription describe = builder.build().describe(); assertEquals( @@ -757,7 +758,7 @@ public void timeWindowNamedMaterializedCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(TimeWindows.of(1)) + .windowedBy(TimeWindows.of(ofMillis(1))) .count(Materialized.as("count-store")); final TopologyDescription describe = builder.build().describe(); assertEquals( @@ -777,7 +778,7 @@ public void timeWindowAnonymousMaterializedCountShouldPreserveTopologyStructure( final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(TimeWindows.of(1)) + .windowedBy(TimeWindows.of(ofMillis(1))) .count(Materialized.with(null, Serdes.Long())); final TopologyDescription describe = builder.build().describe(); assertEquals( @@ -797,7 +798,7 @@ public void sessionWindowZeroArgCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(SessionWindows.with(1)) + .windowedBy(SessionWindows.with(ofMillis(1))) .count(); final TopologyDescription describe = builder.build().describe(); assertEquals( @@ -817,7 +818,7 @@ public void sessionWindowNamedMaterializedCountShouldPreserveTopologyStructure() final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(SessionWindows.with(1)) + .windowedBy(SessionWindows.with(ofMillis(1))) .count(Materialized.as("count-store")); final TopologyDescription describe = builder.build().describe(); assertEquals( @@ -837,7 +838,7 @@ public void sessionWindowAnonymousMaterializedCountShouldPreserveTopologyStructu final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() - .windowedBy(SessionWindows.with(1)) + .windowedBy(SessionWindows.with(ofMillis(1))) .count(Materialized.with(null, Serdes.Long())); final TopologyDescription describe = builder.build().describe(); assertEquals( diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java index caa02db185d97..0bf78563a28ce 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java @@ -66,6 +66,7 @@ import kafka.tools.StreamsResetter; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -526,7 +527,7 @@ public KeyValue apply(final Long key, final String value) { input.through(INTERMEDIATE_USER_TOPIC) .groupByKey() - .windowedBy(TimeWindows.of(35).advanceBy(10)) + .windowedBy(TimeWindows.of(ofMillis(35)).advanceBy(ofMillis(10))) .count() .toStream() .map(new KeyValueMapper, Long, KeyValue>() { diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java index 153c5a1c5088a..bbd29aad072f4 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java @@ -57,6 +57,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static java.time.Duration.ofMillis; import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForCompletion; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -187,8 +188,8 @@ public void shouldCompactAndDeleteTopicsForWindowStoreChangelogs() throws Except textLines.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"))) .groupBy(MockMapper.selectValueMapper()) - .windowedBy(TimeWindows.of(1000).grace(0L)) - .count(Materialized.>as("CountWindows").withRetention(2_000L)); + .windowedBy(TimeWindows.of(ofMillis(1000)).grace(ofMillis(0L))) + .count(Materialized.>as("CountWindows").withRetention(ofMillis(2_000L))); final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProp); streams.start(); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationDedupIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationDedupIntegrationTest.java index b51511e19e98a..08aa245b61651 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationDedupIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationDedupIntegrationTest.java @@ -53,6 +53,8 @@ import java.util.List; import java.util.Properties; +import static java.time.Duration.ofMillis; + /** * Similar to KStreamAggregationIntegrationTest but with dedupping enabled * by virtue of having a large commit interval @@ -144,7 +146,7 @@ public void shouldReduceWindowed() throws Exception { produceMessages(secondBatchTimestamp); groupedStream - .windowedBy(TimeWindows.of(500L)) + .windowedBy(TimeWindows.of(ofMillis(500L))) .reduce(reducer, Materialized.as("reduce-time-windows")) .toStream((windowedKey, value) -> windowedKey.key() + "@" + windowedKey.window().start()) .to(outputTopic, Produced.with(Serdes.String(), Serdes.String())); @@ -179,7 +181,7 @@ public void shouldGroupByKey() throws Exception { produceMessages(timestamp); stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())) - .windowedBy(TimeWindows.of(500L)) + .windowedBy(TimeWindows.of(ofMillis(500L))) .count(Materialized.as("count-windows")) .toStream((windowedKey, value) -> windowedKey.key() + "@" + windowedKey.window().start()) .to(outputTopic, Produced.with(Serdes.String(), Serdes.Long())); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java index ce6c352af9c38..cabb39dc074da 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.integration; +import java.time.Instant; import kafka.tools.ConsoleConsumer; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -85,6 +86,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; @@ -202,7 +204,7 @@ public void shouldReduceWindowed() throws Exception { final Serde> windowedSerde = WindowedSerdes.timeWindowedSerdeFrom(String.class); groupedStream - .windowedBy(TimeWindows.of(500L)) + .windowedBy(TimeWindows.of(ofMillis(500L))) .reduce(reducer) .toStream() .to(outputTopic, Produced.with(windowedSerde, Serdes.String())); @@ -307,7 +309,7 @@ public void shouldAggregateWindowed() throws Exception { produceMessages(secondTimestamp); final Serde> windowedSerde = WindowedSerdes.timeWindowedSerdeFrom(String.class); - groupedStream.windowedBy(TimeWindows.of(500L)) + groupedStream.windowedBy(TimeWindows.of(ofMillis(500L))) .aggregate( initializer, aggregator, @@ -427,7 +429,7 @@ public void shouldGroupByKey() throws Exception { produceMessages(timestamp); stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())) - .windowedBy(TimeWindows.of(500L)) + .windowedBy(TimeWindows.of(ofMillis(500L))) .count() .toStream((windowedKey, value) -> windowedKey.key() + "@" + windowedKey.window().start()).to(outputTopic, Produced.with(Serdes.String(), Serdes.Long())); @@ -520,7 +522,7 @@ public void shouldCountSessionWindows() throws Exception { builder.stream(userSessionsStream, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(SessionWindows.with(sessionGap)) + .windowedBy(SessionWindows.with(ofMillis(sessionGap))) .count() .toStream() .transform(() -> new Transformer, Long, KeyValue>() { @@ -618,7 +620,7 @@ public void shouldReduceSessionWindows() throws Exception { final String userSessionsStore = "UserSessionsStore"; builder.stream(userSessionsStream, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(SessionWindows.with(sessionGap)) + .windowedBy(SessionWindows.with(ofMillis(sessionGap))) .reduce((value1, value2) -> value1 + ":" + value2, Materialized.as(userSessionsStore)) .toStream() .foreach((key, value) -> { @@ -705,7 +707,7 @@ public void shouldCountUnlimitedWindows() throws Exception { builder.stream(userSessionsStream, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(UnlimitedWindows.of().startOn(startTime)) + .windowedBy(UnlimitedWindows.of().startOn(Instant.ofEpochMilli(startTime))) .count() .toStream() .transform(() -> new Transformer, Long, KeyValue>() { diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java index 97d1071aaa218..5c1e167aaadb9 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.integration; +import java.time.Duration; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.KafkaProducer; @@ -256,7 +257,7 @@ public Iterable apply(final String value) { // Create a Windowed State Store that contains the word count for every 1 minute groupedByWord - .windowedBy(TimeWindows.of(WINDOW_SIZE)) + .windowedBy(TimeWindows.of(Duration.ofMillis(WINDOW_SIZE))) .count(Materialized.>as(windowStoreName + "-" + inputTopic)) .toStream(new KeyValueMapper, Long, String>() { @Override @@ -695,7 +696,7 @@ private void verifyCanQueryState(final int cacheSizeBytes) throws Exception { final String windowStoreName = "windowed-count"; s1.groupByKey() - .windowedBy(TimeWindows.of(WINDOW_SIZE)) + .windowedBy(TimeWindows.of(Duration.ofMillis(WINDOW_SIZE))) .count(Materialized.>as(windowStoreName)); kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration); kafkaStreams.start(); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java index 5eebf0411f3b0..ace6c6fab3b3e 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java @@ -60,6 +60,7 @@ import kafka.utils.MockTime; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @@ -162,7 +163,7 @@ private void runIntegrationTest(final String optimizationConfig, mappedStream.filter((k, v) -> k.equals("A")) .join(countStream, (v1, v2) -> v1 + ":" + v2.toString(), - JoinWindows.of(5000), + JoinWindows.of(ofMillis(5000)), Joined.with(Serdes.String(), Serdes.String(), Serdes.Long())) .to(JOINED_TOPIC); @@ -432,4 +433,4 @@ public void process(final String key, final String value) { + " Sink: KSTREAM-SINK-0000000028 (topic: outputTopic_2)\n" + " <-- KTABLE-TOSTREAM-0000000027\n\n"; -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java index 38bc7a68c1f66..ff0f9f6dc89f9 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java @@ -32,6 +32,7 @@ import java.util.Collections; import java.util.List; +import static java.time.Duration.ofMillis; /** * Tests all available joins of Kafka Streams DSL. @@ -79,7 +80,7 @@ public void testInner() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.join(rightStream, valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -109,7 +110,7 @@ public void testInnerRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .join(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -136,7 +137,7 @@ public void testLeft() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.leftJoin(rightStream, valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + leftStream.leftJoin(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -166,7 +167,7 @@ public void testLeftRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .leftJoin(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -193,7 +194,7 @@ public void testOuter() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.outerJoin(rightStream, valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + leftStream.outerJoin(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -223,7 +224,7 @@ public void testOuterRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .outerJoin(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -254,8 +255,8 @@ public void testMultiInner() throws Exception { "D-c-b", "D-c-c", "D-c-d", "D-d-a", "D-d-b", "D-d-c", "D-d-d") ); - leftStream.join(rightStream, valueJoiner, JoinWindows.of(10000)) - .join(rightStream, valueJoiner, JoinWindows.of(10000)).to(OUTPUT_TOPIC); + leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))) + .join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); runTest(expectedResult); } diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/JoinWindowsTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/JoinWindowsTest.java index de635b441f428..1b3c1f1b7dae1 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/JoinWindowsTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/JoinWindowsTest.java @@ -18,6 +18,7 @@ import org.junit.Test; +import static java.time.Duration.ofMillis; import static org.apache.kafka.streams.EqualityCheck.verifyEquality; import static org.apache.kafka.streams.EqualityCheck.verifyInEquality; import static org.junit.Assert.assertEquals; @@ -31,29 +32,29 @@ public class JoinWindowsTest { @Test public void validWindows() { - JoinWindows.of(ANY_OTHER_SIZE) // [ -anyOtherSize ; anyOtherSize ] - .before(ANY_SIZE) // [ -anySize ; anyOtherSize ] - .before(0) // [ 0 ; anyOtherSize ] - .before(-ANY_SIZE) // [ anySize ; anyOtherSize ] - .before(-ANY_OTHER_SIZE); // [ anyOtherSize ; anyOtherSize ] - - JoinWindows.of(ANY_OTHER_SIZE) // [ -anyOtherSize ; anyOtherSize ] - .after(ANY_SIZE) // [ -anyOtherSize ; anySize ] - .after(0) // [ -anyOtherSize ; 0 ] - .after(-ANY_SIZE) // [ -anyOtherSize ; -anySize ] - .after(-ANY_OTHER_SIZE); // [ -anyOtherSize ; -anyOtherSize ] + JoinWindows.of(ofMillis(ANY_OTHER_SIZE)) // [ -anyOtherSize ; anyOtherSize ] + .before(ofMillis(ANY_SIZE)) // [ -anySize ; anyOtherSize ] + .before(ofMillis(0)) // [ 0 ; anyOtherSize ] + .before(ofMillis(-ANY_SIZE)) // [ anySize ; anyOtherSize ] + .before(ofMillis(-ANY_OTHER_SIZE)); // [ anyOtherSize ; anyOtherSize ] + + JoinWindows.of(ofMillis(ANY_OTHER_SIZE)) // [ -anyOtherSize ; anyOtherSize ] + .after(ofMillis(ANY_SIZE)) // [ -anyOtherSize ; anySize ] + .after(ofMillis(0)) // [ -anyOtherSize ; 0 ] + .after(ofMillis(-ANY_SIZE)) // [ -anyOtherSize ; -anySize ] + .after(ofMillis(-ANY_OTHER_SIZE)); // [ -anyOtherSize ; -anyOtherSize ] } @Test(expected = IllegalArgumentException.class) public void timeDifferenceMustNotBeNegative() { - JoinWindows.of(-1); + JoinWindows.of(ofMillis(-1)); } @Test public void endTimeShouldNotBeBeforeStart() { - final JoinWindows windowSpec = JoinWindows.of(ANY_SIZE); + final JoinWindows windowSpec = JoinWindows.of(ofMillis(ANY_SIZE)); try { - windowSpec.after(-ANY_SIZE - 1); + windowSpec.after(ofMillis(-ANY_SIZE - 1)); fail("window end time should not be before window start time"); } catch (final IllegalArgumentException e) { // expected @@ -62,9 +63,9 @@ public void endTimeShouldNotBeBeforeStart() { @Test public void startTimeShouldNotBeAfterEnd() { - final JoinWindows windowSpec = JoinWindows.of(ANY_SIZE); + final JoinWindows windowSpec = JoinWindows.of(ofMillis(ANY_SIZE)); try { - windowSpec.before(-ANY_SIZE - 1); + windowSpec.before(ofMillis(-ANY_SIZE - 1)); fail("window start time should not be after window end time"); } catch (final IllegalArgumentException e) { // expected @@ -74,7 +75,7 @@ public void startTimeShouldNotBeAfterEnd() { @Deprecated @Test public void untilShouldSetMaintainDuration() { - final JoinWindows windowSpec = JoinWindows.of(ANY_SIZE); + final JoinWindows windowSpec = JoinWindows.of(ofMillis(ANY_SIZE)); final long windowSize = windowSpec.size(); assertEquals(windowSize, windowSpec.until(windowSize).maintainMs()); } @@ -82,7 +83,7 @@ public void untilShouldSetMaintainDuration() { @Deprecated @Test public void retentionTimeMustNoBeSmallerThanWindowSize() { - final JoinWindows windowSpec = JoinWindows.of(ANY_SIZE); + final JoinWindows windowSpec = JoinWindows.of(ofMillis(ANY_SIZE)); final long windowSize = windowSpec.size(); try { windowSpec.until(windowSize - 1); @@ -94,10 +95,10 @@ public void retentionTimeMustNoBeSmallerThanWindowSize() { @Test public void gracePeriodShouldEnforceBoundaries() { - JoinWindows.of(3L).grace(0L); + JoinWindows.of(ofMillis(3L)).grace(ofMillis(0L)); try { - JoinWindows.of(3L).grace(-1L); + JoinWindows.of(ofMillis(3L)).grace(ofMillis(-1L)); fail("should not accept negatives"); } catch (final IllegalArgumentException e) { //expected @@ -106,58 +107,58 @@ public void gracePeriodShouldEnforceBoundaries() { @Test public void equalsAndHashcodeShouldBeValidForPositiveCases() { - verifyEquality(JoinWindows.of(3), JoinWindows.of(3)); + verifyEquality(JoinWindows.of(ofMillis(3)), JoinWindows.of(ofMillis(3))); - verifyEquality(JoinWindows.of(3).after(2), JoinWindows.of(3).after(2)); + verifyEquality(JoinWindows.of(ofMillis(3)).after(ofMillis(2)), JoinWindows.of(ofMillis(3)).after(ofMillis(2))); - verifyEquality(JoinWindows.of(3).before(2), JoinWindows.of(3).before(2)); + verifyEquality(JoinWindows.of(ofMillis(3)).before(ofMillis(2)), JoinWindows.of(ofMillis(3)).before(ofMillis(2))); - verifyEquality(JoinWindows.of(3).grace(2), JoinWindows.of(3).grace(2)); + verifyEquality(JoinWindows.of(ofMillis(3)).grace(ofMillis(2)), JoinWindows.of(ofMillis(3)).grace(ofMillis(2))); - verifyEquality(JoinWindows.of(3).until(60), JoinWindows.of(3).until(60)); + verifyEquality(JoinWindows.of(ofMillis(3)).until(60), JoinWindows.of(ofMillis(3)).until(60)); verifyEquality( - JoinWindows.of(3).before(1).after(2).grace(3).until(60), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); // JoinWindows is a little weird in that before and after set the same fields as of. verifyEquality( - JoinWindows.of(9).before(1).after(2).grace(3).until(60), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(9)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); } @Test public void equalsAndHashcodeShouldBeValidForNegativeCases() { - verifyInEquality(JoinWindows.of(9), JoinWindows.of(3)); + verifyInEquality(JoinWindows.of(ofMillis(9)), JoinWindows.of(ofMillis(3))); - verifyInEquality(JoinWindows.of(3).after(9), JoinWindows.of(3).after(2)); + verifyInEquality(JoinWindows.of(ofMillis(3)).after(ofMillis(9)), JoinWindows.of(ofMillis(3)).after(ofMillis(2))); - verifyInEquality(JoinWindows.of(3).before(9), JoinWindows.of(3).before(2)); + verifyInEquality(JoinWindows.of(ofMillis(3)).before(ofMillis(9)), JoinWindows.of(ofMillis(3)).before(ofMillis(2))); - verifyInEquality(JoinWindows.of(3).grace(9), JoinWindows.of(3).grace(2)); + verifyInEquality(JoinWindows.of(ofMillis(3)).grace(ofMillis(9)), JoinWindows.of(ofMillis(3)).grace(ofMillis(2))); - verifyInEquality(JoinWindows.of(3).until(90), JoinWindows.of(3).until(60)); + verifyInEquality(JoinWindows.of(ofMillis(3)).until(90), JoinWindows.of(ofMillis(3)).until(60)); verifyInEquality( - JoinWindows.of(3).before(9).after(2).grace(3).until(60), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(3)).before(ofMillis(9)).after(ofMillis(2)).grace(ofMillis(3)).until(60), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); verifyInEquality( - JoinWindows.of(3).before(1).after(9).grace(3).until(60), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(9)).grace(ofMillis(3)).until(60), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); verifyInEquality( - JoinWindows.of(3).before(1).after(2).grace(9).until(60), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(9)).until(60), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); verifyInEquality( - JoinWindows.of(3).before(1).after(2).grace(3).until(90), - JoinWindows.of(3).before(1).after(2).grace(3).until(60) + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(90), + JoinWindows.of(ofMillis(3)).before(ofMillis(1)).after(ofMillis(2)).grace(ofMillis(3)).until(60) ); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/SessionWindowsTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/SessionWindowsTest.java index 9f99be45cbe8e..6fc0caca96020 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/SessionWindowsTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/SessionWindowsTest.java @@ -18,6 +18,7 @@ import org.junit.Test; +import static java.time.Duration.ofMillis; import static org.apache.kafka.streams.EqualityCheck.verifyEquality; import static org.apache.kafka.streams.EqualityCheck.verifyInEquality; import static org.junit.Assert.assertEquals; @@ -28,23 +29,23 @@ public class SessionWindowsTest { @Test public void shouldSetWindowGap() { final long anyGap = 42L; - assertEquals(anyGap, SessionWindows.with(anyGap).inactivityGap()); + assertEquals(anyGap, SessionWindows.with(ofMillis(anyGap)).inactivityGap()); } @Deprecated @Test public void shouldSetWindowRetentionTime() { final long anyRetentionTime = 42L; - assertEquals(anyRetentionTime, SessionWindows.with(1).until(anyRetentionTime).maintainMs()); + assertEquals(anyRetentionTime, SessionWindows.with(ofMillis(1)).until(anyRetentionTime).maintainMs()); } @Test public void gracePeriodShouldEnforceBoundaries() { - SessionWindows.with(3L).grace(0L); + SessionWindows.with(ofMillis(3L)).grace(ofMillis(0)); try { - SessionWindows.with(3L).grace(-1L); + SessionWindows.with(ofMillis(3L)).grace(ofMillis(-1L)); fail("should not accept negatives"); } catch (final IllegalArgumentException e) { //expected @@ -53,25 +54,25 @@ public void gracePeriodShouldEnforceBoundaries() { @Test(expected = IllegalArgumentException.class) public void windowSizeMustNotBeNegative() { - SessionWindows.with(-1); + SessionWindows.with(ofMillis(-1)); } @Test(expected = IllegalArgumentException.class) public void windowSizeMustNotBeZero() { - SessionWindows.with(0); + SessionWindows.with(ofMillis(0)); } @SuppressWarnings("deprecation") // specifically testing deprecated apis @Test public void retentionTimeShouldBeGapIfGapIsLargerThanDefaultRetentionTime() { - final long windowGap = 2 * SessionWindows.with(1).maintainMs(); - assertEquals(windowGap, SessionWindows.with(windowGap).maintainMs()); + final long windowGap = 2 * SessionWindows.with(ofMillis(1)).maintainMs(); + assertEquals(windowGap, SessionWindows.with(ofMillis(windowGap)).maintainMs()); } @Deprecated @Test public void retentionTimeMustNotBeNegative() { - final SessionWindows windowSpec = SessionWindows.with(42); + final SessionWindows windowSpec = SessionWindows.with(ofMillis(42)); try { windowSpec.until(41); fail("should not accept retention time smaller than gap"); @@ -82,27 +83,27 @@ public void retentionTimeMustNotBeNegative() { @Test public void equalsAndHashcodeShouldBeValidForPositiveCases() { - verifyEquality(SessionWindows.with(1), SessionWindows.with(1)); + verifyEquality(SessionWindows.with(ofMillis(1)), SessionWindows.with(ofMillis(1))); - verifyEquality(SessionWindows.with(1).grace(6), SessionWindows.with(1).grace(6)); + verifyEquality(SessionWindows.with(ofMillis(1)).grace(ofMillis(6)), SessionWindows.with(ofMillis(1)).grace(ofMillis(6))); - verifyEquality(SessionWindows.with(1).until(7), SessionWindows.with(1).until(7)); + verifyEquality(SessionWindows.with(ofMillis(1)).until(7), SessionWindows.with(ofMillis(1)).until(7)); - verifyEquality(SessionWindows.with(1).grace(6).until(7), SessionWindows.with(1).grace(6).until(7)); + verifyEquality(SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(7), SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(7)); } @Test public void equalsAndHashcodeShouldBeValidForNegativeCases() { - verifyInEquality(SessionWindows.with(9), SessionWindows.with(1)); + verifyInEquality(SessionWindows.with(ofMillis(9)), SessionWindows.with(ofMillis(1))); - verifyInEquality(SessionWindows.with(1).grace(9), SessionWindows.with(1).grace(6)); + verifyInEquality(SessionWindows.with(ofMillis(1)).grace(ofMillis(9)), SessionWindows.with(ofMillis(1)).grace(ofMillis(6))); - verifyInEquality(SessionWindows.with(1).until(9), SessionWindows.with(1).until(7)); + verifyInEquality(SessionWindows.with(ofMillis(1)).until(9), SessionWindows.with(ofMillis(1)).until(7)); - verifyInEquality(SessionWindows.with(2).grace(6).until(7), SessionWindows.with(1).grace(6).until(7)); + verifyInEquality(SessionWindows.with(ofMillis(2)).grace(ofMillis(6)).until(7), SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(7)); - verifyInEquality(SessionWindows.with(1).grace(0).until(7), SessionWindows.with(1).grace(6).until(7)); + verifyInEquality(SessionWindows.with(ofMillis(1)).grace(ofMillis(0)).until(7), SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(7)); - verifyInEquality(SessionWindows.with(1).grace(6).until(70), SessionWindows.with(1).grace(6).until(7)); + verifyInEquality(SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(70), SessionWindows.with(ofMillis(1)).grace(ofMillis(6)).until(7)); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/TimeWindowsTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/TimeWindowsTest.java index 9010bb2c27754..b87a99284979d 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/TimeWindowsTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/TimeWindowsTest.java @@ -21,6 +21,7 @@ import java.util.Map; +import static java.time.Duration.ofMillis; import static org.apache.kafka.streams.EqualityCheck.verifyEquality; import static org.apache.kafka.streams.EqualityCheck.verifyInEquality; import static org.junit.Assert.assertEquals; @@ -33,43 +34,43 @@ public class TimeWindowsTest { @Test public void shouldSetWindowSize() { - assertEquals(ANY_SIZE, TimeWindows.of(ANY_SIZE).sizeMs); + assertEquals(ANY_SIZE, TimeWindows.of(ofMillis(ANY_SIZE)).sizeMs); } @Test public void shouldSetWindowAdvance() { final long anyAdvance = 4; - assertEquals(anyAdvance, TimeWindows.of(ANY_SIZE).advanceBy(anyAdvance).advanceMs); + assertEquals(anyAdvance, TimeWindows.of(ofMillis(ANY_SIZE)).advanceBy(ofMillis(anyAdvance)).advanceMs); } @SuppressWarnings("deprecation") // specifically testing deprecated APIs @Test public void shouldSetWindowRetentionTime() { - assertEquals(ANY_SIZE, TimeWindows.of(ANY_SIZE).until(ANY_SIZE).maintainMs()); + assertEquals(ANY_SIZE, TimeWindows.of(ofMillis(ANY_SIZE)).until(ANY_SIZE).maintainMs()); } @SuppressWarnings("deprecation") // specifically testing deprecated APIs @Test public void shouldUseWindowSizeAsRentitionTimeIfWindowSizeIsLargerThanDefaultRetentionTime() { - final long windowSize = 2 * TimeWindows.of(1).maintainMs(); - assertEquals(windowSize, TimeWindows.of(windowSize).maintainMs()); + final long windowSize = 2 * TimeWindows.of(ofMillis(1)).maintainMs(); + assertEquals(windowSize, TimeWindows.of(ofMillis(windowSize)).maintainMs()); } @Test(expected = IllegalArgumentException.class) public void windowSizeMustNotBeZero() { - TimeWindows.of(0); + TimeWindows.of(ofMillis(0)); } @Test(expected = IllegalArgumentException.class) public void windowSizeMustNotBeNegative() { - TimeWindows.of(-1); + TimeWindows.of(ofMillis(-1)); } @Test public void advanceIntervalMustNotBeZero() { - final TimeWindows windowSpec = TimeWindows.of(ANY_SIZE); + final TimeWindows windowSpec = TimeWindows.of(ofMillis(ANY_SIZE)); try { - windowSpec.advanceBy(0); + windowSpec.advanceBy(ofMillis(0)); fail("should not accept zero advance parameter"); } catch (final IllegalArgumentException e) { // expected @@ -78,9 +79,9 @@ public void advanceIntervalMustNotBeZero() { @Test public void advanceIntervalMustNotBeNegative() { - final TimeWindows windowSpec = TimeWindows.of(ANY_SIZE); + final TimeWindows windowSpec = TimeWindows.of(ofMillis(ANY_SIZE)); try { - windowSpec.advanceBy(-1); + windowSpec.advanceBy(ofMillis(-1)); fail("should not accept negative advance parameter"); } catch (final IllegalArgumentException e) { // expected @@ -90,9 +91,9 @@ public void advanceIntervalMustNotBeNegative() { @Deprecated @Test public void advanceIntervalMustNotBeLargerThanWindowSize() { - final TimeWindows windowSpec = TimeWindows.of(ANY_SIZE); + final TimeWindows windowSpec = TimeWindows.of(ofMillis(ANY_SIZE)); try { - windowSpec.advanceBy(ANY_SIZE + 1); + windowSpec.advanceBy(ofMillis(ANY_SIZE + 1)); fail("should not accept advance greater than window size"); } catch (final IllegalArgumentException e) { // expected @@ -102,7 +103,7 @@ public void advanceIntervalMustNotBeLargerThanWindowSize() { @Deprecated @Test public void retentionTimeMustNoBeSmallerThanWindowSize() { - final TimeWindows windowSpec = TimeWindows.of(ANY_SIZE); + final TimeWindows windowSpec = TimeWindows.of(ofMillis(ANY_SIZE)); try { windowSpec.until(ANY_SIZE - 1); fail("should not accept retention time smaller than window size"); @@ -113,10 +114,10 @@ public void retentionTimeMustNoBeSmallerThanWindowSize() { @Test public void gracePeriodShouldEnforceBoundaries() { - TimeWindows.of(3L).grace(0L); + TimeWindows.of(ofMillis(3L)).grace(ofMillis(0L)); try { - TimeWindows.of(3L).grace(-1L); + TimeWindows.of(ofMillis(3L)).grace(ofMillis(-1L)); fail("should not accept negatives"); } catch (final IllegalArgumentException e) { //expected @@ -125,7 +126,7 @@ public void gracePeriodShouldEnforceBoundaries() { @Test public void shouldComputeWindowsForHoppingWindows() { - final TimeWindows windows = TimeWindows.of(12L).advanceBy(5L); + final TimeWindows windows = TimeWindows.of(ofMillis(12L)).advanceBy(ofMillis(5L)); final Map matched = windows.windowsFor(21L); assertEquals(12L / 5L + 1, matched.size()); assertEquals(new TimeWindow(10L, 22L), matched.get(10L)); @@ -135,7 +136,7 @@ public void shouldComputeWindowsForHoppingWindows() { @Test public void shouldComputeWindowsForBarelyOverlappingHoppingWindows() { - final TimeWindows windows = TimeWindows.of(6L).advanceBy(5L); + final TimeWindows windows = TimeWindows.of(ofMillis(6L)).advanceBy(ofMillis(5L)); final Map matched = windows.windowsFor(7L); assertEquals(1, matched.size()); assertEquals(new TimeWindow(5L, 11L), matched.get(5L)); @@ -143,7 +144,7 @@ public void shouldComputeWindowsForBarelyOverlappingHoppingWindows() { @Test public void shouldComputeWindowsForTumblingWindows() { - final TimeWindows windows = TimeWindows.of(12L); + final TimeWindows windows = TimeWindows.of(ofMillis(12L)); final Map matched = windows.windowsFor(21L); assertEquals(1, matched.size()); assertEquals(new TimeWindow(12L, 24L), matched.get(12L)); @@ -152,49 +153,49 @@ public void shouldComputeWindowsForTumblingWindows() { @Test public void equalsAndHashcodeShouldBeValidForPositiveCases() { - verifyEquality(TimeWindows.of(3), TimeWindows.of(3)); + verifyEquality(TimeWindows.of(ofMillis(3)), TimeWindows.of(ofMillis(3))); - verifyEquality(TimeWindows.of(3).advanceBy(1), TimeWindows.of(3).advanceBy(1)); + verifyEquality(TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1)), TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1))); - verifyEquality(TimeWindows.of(3).grace(1), TimeWindows.of(3).grace(1)); + verifyEquality(TimeWindows.of(ofMillis(3)).grace(ofMillis(1)), TimeWindows.of(ofMillis(3)).grace(ofMillis(1))); - verifyEquality(TimeWindows.of(3).until(4), TimeWindows.of(3).until(4)); + verifyEquality(TimeWindows.of(ofMillis(3)).until(4), TimeWindows.of(ofMillis(3)).until(4)); verifyEquality( - TimeWindows.of(3).advanceBy(1).grace(1).until(4), - TimeWindows.of(3).advanceBy(1).grace(1).until(4) + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1)).grace(ofMillis(1)).until(4), + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1)).grace(ofMillis(1)).until(4) ); } @Test public void equalsAndHashcodeShouldBeValidForNegativeCases() { - verifyInEquality(TimeWindows.of(9), TimeWindows.of(3)); + verifyInEquality(TimeWindows.of(ofMillis(9)), TimeWindows.of(ofMillis(3))); - verifyInEquality(TimeWindows.of(3).advanceBy(2), TimeWindows.of(3).advanceBy(1)); + verifyInEquality(TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)), TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1))); - verifyInEquality(TimeWindows.of(3).grace(2), TimeWindows.of(3).grace(1)); + verifyInEquality(TimeWindows.of(ofMillis(3)).grace(ofMillis(2)), TimeWindows.of(ofMillis(3)).grace(ofMillis(1))); - verifyInEquality(TimeWindows.of(3).until(9), TimeWindows.of(3).until(4)); + verifyInEquality(TimeWindows.of(ofMillis(3)).until(9), TimeWindows.of(ofMillis(3)).until(4)); verifyInEquality( - TimeWindows.of(4).advanceBy(2).grace(2).until(4), - TimeWindows.of(3).advanceBy(2).grace(2).until(4) + TimeWindows.of(ofMillis(4)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(4), + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(4) ); verifyInEquality( - TimeWindows.of(3).advanceBy(1).grace(2).until(4), - TimeWindows.of(3).advanceBy(2).grace(2).until(4) + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(1)).grace(ofMillis(2)).until(4), + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(4) ); assertNotEquals( - TimeWindows.of(3).advanceBy(2).grace(1).until(4), - TimeWindows.of(3).advanceBy(2).grace(2).until(4) + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(1)).until(4), + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(4) ); assertNotEquals( - TimeWindows.of(3).advanceBy(2).grace(2).until(9), - TimeWindows.of(3).advanceBy(2).grace(2).until(4) + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(9), + TimeWindows.of(ofMillis(3)).advanceBy(ofMillis(2)).grace(ofMillis(2)).until(4) ); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/UnlimitedWindowsTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/UnlimitedWindowsTest.java index a1406547e26b6..023e83fdde13a 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/UnlimitedWindowsTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/UnlimitedWindowsTest.java @@ -21,6 +21,7 @@ import java.util.Map; +import static java.time.Instant.ofEpochMilli; import static org.apache.kafka.streams.EqualityCheck.verifyEquality; import static org.apache.kafka.streams.EqualityCheck.verifyInEquality; import static org.junit.Assert.assertEquals; @@ -33,12 +34,12 @@ public class UnlimitedWindowsTest { @Test public void shouldSetWindowStartTime() { - assertEquals(anyStartTime, UnlimitedWindows.of().startOn(anyStartTime).startMs); + assertEquals(anyStartTime, UnlimitedWindows.of().startOn(ofEpochMilli(anyStartTime)).startMs); } @Test(expected = IllegalArgumentException.class) public void startTimeMustNotBeNegative() { - UnlimitedWindows.of().startOn(-1); + UnlimitedWindows.of().startOn(ofEpochMilli(-1)); } @Test @@ -54,7 +55,7 @@ public void shouldThrowOnUntil() { @Test public void shouldIncludeRecordsThatHappenedOnWindowStart() { - final UnlimitedWindows w = UnlimitedWindows.of().startOn(anyStartTime); + final UnlimitedWindows w = UnlimitedWindows.of().startOn(ofEpochMilli(anyStartTime)); final Map matchedWindows = w.windowsFor(w.startMs); assertEquals(1, matchedWindows.size()); assertEquals(new UnlimitedWindow(anyStartTime), matchedWindows.get(anyStartTime)); @@ -62,7 +63,7 @@ public void shouldIncludeRecordsThatHappenedOnWindowStart() { @Test public void shouldIncludeRecordsThatHappenedAfterWindowStart() { - final UnlimitedWindows w = UnlimitedWindows.of().startOn(anyStartTime); + final UnlimitedWindows w = UnlimitedWindows.of().startOn(ofEpochMilli(anyStartTime)); final long timestamp = w.startMs + 1; final Map matchedWindows = w.windowsFor(timestamp); assertEquals(1, matchedWindows.size()); @@ -71,7 +72,7 @@ public void shouldIncludeRecordsThatHappenedAfterWindowStart() { @Test public void shouldExcludeRecordsThatHappenedBeforeWindowStart() { - final UnlimitedWindows w = UnlimitedWindows.of().startOn(anyStartTime); + final UnlimitedWindows w = UnlimitedWindows.of().startOn(ofEpochMilli(anyStartTime)); final long timestamp = w.startMs - 1; final Map matchedWindows = w.windowsFor(timestamp); assertTrue(matchedWindows.isEmpty()); @@ -81,13 +82,13 @@ public void shouldExcludeRecordsThatHappenedBeforeWindowStart() { public void equalsAndHashcodeShouldBeValidForPositiveCases() { verifyEquality(UnlimitedWindows.of(), UnlimitedWindows.of()); - verifyEquality(UnlimitedWindows.of().startOn(1), UnlimitedWindows.of().startOn(1)); + verifyEquality(UnlimitedWindows.of().startOn(ofEpochMilli(1)), UnlimitedWindows.of().startOn(ofEpochMilli(1))); } @Test public void equalsAndHashcodeShouldBeValidForNegativeCases() { - verifyInEquality(UnlimitedWindows.of().startOn(9), UnlimitedWindows.of().startOn(1)); + verifyInEquality(UnlimitedWindows.of().startOn(ofEpochMilli(9)), UnlimitedWindows.of().startOn(ofEpochMilli(1))); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KGroupedStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KGroupedStreamImplTest.java index a1f8b27f4d62e..18f36aa8bc580 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KGroupedStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KGroupedStreamImplTest.java @@ -59,6 +59,7 @@ import java.util.Map; import java.util.Properties; +import static java.time.Duration.ofMillis; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; @@ -96,7 +97,7 @@ public void shouldNotHaveInvalidStoreNameOnReduce() { @Test(expected = NullPointerException.class) public void shouldNotHaveNullReducerWithWindowedReduce() { - groupedStream.windowedBy(TimeWindows.of(10)).reduce(null, Materialized.>as("store")); + groupedStream.windowedBy(TimeWindows.of(ofMillis(10))).reduce(null, Materialized.>as("store")); } @Test(expected = NullPointerException.class) @@ -106,7 +107,7 @@ public void shouldNotHaveNullWindowsWithWindowedReduce() { @Test(expected = InvalidTopicException.class) public void shouldNotHaveInvalidStoreNameWithWindowedReduce() { - groupedStream.windowedBy(TimeWindows.of(10)).reduce(MockReducer.STRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); + groupedStream.windowedBy(TimeWindows.of(ofMillis(10))).reduce(MockReducer.STRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); } @Test(expected = NullPointerException.class) @@ -126,12 +127,12 @@ public void shouldNotHaveInvalidStoreNameOnAggregate() { @Test(expected = NullPointerException.class) public void shouldNotHaveNullInitializerOnWindowedAggregate() { - groupedStream.windowedBy(TimeWindows.of(10)).aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.>as("store")); + groupedStream.windowedBy(TimeWindows.of(ofMillis(10))).aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.>as("store")); } @Test(expected = NullPointerException.class) public void shouldNotHaveNullAdderOnWindowedAggregate() { - groupedStream.windowedBy(TimeWindows.of(10)).aggregate(MockInitializer.STRING_INIT, null, Materialized.>as("store")); + groupedStream.windowedBy(TimeWindows.of(ofMillis(10))).aggregate(MockInitializer.STRING_INIT, null, Materialized.>as("store")); } @Test(expected = NullPointerException.class) @@ -141,7 +142,7 @@ public void shouldNotHaveNullWindowsOnWindowedAggregate() { @Test(expected = InvalidTopicException.class) public void shouldNotHaveInvalidStoreNameOnWindowedAggregate() { - groupedStream.windowedBy(TimeWindows.of(10)).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); + groupedStream.windowedBy(TimeWindows.of(ofMillis(10))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); } private void doAggregateSessionWindows(final Map, Integer> results) { @@ -161,7 +162,7 @@ private void doAggregateSessionWindows(final Map, Integer> resu @Test public void shouldAggregateSessionWindows() { final Map, Integer> results = new HashMap<>(); - final KTable, Integer> table = groupedStream.windowedBy(SessionWindows.with(30)).aggregate(new Initializer() { + final KTable, Integer> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).aggregate(new Initializer() { @Override public Integer apply() { return 0; @@ -191,7 +192,7 @@ public void apply(final Windowed key, final Integer value) { @Test public void shouldAggregateSessionWindowsWithInternalStoreName() { final Map, Integer> results = new HashMap<>(); - final KTable, Integer> table = groupedStream.windowedBy(SessionWindows.with(30)).aggregate(new Initializer() { + final KTable, Integer> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).aggregate(new Initializer() { @Override public Integer apply() { return 0; @@ -234,7 +235,7 @@ private void doCountSessionWindows(final Map, Long> results) { @Test public void shouldCountSessionWindows() { final Map, Long> results = new HashMap<>(); - final KTable, Long> table = groupedStream.windowedBy(SessionWindows.with(30)) + final KTable, Long> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))) .count(Materialized.>as("session-store")); table.toStream().foreach(new ForeachAction, Long>() { @Override @@ -249,7 +250,7 @@ public void apply(final Windowed key, final Long value) { @Test public void shouldCountSessionWindowsWithInternalStoreName() { final Map, Long> results = new HashMap<>(); - final KTable, Long> table = groupedStream.windowedBy(SessionWindows.with(30)).count(); + final KTable, Long> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).count(); table.toStream().foreach(new ForeachAction, Long>() { @Override public void apply(final Windowed key, final Long value) { @@ -277,7 +278,7 @@ private void doReduceSessionWindows(final Map, String> results) @Test public void shouldReduceSessionWindows() { final Map, String> results = new HashMap<>(); - final KTable, String> table = groupedStream.windowedBy(SessionWindows.with(30)) + final KTable, String> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))) .reduce(new Reducer() { @Override public String apply(final String value1, final String value2) { @@ -297,7 +298,7 @@ public void apply(final Windowed key, final String value) { @Test public void shouldReduceSessionWindowsWithInternalStoreName() { final Map, String> results = new HashMap<>(); - final KTable, String> table = groupedStream.windowedBy(SessionWindows.with(30)) + final KTable, String> table = groupedStream.windowedBy(SessionWindows.with(ofMillis(30))) .reduce(new Reducer() { @Override public String apply(final String value1, final String value2) { @@ -316,7 +317,7 @@ public void apply(final Windowed key, final String value) { @Test(expected = NullPointerException.class) public void shouldNotAcceptNullReducerWhenReducingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).reduce(null, Materialized.>as("store")); + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).reduce(null, Materialized.>as("store")); } @Test(expected = NullPointerException.class) @@ -326,17 +327,17 @@ public void shouldNotAcceptNullSessionWindowsReducingSessionWindows() { @Test(expected = InvalidTopicException.class) public void shouldNotAcceptInvalidStoreNameWhenReducingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).reduce(MockReducer.STRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).reduce(MockReducer.STRING_ADDER, Materialized.>as(INVALID_STORE_NAME)); } @Test(expected = NullPointerException.class) public void shouldNotAcceptNullStateStoreSupplierWhenReducingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).reduce(null, Materialized.>as(null)); + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).reduce(null, Materialized.>as(null)); } @Test(expected = NullPointerException.class) public void shouldNotAcceptNullInitializerWhenAggregatingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).aggregate(null, MockAggregator.TOSTRING_ADDER, new Merger() { + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).aggregate(null, MockAggregator.TOSTRING_ADDER, new Merger() { @Override public String apply(final String aggKey, final String aggOne, final String aggTwo) { return null; @@ -346,7 +347,7 @@ public String apply(final String aggKey, final String aggOne, final String aggTw @Test(expected = NullPointerException.class) public void shouldNotAcceptNullAggregatorWhenAggregatingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).aggregate(MockInitializer.STRING_INIT, null, new Merger() { + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).aggregate(MockInitializer.STRING_INIT, null, new Merger() { @Override public String apply(final String aggKey, final String aggOne, final String aggTwo) { return null; @@ -356,7 +357,7 @@ public String apply(final String aggKey, final String aggOne, final String aggTw @Test(expected = NullPointerException.class) public void shouldNotAcceptNullSessionMergerWhenAggregatingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(30)).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, + groupedStream.windowedBy(SessionWindows.with(ofMillis(30))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, null, Materialized.>as("storeName")); } @@ -368,7 +369,7 @@ public void shouldNotAcceptNullSessionWindowsWhenAggregatingSessionWindows() { @Test public void shouldAcceptNullStoreNameWhenAggregatingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(10)) + groupedStream.windowedBy(SessionWindows.with(ofMillis(10))) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, new Merger() { @Override public String apply(final String aggKey, final String aggOne, final String aggTwo) { @@ -379,7 +380,7 @@ public String apply(final String aggKey, final String aggOne, final String aggTw @Test(expected = InvalidTopicException.class) public void shouldNotAcceptInvalidStoreNameWhenAggregatingSessionWindows() { - groupedStream.windowedBy(SessionWindows.with(10)) + groupedStream.windowedBy(SessionWindows.with(ofMillis(10))) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, new Merger() { @Override public String apply(final String aggKey, final String aggOne, final String aggTwo) { @@ -558,7 +559,7 @@ private void doCountWindowed(final List, Long>> result @Test public void shouldCountWindowed() { final List, Long>> results = new ArrayList<>(); - groupedStream.windowedBy(TimeWindows.of(500L)).count(Materialized.>as("aggregate-by-key-windowed")) + groupedStream.windowedBy(TimeWindows.of(ofMillis(500L))).count(Materialized.>as("aggregate-by-key-windowed")) .toStream() .foreach(new ForeachAction, Long>() { @Override @@ -573,7 +574,7 @@ public void apply(final Windowed key, final Long value) { @Test public void shouldCountWindowedWithInternalStoreName() { final List, Long>> results = new ArrayList<>(); - groupedStream.windowedBy(TimeWindows.of(500L)).count() + groupedStream.windowedBy(TimeWindows.of(ofMillis(500L))).count() .toStream() .foreach(new ForeachAction, Long>() { @Override @@ -584,4 +585,4 @@ public void apply(final Windowed key, final Long value) { doCountWindowed(results); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamImplTest.java index bce7fc80a407e..ebfc07427475f 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamImplTest.java @@ -57,6 +57,7 @@ import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.assertEquals; @@ -154,14 +155,14 @@ public boolean test(final String key, final Integer value) { public Integer apply(final Integer value1, final Integer value2) { return value1 + value2; } - }, JoinWindows.of(anyWindowSize), joined); + }, JoinWindows.of(ofMillis(anyWindowSize)), joined); streams2[1].join(streams3[1], new ValueJoiner() { @Override public Integer apply(final Integer value1, final Integer value2) { return value1 + value2; } - }, JoinWindows.of(anyWindowSize), joined); + }, JoinWindows.of(ofMillis(anyWindowSize)), joined); stream4.to("topic-5"); @@ -260,7 +261,7 @@ public void shouldUseRecordMetadataTimestampExtractorWhenInternalRepartitioningT }); stream.join(kStream, valueJoiner, - JoinWindows.of(windowSize).until(3 * windowSize), + JoinWindows.of(ofMillis(windowSize)).until(3 * windowSize), Joined.with(Serdes.String(), Serdes.String(), Serdes.String())) @@ -295,7 +296,7 @@ public void shouldUseRecordMetadataTimestampExtractorWhenInternalRepartitioningT stream.join( kStream, valueJoiner, - JoinWindows.of(windowSize).grace(3L * windowSize), + JoinWindows.of(ofMillis(windowSize)).grace(ofMillis(3L * windowSize)), Joined.with(Serdes.String(), Serdes.String(), Serdes.String()) ) .to("output-topic", Produced.with(Serdes.String(), Serdes.String())); @@ -414,12 +415,12 @@ public void shouldNotAllowNullProcessSupplier() { @Test(expected = NullPointerException.class) public void shouldNotAllowNullOtherStreamOnJoin() { - testStream.join(null, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(10)); + testStream.join(null, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(ofMillis(10))); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullValueJoinerOnJoin() { - testStream.join(testStream, null, JoinWindows.of(10)); + testStream.join(testStream, null, JoinWindows.of(ofMillis(10))); } @Test(expected = NullPointerException.class) @@ -532,12 +533,12 @@ public void shouldThrowNullPointerOnJoinWithTableWhenJoinedIsNull() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnJoinWithStreamWhenJoinedIsNull() { - testStream.join(testStream, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(10), null); + testStream.join(testStream, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(ofMillis(10)), null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnOuterJoinJoinedIsNull() { - testStream.outerJoin(testStream, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(10), null); + testStream.outerJoin(testStream, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(ofMillis(10)), null); } @Test diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java index 971ee62d284bd..dac26ec58da8d 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java @@ -41,6 +41,7 @@ import java.util.Properties; import java.util.Set; +import static java.time.Duration.ofMillis; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; import static org.hamcrest.CoreMatchers.hasItem; import static org.junit.Assert.assertEquals; @@ -71,9 +72,9 @@ public Integer apply(final Integer value1, final Integer value2) { return value1 + value2; } }, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer()) - ); + ) final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { @@ -101,7 +102,7 @@ public void testJoin() { joined = stream1.join( stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); @@ -204,7 +205,7 @@ public void testOuterJoin() { joined = stream1.outerJoin( stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); final Collection> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups(); @@ -308,7 +309,7 @@ public void testWindowing() { joined = stream1.join( stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); @@ -529,7 +530,7 @@ public void testAsymmetricWindowingAfter() { joined = stream1.join( stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(0).after(100), + JoinWindows.of(ofMillis(0)).after(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); @@ -640,7 +641,7 @@ public void testAsymmetricWindowingBefore() { joined = stream1.join( stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(0).before(100), + JoinWindows.of(ofMillis(0)).before(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamLeftJoinTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamLeftJoinTest.java index 856de3d85c8db..b019411e3ff84 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamLeftJoinTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamLeftJoinTest.java @@ -39,6 +39,7 @@ import java.util.Properties; import java.util.Set; +import static java.time.Duration.ofMillis; import static org.junit.Assert.assertEquals; public class KStreamKStreamLeftJoinTest { @@ -65,7 +66,7 @@ public void testLeftJoin() { joined = stream1.leftJoin(stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); @@ -151,7 +152,7 @@ public void testWindowing() { joined = stream1.leftJoin(stream2, MockValueJoiner.TOSTRING_JOINER, - JoinWindows.of(100), + JoinWindows.of(ofMillis(100)), Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String())); joined.process(supplier); diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java index c7fd7cdc32a1a..8598b8027dd31 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java @@ -50,6 +50,7 @@ import java.util.Arrays; import java.util.List; +import static java.time.Duration.ofMillis; import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; @@ -69,7 +70,7 @@ public class KStreamSessionWindowAggregateProcessorTest { private final Merger sessionMerger = (aggKey, aggOne, aggTwo) -> aggOne + aggTwo; private final KStreamSessionWindowAggregate sessionAggregator = new KStreamSessionWindowAggregate<>( - SessionWindows.with(GAP_MS), + SessionWindows.with(ofMillis(GAP_MS)), STORE_NAME, initializer, aggregator, @@ -322,7 +323,7 @@ public void shouldLogAndMeterWhenSkippingLateRecord() { LogCaptureAppender.setClassLoggerToDebug(KStreamSessionWindowAggregate.class); final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); final Processor processor = new KStreamSessionWindowAggregate<>( - SessionWindows.with(10L).grace(10L), + SessionWindows.with(ofMillis(10L)).grace(ofMillis(10L)), STORE_NAME, initializer, aggregator, @@ -349,4 +350,4 @@ public void shouldLogAndMeterWhenSkippingLateRecord() { assertEquals(1.0, dropMetric.metricValue()); assertThat(appender.getMessages(), hasItem("Skipping record for expired window. key=[A] topic=[topic] partition=[-3] offset=[-2] timestamp=[0] window=[0,0) expiration=[10]")); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamTransformTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamTransformTest.java index a8ee681cb8c5a..74ad19cb2d5ad 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamTransformTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamTransformTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream.internals; +import java.time.Duration; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.kstream.Consumed; @@ -112,7 +113,7 @@ public Transformer> get() { @Override public void init(final ProcessorContext context) { - context.schedule(1, PunctuationType.WALL_CLOCK_TIME, new Punctuator() { + context.schedule(Duration.ofMillis(1), PunctuationType.WALL_CLOCK_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { context.forward(-1, (int) timestamp); diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowAggregateTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowAggregateTest.java index 5a295b8a50a83..8ae628472d3ef 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowAggregateTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowAggregateTest.java @@ -47,6 +47,7 @@ import java.util.List; import java.util.Properties; +import static java.time.Duration.ofMillis; import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; @@ -70,7 +71,7 @@ public void testAggBasic() { final KTable, String> table2 = builder .stream(topic1, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5))) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.>as("topic1-Canonized").withValueSerde(Serdes.String())); final MockProcessorSupplier, String> supplier = new MockProcessorSupplier<>(); @@ -128,7 +129,7 @@ public void testJoin() { final KTable, String> table1 = builder .stream(topic1, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5))) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.>as("topic1-Canonized").withValueSerde(Serdes.String())); final MockProcessorSupplier, String> supplier = new MockProcessorSupplier<>(); @@ -137,7 +138,7 @@ public void testJoin() { final KTable, String> table2 = builder .stream(topic2, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5))) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.>as("topic2-Canonized").withValueSerde(Serdes.String())); table2.toStream().process(supplier); @@ -232,7 +233,7 @@ public void shouldLogAndMeterWhenSkippingNullKey() { final KStream stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())); stream1.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5))) .aggregate( MockInitializer.STRING_INIT, MockAggregator.toStringInstance("+"), @@ -257,7 +258,7 @@ public void shouldLogAndMeterWhenSkippingExpiredWindow() { final KStream stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())); stream1.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5).until(100)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5)).until(100)) .aggregate( () -> "", MockAggregator.toStringInstance("+"), @@ -316,7 +317,7 @@ public void shouldLogAndMeterWhenSkippingExpiredWindowByGrace() { final KStream stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())); stream1.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10).advanceBy(5).grace(90L)) + .windowedBy(TimeWindows.of(ofMillis(10)).advanceBy(ofMillis(5)).grace(ofMillis(90L))) .aggregate( () -> "", MockAggregator.toStringInstance("+"), diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowReduceTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowReduceTest.java index 3746ae9a32226..913710fd46045 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowReduceTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamWindowReduceTest.java @@ -36,6 +36,7 @@ import java.util.Properties; +import static java.time.Duration.ofMillis; import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; @@ -58,7 +59,7 @@ public void shouldLogAndMeterOnNullKey() { builder .stream("TOPIC", Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(500L)) + .windowedBy(TimeWindows.of(ofMillis(500L))) .reduce((value1, value2) -> value1 + "+" + value2); @@ -80,7 +81,7 @@ public void shouldLogAndMeterOnExpiredEvent() { builder .stream("TOPIC", Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(5L).until(100)) + .windowedBy(TimeWindows.of(ofMillis(5L)).until(100)) .reduce((value1, value2) -> value1 + "+" + value2) .toStream() .map((key, value) -> new KeyValue<>(key.toString(), value)) diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SessionWindowedKStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SessionWindowedKStreamImplTest.java index 34a235ac482e7..9ee918c1c4b76 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SessionWindowedKStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SessionWindowedKStreamImplTest.java @@ -47,6 +47,7 @@ import java.util.Map; import java.util.Properties; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -69,7 +70,7 @@ public String apply(final String aggKey, final String aggOne, final String aggTw public void before() { final KStream stream = builder.stream(TOPIC, Consumed.with(Serdes.String(), Serdes.String())); this.stream = stream.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(SessionWindows.with(500)); + .windowedBy(SessionWindows.with(ofMillis(500))); } @Test @@ -260,4 +261,4 @@ private void processData(final TopologyTestDriver driver) { driver.pipeInput(recordFactory.create(TOPIC, "2", "1", 600)); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java index f260bee62ef68..f6e06e421c5c2 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java @@ -21,6 +21,7 @@ import java.util.Map; +import static java.time.Duration.ofMillis; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -124,7 +125,7 @@ public void cannotCompareTimeWindowWithDifferentWindowType() { @Test public void shouldReturnMatchedWindowsOrderedByTimestamp() { - final TimeWindows windows = TimeWindows.of(12L).advanceBy(5L); + final TimeWindows windows = TimeWindows.of(ofMillis(12L)).advanceBy(ofMillis(5L)); final Map matched = windows.windowsFor(21L); final Long[] expected = matched.keySet().toArray(new Long[matched.size()]); @@ -132,4 +133,4 @@ public void shouldReturnMatchedWindowsOrderedByTimestamp() { assertEquals(expected[1].longValue(), 15L); assertEquals(expected[2].longValue(), 20L); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java index 0e541c91acc57..0505cbc04197d 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java @@ -46,6 +46,7 @@ import java.util.Map; import java.util.Properties; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -61,7 +62,7 @@ public class TimeWindowedKStreamImplTest { public void before() { final KStream stream = builder.stream(TOPIC, Consumed.with(Serdes.String(), Serdes.String())); windowedStream = stream.groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(500L)); + .windowedBy(TimeWindows.of(ofMillis(500L))); } @Test @@ -244,4 +245,4 @@ private void processData(final TopologyTestDriver driver) { driver.pipeInput(recordFactory.create(TOPIC, "2", "1", 500L)); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/StreamsGraphTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/StreamsGraphTest.java index d65f27e2b19ba..c485310492211 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/StreamsGraphTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/StreamsGraphTest.java @@ -34,6 +34,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static java.time.Duration.ofMillis; import static org.junit.Assert.assertEquals; public class StreamsGraphTest { @@ -53,7 +54,7 @@ public void shouldBeAbleToBuildTopologyIncrementally() { final ValueJoiner valueJoiner = (v, v2) -> v + v2; - final KStream joinedStream = stream.join(streamII, valueJoiner, JoinWindows.of(5000)); + final KStream joinedStream = stream.join(streamII, valueJoiner, JoinWindows.of(ofMillis(5000))); // build step one assertEquals(expectedJoinedTopology, builder.build().describe().toString()); @@ -102,7 +103,7 @@ private Topology getTopologyWithChangingValuesAfterChangingKey(final String opti final KStream mappedKeyStream = inputStream.selectKey((k, v) -> k + v); mappedKeyStream.mapValues(v -> v.toUpperCase(Locale.getDefault())).groupByKey().count().toStream().to("output"); - mappedKeyStream.flatMapValues(v -> Arrays.asList(v.split("\\s"))).groupByKey().windowedBy(TimeWindows.of(5000)).count().toStream().to("windowed-output"); + mappedKeyStream.flatMapValues(v -> Arrays.asList(v.split("\\s"))).groupByKey().windowedBy(TimeWindows.of(ofMillis(5000))).count().toStream().to("windowed-output"); return builder.build(properties); @@ -118,7 +119,7 @@ private Topology getTopologyWithThroughOperation(final String optimizeConfig) { final KStream mappedKeyStream = inputStream.selectKey((k, v) -> k + v).through("through-topic"); mappedKeyStream.groupByKey().count().toStream().to("output"); - mappedKeyStream.groupByKey().windowedBy(TimeWindows.of(5000)).count().toStream().to("windowed-output"); + mappedKeyStream.groupByKey().windowedBy(TimeWindows.of(ofMillis(5000))).count().toStream().to("windowed-output"); return builder.build(properties); diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index 0fa058301ecb0..e8fc05afe0fed 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -54,7 +54,6 @@ import org.apache.kafka.streams.state.WindowStore; import java.io.IOException; -import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -63,6 +62,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.time.Duration.ofMillis; + /** * Class that provides support for a series of benchmarks. It is usually driven by * tests/kafkatest/benchmarks/streams/streams_simple_benchmark_test.py. @@ -332,7 +333,7 @@ private void consumeAndProduce(final String topic) { consumer.seekToBeginning(partitions); while (true) { - final ConsumerRecords records = consumer.poll(Duration.ofMillis(POLL_MS)); + final ConsumerRecords records = consumer.poll(ofMillis(POLL_MS)); if (records.isEmpty()) { if (processedRecords == numRecords) { break; @@ -370,7 +371,7 @@ private void consume(final String topic) { consumer.seekToBeginning(partitions); while (true) { - final ConsumerRecords records = consumer.poll(Duration.ofMillis(POLL_MS)); + final ConsumerRecords records = consumer.poll(ofMillis(POLL_MS)); if (records.isEmpty()) { if (processedRecords == numRecords) { break; @@ -550,7 +551,7 @@ private void countStreamsWindowed(final String sourceTopic) { input.peek(new CountDownAction(latch)) .groupByKey() - .windowedBy(TimeWindows.of(AGGREGATE_WINDOW_SIZE).advanceBy(AGGREGATE_WINDOW_ADVANCE)) + .windowedBy(TimeWindows.of(ofMillis(AGGREGATE_WINDOW_SIZE)).advanceBy(ofMillis(AGGREGATE_WINDOW_ADVANCE))) .count(); final KafkaStreams streams = createKafkaStreamsWithExceptionHandler(builder, props); @@ -593,7 +594,7 @@ private void streamStreamJoin(final String kStreamTopic1, final String kStreamTo final KStream input1 = builder.stream(kStreamTopic1); final KStream input2 = builder.stream(kStreamTopic2); - input1.leftJoin(input2, VALUE_JOINER, JoinWindows.of(STREAM_STREAM_JOIN_WINDOW)).foreach(new CountDownAction(latch)); + input1.leftJoin(input2, VALUE_JOINER, JoinWindows.of(ofMillis(STREAM_STREAM_JOIN_WINDOW))).foreach(new CountDownAction(latch)); final KafkaStreams streams = createKafkaStreamsWithExceptionHandler(builder, props); diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/YahooBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/YahooBenchmark.java index 6e09ad1319ca3..1109f9d9be391 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/YahooBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/YahooBenchmark.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.perf; import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; @@ -334,7 +335,7 @@ public String apply(final String key, final String value) { // calculate windowed counts keyedByCampaign .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(TimeWindows.of(10 * 1000)) + .windowedBy(TimeWindows.of(Duration.ofMillis(10 * 1000))) .count(Materialized.>as("time-windows")); return new KafkaStreams(builder.build(), streamConfig); diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StandbyTaskTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StandbyTaskTest.java index 820191dfeb67a..167cd91bef589 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StandbyTaskTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StandbyTaskTest.java @@ -72,6 +72,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import static java.time.Duration.ofMillis; import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; @@ -205,7 +206,7 @@ public void testUpdate() throws IOException { } restoreStateConsumer.seekToBeginning(partition); - task.update(partition2, restoreStateConsumer.poll(Duration.ofMillis(100)).records(partition2)); + task.update(partition2, restoreStateConsumer.poll(ofMillis(100)).records(partition2)); final StandbyContextImpl context = (StandbyContextImpl) task.context(); final MockKeyValueStore store1 = (MockKeyValueStore) context.getStateMgr().getStore(storeName1); @@ -233,8 +234,8 @@ public void shouldRestoreToWindowedStores() throws IOException { builder .stream(Collections.singleton("topic"), new ConsumedInternal<>()) .groupByKey() - .windowedBy(TimeWindows.of(60_000).grace(0L)) - .count(Materialized.>as(storeName).withRetention(120_000L)); + .windowedBy(TimeWindows.of(ofMillis(60_000)).grace(ofMillis(0L))) + .count(Materialized.>as(storeName).withRetention(ofMillis(120_000L))); builder.buildAndOptimizeTopology(); @@ -478,7 +479,7 @@ public void shouldInitializeStateStoreWithoutException() throws IOException { @Test public void shouldInitializeWindowStoreWithoutException() throws IOException { final InternalStreamsBuilder builder = new InternalStreamsBuilder(new InternalTopologyBuilder()); - builder.stream(Collections.singleton("topic"), new ConsumedInternal<>()).groupByKey().windowedBy(TimeWindows.of(100)).count(); + builder.stream(Collections.singleton("topic"), new ConsumedInternal<>()).groupByKey().windowedBy(TimeWindows.of(ofMillis(100))).count(); initializeStandbyStores(builder); } diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamThreadTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamThreadTest.java index e691c54d8ebe0..b4de5ec005706 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamThreadTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamThreadTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.clients.admin.MockAdminClient; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; @@ -1040,13 +1041,13 @@ public Processor get() { return new Processor() { @Override public void init(final ProcessorContext context) { - context.schedule(100L, PunctuationType.STREAM_TIME, new Punctuator() { + context.schedule(Duration.ofMillis(100L), PunctuationType.STREAM_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { punctuatedStreamTime.add(timestamp); } }); - context.schedule(100L, PunctuationType.WALL_CLOCK_TIME, new Punctuator() { + context.schedule(Duration.ofMillis(100L), PunctuationType.WALL_CLOCK_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { punctuatedWallClockTime.add(timestamp); diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamsPartitionAssignorTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamsPartitionAssignorTest.java index 610296973a0e3..649aa191ee0f5 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamsPartitionAssignorTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamsPartitionAssignorTest.java @@ -60,6 +60,7 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; @@ -961,7 +962,7 @@ public Object apply(final Object value1, final Object value2) { return null; } }, - JoinWindows.of(0) + JoinWindows.of(ofMillis(0)) ); final UUID uuid = UUID.randomUUID(); diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java b/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java index 51dc05c4b8d51..d2f673ae06bd0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.tests; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.Serdes; @@ -129,7 +130,7 @@ public boolean test(final String key, final Integer value) { data.groupByKey(Serialized.with(stringSerde, intSerde)); groupedData - .windowedBy(TimeWindows.of(TimeUnit.DAYS.toMillis(1))) + .windowedBy(TimeWindows.of(Duration.ofDays(1))) .aggregate( new Initializer() { public Integer apply() { @@ -154,7 +155,7 @@ public Integer apply(final String aggKey, final Integer value, final Integer agg // max groupedData - .windowedBy(TimeWindows.of(TimeUnit.DAYS.toMillis(2))) + .windowedBy(TimeWindows.of(Duration.ofDays(2))) .aggregate( new Initializer() { public Integer apply() { @@ -179,7 +180,7 @@ public Integer apply(final String aggKey, final Integer value, final Integer agg // sum groupedData - .windowedBy(TimeWindows.of(TimeUnit.DAYS.toMillis(2))) + .windowedBy(TimeWindows.of(Duration.ofDays(2))) .aggregate( new Initializer() { public Long apply() { @@ -202,7 +203,7 @@ public Long apply(final String aggKey, final Integer value, final Long aggregate // cnt groupedData - .windowedBy(TimeWindows.of(TimeUnit.DAYS.toMillis(2))) + .windowedBy(TimeWindows.of(Duration.ofDays(2))) .count(Materialized.>as("uwin-cnt")) .toStream(new Unwindow()) .to("cnt", Produced.with(stringSerde, longSerde)); diff --git a/streams/src/test/java/org/apache/kafka/test/MockProcessor.java b/streams/src/test/java/org/apache/kafka/test/MockProcessor.java index c95f4086ef1b8..e13e144a3c825 100644 --- a/streams/src/test/java/org/apache/kafka/test/MockProcessor.java +++ b/streams/src/test/java/org/apache/kafka/test/MockProcessor.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.test; +import java.time.Duration; import org.apache.kafka.streams.processor.AbstractProcessor; import org.apache.kafka.streams.processor.Cancellable; import org.apache.kafka.streams.processor.ProcessorContext; @@ -55,7 +56,7 @@ public MockProcessor() { public void init(final ProcessorContext context) { super.init(context); if (scheduleInterval > 0L) { - scheduleCancellable = context.schedule(scheduleInterval, punctuationType, new Punctuator() { + scheduleCancellable = context.schedule(Duration.ofMillis(scheduleInterval), punctuationType, new Punctuator() { @Override public void punctuate(final long timestamp) { if (punctuationType == PunctuationType.STREAM_TIME) { diff --git a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala index f33975647ef12..e2a0cc6a51962 100644 --- a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala +++ b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala @@ -18,6 +18,8 @@ */ package org.apache.kafka.streams.scala.kstream +import java.time.Duration.ofMillis + import org.apache.kafka.streams.kstream.JoinWindows import org.apache.kafka.streams.scala.ImplicitConversions._ import org.apache.kafka.streams.scala.Serdes._ @@ -143,7 +145,7 @@ class KStreamTest extends FlatSpec with Matchers with TestDriver { val stream1 = builder.stream[String, String](sourceTopic1) val stream2 = builder.stream[String, String](sourceTopic2) - stream1.join(stream2)((a, b) => s"$a-$b", JoinWindows.of(1000)).to(sinkTopic) + stream1.join(stream2)((a, b) => s"$a-$b", JoinWindows.of(ofMillis(1000))).to(sinkTopic) val testDriver = createTestDriver(builder) diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java index 05a128b18c23a..7d031ec74781b 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java @@ -149,7 +149,7 @@ * {@link ProducerRecord#equals(Object)} can simplify your code as you can ignore attributes you are not interested in. *

    * Note, that calling {@code pipeInput()} will also trigger {@link PunctuationType#STREAM_TIME event-time} base - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) punctuation} callbacks. + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) punctuation} callbacks. * However, you won't trigger {@link PunctuationType#WALL_CLOCK_TIME wall-clock} type punctuations that you must * trigger manually via {@link #advanceWallClockTime(long)}. *

    @@ -488,7 +488,7 @@ public void pipeInput(final List> records) { /** * Advances the internally mocked wall-clock time. * This might trigger a {@link PunctuationType#WALL_CLOCK_TIME wall-clock} type - * {@link ProcessorContext#schedule(long, PunctuationType, Punctuator) punctuations}. + * {@link ProcessorContext#schedule(Duration, PunctuationType, Punctuator) punctuations}. * * @param advanceMs the amount of time to advance wall-clock time in milliseconds */ diff --git a/streams/test-utils/src/test/java/org/apache/kafka/streams/MockProcessorContextTest.java b/streams/test-utils/src/test/java/org/apache/kafka/streams/MockProcessorContextTest.java index 878aa35748340..ba5286783b0f5 100644 --- a/streams/test-utils/src/test/java/org/apache/kafka/streams/MockProcessorContextTest.java +++ b/streams/test-utils/src/test/java/org/apache/kafka/streams/MockProcessorContextTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams; +import java.time.Duration; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.processor.AbstractProcessor; import org.apache.kafka.streams.processor.MockProcessorContext; @@ -345,7 +346,7 @@ public void shouldCapturePunctuator() { @Override public void init(final ProcessorContext context) { context.schedule( - 1000L, + Duration.ofSeconds(1L), PunctuationType.WALL_CLOCK_TIME, timestamp -> context.commit() ); diff --git a/streams/test-utils/src/test/java/org/apache/kafka/streams/TopologyTestDriverTest.java b/streams/test-utils/src/test/java/org/apache/kafka/streams/TopologyTestDriverTest.java index 497a6c374cd10..58f6e02cc9fdb 100644 --- a/streams/test-utils/src/test/java/org/apache/kafka/streams/TopologyTestDriverTest.java +++ b/streams/test-utils/src/test/java/org/apache/kafka/streams/TopologyTestDriverTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.header.Header; @@ -231,7 +232,7 @@ public void init(final ProcessorContext context) { initialized = true; this.context = context; for (final Punctuation punctuation : punctuations) { - this.context.schedule(punctuation.intervalMs, punctuation.punctuationType, punctuation.callback); + this.context.schedule(Duration.ofMillis(punctuation.intervalMs), punctuation.punctuationType, punctuation.callback); } } @@ -863,8 +864,8 @@ private class CustomMaxAggregator implements Processor { @Override public void init(final ProcessorContext context) { this.context = context; - context.schedule(60000, PunctuationType.WALL_CLOCK_TIME, timestamp -> flushStore()); - context.schedule(10000, PunctuationType.STREAM_TIME, timestamp -> flushStore()); + context.schedule(Duration.ofMinutes(1), PunctuationType.WALL_CLOCK_TIME, timestamp -> flushStore()); + context.schedule(Duration.ofSeconds(10), PunctuationType.STREAM_TIME, timestamp -> flushStore()); store = (KeyValueStore) context.getStateStore("aggStore"); } From c7b3c9a5f318f2e91c84e79a8f2f9fd2f3365e9e Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Sun, 23 Sep 2018 09:53:56 +0300 Subject: [PATCH 03/14] KAFKA-7277: Call in tests and examples are changed to Duration versions. --- .../examples/pageview/PageViewTypedDemo.java | 1 - .../examples/temperature/TemperatureDemo.java | 1 - .../org/apache/kafka/streams/ApiUtils.java | 7 +++--- .../apache/kafka/streams/KafkaStreams.java | 2 +- .../kafka/streams/kstream/TimeWindows.java | 2 +- .../ForwardingDisabledProcessorContext.java | 2 +- .../internals/GlobalProcessorContextImpl.java | 2 +- .../internals/ProcessorContextImpl.java | 4 ++-- .../internals/StandbyContextImpl.java | 2 +- .../apache/kafka/streams/state/Stores.java | 6 ++--- .../state/internals/CachingWindowStore.java | 8 +++---- .../ChangeLoggingWindowBytesStore.java | 8 +++---- .../CompositeReadOnlyWindowStore.java | 6 ++--- .../state/internals/MeteredWindowStore.java | 6 ++--- .../state/internals/RocksDBWindowStore.java | 6 ++--- .../kafka/streams/KafkaStreamsTest.java | 3 ++- .../AbstractResetIntegrationTest.java | 3 ++- .../GlobalThreadShutDownOrderTest.java | 4 ++-- ...ableSourceTopicRestartIntegrationTest.java | 8 +++---- .../PurgeRepartitionTopicIntegrationTest.java | 4 ++-- .../QueryableStateIntegrationTest.java | 9 +++---- .../RepartitionOptimizingIntegrationTest.java | 4 ++-- ...ionWithMergeOptimizingIntegrationTest.java | 6 ++--- .../integration/RestoreIntegrationTest.java | 3 ++- .../internals/KStreamKStreamJoinTest.java | 2 +- .../kafka/streams/perf/SimpleBenchmark.java | 7 +++--- .../AbstractProcessorContextTest.java | 7 ++++++ .../InternalTopologyBuilderTest.java | 3 ++- .../kafka/streams/state/NoOpWindowStore.java | 20 +++++++++++++++- .../kafka/streams/state/StoresTest.java | 15 ++++++------ .../internals/CachingWindowStoreTest.java | 3 ++- .../internals/ReadOnlyWindowStoreStub.java | 24 +++++++++++++++++++ .../internals/RocksDBWindowStoreTest.java | 7 +++--- .../StreamThreadStateStoreProviderTest.java | 2 +- .../tests/BrokerCompatibilityTest.java | 3 +-- .../kafka/streams/tests/EosTestClient.java | 5 ++-- .../streams/tests/ShutdownDeadlockTest.java | 4 ++-- .../kafka/streams/tests/SmokeTestClient.java | 5 ++-- .../StreamsBrokerDownResilienceTest.java | 6 ++--- .../tests/StreamsStandByReplicaTest.java | 4 ++-- .../test/InternalMockProcessorContext.java | 7 ++++++ .../kafka/test/NoOpProcessorContext.java | 10 +++++++- .../scala/kstream/MaterializedTest.scala | 4 +++- .../processor/MockProcessorContext.java | 4 ++-- 44 files changed, 160 insertions(+), 89 deletions(-) diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java index 14866cb4191ef..f8af99f691138 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/pageview/PageViewTypedDemo.java @@ -40,7 +40,6 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; /** * Demonstrates how to perform a join between a KStream and a KTable, i.e. an example of a stateful computation, diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java index b91b5a4d4ffc3..93480e40d6b12 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/temperature/TemperatureDemo.java @@ -31,7 +31,6 @@ import java.util.Properties; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; /** * Demonstrates, using the high-level KStream DSL, how to implement an IoT demo application diff --git a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java index e44764fc98d96..56ba260986d95 100644 --- a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java @@ -1,12 +1,12 @@ /* * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with + * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.kafka.streams; import java.time.Duration; diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index b84fb30197ad3..29d9681f365cf 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -814,7 +814,7 @@ public void run() { * This will block until all threads have stopped. */ public void close() { - close(DEFAULT_CLOSE_TIMEOUT, TimeUnit.SECONDS); + close(Duration.ofSeconds(DEFAULT_CLOSE_TIMEOUT)); } /** diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index 32bef982ccc0d..4f97079c84ee1 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -162,7 +162,7 @@ public TimeWindows advanceBy(final long advanceMs) { public TimeWindows advanceBy(final Duration advance) { ApiUtils.validateMillisecondDuration(advance, "advance"); - long advanceMs = advance.toMillis(); + final long advanceMs = advance.toMillis(); if (advanceMs <= 0 || advanceMs > sizeMs) { throw new IllegalArgumentException(String.format("AdvanceMs must lie within interval (0, %d].", sizeMs)); diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java index ad6987d018a11..4990bcc36b419 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java @@ -93,7 +93,7 @@ public Cancellable schedule(final long intervalMs, } @Override - public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { return delegate.schedule(interval, type, callback); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java index b894ca33927c3..e405bbd222a48 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java @@ -101,7 +101,7 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin * @throws UnsupportedOperationException on every invocation */ @Override - public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { throw new UnsupportedOperationException("this should not happen: schedule() not supported in global processor context."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index c4bf13c735de0..1e8c3526b31cb 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -156,8 +156,8 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin } @Override - public Cancellable schedule(Duration interval, PunctuationType type, - Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { return schedule(interval, type, callback); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java index 776c11208045c..886d80308fd22 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java @@ -198,7 +198,7 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin * @throws UnsupportedOperationException on every invocation */ @Override - public Cancellable schedule(Duration interval, PunctuationType type, Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { throw new UnsupportedOperationException("this should not happen: schedule() not supported in standby tasks."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index c28304f6fdf07..4817c252e20e8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -155,7 +155,7 @@ public String metricsScope() { * careful to set it the same as the windowed keys you're actually storing. * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} - * @deprecated since 2.1 Use {@link Stores#persistentWindowStore(String, long, long, boolean, long)} instead + * @deprecated since 2.1 Use {@link Stores#persistentWindowStore(String, Duration, Duration, boolean, long)} instead */ @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, @@ -271,8 +271,8 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, throw new IllegalArgumentException("segmentInterval cannot be zero or negative"); } - long retentionPeriodMs = retentionPeriod.toMillis(); - long windowSizeMs = windowSize.toMillis(); + final long retentionPeriodMs = retentionPeriod.toMillis(); + final long windowSizeMs = windowSize.toMillis(); if (windowSizeMs > retentionPeriodMs) { throw new IllegalArgumentException("The retention period of the window store " diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index aae9adc20b355..682f00da2ec24 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -186,7 +186,7 @@ public synchronized WindowStoreIterator fetch(final Bytes key, final lon } @Override - public WindowStoreIterator fetch(Bytes key, Instant from, Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); @@ -221,8 +221,8 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B } @Override - public KeyValueIterator, byte[]> fetch(Bytes from, Bytes to, Instant fromTime, - Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. @@ -286,7 +286,7 @@ public KeyValueIterator, byte[]> fetchAll(final long timeFrom, f } @Override - public KeyValueIterator, byte[]> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { validateStoreOpen(); final long timeFrom = from.toEpochMilli(); diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index e968b1acfc6af..463e5fb75512e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -59,7 +59,7 @@ public WindowStoreIterator fetch(final Bytes key, final long from, final } @Override - public WindowStoreIterator fetch(Bytes key, Instant from, Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { return bytesStore.fetch(key, from, duration); } @@ -69,8 +69,8 @@ public KeyValueIterator, byte[]> fetch(final Bytes keyFrom, fina } @Override - public KeyValueIterator, byte[]> fetch(Bytes from, Bytes to, Instant fromTime, - Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { return bytesStore.fetch(from, to, fromTime, duration); } @@ -85,7 +85,7 @@ public KeyValueIterator, byte[]> fetchAll(final long timeFrom, f } @Override - public KeyValueIterator, byte[]> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { return bytesStore.fetchAll(from, duration); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index 0ac8681b8c107..360090d8fd91b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -71,7 +71,7 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); final List> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore windowStore : stores) { @@ -97,7 +97,7 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo } @Override - public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { Objects.requireNonNull(from, "from can't be null"); Objects.requireNonNull(to, "to can't be null"); final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @@ -132,7 +132,7 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long } @Override - public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index d68f3e23bf31e..1ab57b6174f96 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -146,7 +146,7 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), from, duration), fetchTime, metrics, @@ -165,7 +165,7 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long } @Override - public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(from, duration), fetchTime, metrics, @@ -179,7 +179,7 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo } @Override - public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), fromTime, duration), fetchTime, metrics, diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index 8003a0489ea01..554592888e8d1 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -94,7 +94,7 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @@ -105,7 +105,7 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo } @Override - public KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); } @@ -122,7 +122,7 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long } @Override - public KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } diff --git a/streams/src/test/java/org/apache/kafka/streams/KafkaStreamsTest.java b/streams/src/test/java/org/apache/kafka/streams/KafkaStreamsTest.java index 5e07703ddd571..abc4cb90b7d62 100644 --- a/streams/src/test/java/org/apache/kafka/streams/KafkaStreamsTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/KafkaStreamsTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams; +import java.time.Duration; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.MockProducer; @@ -463,7 +464,7 @@ public void shouldReturnFalseOnCloseWhenThreadsHaventTerminated() throws Excepti System.currentTimeMillis()); assertTrue("Timed out waiting to receive single message", latch.await(30, TimeUnit.SECONDS)); - assertFalse(streams.close(10, TimeUnit.MILLISECONDS)); + assertFalse(streams.close(Duration.ofMillis(10))); } finally { // stop the thread so we don't interfere with other tests etc keepRunning.set(false); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java index 0bf78563a28ce..bb199b73d5711 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/AbstractResetIntegrationTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.ConsumerGroupDescription; @@ -200,7 +201,7 @@ void prepareTest() throws Exception { void cleanupTest() throws Exception { if (streams != null) { - streams.close(30, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(30)); } IntegrationTestUtils.purgeLocalStreamsState(streamsConfig); } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/GlobalThreadShutDownOrderTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/GlobalThreadShutDownOrderTest.java index cae0f575ee51b..a0ada62e03f9c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/GlobalThreadShutDownOrderTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/GlobalThreadShutDownOrderTest.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.Serdes; @@ -52,7 +53,6 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; -import java.util.concurrent.TimeUnit; import kafka.utils.MockTime; @@ -148,7 +148,7 @@ public boolean conditionMet() { } }, 10000L, "Has not processed record within 10 seconds"); - kafkaStreams.close(30, TimeUnit.SECONDS); + kafkaStreams.close(Duration.ofSeconds(30)); final List expectedRetrievedValues = Arrays.asList(1L, 2L, 3L, 4L); assertEquals(expectedRetrievedValues, retrievedValuesList); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/KTableSourceTopicRestartIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/KTableSourceTopicRestartIntegrationTest.java index dd78190931a5f..d5781dd9c47cc 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/KTableSourceTopicRestartIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/KTableSourceTopicRestartIntegrationTest.java @@ -18,6 +18,7 @@ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.Serdes; @@ -51,7 +52,6 @@ import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; @Category({IntegrationTest.class}) public class KTableSourceTopicRestartIntegrationTest { @@ -130,7 +130,7 @@ public void shouldRestoreAndProgressWhenTopicWrittenToDuringRestorationWithEosDi assertNumberValuesRead(readKeyValues, expectedResultsWithDataWrittenDuringRestoreMap, "Table did not get all values after restart"); } finally { - streamsOne.close(5, TimeUnit.SECONDS); + streamsOne.close(Duration.ofSeconds(5)); } } @@ -155,7 +155,7 @@ public void shouldRestoreAndProgressWhenTopicWrittenToDuringRestorationWithEosEn assertNumberValuesRead(readKeyValues, expectedResultsWithDataWrittenDuringRestoreMap, "Table did not get all values after restart"); } finally { - streamsOne.close(5, TimeUnit.SECONDS); + streamsOne.close(Duration.ofSeconds(5)); } } @@ -179,7 +179,7 @@ public void shouldRestoreAndProgressWhenTopicNotWrittenToDuringRestoration() thr assertNumberValuesRead(readKeyValues, expectedValues, "Table did not get all values after restart"); } finally { - streamsOne.close(5, TimeUnit.SECONDS); + streamsOne.close(Duration.ofSeconds(5)); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/PurgeRepartitionTopicIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/PurgeRepartitionTopicIntegrationTest.java index 2269a5dd88a99..96d7d14d6e60c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/PurgeRepartitionTopicIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/PurgeRepartitionTopicIntegrationTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.Config; import org.apache.kafka.clients.producer.ProducerConfig; @@ -51,7 +52,6 @@ import java.util.Properties; import java.util.Set; import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; @Category({IntegrationTest.class}) public class PurgeRepartitionTopicIntegrationTest { @@ -173,7 +173,7 @@ public void setup() { @After public void shutdown() throws IOException { if (kafkaStreams != null) { - kafkaStreams.close(30, TimeUnit.SECONDS); + kafkaStreams.close(Duration.ofSeconds(30)); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java index 5c1e167aaadb9..571680ffc7e62 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.integration; import java.time.Duration; +import java.time.Instant; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.KafkaProducer; @@ -221,7 +222,7 @@ public int compare(final KeyValue o1, @After public void shutdown() throws IOException { if (kafkaStreams != null) { - kafkaStreams.close(30, TimeUnit.SECONDS); + kafkaStreams.close(Duration.ofSeconds(30)); } IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration); } @@ -362,7 +363,7 @@ public boolean conditionMet() { final int index = metadata.hostInfo().port(); final KafkaStreams streamsWithKey = streamRunnables[index].getStream(); final ReadOnlyWindowStore store = streamsWithKey.store(storeName, QueryableStoreTypes.windowStore()); - return store != null && store.fetch(key, from, to) != null; + return store != null && store.fetch(key, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)) != null; } catch (final IllegalStateException e) { // Kafka Streams instance may have closed but rebalance hasn't happened return false; @@ -1018,7 +1019,7 @@ private void waitUntilAtLeastNumRecordProcessed(final String topic, final int nu private Set> fetch(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, 0, System.currentTimeMillis()); + final WindowStoreIterator fetch = store.fetch(key, Instant.ofEpochMilli(0), Duration.ofMillis(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singleton(KeyValue.pair(key, next.value)); @@ -1029,7 +1030,7 @@ private Set> fetch(final ReadOnlyWindowStore fetchMap(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, 0, System.currentTimeMillis()); + final WindowStoreIterator fetch = store.fetch(key, Instant.ofEpochMilli(0), Duration.ofMillis(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singletonMap(key, next.value); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java index ace6c6fab3b3e..42ead03da36cc 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java @@ -18,6 +18,7 @@ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.Serdes; @@ -54,7 +55,6 @@ import java.util.List; import java.util.Locale; import java.util.Properties; -import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -213,7 +213,7 @@ private void runIntegrationTest(final String optimizationConfig, assertThat(3, equalTo(processorValueCollector.size())); assertThat(processorValueCollector, equalTo(expectedCollectedProcessorValues)); - streams.close(5, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(5)); } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionWithMergeOptimizingIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionWithMergeOptimizingIntegrationTest.java index af1f5f19fd54c..242a966cb461d 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionWithMergeOptimizingIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionWithMergeOptimizingIntegrationTest.java @@ -18,6 +18,7 @@ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringDeserializer; @@ -45,7 +46,6 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; -import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -168,7 +168,7 @@ private void runIntegrationTest(final String optimizationConfig, final List> expectedStringCountKeyValues = Arrays.asList(KeyValue.pair("A", "6"), KeyValue.pair("B", "6"), KeyValue.pair("C", "6")); IntegrationTestUtils.waitUntilFinalKeyValueRecordsReceived(consumerConfig2, COUNT_STRING_TOPIC, expectedStringCountKeyValues); - streams.close(5, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(5)); } @@ -294,4 +294,4 @@ private List> getKeyValues() { + " Sink: KSTREAM-SINK-0000000019 (topic: outputTopic_1)\n" + " <-- KSTREAM-MAPVALUES-0000000018\n\n"; -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RestoreIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RestoreIntegrationTest.java index 5eb4fc7e5b4c0..3fd82202a2d6f 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RestoreIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RestoreIntegrationTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.integration; +import java.time.Duration; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -113,7 +114,7 @@ private Properties props(final String applicationId) { @After public void shutdown() { if (kafkaStreams != null) { - kafkaStreams.close(30, TimeUnit.SECONDS); + kafkaStreams.close(Duration.ofSeconds(30)); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java index dac26ec58da8d..f2e3cc9b39821 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java @@ -74,7 +74,7 @@ public Integer apply(final Integer value1, final Integer value2) { }, JoinWindows.of(ofMillis(100)), Joined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer()) - ) + ); final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index e8fc05afe0fed..12eb1f74421b0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.perf; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -471,8 +472,8 @@ private void processStreamWithWindowStore(final String topic) { final StoreBuilder> storeBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore( "store", - AGGREGATE_WINDOW_SIZE * 3, - AGGREGATE_WINDOW_SIZE, + Duration.ofMillis(AGGREGATE_WINDOW_SIZE * 3), + Duration.ofMillis(AGGREGATE_WINDOW_SIZE), false, 60_000L ), @@ -678,7 +679,7 @@ private KafkaStreams createKafkaStreamsWithExceptionHandler(final StreamsBuilder public void uncaughtException(final Thread t, final Throwable e) { System.out.println("FATAL: An unexpected exception is encountered on thread " + t + ": " + e); - streamsClient.close(30, TimeUnit.SECONDS); + streamsClient.close(Duration.ofSeconds(30)); } }); diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java index 070dba8efa06c..2f59bb5f560fa 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.header.internals.RecordHeader; @@ -202,6 +203,12 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin return null; } + @Override + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { + return null; + } + @Override public void forward(final K key, final V value) {} diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java index daf1f33c60979..2bd428b2d653c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.processor.internals; +import java.time.Duration; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; @@ -588,7 +589,7 @@ public void shouldAddInternalTopicConfigForWindowStores() { builder.addProcessor("processor", new MockProcessorSupplier(), "source"); builder.addStateStore( Stores.windowStoreBuilder( - Stores.persistentWindowStore("store1", 30_000L, 10_000L, false), + Stores.persistentWindowStore("store1", Duration.ofSeconds(30L), Duration.ofSeconds(10L), false), Serdes.String(), Serdes.String() ), diff --git a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java index 8b20b0bc542cd..960c928af4644 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; @@ -92,11 +94,22 @@ public WindowStoreIterator fetch(final Object key, final long timeFrom, final lo return EMPTY_WINDOW_STORE_ITERATOR; } + @Override + public WindowStoreIterator fetch(final Object key, final Instant from, final Duration duration) throws IllegalArgumentException { + return EMPTY_WINDOW_STORE_ITERATOR; + } + @Override public WindowStoreIterator fetch(final Object from, final Object to, final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } - + + @Override + public KeyValueIterator fetch(final Object from, final Object to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { + return EMPTY_WINDOW_STORE_ITERATOR; + } + @Override public WindowStoreIterator all() { return EMPTY_WINDOW_STORE_ITERATOR; @@ -106,4 +119,9 @@ public WindowStoreIterator all() { public WindowStoreIterator fetchAll(final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } + + @Override + public KeyValueIterator fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + return EMPTY_WINDOW_STORE_ITERATOR; + } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java b/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java index d0da15880cc19..8604bffcd29fb 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.state; +import java.time.Duration; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore; import org.apache.kafka.streams.state.internals.MemoryNavigableLRUCache; @@ -54,12 +55,12 @@ public void shouldThrowIfILruMapStoreCapacityIsNegative() { @Test(expected = NullPointerException.class) public void shouldThrowIfIPersistentWindowStoreStoreNameIsNull() { - Stores.persistentWindowStore(null, 0L, 0L, false, 0L); + Stores.persistentWindowStore(null, Duration.ofMillis(0L), Duration.ofMillis(0L), false, 0L); } @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreRetentionPeriodIsNegative() { - Stores.persistentWindowStore("anyName", -1L, 0L, false, 0L); + Stores.persistentWindowStore("anyName", Duration.ofMillis(-1L), Duration.ofMillis(0L), false, 0L); } @Deprecated @@ -70,12 +71,12 @@ public void shouldThrowIfIPersistentWindowStoreIfNumberOfSegmentsSmallerThanOne( @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreIfWindowSizeIsNegative() { - Stores.persistentWindowStore("anyName", 0L, -1L, false); + Stores.persistentWindowStore("anyName", Duration.ofMillis(0L), Duration.ofMillis(-1L), false); } @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreIfSegmentIntervalIsTooSmall() { - Stores.persistentWindowStore("anyName", 1L, 1L, false, -1L); + Stores.persistentWindowStore("anyName", Duration.ofMillis(1L), Duration.ofMillis(1L), false, -1L); } @Test(expected = NullPointerException.class) @@ -120,7 +121,7 @@ public void shouldCreateRocksDbStore() { @Test public void shouldCreateRocksDbWindowStore() { - assertThat(Stores.persistentWindowStore("store", 1L, 1L, false).get(), instanceOf(RocksDBWindowStore.class)); + assertThat(Stores.persistentWindowStore("store", Duration.ofMillis(1L), Duration.ofMillis(1L), false).get(), instanceOf(RocksDBWindowStore.class)); } @Test @@ -131,7 +132,7 @@ public void shouldCreateRocksDbSessionStore() { @Test public void shouldBuildWindowStore() { final WindowStore store = Stores.windowStoreBuilder( - Stores.persistentWindowStore("store", 3L, 3L, true), + Stores.persistentWindowStore("store", Duration.ofMillis(3L), Duration.ofMillis(3L), true), Serdes.String(), Serdes.String() ).build(); @@ -157,4 +158,4 @@ public void shouldBuildSessionStore() { ).build(); assertThat(store, not(nullValue())); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java index 1c8dd7b8a5aeb..1bdf03d712e92 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.serialization.Serdes; @@ -101,7 +102,7 @@ public void shouldNotReturnDuplicatesInRanges() { final StreamsBuilder builder = new StreamsBuilder(); final StoreBuilder> storeBuilder = Stores.windowStoreBuilder( - Stores.persistentWindowStore("store-name", 3600000L, 60000L, false), + Stores.persistentWindowStore("store-name", Duration.ofHours(1L), Duration.ofMinutes(1L), false), Serdes.String(), Serdes.String()) .withCachingEnabled(); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java index 4818d0990f2ad..499dbeebd55af 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; @@ -60,9 +62,17 @@ public V fetch(final K key, final long time) { @Override public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { + return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { if (!open) { throw new InvalidStateStoreException("Store is not open"); } + + final long timeFrom = from.toEpochMilli(); + final long timeTo = from.toEpochMilli() + duration.toMillis(); final List> results = new ArrayList<>(); for (long now = timeFrom; now <= timeTo; now++) { final Map kvMap = data.get(now); @@ -120,9 +130,16 @@ public void remove() { @Override public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { if (!open) { throw new InvalidStateStoreException("Store is not open"); } + final long timeFrom = from.toEpochMilli(); + final long timeTo = from.toEpochMilli() + duration.toMillis(); final List, V>> results = new ArrayList<>(); for (final long now : data.keySet()) { if (!(now >= timeFrom && now <= timeTo)) continue; @@ -166,9 +183,16 @@ public void remove() { @Override public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { + return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + } + + @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { if (!open) { throw new InvalidStateStoreException("Store is not open"); } + final long timeFrom = fromTime.toEpochMilli(); + final long timeTo = duration.toMillis(); final List, V>> results = new ArrayList<>(); for (long now = timeFrom; now <= timeTo; now++) { final NavigableMap kvMap = data.get(now); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java index 201a31efd7e89..053dfb0d660e0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.state.internals; +import java.time.Duration; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.header.Headers; @@ -107,7 +108,7 @@ public void send(final String topic, private WindowStore createWindowStore(final ProcessorContext context, final boolean retainDuplicates) { final WindowStore store = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, retentionPeriod, windowSize, retainDuplicates, segmentInterval), + Stores.persistentWindowStore(windowName, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), retainDuplicates, segmentInterval), Serdes.Integer(), Serdes.String()).build(); @@ -770,7 +771,7 @@ public void shouldFetchAndIterateOverExactKeys() { final long retentionPeriod = 0x7a00000000000000L; final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, retentionPeriod, windowSize, true), + Stores.persistentWindowStore(windowName, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), true), Serdes.String(), Serdes.String()).build(); @@ -846,7 +847,7 @@ public void shouldNoNullPointerWhenSerdeDoesNotHandleNull() { @Test public void shouldFetchAndIterateOverExactBinaryKeys() { final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, 60_000L, 60_000L, true), + Stores.persistentWindowStore(windowName, Duration.ofMinutes(1L), Duration.ofMinutes(1L), true), Serdes.Bytes(), Serdes.String()).build(); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/StreamThreadStateStoreProviderTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/StreamThreadStateStoreProviderTest.java index 711cdc1d93c60..ca059b4c60be0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/StreamThreadStateStoreProviderTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/StreamThreadStateStoreProviderTest.java @@ -78,7 +78,7 @@ public void before() { topology.addStateStore(Stores.keyValueStoreBuilder(Stores.inMemoryKeyValueStore("kv-store"), Serdes.String(), Serdes.String()), "the-processor"); topology.addStateStore( Stores.windowStoreBuilder( - Stores.persistentWindowStore("window-store", 10L, 2L, false), + Stores.persistentWindowStore("window-store", Duration.ofMillis(10L), Duration.ofMillis(2L), false), Serdes.String(), Serdes.String()), "the-processor" diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/BrokerCompatibilityTest.java b/streams/src/test/java/org/apache/kafka/streams/tests/BrokerCompatibilityTest.java index 01333db6b12fb..6f801c9e93acf 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/BrokerCompatibilityTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/BrokerCompatibilityTest.java @@ -41,7 +41,6 @@ import java.util.Collections; import java.util.Locale; import java.util.Properties; -import java.util.concurrent.TimeUnit; public class BrokerCompatibilityTest { @@ -108,7 +107,7 @@ public void uncaughtException(final Thread t, final Throwable e) { System.err.println("FATAL: An unexpected exception " + cause); e.printStackTrace(System.err); System.err.flush(); - streams.close(30, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(30)); } }); System.out.println("start Kafka Streams"); diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/EosTestClient.java b/streams/src/test/java/org/apache/kafka/streams/tests/EosTestClient.java index 1f8238ef51819..e292f16eda77a 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/EosTestClient.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/EosTestClient.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.tests; +import java.time.Duration; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KafkaStreams; @@ -56,7 +57,7 @@ public void start() { @Override public void run() { isRunning = false; - streams.close(TimeUnit.SECONDS.toMillis(300), TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(300)); // need to wait for callback to avoid race condition // -> make sure the callback printout to stdout is there as it is expected test output @@ -102,7 +103,7 @@ public void onChange(final KafkaStreams.State newState, final KafkaStreams.State streams.start(); } if (uncaughtException) { - streams.close(TimeUnit.SECONDS.toMillis(60), TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(60_000L)); streams = null; } sleep(1000); diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/ShutdownDeadlockTest.java b/streams/src/test/java/org/apache/kafka/streams/tests/ShutdownDeadlockTest.java index 0bbb8896485fb..a6a2ebb33781f 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/ShutdownDeadlockTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/ShutdownDeadlockTest.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.tests; +import java.time.Duration; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; @@ -30,7 +31,6 @@ import org.apache.kafka.streams.kstream.KStream; import java.util.Properties; -import java.util.concurrent.TimeUnit; public class ShutdownDeadlockTest { @@ -65,7 +65,7 @@ public void uncaughtException(final Thread t, final Throwable e) { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { - streams.close(5, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(5)); } })); diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java b/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java index d2f673ae06bd0..ddff7a892e2e1 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java @@ -41,7 +41,6 @@ import org.apache.kafka.streams.state.WindowStore; import java.util.Properties; -import java.util.concurrent.TimeUnit; public class SmokeTestClient extends SmokeTestUtil { @@ -83,7 +82,7 @@ public void run() { } public void close() { - streams.close(5, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(5)); // do not remove these printouts since they are needed for health scripts if (!uncaughtException) { System.out.println("SMOKE-TEST-CLIENT-CLOSED"); @@ -253,7 +252,7 @@ public Double apply(final Long value1, final Long value2) { @Override public void uncaughtException(final Thread t, final Throwable e) { System.out.println("FATAL: An unexpected exception is encountered on thread " + t + ": " + e); - streamsClient.close(30, TimeUnit.SECONDS); + streamsClient.close(Duration.ofSeconds(30)); } }); diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/StreamsBrokerDownResilienceTest.java b/streams/src/test/java/org/apache/kafka/streams/tests/StreamsBrokerDownResilienceTest.java index eb196888a455e..25c642e5268db 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/StreamsBrokerDownResilienceTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/StreamsBrokerDownResilienceTest.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.tests; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.Serde; @@ -33,7 +34,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Properties; -import java.util.concurrent.TimeUnit; public class StreamsBrokerDownResilienceTest { @@ -109,7 +109,7 @@ public void apply(final String key, final String value) { public void uncaughtException(final Thread t, final Throwable e) { System.err.println("FATAL: An unexpected exception " + e); System.err.flush(); - streams.close(30, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(30)); } }); System.out.println("Start Kafka Streams"); @@ -118,7 +118,7 @@ public void uncaughtException(final Thread t, final Throwable e) { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { - streams.close(30, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(30)); System.out.println("Complete shutdown of streams resilience test app now"); System.out.flush(); } diff --git a/streams/src/test/java/org/apache/kafka/streams/tests/StreamsStandByReplicaTest.java b/streams/src/test/java/org/apache/kafka/streams/tests/StreamsStandByReplicaTest.java index db160fe89ebad..66b20b283fb26 100644 --- a/streams/src/test/java/org/apache/kafka/streams/tests/StreamsStandByReplicaTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/tests/StreamsStandByReplicaTest.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.tests; +import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.Serde; @@ -38,7 +39,6 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import java.util.concurrent.TimeUnit; public class StreamsStandByReplicaTest { @@ -164,7 +164,7 @@ public void run() { } private static void shutdown(final KafkaStreams streams) { - streams.close(10, TimeUnit.SECONDS); + streams.close(Duration.ofSeconds(10)); } private static boolean confirmCorrectConfigs(final Properties properties) { diff --git a/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java b/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java index bedf8ebf8c497..12c7b81df269d 100644 --- a/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java +++ b/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.test; +import java.time.Duration; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.header.internals.RecordHeaders; import org.apache.kafka.common.metrics.Metrics; @@ -207,6 +208,12 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin throw new UnsupportedOperationException("schedule() not supported."); } + @Override + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { + throw new UnsupportedOperationException("schedule() not supported."); + } + @Override public void commit() { } diff --git a/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java b/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java index ce9838919f0b8..e58567a698a98 100644 --- a/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java +++ b/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.test; +import java.time.Duration; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.processor.Cancellable; @@ -52,7 +53,14 @@ public StateStore getStateStore(final String name) { return null; } - @Override public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { + @Override + public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { + return null; + } + + @Override + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { return null; } diff --git a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala index 14fc5d4f89df3..9144493cde196 100644 --- a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala +++ b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala @@ -18,6 +18,8 @@ */ package org.apache.kafka.streams.scala.kstream +import java.time.Duration + import org.apache.kafka.streams.kstream.internals.MaterializedInternal import org.apache.kafka.streams.scala.Serdes._ import org.apache.kafka.streams.scala._ @@ -50,7 +52,7 @@ class MaterializedTest extends FlatSpec with Matchers { } "Create a Materialize with a window store supplier" should "create a Materialized with Serdes and a store supplier" in { - val storeSupplier = Stores.persistentWindowStore("store", 1, 1, true) + val storeSupplier = Stores.persistentWindowStore("store", Duration.ofMillis(1), Duration.ofMillis(1), true) val materialized: Materialized[String, Long, ByteArrayWindowStore] = Materialized.as[String, Long](storeSupplier) diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index 2d67c06e44599..b26e3cd14a3a7 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -383,8 +383,8 @@ public Cancellable schedule(final long intervalMs, final PunctuationType type, f } @Override - public Cancellable schedule(Duration interval, PunctuationType type, - Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { final CapturedPunctuator capturedPunctuator = new CapturedPunctuator(interval.toMillis(), type, callback); punctuators.add(capturedPunctuator); From c4f2335542fbe2c4e21e47307949704e7d2ccafc Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Mon, 24 Sep 2018 21:15:22 +0300 Subject: [PATCH 04/14] KAFKA-7277: Tests fixed. All deprecated methods removed from tests, new version called instead. --- .../wordcount/WordCountProcessorDemo.java | 2 +- .../org/apache/kafka/streams/ApiUtils.java | 62 +++- .../apache/kafka/streams/KafkaStreams.java | 2 + .../kafka/streams/kstream/JoinWindows.java | 8 +- .../kafka/streams/kstream/Materialized.java | 1 - .../kafka/streams/kstream/SessionWindows.java | 4 +- .../kafka/streams/kstream/TimeWindows.java | 10 +- .../streams/kstream/UnlimitedWindows.java | 1 - .../kstream/internals/KStreamImpl.java | 5 +- .../kstream/internals/KStreamKStreamJoin.java | 5 +- .../internals/TimeWindowedKStreamImpl.java | 5 +- .../ForwardingDisabledProcessorContext.java | 5 +- .../internals/GlobalProcessorContextImpl.java | 1 + .../internals/ProcessorContextImpl.java | 3 +- .../internals/StandbyContextImpl.java | 1 + .../streams/state/ReadOnlyWindowStore.java | 5 +- .../streams/state/WindowStoreIterator.java | 4 +- .../state/internals/CachingWindowStore.java | 12 + .../ChangeLoggingWindowBytesStore.java | 2 + .../CompositeReadOnlyWindowStore.java | 10 + .../state/internals/MeteredWindowStore.java | 9 + .../state/internals/RocksDBWindowStore.java | 31 +- .../TimeWindowedKStreamImplTest.java | 8 +- .../kafka/streams/perf/SimpleBenchmark.java | 7 +- .../internals/CachingWindowStoreTest.java | 45 +-- .../ChangeLoggingWindowBytesStoreTest.java | 12 +- .../CompositeReadOnlyWindowStoreTest.java | 36 ++- .../internals/MeteredWindowStoreTest.java | 12 +- .../internals/RocksDBWindowStoreTest.java | 297 +++++++++--------- .../processor/MockProcessorContext.java | 1 + 30 files changed, 353 insertions(+), 253 deletions(-) diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java index 6f902f8f275fb..6b496415f6538 100644 --- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java +++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java @@ -64,7 +64,7 @@ public Processor get() { @SuppressWarnings("unchecked") public void init(final ProcessorContext context) { this.context = context; - this.context.schedule(Duration.ofMillis(1000), PunctuationType.STREAM_TIME, timestamp -> { + this.context.schedule(Duration.ofSeconds(1), PunctuationType.STREAM_TIME, timestamp -> { try (final KeyValueIterator iter = kvStore.all()) { System.out.println("----------- " + timestamp + " ----------- "); diff --git a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java index 56ba260986d95..f737a435d0105 100644 --- a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java @@ -27,35 +27,56 @@ private ApiUtils() { } /** - * Validates that milliseconds from duration {@code d} can be retrieved. + * Validates that milliseconds from duration {@code d} can be retrieved and is not negative. * @param d Duration to check * @param name Name of params for an error message. */ public static void validateMillisecondDuration(final Duration d, final String name) { - try { - Objects.requireNonNull(d); + validateMillisecondDuration(d, name, false); + } - //noinspection ResultOfMethodCallIgnored - d.toMillis(); - } catch (final NullPointerException e) { - throw new IllegalArgumentException(name + " shouldn't be null.", e); - } catch (final ArithmeticException e) { - throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + d + - " is negative or too big", e); - } + /** + * Validates that milliseconds from duration {@code d} can be retrieved and is not negative. + * @param d Duration to check + * @param name Name of params for an error message. + * @param canBeNegative If {@code true} duration can have negative value. + */ + public static void validateMillisecondDuration(final Duration d, final String name, final boolean canBeNegative) { + final long msec = toMillis(d, name); + + if (!canBeNegative && msec < 0) + throw new IllegalArgumentException(name + " cannot be negative."); } /** - * Validates that milliseconds from instant {@code i} can be retrieved. + * Validates that milliseconds from duration {@code d} can be retrieved and is positive. + * @param d Duration to check + * @param name Name of params for an error message. + */ + public static void validateMillisecondDurationPositive(final Duration d, final String name) { + final long msec = toMillis(d, name); + + if (msec <= 0) + throw new IllegalArgumentException(name + " should be larger than zero."); + } + + /** + * Validates that milliseconds from instant {@code i} can be retrieved and is not negative. * @param i Instant to check * @param name Name of params for an error message. */ public static void validateMillisecondInstant(final Instant i, final String name) { + final long msec = toMillis(i, name); + + if (msec < 0) + throw new IllegalArgumentException(name + " should be positive."); + } + + private static long toMillis(final Instant i, final String name) { try { Objects.requireNonNull(i); - //noinspection ResultOfMethodCallIgnored - i.toEpochMilli(); + return i.toEpochMilli(); } catch (final NullPointerException e) { throw new IllegalArgumentException(name + " shouldn't be null.", e); } catch (final ArithmeticException e) { @@ -63,4 +84,17 @@ public static void validateMillisecondInstant(final Instant i, final String name " is negative or too big", e); } } + + private static long toMillis(final Duration d, final String name) { + try { + Objects.requireNonNull(d); + + return d.toMillis(); + } catch (final NullPointerException e) { + throw new IllegalArgumentException(name + " shouldn't be null.", e); + } catch (final ArithmeticException e) { + throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + d + + " is negative or too big", e); + } + } } diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 29d9681f365cf..75b30c9e870a2 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -848,6 +848,8 @@ public synchronized boolean close(final long timeout, final TimeUnit timeUnit) { public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeout.toMillis()); + ApiUtils.validateMillisecondDuration(timeout, "timeout"); + if (!setState(State.PENDING_SHUTDOWN)) { // if transition failed, it means it was either in PENDING_SHUTDOWN // or NOT_RUNNING already; just check that all threads have been stopped diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index 3111f5e5f4d5d..785957c322020 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -129,7 +129,6 @@ public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgument */ public static JoinWindows of(final Duration timeDifference) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); - return new JoinWindows(timeDifference.toMillis(), timeDifference.toMillis(), null, DEFAULT_RETENTION_MS); } @@ -162,8 +161,7 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows before(final Duration timeDifference) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); - + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference", true); return new JoinWindows(timeDifference.toMillis(), afterMs, grace, maintainDurationMs, segments); } @@ -196,8 +194,7 @@ public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentExce */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows after(final Duration timeDifference) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); - + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference", true); return new JoinWindows(beforeMs, timeDifference.toMillis(), grace, maintainDurationMs, segments); } @@ -229,7 +226,6 @@ public long size() { @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); - return new JoinWindows(beforeMs, afterMs, afterWindowEnd, maintainDurationMs, segments); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java index be15c446515e7..65ffd6fd33f46 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java @@ -248,7 +248,6 @@ public Materialized withCachingDisabled() { */ public Materialized withRetention(final Duration retention) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(retention, "retention"); - this.retention = retention; return this; } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java index dfb3dbd10f279..5994dc65fedea 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java @@ -105,8 +105,7 @@ public static SessionWindows with(final long inactivityGapMs) { * @throws IllegalArgumentException if {@code inactivityGap} is zero or negative or too big */ public static SessionWindows with(final Duration inactivityGap) { - ApiUtils.validateMillisecondDuration(inactivityGap, "inactivityGap"); - + ApiUtils.validateMillisecondDurationPositive(inactivityGap, "inactivityGap"); return new SessionWindows(inactivityGap.toMillis(), DEFAULT_RETENTION_MS, null); } @@ -143,7 +142,6 @@ public SessionWindows until(final long durationMs) throws IllegalArgumentExcepti */ public SessionWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); - return new SessionWindows( gapMs, maintainDurationMs, diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index 4f97079c84ee1..3804181661775 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -121,8 +121,7 @@ public static TimeWindows of(final long sizeMs) throws IllegalArgumentException * @throws IllegalArgumentException if the specified window size is zero or negative or too big */ public static TimeWindows of(final Duration size) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(size, "size"); - + ApiUtils.validateMillisecondDurationPositive(size, "size"); // This is a static factory method, so we initialize grace and retention to the defaults. return new TimeWindows(size.toMillis(), size.toMillis(), null, DEFAULT_RETENTION_MS); } @@ -160,11 +159,9 @@ public TimeWindows advanceBy(final long advanceMs) { */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows advanceBy(final Duration advance) { - ApiUtils.validateMillisecondDuration(advance, "advance"); - + ApiUtils.validateMillisecondDurationPositive(advance, "advance"); final long advanceMs = advance.toMillis(); - - if (advanceMs <= 0 || advanceMs > sizeMs) { + if (advanceMs > sizeMs) { throw new IllegalArgumentException(String.format("AdvanceMs must lie within interval (0, %d].", sizeMs)); } return new TimeWindows(sizeMs, advanceMs, grace, maintainDurationMs, segments); @@ -200,7 +197,6 @@ public long size() { @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); - return new TimeWindows(sizeMs, advanceMs, afterWindowEnd, maintainDurationMs, segments); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java index 788096364a347..d956e5bee1369 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java @@ -80,7 +80,6 @@ public UnlimitedWindows startOn(final long startMs) throws IllegalArgumentExcept */ public UnlimitedWindows startOn(final Instant start) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(start, "start"); - return new UnlimitedWindows(start.toEpochMilli()); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamImpl.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamImpl.java index 42c20a52f379c..211b52ed2d08e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamImpl.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream.internals; +import java.time.Duration; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.kstream.ForeachAction; @@ -830,8 +831,8 @@ private static StoreBuilder> joinWindowStoreBuilder(fin return Stores.windowStoreBuilder( Stores.persistentWindowStore( joinName + "-store", - windows.size() + windows.gracePeriodMs(), - windows.size(), + Duration.ofMillis(windows.size() + windows.gracePeriodMs()), + Duration.ofMillis(windows.size()), true ), keySerde, diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java index 4c6998ad85535..017d0d6282e17 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.kstream.internals; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.kstream.ValueJoiner; import org.apache.kafka.streams.processor.AbstractProcessor; import org.apache.kafka.streams.processor.Processor; @@ -88,7 +90,8 @@ key, value, context().topic(), context().partition(), context().offset() final long timeFrom = Math.max(0L, context().timestamp() - joinBeforeMs); final long timeTo = Math.max(0L, context().timestamp() + joinAfterMs); - try (final WindowStoreIterator iter = otherWindow.fetch(key, timeFrom, timeTo)) { + try (final WindowStoreIterator iter = + otherWindow.fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom))) { while (iter.hasNext()) { needOuterJoin = false; context().forward(key, joiner.apply(value, iter.next().value)); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImpl.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImpl.java index 5c5cfb2bed70a..2cec5c61b41f9 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImpl.java @@ -16,6 +16,7 @@ */ package org.apache.kafka.streams.kstream.internals; +import java.time.Duration; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; @@ -181,8 +182,8 @@ private StoreBuilder> materialize(final MaterializedInte supplier = Stores.persistentWindowStore( materialized.storeName(), - retentionPeriod, - windows.size(), + Duration.ofMillis(retentionPeriod), + Duration.ofMillis(windows.size()), false ); diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java index 4990bcc36b419..d5d0b932935b9 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java @@ -86,6 +86,7 @@ public StateStore getStateStore(final String name) { } @Override + @Deprecated public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { @@ -93,7 +94,9 @@ public Cancellable schedule(final long intervalMs, } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { return delegate.schedule(interval, type, callback); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java index e405bbd222a48..dda91db491896 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java @@ -93,6 +93,7 @@ public void commit() { * @throws UnsupportedOperationException on every invocation */ @Override + @Deprecated public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { throw new UnsupportedOperationException("this should not happen: schedule() not supported in global processor context."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index 1e8c3526b31cb..67d306825d5df 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -151,6 +151,7 @@ public void commit() { } @Override + @Deprecated public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { return task.schedule(interval, type, callback); } @@ -158,7 +159,7 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin @Override public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { - return schedule(interval, type, callback); + return task.schedule(interval.toMillis(), type, callback); } void setStreamTimeSupplier(final TimestampSupplier streamTimeSupplier) { diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java index 886d80308fd22..90370cdc6050c 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java @@ -190,6 +190,7 @@ public void commit() { * @throws UnsupportedOperationException on every invocation */ @Override + @Deprecated public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { throw new UnsupportedOperationException("this should not happen: schedule() not supported in standby tasks."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index 286ddb1d01b32..9ff1227eca68c 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -63,7 +63,7 @@ public interface ReadOnlyWindowStore { * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", 10, 20)} then the results will contain the first + * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

    * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -102,7 +102,7 @@ public interface ReadOnlyWindowStore { * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", 10, 20)} then the results will contain the first + * And we call {@code store.fetch("A", ofEpochMilli(10), ofMillis(20))} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

    * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -132,6 +132,7 @@ public interface ReadOnlyWindowStore { * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for any key. */ + @Deprecated KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); /** diff --git a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java index c07130e0fe56a..0b73d7df6f1f7 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java @@ -16,12 +16,14 @@ */ package org.apache.kafka.streams.state; +import java.time.Duration; +import java.time.Instant; import org.apache.kafka.streams.KeyValue; import java.io.Closeable; /** - * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, long, long)}. + * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, Instant, Duration)}. * * Users must call its {@code close} method explicitly upon completeness to release resources, * or use try-with-resources statement (available since JDK7) for this {@link Closeable} class. diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index 682f00da2ec24..3992d4c6e3ffe 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -21,6 +21,7 @@ import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.kstream.internals.CacheFlushListener; import org.apache.kafka.streams.processor.ProcessorContext; @@ -181,6 +182,7 @@ public byte[] fetch(final Bytes key, final long timestamp) { } @Override + @Deprecated public synchronized WindowStoreIterator fetch(final Bytes key, final long timeFrom, final long timeTo) { return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @@ -191,6 +193,9 @@ public WindowStoreIterator fetch(final Bytes key, final Instant from, fi // if store is open outside as well. validateStoreOpen(); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final WindowStoreIterator underlyingIterator = underlying.fetch(key, from, duration); if (cache == null) { return underlyingIterator; @@ -228,6 +233,9 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B // if store is open outside as well. validateStoreOpen(); + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final long timeFrom = fromTime.toEpochMilli(); final long timeTo = fromTime.toEpochMilli() + duration.toMillis(); @@ -281,6 +289,7 @@ public KeyValueIterator, byte[]> all() { } @Override + @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @@ -289,6 +298,9 @@ public KeyValueIterator, byte[]> fetchAll(final long timeFrom, f public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { validateStoreOpen(); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final long timeFrom = from.toEpochMilli(); final long timeTo = from.toEpochMilli() + duration.toMillis(); diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index 463e5fb75512e..904d23ca5a1e8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -54,6 +54,7 @@ public byte[] fetch(final Bytes key, final long timestamp) { } @Override + @Deprecated public WindowStoreIterator fetch(final Bytes key, final long from, final long to) { return fetch(key, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)); } @@ -80,6 +81,7 @@ public KeyValueIterator, byte[]> all() { } @Override + @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index 360090d8fd91b..637819fdb12f2 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -18,6 +18,7 @@ import java.time.Duration; import java.time.Instant; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.state.KeyValueIterator; @@ -66,6 +67,7 @@ public V fetch(final K key, final long time) { } @Override + @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @@ -73,6 +75,8 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long @Override public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); final List> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore windowStore : stores) { try { @@ -100,6 +104,8 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { Objects.requireNonNull(from, "from can't be null"); Objects.requireNonNull(to, "to can't be null"); + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { @@ -127,12 +133,16 @@ public KeyValueIterator, V> apply(final ReadOnlyWindowStore st } @Override + @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index 1ab57b6174f96..622ec4d96f47c 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -22,6 +22,7 @@ import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.Time; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.errors.ProcessorStateException; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; @@ -141,12 +142,15 @@ public V fetch(final K key, final long timestamp) { } @Override + @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), from, duration), fetchTime, metrics, @@ -160,12 +164,15 @@ public KeyValueIterator, V> all() { } @Override + @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(from, duration), fetchTime, metrics, @@ -180,6 +187,8 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), fromTime, duration), fetchTime, metrics, diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index 554592888e8d1..4b5074205dc17 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -20,6 +20,7 @@ import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; @@ -88,25 +89,33 @@ public V fetch(final K key, final long timestamp) { } @Override + @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), timeFrom, timeTo); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); + return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), + from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); } @Override + @Deprecated public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), Bytes.wrap(serdes.rawKey(to)), timeFrom, timeTo); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); + return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), + Bytes.wrap(serdes.rawKey(to)), fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } @Override @@ -116,14 +125,18 @@ public KeyValueIterator, V> all() { } @Override + @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - final KeyValueIterator bytesIterator = bytesStore.fetchAll(timeFrom, timeTo); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); + return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); } @Override public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + final KeyValueIterator bytesIterator = + bytesStore.fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } private void maybeUpdateSeqnumForDups() { diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java index 0505cbc04197d..6b2687d7444cf 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java @@ -47,6 +47,8 @@ import java.util.Properties; import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; +import static java.time.Instant.ofEpochMilli; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -137,7 +139,7 @@ public void shouldMaterializeCount() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("count-store"); - final List, Long>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); + final List, Long>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), 2L), @@ -156,7 +158,7 @@ public void shouldMaterializeReduced() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("reduced"); - final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); + final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "1+2"), @@ -176,7 +178,7 @@ public void shouldMaterializeAggregated() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("aggregated"); - final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", 0, 1000)); + final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "0+1+2"), KeyValue.pair(new Windowed<>("1", new TimeWindow(500, 1000)), "0+3"), diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index 12eb1f74421b0..b65068aba1fd2 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -64,6 +64,7 @@ import java.util.concurrent.TimeUnit; import static java.time.Duration.ofMillis; +import static java.time.Instant.ofEpochMilli; /** * Class that provides support for a series of benchmarks. It is usually driven by @@ -472,8 +473,8 @@ private void processStreamWithWindowStore(final String topic) { final StoreBuilder> storeBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore( "store", - Duration.ofMillis(AGGREGATE_WINDOW_SIZE * 3), - Duration.ofMillis(AGGREGATE_WINDOW_SIZE), + ofMillis(AGGREGATE_WINDOW_SIZE * 3), + ofMillis(AGGREGATE_WINDOW_SIZE), false, 60_000L ), @@ -500,7 +501,7 @@ public void init(final ProcessorContext context) { @Override public void process(final Integer key, final byte[] value) { final long timestamp = context().timestamp(); - final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, timestamp - 1000L, timestamp + 1000L); + final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, ofEpochMilli(timestamp - 1000L), ofMillis(1000L)); while (iter.hasNext()) { iter.next(); } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java index 1bdf03d712e92..4fe2e010a0359 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.serialization.Serdes; @@ -50,6 +49,10 @@ import java.util.Properties; import java.util.UUID; +import static java.time.Duration.ofHours; +import static java.time.Duration.ofMillis; +import static java.time.Duration.ofMinutes; +import static java.time.Instant.ofEpochMilli; import static org.apache.kafka.common.utils.Utils.mkList; import static org.apache.kafka.streams.state.internals.ThreadCacheTest.memoryCacheEntrySize; import static org.apache.kafka.test.StreamsTestUtils.toList; @@ -102,7 +105,7 @@ public void shouldNotReturnDuplicatesInRanges() { final StreamsBuilder builder = new StreamsBuilder(); final StoreBuilder> storeBuilder = Stores.windowStoreBuilder( - Stores.persistentWindowStore("store-name", Duration.ofHours(1L), Duration.ofMinutes(1L), false), + Stores.persistentWindowStore("store-name", ofHours(1L), ofMinutes(1L), false), Serdes.String(), Serdes.String()) .withCachingEnabled(); @@ -198,8 +201,8 @@ public void shouldPutFetchFromCache() { assertThat(cachingStore.fetch(bytesKey("c"), 10), equalTo(null)); assertThat(cachingStore.fetch(bytesKey("a"), 0), equalTo(null)); - final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), 10, 10); - final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), 10, 10); + final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), ofEpochMilli(10), ofMillis(0)); + final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), ofEpochMilli(10), ofMillis(0)); verifyKeyValue(a.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(b.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(a.hasNext()); @@ -225,7 +228,7 @@ public void shouldPutFetchRangeFromCache() { cachingStore.put(bytesKey("a"), bytesValue("a")); cachingStore.put(bytesKey("b"), bytesValue("b")); - final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), 10, 10); + final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(10), ofMillis(0)); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "b"); assertFalse(iterator.hasNext()); @@ -259,21 +262,21 @@ public void shouldFetchAllWithinTimestampRange() { cachingStore.put(bytesKey(array[i]), bytesValue(array[i])); } - final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(0, 7); + final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(ofEpochMilli(0), ofMillis(7)); for (int i = 0; i < array.length; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator.hasNext()); - final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(2, 4); + final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(ofEpochMilli(2), ofMillis(2)); for (int i = 2; i <= 4; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator1.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator1.hasNext()); - final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(5, 7); + final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(ofEpochMilli(5), ofMillis(2)); for (int i = 5; i <= 7; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator2.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); @@ -337,7 +340,7 @@ public void shouldTakeValueFromCacheIfSameTimestampFlushedToRocks() { cachingStore.flush(); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(0)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(fetch.hasNext()); } @@ -347,7 +350,7 @@ public void shouldIterateAcrossWindows() { cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -358,7 +361,7 @@ public void shouldIterateCacheAndStore() { final Bytes key = Bytes.wrap("1".getBytes()); underlying.put(WindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes()); cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -371,7 +374,7 @@ public void shouldIterateCacheAndStoreKeyRange() { cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); final KeyValueIterator, byte[]> fetchRange = - cachingStore.fetch(key, bytesKey("2"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + cachingStore.fetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)), "b"); assertFalse(fetchRange.hasNext()); @@ -388,13 +391,13 @@ public void shouldClearNamespaceCacheOnClose() { @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), 0, 10); + cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofMillis(10)); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchRangeFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), bytesKey("b"), 0, 10); + cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(0), ofMillis(10)); } @Test(expected = InvalidStateStoreException.class) @@ -416,7 +419,7 @@ public void shouldFetchAndIterateOverExactKeys() { KeyValue.pair(1L, bytesValue("0003")), KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005")) ); - final List> actual = toList(cachingStore.fetch(bytesKey("a"), 0, Long.MAX_VALUE)); + final List> actual = toList(cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); verifyKeyValueList(expected, actual); } @@ -434,12 +437,12 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("a", "0003", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) ); verifyKeyValueList( mkList(windowedPair("aa", "0002", 0), windowedPair("aa", "0004", 1)), - toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) ); verifyKeyValueList( @@ -450,7 +453,7 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("aa", "0004", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) ); } @@ -466,17 +469,17 @@ public void shouldNotThrowNullPointerExceptionOnPutNullValue() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnFetchNullKey() { - cachingStore.fetch(null, 1L, 2L); + cachingStore.fetch(null, ofEpochMilli(1L), ofMillis(1L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullFromKey() { - cachingStore.fetch(null, bytesKey("anyTo"), 1L, 2L); + cachingStore.fetch(null, bytesKey("anyTo"), ofEpochMilli(1L), ofMillis(1L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullToKey() { - cachingStore.fetch(bytesKey("anyFrom"), null, 1L, 2L); + cachingStore.fetch(bytesKey("anyFrom"), null, ofEpochMilli(1L), ofMillis(1L)); } private static KeyValue, byte[]> windowedPair(final String key, final String value, final long timestamp) { diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java index ec81b93ae502e..65f49a3aac7d0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java @@ -36,6 +36,8 @@ import java.util.HashMap; import java.util.Map; +import static java.time.Duration.ofMillis; +import static java.time.Instant.ofEpochMilli; import static org.junit.Assert.assertArrayEquals; @RunWith(EasyMockRunner.class) @@ -97,21 +99,21 @@ public void shouldLogPuts() { @Test public void shouldDelegateToUnderlyingStoreWhenFetching() { - EasyMock.expect(inner.fetch(bytesKey, 0, 10)).andReturn(KeyValueIterators.emptyWindowStoreIterator()); + EasyMock.expect(inner.fetch(bytesKey, ofEpochMilli(0), ofMillis(10))).andReturn(KeyValueIterators.emptyWindowStoreIterator()); init(); - store.fetch(bytesKey, 0, 10); + store.fetch(bytesKey, ofEpochMilli(0), ofMillis(10)); EasyMock.verify(inner); } @Test public void shouldDelegateToUnderlyingStoreWhenFetchingRange() { - EasyMock.expect(inner.fetch(bytesKey, bytesKey, 0, 1)).andReturn(KeyValueIterators., byte[]>emptyIterator()); + EasyMock.expect(inner.fetch(bytesKey, bytesKey, ofEpochMilli(0), ofMillis(1))).andReturn(KeyValueIterators., byte[]>emptyIterator()); init(); - store.fetch(bytesKey, bytesKey, 0, 1); + store.fetch(bytesKey, bytesKey, ofEpochMilli(0), ofMillis(1)); EasyMock.verify(inner); } @@ -131,4 +133,4 @@ public void shouldRetainDuplicatesWhenSet() { EasyMock.verify(inner); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java index 66f318aa10082..1692c9d74ed17 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java @@ -35,6 +35,8 @@ import java.util.List; import java.util.NoSuchElementException; +import static java.time.Duration.ofMillis; +import static java.time.Instant.ofEpochMilli; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; @@ -77,7 +79,7 @@ public void shouldFetchValuesFromWindowStore() { underlyingWindowStore.put("my-key", "my-value", 0L); underlyingWindowStore.put("my-key", "my-later-value", 10L); - final WindowStoreIterator iterator = windowStore.fetch("my-key", 0L, 25L); + final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofMillis(25L)); final List> results = StreamsTestUtils.toList(iterator); assertEquals(asList(new KeyValue<>(0L, "my-value"), @@ -87,7 +89,7 @@ public void shouldFetchValuesFromWindowStore() { @Test public void shouldReturnEmptyIteratorIfNoData() { - final WindowStoreIterator iterator = windowStore.fetch("my-key", 0L, 25L); + final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofMillis(25L)); assertEquals(false, iterator.hasNext()); } @@ -100,10 +102,10 @@ public void shouldFindValueForKeyWhenMultiStores() { underlyingWindowStore.put("key-one", "value-one", 0L); secondUnderlying.put("key-two", "value-two", 10L); - final List> keyOneResults = StreamsTestUtils.toList(windowStore.fetch("key-one", 0L, - 1L)); - final List> keyTwoResults = StreamsTestUtils.toList(windowStore.fetch("key-two", 10L, - 11L)); + final List> keyOneResults = StreamsTestUtils.toList(windowStore.fetch("key-one", ofEpochMilli(0L), + ofMillis(1L))); + final List> keyTwoResults = StreamsTestUtils.toList(windowStore.fetch("key-two", ofEpochMilli(10L), + ofMillis(1L))); assertEquals(Collections.singletonList(KeyValue.pair(0L, "value-one")), keyOneResults); assertEquals(Collections.singletonList(KeyValue.pair(10L, "value-two")), keyTwoResults); @@ -114,14 +116,14 @@ public void shouldNotGetValuesFromOtherStores() { otherUnderlyingStore.put("some-key", "some-value", 0L); underlyingWindowStore.put("some-key", "my-value", 1L); - final List> results = StreamsTestUtils.toList(windowStore.fetch("some-key", 0L, 2L)); + final List> results = StreamsTestUtils.toList(windowStore.fetch("some-key", ofEpochMilli(0L), ofMillis(2L))); assertEquals(Collections.singletonList(new KeyValue<>(1L, "my-value")), results); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStateStoreExceptionOnRebalance() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(true), QueryableStoreTypes.windowStore(), "foo"); - store.fetch("key", 1, 10); + store.fetch("key", ofEpochMilli(1), ofMillis(10)); } @Test @@ -130,7 +132,7 @@ public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(stubProviderOne, QueryableStoreTypes.windowStore(), "window-store"); try { - store.fetch("key", 1, 10); + store.fetch("key", ofEpochMilli(1), ofMillis(10)); Assert.fail("InvalidStateStoreException was expected"); } catch (final InvalidStateStoreException e) { Assert.assertEquals("State store is not available anymore and may have been migrated to another instance; " + @@ -142,7 +144,7 @@ public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() { public void emptyIteratorAlwaysReturnsFalse() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", 1, 10); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); Assert.assertFalse(windowStoreIterator.hasNext()); } @@ -151,7 +153,7 @@ public void emptyIteratorAlwaysReturnsFalse() { public void emptyIteratorPeekNextKeyShouldThrowNoSuchElementException() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", 1, 10); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.peekNextKey(); @@ -161,7 +163,7 @@ public void emptyIteratorPeekNextKeyShouldThrowNoSuchElementException() { public void emptyIteratorNextShouldThrowNoSuchElementException() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", 1, 10); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.next(); @@ -173,7 +175,7 @@ public void shouldFetchKeyRangeAcrossStores() { stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("a", "a", 0L); secondUnderlying.put("b", "b", 10L); - final List, String>> results = StreamsTestUtils.toList(windowStore.fetch("a", "b", 0, 10)); + final List, String>> results = StreamsTestUtils.toList(windowStore.fetch("a", "b", ofEpochMilli(0), ofMillis(10))); assertThat(results, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); @@ -212,7 +214,7 @@ public void shouldFetchAllAcrossStores() { stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("a", "a", 0L); secondUnderlying.put("b", "b", 10L); - final List, String>> results = StreamsTestUtils.toList(windowStore.fetchAll(0, 10)); + final List, String>> results = StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(0), ofMillis(10))); assertThat(results, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); @@ -220,17 +222,17 @@ public void shouldFetchAllAcrossStores() { @Test(expected = NullPointerException.class) public void shouldThrowNPEIfKeyIsNull() { - windowStore.fetch(null, 0, 0); + windowStore.fetch(null, ofEpochMilli(0), ofMillis(0)); } @Test(expected = NullPointerException.class) public void shouldThrowNPEIfFromKeyIsNull() { - windowStore.fetch(null, "a", 0, 0); + windowStore.fetch(null, "a", ofEpochMilli(0), ofMillis(0)); } @Test(expected = NullPointerException.class) public void shouldThrowNPEIfToKeyIsNull() { - windowStore.fetch("a", null, 0, 0); + windowStore.fetch("a", null, ofEpochMilli(0), ofMillis(0)); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java index 1ac6d94cfeec0..26c039fad44cc 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java @@ -40,6 +40,8 @@ import java.util.Map; +import static java.time.Duration.ofMillis; +import static java.time.Instant.ofEpochMilli; import static java.util.Collections.singletonMap; import static org.apache.kafka.test.StreamsTestUtils.getMetricByNameFilterByTags; import static org.junit.Assert.assertEquals; @@ -110,11 +112,11 @@ public void shouldRecordPutLatency() { @Test public void shouldRecordFetchLatency() { - EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), 1, 1)).andReturn(KeyValueIterators.emptyWindowStoreIterator()); + EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), ofEpochMilli(1), ofMillis(0))).andReturn(KeyValueIterators.emptyWindowStoreIterator()); EasyMock.replay(innerStoreMock); store.init(context, store); - store.fetch("a", 1, 1).close(); // recorded on close; + store.fetch("a", ofEpochMilli(1), ofMillis(0)).close(); // recorded on close; final Map metrics = context.metrics().metrics(); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "all")).metricValue()); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "mocked-store")).metricValue()); @@ -123,11 +125,11 @@ public void shouldRecordFetchLatency() { @Test public void shouldRecordFetchRangeLatency() { - EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), Bytes.wrap("b".getBytes()), 1, 1)).andReturn(KeyValueIterators., byte[]>emptyIterator()); + EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), Bytes.wrap("b".getBytes()), ofEpochMilli(1), ofMillis(0))).andReturn(KeyValueIterators., byte[]>emptyIterator()); EasyMock.replay(innerStoreMock); store.init(context, store); - store.fetch("a", "b", 1, 1).close(); // recorded on close; + store.fetch("a", "b", ofEpochMilli(1), ofMillis(0)).close(); // recorded on close; final Map metrics = context.metrics().metrics(); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "all")).metricValue()); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "mocked-store")).metricValue()); @@ -171,4 +173,4 @@ public void shouldNotExceptionIfFetchReturnsNull() { assertNull(store.fetch("a", 0)); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java index 053dfb0d660e0..22d855741b475 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.header.Headers; @@ -55,6 +54,10 @@ import java.util.Map; import java.util.Set; +import static java.time.Duration.ofMillis; +import static java.time.Duration.ofMinutes; +import static java.time.Duration.ofSeconds; +import static java.time.Instant.ofEpochMilli; import static java.util.Objects.requireNonNull; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -108,7 +111,7 @@ public void send(final String topic, private WindowStore createWindowStore(final ProcessorContext context, final boolean retainDuplicates) { final WindowStore store = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), retainDuplicates, segmentInterval), + Stores.persistentWindowStore(windowName, ofMillis(retentionPeriod), ofMillis(windowSize), retainDuplicates, segmentInterval), Serdes.Integer(), Serdes.String()).build(); @@ -143,7 +146,7 @@ public void shouldOnlyIterateOpenSegments() { setCurrentTime(currentTime); windowStore.put(1, "three"); - final WindowStoreIterator iterator = windowStore.fetch(1, 0, currentTime); + final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(0), ofMillis(currentTime)); // roll to the next segment that will close the first currentTime = currentTime + segmentInterval; @@ -178,12 +181,12 @@ public void testRangeAndSinglePointFetch() { assertEquals("four", windowStore.fetch(4, startTime + 4L)); assertEquals("five", windowStore.fetch(5, startTime + 5L)); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L - windowSize, startTime + 0L + windowSize))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L - windowSize, startTime + 1L + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - windowSize, startTime + 2L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L - windowSize, startTime + 3L + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L - windowSize, startTime + 4L + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L - windowSize, startTime + 5L + windowSize))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); putSecondBatch(windowStore, startTime, context); @@ -194,21 +197,21 @@ public void testRangeAndSinglePointFetch() { assertEquals("two+5", windowStore.fetch(2, startTime + 7L)); assertEquals("two+6", windowStore.fetch(2, startTime + 8L)); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 2L - windowSize, startTime - 2L + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime - 1L - windowSize, startTime - 1L + windowSize))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 1L - windowSize, startTime + 1L + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 2L - windowSize, startTime + 2L + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 3L - windowSize, startTime + 3L + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 4L - windowSize, startTime + 4L + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 5L - windowSize, startTime + 5L + windowSize))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 6L - windowSize, startTime + 6L + windowSize))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 7L - windowSize, startTime + 7L + windowSize))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 8L - windowSize, startTime + 8L + windowSize))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 9L - windowSize, startTime + 9L + windowSize))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 10L - windowSize, startTime + 10L + windowSize))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 11L - windowSize, startTime + 11L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L - windowSize, startTime + 12L + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofMillis(windowSize * 2)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -258,17 +261,17 @@ public void shouldFetchAllInTimeRange() { assertEquals( Utils.mkList(one, two, four), - StreamsTestUtils.toList(windowStore.fetchAll(startTime + 1, startTime + 4)) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofMillis(3))) ); assertEquals( Utils.mkList(zero, one, two), - StreamsTestUtils.toList(windowStore.fetchAll(startTime + 0, startTime + 3)) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 0), ofMillis(3))) ); assertEquals( Utils.mkList(one, two, four, five), - StreamsTestUtils.toList(windowStore.fetchAll(startTime + 1, startTime + 5)) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofMillis(4))) ); } @@ -287,36 +290,36 @@ public void testFetchRange() { assertEquals( Utils.mkList(zero, one), - StreamsTestUtils.toList(windowStore.fetch(0, 1, startTime + 0L - windowSize, startTime + 0L + windowSize)) + StreamsTestUtils.toList(windowStore.fetch(0, 1, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) ); assertEquals( Utils.mkList(one), - StreamsTestUtils.toList(windowStore.fetch(1, 1, startTime + 0L - windowSize, startTime + 0L + windowSize)) + StreamsTestUtils.toList(windowStore.fetch(1, 1, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) ); assertEquals( Utils.mkList(one, two), - StreamsTestUtils.toList(windowStore.fetch(1, 3, startTime + 0L - windowSize, startTime + 0L + windowSize)) + StreamsTestUtils.toList(windowStore.fetch(1, 3, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) ); assertEquals( Utils.mkList(zero, one, two), - StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 0L - windowSize, startTime + 0L + windowSize)) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) ); assertEquals( Utils.mkList(zero, one, two, four, five), - StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 0L - windowSize, startTime + 0L + windowSize + 5L)) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize + 5L))) ); assertEquals( Utils.mkList(two, four, five), - StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 2L, startTime + 0L + windowSize + 5L)) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 2L), ofMillis(windowSize + 5L))) ); assertEquals( Utils.mkList(), - StreamsTestUtils.toList(windowStore.fetch(4, 5, startTime + 2L, startTime + windowSize)) + StreamsTestUtils.toList(windowStore.fetch(4, 5, ofEpochMilli(startTime + 2L), ofMillis(windowSize - 2L))) ); assertEquals( Utils.mkList(), - StreamsTestUtils.toList(windowStore.fetch(0, 3, startTime + 3L, startTime + windowSize + 5)) + StreamsTestUtils.toList(windowStore.fetch(0, 3, ofEpochMilli(startTime + 3L), ofMillis(windowSize + 2L))) ); } @@ -327,30 +330,30 @@ public void testPutAndFetchBefore() { putFirstBatch(windowStore, startTime, context); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L - windowSize, startTime + 0L))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L - windowSize, startTime + 1L))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - windowSize, startTime + 2L))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L - windowSize, startTime + 3L))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L - windowSize, startTime + 4L))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L - windowSize, startTime + 5L))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); putSecondBatch(windowStore, startTime, context); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 1L - windowSize, startTime - 1L))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 0L - windowSize, startTime + 0L))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 1L - windowSize, startTime + 1L))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - windowSize, startTime + 2L))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime + 3L - windowSize, startTime + 3L))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 4L - windowSize, startTime + 4L))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 5L - windowSize, startTime + 5L))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 6L - windowSize, startTime + 6L))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 7L - windowSize, startTime + 7L))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 8L - windowSize, startTime + 8L))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 9L - windowSize, startTime + 9L))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 10L - windowSize, startTime + 10L))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 11L - windowSize, startTime + 11L))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L - windowSize, startTime + 12L))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 13L - windowSize, startTime + 13L))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 13L - windowSize), ofMillis(windowSize)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -373,30 +376,30 @@ public void testPutAndFetchAfter() { putFirstBatch(windowStore, startTime, context); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L, startTime + 0L + windowSize))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L, startTime + 1L + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L, startTime + 2L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L, startTime + 3L + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L, startTime + 4L + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L, startTime + 5L + windowSize))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L), ofMillis(windowSize)))); putSecondBatch(windowStore, startTime, context); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 2L, startTime - 2L + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime - 1L, startTime - 1L + windowSize))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime, startTime + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 1L, startTime + 1L + windowSize))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 2L, startTime + 2L + windowSize))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 3L, startTime + 3L + windowSize))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 4L, startTime + 4L + windowSize))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 5L, startTime + 5L + windowSize))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 6L, startTime + 6L + windowSize))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 7L, startTime + 7L + windowSize))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 8L, startTime + 8L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 9L, startTime + 9L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 10L, startTime + 10L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 11L, startTime + 11L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L, startTime + 12L + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L), ofMillis(windowSize)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -420,17 +423,17 @@ public void testPutSameKeyTimestamp() { setCurrentTime(startTime); windowStore.put(0, "zero"); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); windowStore.put(0, "zero"); windowStore.put(0, "zero+"); windowStore.put(0, "zero++"); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, startTime + 1L - windowSize, startTime + 1L + windowSize))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, startTime + 2L - windowSize, startTime + 2L + windowSize))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, startTime + 3L - windowSize, startTime + 3L + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime + 4L - windowSize, startTime + 4L + windowSize))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize * 2)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -488,12 +491,12 @@ public void testRolling() { segmentDirs(baseDir) ); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); setCurrentTime(startTime + increment * 6); windowStore.put(6, "six"); @@ -507,13 +510,13 @@ public void testRolling() { ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, startTime + increment * 6 - windowSize, startTime + increment * 6 + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); setCurrentTime(startTime + increment * 7); @@ -527,14 +530,14 @@ public void testRolling() { segmentDirs(baseDir) ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, startTime + increment * 6 - windowSize, startTime + increment * 6 + windowSize))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, startTime + increment * 7 - windowSize, startTime + increment * 7 + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); setCurrentTime(startTime + increment * 8); windowStore.put(8, "eight"); @@ -548,15 +551,15 @@ public void testRolling() { ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, startTime + increment * 6 - windowSize, startTime + increment * 6 + windowSize))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, startTime + increment * 7 - windowSize, startTime + increment * 7 + windowSize))); - assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, startTime + increment * 8 - windowSize, startTime + increment * 8 + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); // check segment directories windowStore.flush(); @@ -604,27 +607,27 @@ public void testRestore() throws IOException { Utils.delete(baseDir); windowStore = createWindowStore(context, false); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(6, startTime + increment * 6 - windowSize, startTime + increment * 6 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(7, startTime + increment * 7 - windowSize, startTime + increment * 7 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(8, startTime + increment * 8 - windowSize, startTime + increment * 8 + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); context.restore(windowName, changeLog); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, startTime - windowSize, startTime + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, startTime + increment - windowSize, startTime + increment + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + increment * 2 - windowSize, startTime + increment * 2 + windowSize))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + increment * 3 - windowSize, startTime + increment * 3 + windowSize))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + increment * 4 - windowSize, startTime + increment * 4 + windowSize))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + increment * 5 - windowSize, startTime + increment * 5 + windowSize))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, startTime + increment * 6 - windowSize, startTime + increment * 6 + windowSize))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, startTime + increment * 7 - windowSize, startTime + increment * 7 + windowSize))); - assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, startTime + increment * 8 - windowSize, startTime + increment * 8 + windowSize))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); // check segment directories windowStore.flush(); @@ -663,7 +666,7 @@ public void testSegmentMaintenance() { WindowStoreIterator iter; int fetchedCount; - iter = windowStore.fetch(0, 0L, segmentInterval * 4); + iter = windowStore.fetch(0, ofEpochMilli(0L), ofMillis(segmentInterval * 4)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -679,7 +682,7 @@ public void testSegmentMaintenance() { setCurrentTime(segmentInterval * 3); windowStore.put(0, "v"); - iter = windowStore.fetch(0, 0L, segmentInterval * 4); + iter = windowStore.fetch(0, ofEpochMilli(0L), ofMillis(segmentInterval * 4)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -695,7 +698,7 @@ public void testSegmentMaintenance() { setCurrentTime(segmentInterval * 5); windowStore.put(0, "v"); - iter = windowStore.fetch(0, segmentInterval * 4, segmentInterval * 10); + iter = windowStore.fetch(0, ofEpochMilli(segmentInterval * 4), ofMillis(segmentInterval * 6)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -737,7 +740,7 @@ public void testInitialLoading() { assertEquals(expected, actual); - try (final WindowStoreIterator iter = windowStore.fetch(0, 0L, 1000000L)) { + try (final WindowStoreIterator iter = windowStore.fetch(0, ofEpochMilli(0L), ofSeconds(1000L))) { while (iter.hasNext()) { iter.next(); } @@ -757,7 +760,7 @@ public void shouldCloseOpenIteratorsWhenStoreIsClosedAndNotThrowInvalidStateStor windowStore.put(1, "two", 2L); windowStore.put(1, "three", 3L); - final WindowStoreIterator iterator = windowStore.fetch(1, 1L, 3L); + final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(1L), ofMillis(3L)); assertTrue(iterator.hasNext()); windowStore.close(); @@ -771,7 +774,7 @@ public void shouldFetchAndIterateOverExactKeys() { final long retentionPeriod = 0x7a00000000000000L; final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), true), + Stores.persistentWindowStore(windowName, ofMillis(retentionPeriod), ofMillis(windowSize), true), Serdes.String(), Serdes.String()).build(); @@ -785,16 +788,16 @@ public void shouldFetchAndIterateOverExactKeys() { final List expected = Utils.mkList("0001", "0003", "0005"); - assertThat(toList(windowStore.fetch("a", 0, Long.MAX_VALUE)), equalTo(expected)); + assertThat(toList(windowStore.fetch("a", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expected)); - List, String>> list = StreamsTestUtils.toList(windowStore.fetch("a", "a", 0, Long.MAX_VALUE)); + List, String>> list = StreamsTestUtils.toList(windowStore.fetch("a", "a", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); assertThat(list, equalTo(Utils.mkList( windowedPair("a", "0001", 0, windowSize), windowedPair("a", "0003", 1, windowSize), windowedPair("a", "0005", 0x7a00000000000000L - 1, windowSize) ))); - list = StreamsTestUtils.toList(windowStore.fetch("aa", "aa", 0, Long.MAX_VALUE)); + list = StreamsTestUtils.toList(windowStore.fetch("aa", "aa", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); assertThat(list, equalTo(Utils.mkList( windowedPair("aa", "0002", 0, windowSize), windowedPair("aa", "0004", 1, windowSize) @@ -816,19 +819,19 @@ public void shouldNotThrowNullPointerExceptionOnPutNullValue() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnGetNullKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(null, 1L, 2L); + windowStore.fetch(null, ofEpochMilli(1L), ofMillis(2L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullFromKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(null, 2, 1L, 2L); + windowStore.fetch(null, 2, ofEpochMilli(1L), ofMillis(1L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullToKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(1, null, 1L, 2L); + windowStore.fetch(1, null, ofEpochMilli(1L), ofMillis(1L)); } @Test @@ -847,7 +850,7 @@ public void shouldNoNullPointerWhenSerdeDoesNotHandleNull() { @Test public void shouldFetchAndIterateOverExactBinaryKeys() { final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, Duration.ofMinutes(1L), Duration.ofMinutes(1L), true), + Stores.persistentWindowStore(windowName, ofMinutes(1L), ofMinutes(1L), true), Serdes.Bytes(), Serdes.String()).build(); @@ -867,11 +870,11 @@ public void shouldFetchAndIterateOverExactBinaryKeys() { windowStore.put(key3, "9", 59999); final List expectedKey1 = Utils.mkList("1", "4", "7"); - assertThat(toList(windowStore.fetch(key1, 0, Long.MAX_VALUE)), equalTo(expectedKey1)); + assertThat(toList(windowStore.fetch(key1, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey1)); final List expectedKey2 = Utils.mkList("2", "5", "8"); - assertThat(toList(windowStore.fetch(key2, 0, Long.MAX_VALUE)), equalTo(expectedKey2)); + assertThat(toList(windowStore.fetch(key2, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey2)); final List expectedKey3 = Utils.mkList("3", "6", "9"); - assertThat(toList(windowStore.fetch(key3, 0, Long.MAX_VALUE)), equalTo(expectedKey3)); + assertThat(toList(windowStore.fetch(key3, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey3)); } private void putFirstBatch(final WindowStore store, diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index b26e3cd14a3a7..cd9671043d2a6 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -378,6 +378,7 @@ public StateStore getStateStore(final String name) { } @Override + @Deprecated public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { return schedule(Duration.ofMillis(intervalMs), type, callback); } From a6d47d4f013285a81e435c832a57c28da6f5e1a9 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Tue, 25 Sep 2018 20:05:33 +0300 Subject: [PATCH 05/14] KAFKA-7277: All new method executed via deprecated. --- .../org/apache/kafka/streams/ApiUtils.java | 73 ++++----------- .../apache/kafka/streams/KafkaStreams.java | 4 +- .../kafka/streams/kstream/JoinWindows.java | 20 ++-- .../kafka/streams/kstream/SessionWindows.java | 15 ++- .../kafka/streams/kstream/TimeWindows.java | 30 +++--- .../streams/kstream/UnlimitedWindows.java | 7 +- .../ForwardingDisabledProcessorContext.java | 2 +- .../internals/ProcessorContextImpl.java | 4 +- .../apache/kafka/streams/state/Stores.java | 41 +++++---- .../state/internals/CachingWindowStore.java | 92 ++++++++----------- .../ChangeLoggingWindowBytesStore.java | 19 ++-- .../CompositeReadOnlyWindowStore.java | 61 ++++++------ .../state/internals/MeteredWindowStore.java | 37 ++++---- .../state/internals/RocksDBWindowStore.java | 21 ++--- .../kafka/streams/state/NoOpWindowStore.java | 3 + .../ChangeLoggingWindowBytesStoreTest.java | 4 +- .../internals/MeteredWindowStoreTest.java | 4 +- .../internals/ReadOnlyWindowStoreStub.java | 42 ++++----- .../processor/MockProcessorContext.java | 16 ++-- 19 files changed, 239 insertions(+), 256 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java index f737a435d0105..c16f0e72de206 100644 --- a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java @@ -27,73 +27,38 @@ private ApiUtils() { } /** - * Validates that milliseconds from duration {@code d} can be retrieved and is not negative. - * @param d Duration to check + * Validates that milliseconds from {@code duration} can be retrieved. + * @param duration Duration to check. * @param name Name of params for an error message. + * @return Milliseconds from {@code duration}. */ - public static void validateMillisecondDuration(final Duration d, final String name) { - validateMillisecondDuration(d, name, false); - } - - /** - * Validates that milliseconds from duration {@code d} can be retrieved and is not negative. - * @param d Duration to check - * @param name Name of params for an error message. - * @param canBeNegative If {@code true} duration can have negative value. - */ - public static void validateMillisecondDuration(final Duration d, final String name, final boolean canBeNegative) { - final long msec = toMillis(d, name); - - if (!canBeNegative && msec < 0) - throw new IllegalArgumentException(name + " cannot be negative."); - } - - /** - * Validates that milliseconds from duration {@code d} can be retrieved and is positive. - * @param d Duration to check - * @param name Name of params for an error message. - */ - public static void validateMillisecondDurationPositive(final Duration d, final String name) { - final long msec = toMillis(d, name); - - if (msec <= 0) - throw new IllegalArgumentException(name + " should be larger than zero."); - } - - /** - * Validates that milliseconds from instant {@code i} can be retrieved and is not negative. - * @param i Instant to check - * @param name Name of params for an error message. - */ - public static void validateMillisecondInstant(final Instant i, final String name) { - final long msec = toMillis(i, name); - - if (msec < 0) - throw new IllegalArgumentException(name + " should be positive."); - } - - private static long toMillis(final Instant i, final String name) { + public static long validateMillisecondDuration(final Duration duration, final String name) { try { - Objects.requireNonNull(i); + Objects.requireNonNull(duration); - return i.toEpochMilli(); + return duration.toMillis(); } catch (final NullPointerException e) { - throw new IllegalArgumentException(name + " shouldn't be null.", e); + throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null.", e); } catch (final ArithmeticException e) { - throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + i + - " is negative or too big", e); + throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. ", e); } } - private static long toMillis(final Duration d, final String name) { + /** + * Validates that milliseconds from {@code instant} can be retrieved. + * @param instant Instant to check. + * @param name Name of params for an error message. + * @return Milliseconds from {@code instant}. + */ + public static long validateMillisecondInstant(final Instant instant, final String name) { try { - Objects.requireNonNull(d); + Objects.requireNonNull(instant); - return d.toMillis(); + return instant.toEpochMilli(); } catch (final NullPointerException e) { - throw new IllegalArgumentException(name + " shouldn't be null.", e); + throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null.", e); } catch (final ArithmeticException e) { - throw new IllegalArgumentException(name + " can't be converted to milliseconds. " + d + + throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. " + instant + " is negative or too big", e); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 75b30c9e870a2..6f55f8e09ca4c 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -846,10 +846,10 @@ public synchronized boolean close(final long timeout, final TimeUnit timeUnit) { * @throws IllegalArgumentException if {@param timeout} is negative or too big */ public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { - log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeout.toMillis()); - ApiUtils.validateMillisecondDuration(timeout, "timeout"); + log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeout.toMillis()); + if (!setState(State.PENDING_SHUTDOWN)) { // if transition failed, it means it was either in PENDING_SHUTDOWN // or NOT_RUNNING already; just check that all threads have been stopped diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index 785957c322020..e2ab572290aff 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -116,7 +116,7 @@ private JoinWindows(final long beforeMs, @Deprecated public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgumentException { // This is a static factory method, so we initialize grace and retention to the defaults. - return of(Duration.ofMillis(timeDifferenceMs)); + return new JoinWindows(timeDifferenceMs, timeDifferenceMs, null, DEFAULT_RETENTION_MS); } /** @@ -129,7 +129,7 @@ public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgument */ public static JoinWindows of(final Duration timeDifference) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); - return new JoinWindows(timeDifference.toMillis(), timeDifference.toMillis(), null, DEFAULT_RETENTION_MS); + return of(timeDifference.toMillis()); } /** @@ -146,7 +146,7 @@ public static JoinWindows of(final Duration timeDifference) throws IllegalArgume @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this @Deprecated public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentException { - return before(Duration.ofMillis(timeDifferenceMs)); + return new JoinWindows(timeDifferenceMs, afterMs, grace, maintainDurationMs, segments); } /** @@ -161,8 +161,8 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows before(final Duration timeDifference) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference", true); - return new JoinWindows(timeDifference.toMillis(), afterMs, grace, maintainDurationMs, segments); + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); + return before(timeDifference.toMillis()); } /** @@ -179,7 +179,7 @@ public JoinWindows before(final Duration timeDifference) throws IllegalArgumentE @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this @Deprecated public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentException { - return after(Duration.ofMillis(timeDifferenceMs)); + return new JoinWindows(beforeMs, timeDifferenceMs, grace, maintainDurationMs, segments); } /** @@ -194,8 +194,8 @@ public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentExce */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows after(final Duration timeDifference) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference", true); - return new JoinWindows(beforeMs, timeDifference.toMillis(), grace, maintainDurationMs, segments); + ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); + return after(timeDifference.toMillis()); } /** @@ -226,6 +226,10 @@ public long size() { @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); + if (afterWindowEnd.toMillis() < 0) { + throw new IllegalArgumentException("Grace period must not be negative."); + } + return new JoinWindows(beforeMs, afterMs, afterWindowEnd, maintainDurationMs, segments); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java index 5994dc65fedea..15c2ea41a36b8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java @@ -88,12 +88,15 @@ private SessionWindows(final long gapMs, final long maintainDurationMs, final Du * @param inactivityGapMs the gap of inactivity between sessions in milliseconds * @return a new window specification with default maintain duration of 1 day * - * @throws IllegalArgumentException if {@code inactivityGapMs} is zero or negative or too big + * @throws IllegalArgumentException if {@code inactivityGapMs} is zero or negative * @deprecated User {@link #with(Duration)} instead. */ @Deprecated public static SessionWindows with(final long inactivityGapMs) { - return with(Duration.ofMillis(inactivityGapMs)); + if (inactivityGapMs <= 0) { + throw new IllegalArgumentException("Gap time (inactivityGapMs) cannot be zero or negative."); + } + return new SessionWindows(inactivityGapMs, DEFAULT_RETENTION_MS, null); } /** @@ -105,8 +108,8 @@ public static SessionWindows with(final long inactivityGapMs) { * @throws IllegalArgumentException if {@code inactivityGap} is zero or negative or too big */ public static SessionWindows with(final Duration inactivityGap) { - ApiUtils.validateMillisecondDurationPositive(inactivityGap, "inactivityGap"); - return new SessionWindows(inactivityGap.toMillis(), DEFAULT_RETENTION_MS, null); + ApiUtils.validateMillisecondDuration(inactivityGap, "inactivityGap"); + return with(inactivityGap.toMillis()); } /** @@ -142,6 +145,10 @@ public SessionWindows until(final long durationMs) throws IllegalArgumentExcepti */ public SessionWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); + if (afterWindowEnd.toMillis() < 0) { + throw new IllegalArgumentException("Grace period must not be negative."); + } + return new SessionWindows( gapMs, maintainDurationMs, diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index 3804181661775..60c0a2b1e0d1f 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -100,12 +100,16 @@ private TimeWindows(final long sizeMs, * * @param sizeMs The size of the window in milliseconds * @return a new window definition with default maintain duration of 1 day - * @throws IllegalArgumentException if the specified window size is zero or negative or too big + * @throws IllegalArgumentException if the specified window size is zero or negative * @deprecated Use {@link #of(Duration)} instead */ @Deprecated public static TimeWindows of(final long sizeMs) throws IllegalArgumentException { - return of(Duration.ofMillis(sizeMs)); + if (sizeMs <= 0) { + throw new IllegalArgumentException("Window size (sizeMs) must be larger than zero."); + } + // This is a static factory method, so we initialize grace and retention to the defaults. + return new TimeWindows(sizeMs, sizeMs, null, DEFAULT_RETENTION_MS); } /** @@ -121,9 +125,8 @@ public static TimeWindows of(final long sizeMs) throws IllegalArgumentException * @throws IllegalArgumentException if the specified window size is zero or negative or too big */ public static TimeWindows of(final Duration size) throws IllegalArgumentException { - ApiUtils.validateMillisecondDurationPositive(size, "size"); - // This is a static factory method, so we initialize grace and retention to the defaults. - return new TimeWindows(size.toMillis(), size.toMillis(), null, DEFAULT_RETENTION_MS); + ApiUtils.validateMillisecondDuration(size, "size"); + return of(size.toMillis()); } /** @@ -142,7 +145,10 @@ public static TimeWindows of(final Duration size) throws IllegalArgumentExceptio @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows @Deprecated public TimeWindows advanceBy(final long advanceMs) { - return advanceBy(Duration.ofMillis(advanceMs)); + if (advanceMs <= 0 || advanceMs > sizeMs) { + throw new IllegalArgumentException(String.format("AdvanceMs must lie within interval (0, %d].", sizeMs)); + } + return new TimeWindows(sizeMs, advanceMs, grace, maintainDurationMs, segments); } /** @@ -159,12 +165,8 @@ public TimeWindows advanceBy(final long advanceMs) { */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows advanceBy(final Duration advance) { - ApiUtils.validateMillisecondDurationPositive(advance, "advance"); - final long advanceMs = advance.toMillis(); - if (advanceMs > sizeMs) { - throw new IllegalArgumentException(String.format("AdvanceMs must lie within interval (0, %d].", sizeMs)); - } - return new TimeWindows(sizeMs, advanceMs, grace, maintainDurationMs, segments); + ApiUtils.validateMillisecondDuration(advance, "advance"); + return advanceBy(advance.toMillis()); } @Override @@ -197,6 +199,10 @@ public long size() { @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); + if (afterWindowEnd.toMillis() < 0) { + throw new IllegalArgumentException("Grace period must not be negative."); + } + return new TimeWindows(sizeMs, advanceMs, afterWindowEnd, maintainDurationMs, segments); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java index d956e5bee1369..f87fd62ba9e2a 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java @@ -68,7 +68,10 @@ public static UnlimitedWindows of() { */ @Deprecated public UnlimitedWindows startOn(final long startMs) throws IllegalArgumentException { - return startOn(Instant.ofEpochMilli(startMs)); + if (startMs < 0) { + throw new IllegalArgumentException("Window start time (startMs) cannot be negative."); + } + return new UnlimitedWindows(startMs); } /** @@ -80,7 +83,7 @@ public UnlimitedWindows startOn(final long startMs) throws IllegalArgumentExcept */ public UnlimitedWindows startOn(final Instant start) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(start, "start"); - return new UnlimitedWindows(start.toEpochMilli()); + return startOn(start.toEpochMilli()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java index d5d0b932935b9..0ef70b770305e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ForwardingDisabledProcessorContext.java @@ -90,7 +90,7 @@ public StateStore getStateStore(final String name) { public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { - return schedule(Duration.ofMillis(intervalMs), type, callback); + return delegate.schedule(intervalMs, type, callback); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index 67d306825d5df..2fe20b4a6c8cf 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -17,6 +17,7 @@ package org.apache.kafka.streams.processor.internals; import java.time.Duration; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.Cancellable; @@ -159,7 +160,8 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin @Override public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { - return task.schedule(interval.toMillis(), type, callback); + ApiUtils.validateMillisecondDuration(interval, "interval"); + return schedule(interval.toMillis(), type, callback); } void setStreamTimeSupplier(final TimestampSupplier streamTimeSupplier) { diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index 4817c252e20e8..84e2e4c6085e5 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -195,8 +195,9 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, final long retentionPeriod, final long windowSize, final boolean retainDuplicates) { - return persistentWindowStore(name, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), - retainDuplicates); + // we're arbitrarily defaulting to segments no smaller than one minute. + final long defaultSegmentInterval = Math.max(retentionPeriod / 2, 60_000L); + return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, defaultSegmentInterval); } /** @@ -218,9 +219,7 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); - // we're arbitrarily defaulting to segments no smaller than one minute. - final long defaultSegmentInterval = Math.max(retentionPeriod.toMillis() / 2, 60_000L); - return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, defaultSegmentInterval); + return persistentWindowStore(name, retentionPeriod.toMillis(), windowSize.toMillis(), retainDuplicates); } /** @@ -242,8 +241,23 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, final long windowSize, final boolean retainDuplicates, final long segmentInterval) { - return persistentWindowStore(name, Duration.ofMillis(retentionPeriod), Duration.ofMillis(windowSize), - retainDuplicates, segmentInterval); + Objects.requireNonNull(name, "name cannot be null"); + if (retentionPeriod < 0L) { + throw new IllegalArgumentException("retentionPeriod cannot be negative"); + } + if (windowSize < 0L) { + throw new IllegalArgumentException("windowSize cannot be negative"); + } + if (segmentInterval < 1L) { + throw new IllegalArgumentException("segmentInterval cannot be zero or negative"); + } + if (windowSize > retentionPeriod) { + throw new IllegalArgumentException("The retention period of the window store " + + name + " must be no smaller than its window size. Got size=[" + + windowSize + "], retention=[" + retentionPeriod + "]"); + } + + return new RocksDbWindowBytesStoreSupplier(name, retentionPeriod, segmentInterval, windowSize, retainDuplicates); } /** @@ -267,21 +281,10 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); - if (segmentInterval < 1L) { - throw new IllegalArgumentException("segmentInterval cannot be zero or negative"); - } - final long retentionPeriodMs = retentionPeriod.toMillis(); final long windowSizeMs = windowSize.toMillis(); - if (windowSizeMs > retentionPeriodMs) { - throw new IllegalArgumentException("The retention period of the window store " - + name + " must be no smaller than its window size. Got size=[" - + windowSize + "], retention=[" + retentionPeriod + "]"); - } - - return new RocksDbWindowBytesStoreSupplier(name, retentionPeriodMs, segmentInterval, - windowSizeMs, retainDuplicates); + return persistentWindowStore(name, retentionPeriodMs, windowSizeMs, retainDuplicates, segmentInterval); } /** diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index 3992d4c6e3ffe..d54d5e3400287 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -184,34 +184,22 @@ public byte[] fetch(final Bytes key, final long timestamp) { @Override @Deprecated public synchronized WindowStoreIterator fetch(final Bytes key, final long timeFrom, final long timeTo) { - return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); - ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - - final WindowStoreIterator underlyingIterator = underlying.fetch(key, from, duration); + final WindowStoreIterator underlyingIterator = underlying.fetch(key, timeFrom, timeTo); if (cache == null) { return underlyingIterator; } - - final long timeFrom = from.toEpochMilli(); - final long timeTo = from.toEpochMilli() + duration.toMillis(); - final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(key, timeFrom)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(key, timeTo)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(key, - key, - timeFrom, - timeTo); + key, + timeFrom, + timeTo); final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator( cacheIterator, hasNextCondition, cacheFunction ); @@ -220,27 +208,19 @@ public WindowStoreIterator fetch(final Bytes key, final Instant from, fi } @Override - public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, - final long timeTo) { - return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override - public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { - + public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, final long timeTo) { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. validateStoreOpen(); - ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - - final long timeFrom = fromTime.toEpochMilli(); - final long timeTo = fromTime.toEpochMilli() + duration.toMillis(); - - final KeyValueIterator, byte[]> underlyingIterator = - underlying.fetch(from, to, fromTime, duration); + final KeyValueIterator, byte[]> underlyingIterator = underlying.fetch(from, to, timeFrom, timeTo); if (cache == null) { return underlyingIterator; } @@ -249,11 +229,10 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(from, - to, - timeFrom, - timeTo); - final PeekingKeyValueIterator filteredCacheIterator = - new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); + to, + timeFrom, + timeTo); + final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( filteredCacheIterator, @@ -264,6 +243,15 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B ); } + @Override + public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { + + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + } + private V fetchPrevious(final Bytes key, final long timestamp) { final byte[] value = underlying.fetch(key, timestamp); if (value != null) { @@ -291,32 +279,28 @@ public KeyValueIterator, byte[]> all() { @Override @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { validateStoreOpen(); - ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - - final long timeFrom = from.toEpochMilli(); - final long timeTo = from.toEpochMilli() + duration.toMillis(); - - final KeyValueIterator, byte[]> underlyingIterator = underlying.fetchAll(from, duration); + final KeyValueIterator, byte[]> underlyingIterator = underlying.fetchAll(timeFrom, timeTo); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(null, null, timeFrom, timeTo); final PeekingKeyValueIterator filteredCacheIterator = new FilteredCacheIterator(cacheIterator, - hasNextCondition, - cacheFunction); + hasNextCondition, + cacheFunction); return new MergedSortedCacheWindowStoreKeyValueIterator( - filteredCacheIterator, - underlyingIterator, - bytesSerdes, - windowSize, - cacheFunction + filteredCacheIterator, + underlyingIterator, + bytesSerdes, + windowSize, + cacheFunction ); } + + @Override + public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index 904d23ca5a1e8..c9cbfb9d35540 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -20,6 +20,7 @@ import java.time.Instant; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; @@ -56,23 +57,27 @@ public byte[] fetch(final Bytes key, final long timestamp) { @Override @Deprecated public WindowStoreIterator fetch(final Bytes key, final long from, final long to) { - return fetch(key, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)); + return bytesStore.fetch(key, from, to); } @Override public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { - return bytesStore.fetch(key, from, duration); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override public KeyValueIterator, byte[]> fetch(final Bytes keyFrom, final Bytes keyTo, final long from, final long to) { - return fetch(keyFrom, keyTo, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)); + return bytesStore.fetch(keyFrom, keyTo, from, to); } @Override public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { - return bytesStore.fetch(from, to, fromTime, duration); + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); } @Override @@ -83,12 +88,14 @@ public KeyValueIterator, byte[]> all() { @Override @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + return bytesStore.fetchAll(timeFrom, timeTo); } @Override public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { - return bytesStore.fetchAll(from, duration); + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index 637819fdb12f2..11061d065015b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -69,18 +69,11 @@ public V fetch(final K key, final long time) { @Override @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); - ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); final List> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore windowStore : stores) { try { - final WindowStoreIterator result = windowStore.fetch(key, from, duration); + final WindowStoreIterator result = windowStore.fetch(key, timeFrom, timeTo); if (!result.hasNext()) { result.close(); } else { @@ -88,34 +81,41 @@ public WindowStoreIterator fetch(final K key, final Instant from, final Durat } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException( - "State store is not available anymore and may have been migrated to another instance; " + - "please re-discover its location from the state metadata."); + "State store is not available anymore and may have been migrated to another instance; " + + "please re-discover its location from the state metadata."); } } return KeyValueIterators.emptyWindowStoreIterator(); } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { Objects.requireNonNull(from, "from can't be null"); Objects.requireNonNull(to, "to can't be null"); - ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { - return store.fetch(from, to, fromTime, duration); + return store.fetch(from, to, timeFrom, timeTo); } }; return new DelegatingPeekingKeyValueIterator<>(storeName, - new CompositeKeyValueIterator<>( - provider.stores(storeName, windowStoreType).iterator(), - nextIteratorFunction)); + new CompositeKeyValueIterator<>( + provider.stores(storeName, windowStoreType).iterator(), + nextIteratorFunction)); + } + + @Override + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); } @Override @@ -135,23 +135,22 @@ public KeyValueIterator, V> apply(final ReadOnlyWindowStore st @Override @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { - ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - final NextIteratorFunction, V, ReadOnlyWindowStore> nextIteratorFunction = new NextIteratorFunction, V, ReadOnlyWindowStore>() { @Override public KeyValueIterator, V> apply(final ReadOnlyWindowStore store) { - return store.fetchAll(from, duration); + return store.fetchAll(timeFrom, timeTo); } }; return new DelegatingPeekingKeyValueIterator<>(storeName, - new CompositeKeyValueIterator<>( - provider.stores(storeName, windowStoreType).iterator(), - nextIteratorFunction)); + new CompositeKeyValueIterator<>( + provider.stores(storeName, windowStoreType).iterator(), + nextIteratorFunction)); + } + + @Override + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index 622ec4d96f47c..843da21ab2e25 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -144,18 +144,18 @@ public V fetch(final K key, final long timestamp) { @Override @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), timeFrom, timeTo), + fetchTime, + metrics, + serdes, + time); } @Override public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); ApiUtils.validateMillisecondDuration(duration, "duration"); - return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), from, duration), - fetchTime, - metrics, - serdes, - time); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override @@ -166,34 +166,35 @@ public KeyValueIterator, V> all() { @Override @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(timeFrom, timeTo), + fetchTime, + metrics, + serdes, + time); } @Override public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); ApiUtils.validateMillisecondDuration(duration, "duration"); - return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(from, duration), - fetchTime, - metrics, - serdes, - time); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override + @Deprecated public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), timeFrom, timeTo), + fetchTime, + metrics, + serdes, + time); } @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); ApiUtils.validateMillisecondDuration(duration, "duration"); - return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), fromTime, duration), - fetchTime, - metrics, - serdes, - time); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index 4b5074205dc17..204357e2cb109 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -91,31 +91,29 @@ public V fetch(final K key, final long timestamp) { @Override @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), timeFrom, timeTo); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); } @Override public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); ApiUtils.validateMillisecondDuration(duration, "duration"); - final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), - from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } @Override @Deprecated public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), Bytes.wrap(serdes.rawKey(to)), timeFrom, timeTo); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); ApiUtils.validateMillisecondDuration(duration, "duration"); - final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), - Bytes.wrap(serdes.rawKey(to)), fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); } @Override @@ -127,16 +125,15 @@ public KeyValueIterator, V> all() { @Override @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + final KeyValueIterator bytesIterator = bytesStore.fetchAll(timeFrom, timeTo); + return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); } @Override public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); ApiUtils.validateMillisecondDuration(duration, "duration"); - final KeyValueIterator bytesIterator = - bytesStore.fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); - return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } private void maybeUpdateSeqnumForDups() { diff --git a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java index 960c928af4644..99aacd0d19bd4 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java @@ -90,6 +90,7 @@ public Object fetch(final Object key, final long time) { } @Override + @Deprecated public WindowStoreIterator fetch(final Object key, final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } @@ -100,6 +101,7 @@ public WindowStoreIterator fetch(final Object key, final Instant from, final Dur } @Override + @Deprecated public WindowStoreIterator fetch(final Object from, final Object to, final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } @@ -116,6 +118,7 @@ public WindowStoreIterator all() { } @Override + @Deprecated public WindowStoreIterator fetchAll(final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java index 65f49a3aac7d0..e1204892a018c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java @@ -99,7 +99,7 @@ public void shouldLogPuts() { @Test public void shouldDelegateToUnderlyingStoreWhenFetching() { - EasyMock.expect(inner.fetch(bytesKey, ofEpochMilli(0), ofMillis(10))).andReturn(KeyValueIterators.emptyWindowStoreIterator()); + EasyMock.expect(inner.fetch(bytesKey, 0, 10)).andReturn(KeyValueIterators.emptyWindowStoreIterator()); init(); @@ -109,7 +109,7 @@ public void shouldDelegateToUnderlyingStoreWhenFetching() { @Test public void shouldDelegateToUnderlyingStoreWhenFetchingRange() { - EasyMock.expect(inner.fetch(bytesKey, bytesKey, ofEpochMilli(0), ofMillis(1))).andReturn(KeyValueIterators., byte[]>emptyIterator()); + EasyMock.expect(inner.fetch(bytesKey, bytesKey, 0, 1)).andReturn(KeyValueIterators., byte[]>emptyIterator()); init(); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java index 26c039fad44cc..0fe69fa3ced9e 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java @@ -112,7 +112,7 @@ public void shouldRecordPutLatency() { @Test public void shouldRecordFetchLatency() { - EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), ofEpochMilli(1), ofMillis(0))).andReturn(KeyValueIterators.emptyWindowStoreIterator()); + EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), 1, 1)).andReturn(KeyValueIterators.emptyWindowStoreIterator()); EasyMock.replay(innerStoreMock); store.init(context, store); @@ -125,7 +125,7 @@ public void shouldRecordFetchLatency() { @Test public void shouldRecordFetchRangeLatency() { - EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), Bytes.wrap("b".getBytes()), ofEpochMilli(1), ofMillis(0))).andReturn(KeyValueIterators., byte[]>emptyIterator()); + EasyMock.expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), Bytes.wrap("b".getBytes()), 1, 1)).andReturn(KeyValueIterators., byte[]>emptyIterator()); EasyMock.replay(innerStoreMock); store.init(context, store); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java index 499dbeebd55af..d8608c050e40a 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java @@ -18,6 +18,7 @@ import java.time.Duration; import java.time.Instant; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; @@ -62,17 +63,9 @@ public V fetch(final K key, final long time) { @Override public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { - return fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { if (!open) { throw new InvalidStateStoreException("Store is not open"); } - - final long timeFrom = from.toEpochMilli(); - final long timeTo = from.toEpochMilli() + duration.toMillis(); final List> results = new ArrayList<>(); for (long now = timeFrom; now <= timeTo; now++) { final Map kvMap = data.get(now); @@ -83,6 +76,13 @@ public WindowStoreIterator fetch(final K key, final Instant from, final Durat return new TheWindowStoreIterator<>(results.iterator()); } + @Override + public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + } + @Override public KeyValueIterator, V> all() { if (!open) { @@ -130,16 +130,9 @@ public void remove() { @Override public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { - return fetchAll(Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); - } - - @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { if (!open) { throw new InvalidStateStoreException("Store is not open"); } - final long timeFrom = from.toEpochMilli(); - final long timeTo = from.toEpochMilli() + duration.toMillis(); final List, V>> results = new ArrayList<>(); for (final long now : data.keySet()) { if (!(now >= timeFrom && now <= timeTo)) continue; @@ -182,17 +175,17 @@ public void remove() { } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { - return fetch(from, to, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom)); + public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(from, "from"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); } - @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { + @Override + public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { if (!open) { throw new InvalidStateStoreException("Store is not open"); } - final long timeFrom = fromTime.toEpochMilli(); - final long timeTo = duration.toMillis(); final List, V>> results = new ArrayList<>(); for (long now = timeFrom; now <= timeTo; now++) { final NavigableMap kvMap = data.get(now); @@ -233,6 +226,13 @@ public void remove() { }; } + @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, + final Duration duration) throws IllegalArgumentException { + ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); + ApiUtils.validateMillisecondDuration(duration, "duration"); + return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + } + public void put(final K key, final V value, final long timestamp) { if (!data.containsKey(timestamp)) { data.put(timestamp, new TreeMap()); diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index cd9671043d2a6..3a192bbaca6a1 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -21,6 +21,7 @@ import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.streams.ApiUtils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.StreamsMetrics; @@ -380,13 +381,7 @@ public StateStore getStateStore(final String name) { @Override @Deprecated public Cancellable schedule(final long intervalMs, final PunctuationType type, final Punctuator callback) { - return schedule(Duration.ofMillis(intervalMs), type, callback); - } - - @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { - final CapturedPunctuator capturedPunctuator = new CapturedPunctuator(interval.toMillis(), type, callback); + final CapturedPunctuator capturedPunctuator = new CapturedPunctuator(intervalMs, type, callback); punctuators.add(capturedPunctuator); @@ -398,6 +393,13 @@ public void cancel() { }; } + @Override + public Cancellable schedule(final Duration interval, final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(interval, "interval"); + return schedule(interval.toMillis(), type, callback); + } + /** * Get the punctuators scheduled so far. The returned list is not affected by subsequent calls to {@code schedule(...)}. * From b44aed9630c80942e67bbcc629a44ffcec27a9d5 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Tue, 25 Sep 2018 20:23:49 +0300 Subject: [PATCH 06/14] KAFKA-7277: Code review imrpovements --- .../apache/kafka/streams/KafkaStreams.java | 39 +++++++++---------- .../kafka/streams/kstream/JoinWindows.java | 5 +-- .../kafka/streams/kstream/Materialized.java | 3 ++ .../InternalTopicIntegrationTest.java | 5 ++- .../KStreamAggregationIntegrationTest.java | 4 +- .../QueryableStateIntegrationTest.java | 17 ++++---- .../RepartitionOptimizingIntegrationTest.java | 4 +- .../StreamStreamJoinIntegrationTest.java | 18 ++++----- 8 files changed, 49 insertions(+), 46 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 6f55f8e09ca4c..053fbfea93a56 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -814,7 +814,7 @@ public void run() { * This will block until all threads have stopped. */ public void close() { - close(Duration.ofSeconds(DEFAULT_CLOSE_TIMEOUT)); + close(DEFAULT_CLOSE_TIMEOUT, TimeUnit.SECONDS); } /** @@ -831,24 +831,7 @@ public void close() { */ @Deprecated public synchronized boolean close(final long timeout, final TimeUnit timeUnit) { - return close(Duration.ofMillis(timeUnit.toMillis(timeout))); - } - - /** - * Shutdown this {@code KafkaStreams} by signaling all the threads to stop, and then wait up to the timeout for the - * threads to join. - * A {@code timeout} of 0 means to wait forever. - * - * @param timeout how long to wait for the threads to shutdown - * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached - * before all threads stopped - * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. - * @throws IllegalArgumentException if {@param timeout} is negative or too big - */ - public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { - ApiUtils.validateMillisecondDuration(timeout, "timeout"); - - log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeout.toMillis()); + log.debug("Stopping Streams client with timeoutMillis = {} ms.", timeUnit.toMillis(timeout)); if (!setState(State.PENDING_SHUTDOWN)) { // if transition failed, it means it was either in PENDING_SHUTDOWN @@ -905,7 +888,7 @@ public void run() { shutdownThread.start(); } - if (waitOnState(State.NOT_RUNNING, timeout.toMillis())) { + if (waitOnState(State.NOT_RUNNING, timeUnit.toMillis(timeout))) { log.info("Streams client stopped completely"); return true; } else { @@ -914,6 +897,22 @@ public void run() { } } + /** + * Shutdown this {@code KafkaStreams} by signaling all the threads to stop, and then wait up to the timeout for the + * threads to join. + * A {@code timeout} of 0 means to wait forever. + * + * @param timeout how long to wait for the threads to shutdown + * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached + * before all threads stopped + * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. + * @throws IllegalArgumentException if {@param timeout} is negative or too big + */ + public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { + ApiUtils.validateMillisecondDuration(timeout, "timeout"); + return close(timeout.toMillis(), TimeUnit.MILLISECONDS); + } + /** * Do a clean up of the local {@link StateStore} directory ({@link StreamsConfig#STATE_DIR_CONFIG}) by deleting all * data with regard to the {@link StreamsConfig#APPLICATION_ID_CONFIG application ID}. diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index e2ab572290aff..735600c465c33 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -110,7 +110,7 @@ private JoinWindows(final long beforeMs, * the timestamp of the record from the primary stream. * * @param timeDifferenceMs join window interval in milliseconds - * @throws IllegalArgumentException if {@code timeDifferenceMs} is negative or too big + * @throws IllegalArgumentException if {@code timeDifferenceMs} is negative * @deprecated Use {@link #of(Duration)} instead. */ @Deprecated @@ -140,7 +140,7 @@ public static JoinWindows of(final Duration timeDifference) throws IllegalArgume * value (which would result in a negative window size). * * @param timeDifferenceMs relative window start time in milliseconds - * @throws IllegalArgumentException if the resulting window size is negative or too big + * @throws IllegalArgumentException if the resulting window size is negative * @deprecated Use {@link #before(Duration)} instead. */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this @@ -229,7 +229,6 @@ public JoinWindows grace(final Duration afterWindowEnd) throws IllegalArgumentEx if (afterWindowEnd.toMillis() < 0) { throw new IllegalArgumentException("Grace period must not be negative."); } - return new JoinWindows(beforeMs, afterMs, afterWindowEnd, maintainDurationMs, segments); } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java index 65ffd6fd33f46..a81ed7b697ffe 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java @@ -248,6 +248,9 @@ public Materialized withCachingDisabled() { */ public Materialized withRetention(final Duration retention) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(retention, "retention"); + if (retention.toMillis() < 0) { + throw new IllegalArgumentException("Retention must not be negative."); + } this.retention = retention; return this; } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java index bbd29aad072f4..9bd8c65c2dbc1 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/InternalTopicIntegrationTest.java @@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit; import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForCompletion; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -188,8 +189,8 @@ public void shouldCompactAndDeleteTopicsForWindowStoreChangelogs() throws Except textLines.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"))) .groupBy(MockMapper.selectValueMapper()) - .windowedBy(TimeWindows.of(ofMillis(1000)).grace(ofMillis(0L))) - .count(Materialized.>as("CountWindows").withRetention(ofMillis(2_000L))); + .windowedBy(TimeWindows.of(ofSeconds(1L)).grace(ofMillis(0L))) + .count(Materialized.>as("CountWindows").withRetention(ofSeconds(2L))); final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProp); streams.start(); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java index cabb39dc074da..7642f69b8e0fe 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/KStreamAggregationIntegrationTest.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.integration; -import java.time.Instant; import kafka.tools.ConsoleConsumer; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -87,6 +86,7 @@ import java.util.concurrent.TimeUnit; import static java.time.Duration.ofMillis; +import static java.time.Instant.ofEpochMilli; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; @@ -707,7 +707,7 @@ public void shouldCountUnlimitedWindows() throws Exception { builder.stream(userSessionsStream, Consumed.with(Serdes.String(), Serdes.String())) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) - .windowedBy(UnlimitedWindows.of().startOn(Instant.ofEpochMilli(startTime))) + .windowedBy(UnlimitedWindows.of().startOn(ofEpochMilli(startTime))) .count() .toStream() .transform(() -> new Transformer, Long, KeyValue>() { diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java index 571680ffc7e62..08d7373ba385a 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java @@ -16,8 +16,6 @@ */ package org.apache.kafka.streams.integration; -import java.time.Duration; -import java.time.Instant; import kafka.utils.MockTime; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.KafkaProducer; @@ -88,6 +86,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; +import static java.time.Instant.ofEpochMilli; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertEquals; @@ -222,7 +223,7 @@ public int compare(final KeyValue o1, @After public void shutdown() throws IOException { if (kafkaStreams != null) { - kafkaStreams.close(Duration.ofSeconds(30)); + kafkaStreams.close(ofSeconds(30)); } IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration); } @@ -258,7 +259,7 @@ public Iterable apply(final String value) { // Create a Windowed State Store that contains the word count for every 1 minute groupedByWord - .windowedBy(TimeWindows.of(Duration.ofMillis(WINDOW_SIZE))) + .windowedBy(TimeWindows.of(ofMillis(WINDOW_SIZE))) .count(Materialized.>as(windowStoreName + "-" + inputTopic)) .toStream(new KeyValueMapper, Long, String>() { @Override @@ -363,7 +364,7 @@ public boolean conditionMet() { final int index = metadata.hostInfo().port(); final KafkaStreams streamsWithKey = streamRunnables[index].getStream(); final ReadOnlyWindowStore store = streamsWithKey.store(storeName, QueryableStoreTypes.windowStore()); - return store != null && store.fetch(key, Instant.ofEpochMilli(from), Duration.ofMillis(to - from)) != null; + return store != null && store.fetch(key, ofEpochMilli(from), ofMillis(to - from)) != null; } catch (final IllegalStateException e) { // Kafka Streams instance may have closed but rebalance hasn't happened return false; @@ -697,7 +698,7 @@ private void verifyCanQueryState(final int cacheSizeBytes) throws Exception { final String windowStoreName = "windowed-count"; s1.groupByKey() - .windowedBy(TimeWindows.of(Duration.ofMillis(WINDOW_SIZE))) + .windowedBy(TimeWindows.of(ofMillis(WINDOW_SIZE))) .count(Materialized.>as(windowStoreName)); kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration); kafkaStreams.start(); @@ -1019,7 +1020,7 @@ private void waitUntilAtLeastNumRecordProcessed(final String topic, final int nu private Set> fetch(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, Instant.ofEpochMilli(0), Duration.ofMillis(System.currentTimeMillis())); + final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofMillis(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singleton(KeyValue.pair(key, next.value)); @@ -1030,7 +1031,7 @@ private Set> fetch(final ReadOnlyWindowStore fetchMap(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, Instant.ofEpochMilli(0), Duration.ofMillis(System.currentTimeMillis())); + final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofMillis(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singletonMap(key, next.value); diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java index 42ead03da36cc..16cd0b8afadae 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RepartitionOptimizingIntegrationTest.java @@ -18,7 +18,6 @@ package org.apache.kafka.streams.integration; -import java.time.Duration; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.Serdes; @@ -61,6 +60,7 @@ import kafka.utils.MockTime; import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @@ -213,7 +213,7 @@ private void runIntegrationTest(final String optimizationConfig, assertThat(3, equalTo(processorValueCollector.size())); assertThat(processorValueCollector, equalTo(expectedCollectedProcessorValues)); - streams.close(Duration.ofSeconds(5)); + streams.close(ofSeconds(5)); } diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java index ff0f9f6dc89f9..646185ebb93c6 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/StreamStreamJoinIntegrationTest.java @@ -32,7 +32,7 @@ import java.util.Collections; import java.util.List; -import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; /** * Tests all available joins of Kafka Streams DSL. @@ -80,7 +80,7 @@ public void testInner() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -110,7 +110,7 @@ public void testInnerRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .join(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -137,7 +137,7 @@ public void testLeft() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.leftJoin(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + leftStream.leftJoin(rightStream, valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -167,7 +167,7 @@ public void testLeftRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .leftJoin(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -194,7 +194,7 @@ public void testOuter() throws Exception { Arrays.asList("D-a", "D-b", "D-c", "D-d") ); - leftStream.outerJoin(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + leftStream.outerJoin(rightStream, valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -224,7 +224,7 @@ public void testOuterRepartitioned() throws Exception { leftStream.map(MockMapper.noOpKeyValueMapper()) .outerJoin(rightStream.flatMap(MockMapper.noOpFlatKeyValueMapper()) .selectKey(MockMapper.selectKeyKeyValueMapper()), - valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } @@ -255,8 +255,8 @@ public void testMultiInner() throws Exception { "D-c-b", "D-c-c", "D-c-d", "D-d-a", "D-d-b", "D-d-c", "D-d-d") ); - leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))) - .join(rightStream, valueJoiner, JoinWindows.of(ofMillis(10000))).to(OUTPUT_TOPIC); + leftStream.join(rightStream, valueJoiner, JoinWindows.of(ofSeconds(10))) + .join(rightStream, valueJoiner, JoinWindows.of(ofSeconds(10))).to(OUTPUT_TOPIC); runTest(expectedResult); } From 4e42ed48660b72315d1fecaf615bdb6fecac916a Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Wed, 26 Sep 2018 01:15:18 +0300 Subject: [PATCH 07/14] KAFKA-7277: Fixed compilation after merge with trunk. --- .../integration/SuppressionIntegrationTest.java | 4 ++-- .../kstream/internals/SuppressScenarioTest.java | 6 +++--- .../internals/graph/GraphGraceSearchUtilTest.java | 13 +++++++------ 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/SuppressionIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/SuppressionIntegrationTest.java index af91abaf2b1b7..11b94f928e338 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/SuppressionIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/SuppressionIntegrationTest.java @@ -356,7 +356,7 @@ public void shouldSupportFinalResultsForTimeWindows() throws InterruptedExceptio Consumed.with(STRING_SERDE, STRING_SERDE) ) .groupBy((String k1, String v1) -> k1, Serialized.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(TimeWindows.of(scaledTime(2L)).grace(scaledTime(1L))) + .windowedBy(TimeWindows.of(ofMillis(scaledTime(2L))).grace(ofMillis(scaledTime(1L)))) .count(Materialized.>as("counts").withCachingDisabled().withLoggingDisabled()); valueCounts @@ -544,4 +544,4 @@ private String printRecords(final List> result) { resultStr.append("]"); return resultStr.toString(); } -} \ No newline at end of file +} diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressScenarioTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressScenarioTest.java index d98a15e093b9a..00b806020318b 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressScenarioTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressScenarioTest.java @@ -394,7 +394,7 @@ public void shouldSupportFinalResultsForTimeWindows() { final KTable, Long> valueCounts = builder .stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)) .groupBy((String k, String v) -> k, Serialized.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(TimeWindows.of(2L).grace(1L)) + .windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(1L))) .count(Materialized.>as("counts").withCachingDisabled()); valueCounts .suppress(untilWindowCloses(unbounded())) @@ -448,7 +448,7 @@ public void shouldSupportFinalResultsForTimeWindowsWithLargeJump() { final KTable, Long> valueCounts = builder .stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)) .groupBy((String k, String v) -> k, Serialized.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(TimeWindows.of(2L).grace(2L)) + .windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(2L))) .count(Materialized.>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE)); valueCounts .suppress(untilWindowCloses(unbounded())) @@ -507,7 +507,7 @@ public void shouldSupportFinalResultsForSessionWindows() { final KTable, Long> valueCounts = builder .stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)) .groupBy((String k, String v) -> k, Serialized.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(SessionWindows.with(5L).grace(5L)) + .windowedBy(SessionWindows.with(ofMillis(5L)).grace(ofMillis(5L))) .count(Materialized.>as("counts").withCachingDisabled()); valueCounts .suppress(untilWindowCloses(unbounded())) diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/GraphGraceSearchUtilTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/GraphGraceSearchUtilTest.java index 2b054230839ec..37265fae39917 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/GraphGraceSearchUtilTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/graph/GraphGraceSearchUtilTest.java @@ -26,6 +26,7 @@ import org.apache.kafka.streams.processor.ProcessorContext; import org.junit.Test; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -78,7 +79,7 @@ public void close() {} @Test public void shouldExtractGraceFromKStreamWindowAggregateNode() { - final TimeWindows windows = TimeWindows.of(10L).grace(1234L); + final TimeWindows windows = TimeWindows.of(ofMillis(10L)).grace(ofMillis(1234L)); final StatefulProcessorNode node = new StatefulProcessorNode<>( "asdf", new ProcessorParameters<>( @@ -101,7 +102,7 @@ public void shouldExtractGraceFromKStreamWindowAggregateNode() { @Test public void shouldExtractGraceFromKStreamSessionWindowAggregateNode() { - final SessionWindows windows = SessionWindows.with(10L).grace(1234L); + final SessionWindows windows = SessionWindows.with(ofMillis(10L)).grace(ofMillis(1234L)); final StatefulProcessorNode node = new StatefulProcessorNode<>( "asdf", @@ -126,7 +127,7 @@ public void shouldExtractGraceFromKStreamSessionWindowAggregateNode() { @Test public void shouldExtractGraceFromAncestorThroughStatefulParent() { - final SessionWindows windows = SessionWindows.with(10L).grace(1234L); + final SessionWindows windows = SessionWindows.with(ofMillis(10L)).grace(ofMillis(1234L)); final StatefulProcessorNode graceGrandparent = new StatefulProcessorNode<>( "asdf", new ProcessorParameters<>(new KStreamSessionWindowAggregate( @@ -167,7 +168,7 @@ public void close() {} @Test public void shouldExtractGraceFromAncestorThroughStatelessParent() { - final SessionWindows windows = SessionWindows.with(10L).grace(1234L); + final SessionWindows windows = SessionWindows.with(ofMillis(10L)).grace(ofMillis(1234L)); final StatefulProcessorNode graceGrandparent = new StatefulProcessorNode<>( "asdf", new ProcessorParameters<>( @@ -201,7 +202,7 @@ public void shouldUseMaxIfMultiParentsDoNotAgreeOnGrace() { "asdf", new ProcessorParameters<>( new KStreamSessionWindowAggregate( - SessionWindows.with(10L).grace(1234L), + SessionWindows.with(ofMillis(10L)).grace(ofMillis(1234L)), "asdf", null, null, @@ -218,7 +219,7 @@ public void shouldUseMaxIfMultiParentsDoNotAgreeOnGrace() { "asdf", new ProcessorParameters<>( new KStreamWindowAggregate( - TimeWindows.of(10L).grace(4321L), + TimeWindows.of(ofMillis(10L)).grace(ofMillis(4321L)), "asdf", null, null From e5b9e5f68dc083a754fb7931e957d76d92ea7be8 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Wed, 26 Sep 2018 07:25:37 +0300 Subject: [PATCH 08/14] KAFKA-7277: Code review fixes: * ApiUtils moved to internals package. * JavaDoc fixes. --- .../java/org/apache/kafka/streams/KafkaStreams.java | 3 ++- .../apache/kafka/streams/{ => internals}/ApiUtils.java | 4 +--- .../org/apache/kafka/streams/kstream/JoinWindows.java | 10 +++++----- .../org/apache/kafka/streams/kstream/Materialized.java | 4 ++-- .../apache/kafka/streams/kstream/SessionWindows.java | 4 ++-- .../org/apache/kafka/streams/kstream/TimeWindows.java | 6 +++--- .../apache/kafka/streams/kstream/UnlimitedWindows.java | 2 +- .../processor/internals/ProcessorContextImpl.java | 2 +- .../kafka/streams/state/ReadOnlyWindowStore.java | 6 +++--- .../java/org/apache/kafka/streams/state/Stores.java | 2 +- .../streams/state/internals/CachingWindowStore.java | 2 +- .../state/internals/ChangeLoggingWindowBytesStore.java | 2 +- .../state/internals/CompositeReadOnlyWindowStore.java | 2 +- .../streams/state/internals/MeteredWindowStore.java | 2 +- .../streams/state/internals/RocksDBWindowStore.java | 2 +- .../org/apache/kafka/streams/perf/SimpleBenchmark.java | 6 +++--- .../state/internals/ReadOnlyWindowStoreStub.java | 2 +- .../kafka/streams/scala/kstream/KStreamTest.scala | 4 ++-- .../kafka/streams/processor/MockProcessorContext.java | 2 +- 19 files changed, 33 insertions(+), 34 deletions(-) rename streams/src/main/java/org/apache/kafka/streams/{ => internals}/ApiUtils.java (98%) diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 053fbfea93a56..1fb2591b50b31 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -37,6 +37,7 @@ import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.errors.ProcessorStateException; import org.apache.kafka.streams.errors.StreamsException; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.Produced; @@ -906,7 +907,7 @@ public void run() { * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached * before all threads stopped * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. - * @throws IllegalArgumentException if {@param timeout} is negative or too big + * @throws IllegalArgumentException if {@param timeout} is negative or can't be represented as {@code long milliseconds} */ public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeout, "timeout"); diff --git a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java similarity index 98% rename from streams/src/main/java/org/apache/kafka/streams/ApiUtils.java rename to streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java index c16f0e72de206..f7e9cc1d11b5b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java @@ -14,14 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.kafka.streams; +package org.apache.kafka.streams.internals; import java.time.Duration; import java.time.Instant; import java.util.Objects; -/** - */ public final class ApiUtils { private ApiUtils() { } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index 735600c465c33..af073a0b69589 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -16,7 +16,7 @@ */ package org.apache.kafka.streams.kstream; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.processor.TimestampExtractor; import java.time.Duration; @@ -125,7 +125,7 @@ public static JoinWindows of(final long timeDifferenceMs) throws IllegalArgument * the timestamp of the record from the primary stream. * * @param timeDifference join window interval - * @throws IllegalArgumentException if {@code timeDifference} is negative or too big + * @throws IllegalArgumentException if {@code timeDifference} is negative or can't be represented as {@code long milliseconds} */ public static JoinWindows of(final Duration timeDifference) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeDifference, "timeDifference"); @@ -157,7 +157,7 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc * value (which would result in a negative window size). * * @param timeDifference relative window start time - * @throws IllegalArgumentException if the resulting window size is negative or too big + * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows before(final Duration timeDifference) throws IllegalArgumentException { @@ -173,7 +173,7 @@ public JoinWindows before(final Duration timeDifference) throws IllegalArgumentE * value (which would result in a negative window size). * * @param timeDifferenceMs relative window end time in milliseconds - * @throws IllegalArgumentException if the resulting window size is negative or too big + * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} * @deprecated Use {@link #after(Duration)} instead */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this @@ -190,7 +190,7 @@ public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentExce * value (which would result in a negative window size). * * @param timeDifference relative window end time - * @throws IllegalArgumentException if the resulting window size is negative or too big + * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows after(final Duration timeDifference) throws IllegalArgumentException { diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java index a81ed7b697ffe..a19412d7b13a8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/Materialized.java @@ -19,7 +19,7 @@ import org.apache.kafka.common.internals.Topic; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.KeyValueStore; @@ -244,7 +244,7 @@ public Materialized withCachingDisabled() { * from window-start through window-end, and for the entire grace period. * * @return itself - * @throws IllegalArgumentException if retention is negative or too big + * @throws IllegalArgumentException if retention is negative or can't be represented as {@code long milliseconds} */ public Materialized withRetention(final Duration retention) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(retention, "retention"); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java index 15c2ea41a36b8..6ad77f4004851 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java @@ -16,7 +16,7 @@ */ package org.apache.kafka.streams.kstream; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.state.SessionBytesStoreSupplier; @@ -105,7 +105,7 @@ public static SessionWindows with(final long inactivityGapMs) { * @param inactivityGap the gap of inactivity between sessions * @return a new window specification with default maintain duration of 1 day * - * @throws IllegalArgumentException if {@code inactivityGap} is zero or negative or too big + * @throws IllegalArgumentException if {@code inactivityGap} is zero or negative or can't be represented as {@code long milliseconds} */ public static SessionWindows with(final Duration inactivityGap) { ApiUtils.validateMillisecondDuration(inactivityGap, "inactivityGap"); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index 60c0a2b1e0d1f..ab8f3cac567dd 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -16,7 +16,7 @@ */ package org.apache.kafka.streams.kstream; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.internals.TimeWindow; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.state.WindowBytesStoreSupplier; @@ -122,7 +122,7 @@ public static TimeWindows of(final long sizeMs) throws IllegalArgumentException * * @param size The size of the window * @return a new window definition with default maintain duration of 1 day - * @throws IllegalArgumentException if the specified window size is zero or negative or too big + * @throws IllegalArgumentException if the specified window size is zero or negative or can't be represented as {@code long milliseconds} */ public static TimeWindows of(final Duration size) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(size, "size"); @@ -194,7 +194,7 @@ public long size() { * * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder - * @throws IllegalArgumentException if afterWindowEnd is negative or too big + * @throws IllegalArgumentException if afterWindowEnd is negative or can't be represented as {@code long milliseconds} */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java index f87fd62ba9e2a..a120ba136abd8 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java @@ -17,7 +17,7 @@ package org.apache.kafka.streams.kstream; import java.time.Instant; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.internals.UnlimitedWindow; import org.apache.kafka.streams.processor.TimestampExtractor; diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index 2fe20b4a6c8cf..8b3bc153f7063 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -17,7 +17,7 @@ package org.apache.kafka.streams.processor.internals; import java.time.Duration; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.Cancellable; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index 9ff1227eca68c..f64659c5d91ff 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -114,7 +114,7 @@ public interface ReadOnlyWindowStore { * @return an iterator over key-value pairs {@code } * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for key. - * @throws IllegalArgumentException if duration is negative or too big + * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException; @@ -148,7 +148,7 @@ public interface ReadOnlyWindowStore { * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for any key. - * @throws IllegalArgumentException if duration is negative or too big + * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) throws IllegalArgumentException; @@ -182,7 +182,7 @@ KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException if {@code null} is used for any key - * @throws IllegalArgumentException if duration is negative or too big + * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException; } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index 84e2e4c6085e5..aa854257ea3f7 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -22,7 +22,7 @@ import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.Time; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore; import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder; import org.apache.kafka.streams.state.internals.MemoryNavigableLRUCache; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index d54d5e3400287..4cd9503b18eba 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -21,7 +21,7 @@ import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.kstream.internals.CacheFlushListener; import org.apache.kafka.streams.processor.ProcessorContext; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index c9cbfb9d35540..75bff70cfa859 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -20,7 +20,7 @@ import java.time.Instant; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index 11061d065015b..a52aa3366ab50 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -18,7 +18,7 @@ import java.time.Duration; import java.time.Instant; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.state.KeyValueIterator; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index 843da21ab2e25..a43e6e9542973 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -22,7 +22,7 @@ import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.Time; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.errors.ProcessorStateException; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index 204357e2cb109..c6e64e3aac139 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -20,7 +20,7 @@ import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStore; diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index b65068aba1fd2..52e5be6d345a8 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.perf; -import java.time.Duration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -64,6 +63,7 @@ import java.util.concurrent.TimeUnit; import static java.time.Duration.ofMillis; +import static java.time.Duration.ofSeconds; import static java.time.Instant.ofEpochMilli; /** @@ -501,7 +501,7 @@ public void init(final ProcessorContext context) { @Override public void process(final Integer key, final byte[] value) { final long timestamp = context().timestamp(); - final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, ofEpochMilli(timestamp - 1000L), ofMillis(1000L)); + final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, ofEpochMilli(timestamp - 1000L), ofSeconds(1L)); while (iter.hasNext()) { iter.next(); } @@ -680,7 +680,7 @@ private KafkaStreams createKafkaStreamsWithExceptionHandler(final StreamsBuilder public void uncaughtException(final Thread t, final Throwable e) { System.out.println("FATAL: An unexpected exception is encountered on thread " + t + ": " + e); - streamsClient.close(Duration.ofSeconds(30)); + streamsClient.close(ofSeconds(30)); } }); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java index d8608c050e40a..ba6df179665dc 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java @@ -18,7 +18,7 @@ import java.time.Duration; import java.time.Instant; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; diff --git a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala index e2a0cc6a51962..8626be56aae1f 100644 --- a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala +++ b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KStreamTest.scala @@ -18,7 +18,7 @@ */ package org.apache.kafka.streams.scala.kstream -import java.time.Duration.ofMillis +import java.time.Duration.ofSeconds import org.apache.kafka.streams.kstream.JoinWindows import org.apache.kafka.streams.scala.ImplicitConversions._ @@ -145,7 +145,7 @@ class KStreamTest extends FlatSpec with Matchers with TestDriver { val stream1 = builder.stream[String, String](sourceTopic1) val stream2 = builder.stream[String, String](sourceTopic2) - stream1.join(stream2)((a, b) => s"$a-$b", JoinWindows.of(ofMillis(1000))).to(sinkTopic) + stream1.join(stream2)((a, b) => s"$a-$b", JoinWindows.of(ofSeconds(1))).to(sinkTopic) val testDriver = createTestDriver(builder) diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index b6af5b7b0664a..50c4488aca4bc 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -21,7 +21,7 @@ import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.serialization.Serde; -import org.apache.kafka.streams.ApiUtils; +import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.StreamsMetrics; From 527f6fe488b4eea81fc693b0b8212cd05b91662c Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Fri, 28 Sep 2018 01:44:18 +0300 Subject: [PATCH 09/14] KAFKA-7277: Code review fixes: * Typos fixed. * Error messages improved. * Wrong new method removed * fetch, fetchAll added to WindowStore --- .../apache/kafka/streams/KafkaStreams.java | 2 +- .../kafka/streams/internals/ApiUtils.java | 3 +- .../kafka/streams/kstream/JoinWindows.java | 8 +-- .../kstream/internals/KStreamKStreamJoin.java | 5 +- .../streams/processor/ProcessorContext.java | 4 +- .../streams/state/ReadOnlyWindowStore.java | 1 + .../apache/kafka/streams/state/Stores.java | 51 ++++++-------- .../kafka/streams/state/WindowStore.java | 66 +++++++++++++++++++ ...amSessionWindowAggregateProcessorTest.java | 2 +- .../kafka/streams/perf/SimpleBenchmark.java | 4 +- .../InternalTopologyBuilderTest.java | 6 +- .../kafka/streams/state/StoresTest.java | 23 +++---- .../internals/RocksDBWindowStoreTest.java | 2 +- .../scala/kstream/MaterializedTest.scala | 2 +- 14 files changed, 115 insertions(+), 64 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 1fb2591b50b31..6e04349d5f41a 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -907,7 +907,7 @@ public void run() { * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached * before all threads stopped * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. - * @throws IllegalArgumentException if {@param timeout} is negative or can't be represented as {@code long milliseconds} + * @throws IllegalArgumentException if {@param timeout} can't be represented as {@code long milliseconds} */ public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeout, "timeout"); diff --git a/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java index f7e9cc1d11b5b..eb7d3e074dd41 100644 --- a/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java @@ -56,8 +56,7 @@ public static long validateMillisecondInstant(final Instant instant, final Strin } catch (final NullPointerException e) { throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null.", e); } catch (final ArithmeticException e) { - throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. " + instant + - " is negative or too big", e); + throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. ", e); } } } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index af073a0b69589..107a04963f555 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -136,7 +136,7 @@ public static JoinWindows of(final Duration timeDifference) throws IllegalArgume * Changes the start window boundary to {@code timeDifferenceMs} but keep the end window boundary as is. * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most * {@code timeDifferenceMs} earlier than the timestamp of the record from the primary stream. - * {@code timeDifferenceMs} can be negative but it's absolute value must not be larger than current window "after" + * {@code timeDifferenceMs} can be negative but its absolute value must not be larger than current window "after" * value (which would result in a negative window size). * * @param timeDifferenceMs relative window start time in milliseconds @@ -153,7 +153,7 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc * Changes the start window boundary to {@code timeDifference} but keep the end window boundary as is. * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most * {@code timeDifference} earlier than the timestamp of the record from the primary stream. - * {@code timeDifference} can be negative but it's absolute value must not be larger than current window "after" + * {@code timeDifference} can be negative but its absolute value must not be larger than current window "after" * value (which would result in a negative window size). * * @param timeDifference relative window start time @@ -169,7 +169,7 @@ public JoinWindows before(final Duration timeDifference) throws IllegalArgumentE * Changes the end window boundary to {@code timeDifferenceMs} but keep the start window boundary as is. * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most * {@code timeDifferenceMs} later than the timestamp of the record from the primary stream. - * {@code timeDifferenceMs} can be negative but it's absolute value must not be larger than current window "before" + * {@code timeDifferenceMs} can be negative but its absolute value must not be larger than current window "before" * value (which would result in a negative window size). * * @param timeDifferenceMs relative window end time in milliseconds @@ -186,7 +186,7 @@ public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentExce * Changes the end window boundary to {@code timeDifference} but keep the start window boundary as is. * Thus, records of the same key are joinable if the timestamp of a record from the secondary stream is at most * {@code timeDifference} later than the timestamp of the record from the primary stream. - * {@code timeDifference} can be negative but it's absolute value must not be larger than current window "before" + * {@code timeDifference} can be negative but its absolute value must not be larger than current window "before" * value (which would result in a negative window size). * * @param timeDifference relative window end time diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java index 017d0d6282e17..4c6998ad85535 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoin.java @@ -16,8 +16,6 @@ */ package org.apache.kafka.streams.kstream.internals; -import java.time.Duration; -import java.time.Instant; import org.apache.kafka.streams.kstream.ValueJoiner; import org.apache.kafka.streams.processor.AbstractProcessor; import org.apache.kafka.streams.processor.Processor; @@ -90,8 +88,7 @@ key, value, context().topic(), context().partition(), context().offset() final long timeFrom = Math.max(0L, context().timestamp() - joinBeforeMs); final long timeTo = Math.max(0L, context().timestamp() + joinAfterMs); - try (final WindowStoreIterator iter = - otherWindow.fetch(key, Instant.ofEpochMilli(timeFrom), Duration.ofMillis(timeTo - timeFrom))) { + try (final WindowStoreIterator iter = otherWindow.fetch(key, timeFrom, timeTo)) { while (iter.hasNext()) { needOuterJoin = false; context().forward(key, joiner.apply(value, iter.next().value)); diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java index bef6c9cd3842e..8f03764544fec 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java @@ -165,8 +165,8 @@ Cancellable schedule(final long intervalMs, * @return a handle allowing cancellation of the punctuation schedule established by this method */ Cancellable schedule(final Duration interval, - final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException; + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException; /** * Forwards a key/value pair to all downstream processors. diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index f64659c5d91ff..332b230872032 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -131,6 +131,7 @@ public interface ReadOnlyWindowStore { * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for any key. + * @deprecated Use {@link #fetch(Object, Object, Instant, Duration)} instead */ @Deprecated KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index aa854257ea3f7..98edc71d0feec 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -155,7 +155,7 @@ public String metricsScope() { * careful to set it the same as the windowed keys you're actually storing. * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} - * @deprecated since 2.1 Use {@link Stores#persistentWindowStore(String, Duration, Duration, boolean, long)} instead + * @deprecated since 2.1 Use {@link Stores#persistentWindowStore(String, long, long, boolean, long)} instead */ @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, @@ -212,9 +212,9 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @return an instance of {@link WindowBytesStoreSupplier} */ public static WindowBytesStoreSupplier persistentWindowStore(final String name, - final Duration retentionPeriod, - final Duration windowSize, - final boolean retainDuplicates) throws IllegalArgumentException { + final Duration retentionPeriod, + final Duration windowSize, + final boolean retainDuplicates) throws IllegalArgumentException { Objects.requireNonNull(name, "name cannot be null"); ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); @@ -233,7 +233,6 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @param windowSize size of the windows (cannot be negative) * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} - * @deprecated Use {@link #persistentWindowStore(String, Duration, Duration, boolean, long)} */ @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, @@ -260,33 +259,6 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, return new RocksDbWindowBytesStoreSupplier(name, retentionPeriod, segmentInterval, windowSize, retainDuplicates); } - /** - * Create a persistent {@link WindowBytesStoreSupplier}. - * @param name name of the store (cannot be {@code null}) - * @param retentionPeriod length of time to retain data in the store (cannot be negative) - * Note that the retention period must be at least long enough to contain the - * windowed data's entire life cycle, from window-start through window-end, - * and for the entire grace period. - * @param segmentInterval size of segments - * @param windowSize size of the windows - * @param retainDuplicates whether or not to retain duplicates. - * @return an instance of {@link WindowBytesStoreSupplier} - */ - public static WindowBytesStoreSupplier persistentWindowStore(final String name, - final Duration retentionPeriod, - final Duration windowSize, - final boolean retainDuplicates, - final long segmentInterval) { - Objects.requireNonNull(name, "name cannot be null"); - ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); - ApiUtils.validateMillisecondDuration(windowSize, "windowSize"); - - final long retentionPeriodMs = retentionPeriod.toMillis(); - final long windowSizeMs = windowSize.toMillis(); - - return persistentWindowStore(name, retentionPeriodMs, windowSizeMs, retainDuplicates, segmentInterval); - } - /** * Create a persistent {@link SessionBytesStoreSupplier}. * @param name name of the store (cannot be {@code null}) @@ -305,6 +277,21 @@ public static SessionBytesStoreSupplier persistentSessionStore(final String name return new RocksDbSessionBytesStoreSupplier(name, retentionPeriod); } + /** + * Create a persistent {@link SessionBytesStoreSupplier}. + * @param name name of the store (cannot be {@code null}) + * @param retentionPeriod length ot time to retain data in the store (cannot be negative) + * Note that the retention period must be at least long enough to contain the + * windowed data's entire life cycle, from window-start through window-end, + * and for the entire grace period. + * @return an instance of a {@link SessionBytesStoreSupplier} + */ + public static SessionBytesStoreSupplier persistentSessionStore(final String name, + final Duration retentionPeriod) { + ApiUtils.validateMillisecondDuration(retentionPeriod, "retentionPeriod"); + return persistentSessionStore(name, retentionPeriod.toMillis()); + } + /** * Creates a {@link StoreBuilder} that can be used to build a {@link WindowStore}. diff --git a/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java index 1685123ff9687..d07de539c41a1 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java @@ -16,6 +16,8 @@ */ package org.apache.kafka.streams.state; +import org.apache.kafka.streams.errors.InvalidStateStoreException; +import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.StateStore; /** @@ -48,4 +50,68 @@ public interface WindowStore extends StateStore, ReadOnlyWindowStore * @throws NullPointerException If null is used for key. */ void put(K key, V value, long windowStartTimestamp); + + /** + * Get all the key-value pairs with the given key and the time range from all + * the existing windows. + * + * This iterator must be closed after use. + *

    + * The time range is inclusive and applies to the starting timestamp of the window. + * For example, if we have the following windows: + *

    + *

    +     * +-------------------------------+
    +     * |  key  | start time | end time |
    +     * +-------+------------+----------+
    +     * |   A   |     10     |    20    |
    +     * +-------+------------+----------+
    +     * |   A   |     15     |    25    |
    +     * +-------+------------+----------+
    +     * |   A   |     20     |    30    |
    +     * +-------+------------+----------+
    +     * |   A   |     25     |    35    |
    +     * +--------------------------------
    +     * 
    + * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first + * three windows from the table above, i.e., all those where 10 <= start time <= 20. + *

    + * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest + * available window to the newest/latest window. + * + * @param key the key to fetch + * @param timeFrom time range start (inclusive) + * @param timeTo time range end (inclusive) + * @return an iterator over key-value pairs {@code } + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException If {@code null} is used for key. + */ + WindowStoreIterator fetch(K key, long timeFrom, long timeTo); + + /** + * Get all the key-value pairs in the given key range and time range from all + * the existing windows. + * + * This iterator must be closed after use. + * + * @param from the first key in the range + * @param to the last key in the range + * @param timeFrom time range start (inclusive) + * @param timeTo time range end (inclusive) + * @return an iterator over windowed key-value pairs {@code , value>} + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException If {@code null} is used for any key. + */ + KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); + + /** + * Gets all the key-value pairs that belong to the windows within in the given time range. + * + * @param timeFrom the beginning of the time slot from which to search (inclusive) + * @param timeTo the end of the time slot from which to search (inclusive) + * @return an iterator over windowed key-value pairs {@code , value>} + * @throws InvalidStateStoreException if the store is not initialized + * @throws NullPointerException if {@code null} is used for any key + */ + KeyValueIterator, V> fetchAll(long timeFrom, long timeTo); } diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java index 8598b8027dd31..419c861f118af 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregateProcessorTest.java @@ -107,7 +107,7 @@ public void forward(final K key, final V value) { } private void initStore(final boolean enableCaching) { - final StoreBuilder> storeBuilder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(STORE_NAME, GAP_MS * 3), + final StoreBuilder> storeBuilder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(STORE_NAME, ofMillis(GAP_MS * 3)), Serdes.String(), Serdes.Long()) .withLoggingDisabled(); diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index 52e5be6d345a8..a82025c5999a4 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -473,8 +473,8 @@ private void processStreamWithWindowStore(final String topic) { final StoreBuilder> storeBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore( "store", - ofMillis(AGGREGATE_WINDOW_SIZE * 3), - ofMillis(AGGREGATE_WINDOW_SIZE), + AGGREGATE_WINDOW_SIZE * 3, + AGGREGATE_WINDOW_SIZE, false, 60_000L ), diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java index 2bd428b2d653c..8ddb0b5004076 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopologyBuilderTest.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.processor.internals; -import java.time.Duration; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; @@ -48,6 +47,7 @@ import java.util.Set; import java.util.regex.Pattern; +import static java.time.Duration.ofSeconds; import static org.apache.kafka.common.utils.Utils.mkList; import static org.apache.kafka.common.utils.Utils.mkSet; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -589,7 +589,7 @@ public void shouldAddInternalTopicConfigForWindowStores() { builder.addProcessor("processor", new MockProcessorSupplier(), "source"); builder.addStateStore( Stores.windowStoreBuilder( - Stores.persistentWindowStore("store1", Duration.ofSeconds(30L), Duration.ofSeconds(10L), false), + Stores.persistentWindowStore("store1", ofSeconds(30L), ofSeconds(10L), false), Serdes.String(), Serdes.String() ), @@ -597,7 +597,7 @@ public void shouldAddInternalTopicConfigForWindowStores() { ); builder.addStateStore( Stores.sessionStoreBuilder( - Stores.persistentSessionStore("store2", 30000), Serdes.String(), Serdes.String() + Stores.persistentSessionStore("store2", ofSeconds(30)), Serdes.String(), Serdes.String() ), "processor" ); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java b/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java index 8604bffcd29fb..b62364a4a2f3b 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/StoresTest.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state; -import java.time.Duration; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore; import org.apache.kafka.streams.state.internals.MemoryNavigableLRUCache; @@ -25,6 +24,7 @@ import org.apache.kafka.streams.state.internals.RocksDBWindowStore; import org.junit.Test; +import static java.time.Duration.ofMillis; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -55,12 +55,12 @@ public void shouldThrowIfILruMapStoreCapacityIsNegative() { @Test(expected = NullPointerException.class) public void shouldThrowIfIPersistentWindowStoreStoreNameIsNull() { - Stores.persistentWindowStore(null, Duration.ofMillis(0L), Duration.ofMillis(0L), false, 0L); + Stores.persistentWindowStore(null, 0L, 0L, false, 0L); } @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreRetentionPeriodIsNegative() { - Stores.persistentWindowStore("anyName", Duration.ofMillis(-1L), Duration.ofMillis(0L), false, 0L); + Stores.persistentWindowStore("anyName", -1L, 0L, false, 0L); } @Deprecated @@ -71,22 +71,23 @@ public void shouldThrowIfIPersistentWindowStoreIfNumberOfSegmentsSmallerThanOne( @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreIfWindowSizeIsNegative() { - Stores.persistentWindowStore("anyName", Duration.ofMillis(0L), Duration.ofMillis(-1L), false); + Stores.persistentWindowStore("anyName", ofMillis(0L), ofMillis(-1L), false); } @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentWindowStoreIfSegmentIntervalIsTooSmall() { - Stores.persistentWindowStore("anyName", Duration.ofMillis(1L), Duration.ofMillis(1L), false, -1L); + Stores.persistentWindowStore("anyName", 1L, 1L, false, -1L); } @Test(expected = NullPointerException.class) public void shouldThrowIfIPersistentSessionStoreStoreNameIsNull() { - Stores.persistentSessionStore(null, 0); + Stores.persistentSessionStore(null, ofMillis(0)); + } @Test(expected = IllegalArgumentException.class) public void shouldThrowIfIPersistentSessionStoreRetentionPeriodIsNegative() { - Stores.persistentSessionStore("anyName", -1); + Stores.persistentSessionStore("anyName", ofMillis(-1)); } @Test(expected = NullPointerException.class) @@ -121,18 +122,18 @@ public void shouldCreateRocksDbStore() { @Test public void shouldCreateRocksDbWindowStore() { - assertThat(Stores.persistentWindowStore("store", Duration.ofMillis(1L), Duration.ofMillis(1L), false).get(), instanceOf(RocksDBWindowStore.class)); + assertThat(Stores.persistentWindowStore("store", ofMillis(1L), ofMillis(1L), false).get(), instanceOf(RocksDBWindowStore.class)); } @Test public void shouldCreateRocksDbSessionStore() { - assertThat(Stores.persistentSessionStore("store", 1).get(), instanceOf(RocksDBSessionStore.class)); + assertThat(Stores.persistentSessionStore("store", ofMillis(1)).get(), instanceOf(RocksDBSessionStore.class)); } @Test public void shouldBuildWindowStore() { final WindowStore store = Stores.windowStoreBuilder( - Stores.persistentWindowStore("store", Duration.ofMillis(3L), Duration.ofMillis(3L), true), + Stores.persistentWindowStore("store", ofMillis(3L), ofMillis(3L), true), Serdes.String(), Serdes.String() ).build(); @@ -152,7 +153,7 @@ public void shouldBuildKeyValueStore() { @Test public void shouldBuildSessionStore() { final SessionStore store = Stores.sessionStoreBuilder( - Stores.persistentSessionStore("name", 10), + Stores.persistentSessionStore("name", ofMillis(10)), Serdes.String(), Serdes.String() ).build(); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java index 22d855741b475..b19ccbf40a26e 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java @@ -111,7 +111,7 @@ public void send(final String topic, private WindowStore createWindowStore(final ProcessorContext context, final boolean retainDuplicates) { final WindowStore store = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, ofMillis(retentionPeriod), ofMillis(windowSize), retainDuplicates, segmentInterval), + Stores.persistentWindowStore(windowName, retentionPeriod, windowSize, retainDuplicates, segmentInterval), Serdes.Integer(), Serdes.String()).build(); diff --git a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala index 9144493cde196..5df2916316f44 100644 --- a/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala +++ b/streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/MaterializedTest.scala @@ -74,7 +74,7 @@ class MaterializedTest extends FlatSpec with Matchers { } "Create a Materialize with a session store supplier" should "create a Materialized with Serdes and a store supplier" in { - val storeSupplier = Stores.persistentSessionStore("store", 1) + val storeSupplier = Stores.persistentSessionStore("store", Duration.ofMillis(1)) val materialized: Materialized[String, Long, ByteArraySessionStore] = Materialized.as[String, Long](storeSupplier) From d63b7298cd21163cae909525642074b7938af2e1 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Sat, 29 Sep 2018 20:28:34 +0300 Subject: [PATCH 10/14] KAFKA-7277: Code review fixes: * Unneeded @Deprecated removed --- .../kafka/streams/state/internals/CachingWindowStore.java | 2 -- .../streams/state/internals/ChangeLoggingWindowBytesStore.java | 2 -- .../kafka/streams/state/internals/MeteredWindowStore.java | 3 --- .../kafka/streams/state/internals/RocksDBWindowStore.java | 3 --- 4 files changed, 10 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index 4cd9503b18eba..914d76b40631d 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -182,7 +182,6 @@ public byte[] fetch(final Bytes key, final long timestamp) { } @Override - @Deprecated public synchronized WindowStoreIterator fetch(final Bytes key, final long timeFrom, final long timeTo) { // since this function may not access the underlying inner store, we need to validate // if store is open outside as well. @@ -277,7 +276,6 @@ public KeyValueIterator, byte[]> all() { } @Override - @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { validateStoreOpen(); diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index 75bff70cfa859..ee6d0a2c953ed 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -55,7 +55,6 @@ public byte[] fetch(final Bytes key, final long timestamp) { } @Override - @Deprecated public WindowStoreIterator fetch(final Bytes key, final long from, final long to) { return bytesStore.fetch(key, from, to); } @@ -86,7 +85,6 @@ public KeyValueIterator, byte[]> all() { } @Override - @Deprecated public KeyValueIterator, byte[]> fetchAll(final long timeFrom, final long timeTo) { return bytesStore.fetchAll(timeFrom, timeTo); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index a43e6e9542973..27bf0163ccfd1 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -142,7 +142,6 @@ public V fetch(final K key, final long timestamp) { } @Override - @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { return new MeteredWindowStoreIterator<>(inner.fetch(keyBytes(key), timeFrom, timeTo), fetchTime, @@ -164,7 +163,6 @@ public KeyValueIterator, V> all() { } @Override - @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { return new MeteredWindowedKeyValueIterator<>(inner.fetchAll(timeFrom, timeTo), fetchTime, @@ -181,7 +179,6 @@ public KeyValueIterator, V> fetchAll(final Instant from, final Durat } @Override - @Deprecated public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { return new MeteredWindowedKeyValueIterator<>(inner.fetch(keyBytes(from), keyBytes(to), timeFrom, timeTo), fetchTime, diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index c6e64e3aac139..e57337cb62530 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -89,7 +89,6 @@ public V fetch(final K key, final long timestamp) { } @Override - @Deprecated public WindowStoreIterator fetch(final K key, final long timeFrom, final long timeTo) { final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), timeFrom, timeTo); return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); @@ -103,7 +102,6 @@ public WindowStoreIterator fetch(final K key, final Instant from, final Durat } @Override - @Deprecated public KeyValueIterator, V> fetch(final K from, final K to, final long timeFrom, final long timeTo) { final KeyValueIterator bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(from)), Bytes.wrap(serdes.rawKey(to)), timeFrom, timeTo); return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); @@ -123,7 +121,6 @@ public KeyValueIterator, V> all() { } @Override - @Deprecated public KeyValueIterator, V> fetchAll(final long timeFrom, final long timeTo) { final KeyValueIterator bytesIterator = bytesStore.fetchAll(timeFrom, timeTo); return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).keyValueIterator(); From 5dbe795e5dd60acd51d42a4d317a6696bd5bdd70 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Mon, 1 Oct 2018 19:31:28 +0300 Subject: [PATCH 11/14] KAFKA-7277: Code review fixes: * Javadocs. * Code style. --- .../apache/kafka/streams/KafkaStreams.java | 4 ++-- .../kafka/streams/internals/ApiUtils.java | 14 +++++------- .../kafka/streams/kstream/JoinWindows.java | 7 +++--- .../kafka/streams/kstream/SessionWindows.java | 1 + .../kafka/streams/kstream/TimeWindows.java | 8 +++---- .../streams/kstream/UnlimitedWindows.java | 2 +- .../streams/processor/ProcessorContext.java | 18 +++++++-------- .../internals/GlobalProcessorContextImpl.java | 2 +- .../internals/ProcessorContextImpl.java | 5 +++-- .../internals/StandbyContextImpl.java | 2 +- .../streams/state/ReadOnlyWindowStore.java | 22 ++++++++----------- .../apache/kafka/streams/state/Stores.java | 2 ++ .../kafka/streams/state/WindowStore.java | 12 +++++----- .../streams/state/WindowStoreIterator.java | 4 +--- .../AbstractProcessorContextTest.java | 5 +++-- .../kafka/streams/state/NoOpWindowStore.java | 10 ++++----- .../internals/ReadOnlyWindowStoreStub.java | 6 +++-- .../test/InternalMockProcessorContext.java | 5 +++-- .../kafka/test/NoOpProcessorContext.java | 5 +++-- .../processor/MockProcessorContext.java | 5 +++-- 20 files changed, 68 insertions(+), 71 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java index 6e04349d5f41a..fc260eaa523c2 100644 --- a/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java +++ b/streams/src/main/java/org/apache/kafka/streams/KafkaStreams.java @@ -906,8 +906,8 @@ public void run() { * @param timeout how long to wait for the threads to shutdown * @return {@code true} if all threads were successfully stopped—{@code false} if the timeout was reached * before all threads stopped - * Note that this method must not be called in the {@code onChange} callback of {@link StateListener}. - * @throws IllegalArgumentException if {@param timeout} can't be represented as {@code long milliseconds} + * Note that this method must not be called in the {@link StateListener#onChange(State, State)} callback of {@link StateListener}. + * @throws IllegalArgumentException if {@code timeout} can't be represented as {@code long milliseconds} */ public synchronized boolean close(final Duration timeout) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(timeout, "timeout"); diff --git a/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java index eb7d3e074dd41..e888d7a120bb3 100644 --- a/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java +++ b/streams/src/main/java/org/apache/kafka/streams/internals/ApiUtils.java @@ -32,13 +32,12 @@ private ApiUtils() { */ public static long validateMillisecondDuration(final Duration duration, final String name) { try { - Objects.requireNonNull(duration); + if (duration == null) + throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null."); return duration.toMillis(); - } catch (final NullPointerException e) { - throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null.", e); } catch (final ArithmeticException e) { - throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. ", e); + throw new IllegalArgumentException("[" + name + "] can't be converted to milliseconds. ", e); } } @@ -50,13 +49,12 @@ public static long validateMillisecondDuration(final Duration duration, final St */ public static long validateMillisecondInstant(final Instant instant, final String name) { try { - Objects.requireNonNull(instant); + if (instant == null) + throw new IllegalArgumentException("[" + name + "] shouldn't be null."); return instant.toEpochMilli(); - } catch (final NullPointerException e) { - throw new IllegalArgumentException("[" + Objects.toString(name) + "] shouldn't be null.", e); } catch (final ArithmeticException e) { - throw new IllegalArgumentException("[" + Objects.toString(name) + "] can't be converted to milliseconds. ", e); + throw new IllegalArgumentException("[" + name + "] can't be converted to milliseconds. ", e); } } } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java index 107a04963f555..62eade4298a32 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/JoinWindows.java @@ -157,7 +157,7 @@ public JoinWindows before(final long timeDifferenceMs) throws IllegalArgumentExc * value (which would result in a negative window size). * * @param timeDifference relative window start time - * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} + * @throws IllegalArgumentException if the resulting window size is negative or {@code timeDifference} can't be represented as {@code long milliseconds} */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows before(final Duration timeDifference) throws IllegalArgumentException { @@ -173,7 +173,7 @@ public JoinWindows before(final Duration timeDifference) throws IllegalArgumentE * value (which would result in a negative window size). * * @param timeDifferenceMs relative window end time in milliseconds - * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} + * @throws IllegalArgumentException if the resulting window size is negative * @deprecated Use {@link #after(Duration)} instead */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this @@ -190,7 +190,7 @@ public JoinWindows after(final long timeDifferenceMs) throws IllegalArgumentExce * value (which would result in a negative window size). * * @param timeDifference relative window end time - * @throws IllegalArgumentException if the resulting window size is negative or can't be represented as {@code long milliseconds} + * @throws IllegalArgumentException if the resulting window size is negative or {@code timeDifference} can't be represented as {@code long milliseconds} */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows after(final Duration timeDifference) throws IllegalArgumentException { @@ -222,6 +222,7 @@ public long size() { * * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder + * @throws IllegalArgumentException if the {@code afterWindowEnd} is negative of can't be represented as {@code long milliseconds} */ @SuppressWarnings({"deprecation"}) // removing segments from Windows will fix this public JoinWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java index 6ad77f4004851..02c7cbfe79530 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindows.java @@ -142,6 +142,7 @@ public SessionWindows until(final long durationMs) throws IllegalArgumentExcepti * * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder + * @throws IllegalArgumentException if the {@code afterWindowEnd} is negative of can't be represented as {@code long milliseconds} */ public SessionWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(afterWindowEnd, "afterWindowEnd"); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java index ab8f3cac567dd..46485b146d425 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindows.java @@ -136,8 +136,7 @@ public static TimeWindows of(final Duration size) throws IllegalArgumentExceptio *

    * This provides the semantics of hopping windows, which are fixed-sized, overlapping windows. * - * @param advanceMs The advance interval ("hop") in milliseconds of the window, with the requirement that - * {@code 0 < advanceMs ≤ sizeMs}. + * @param advanceMs The advance interval ("hop") in milliseconds of the window, with the requirement that {@code 0 < advanceMs <= sizeMs}. * @return a new window definition with default maintain duration of 1 day * @throws IllegalArgumentException if the advance interval is negative, zero, or larger-or-equal the window size * @deprecated Use {@link #advanceBy(Duration)} instead @@ -158,8 +157,7 @@ public TimeWindows advanceBy(final long advanceMs) { *

    * This provides the semantics of hopping windows, which are fixed-sized, overlapping windows. * - * @param advance The advance interval ("hop") of the window, with the requirement that - * {@code 0 < advance.toMillis() ≤ sizeMs}. + * @param advance The advance interval ("hop") of the window, with the requirement that {@code 0 < advance.toMillis() <= sizeMs}. * @return a new window definition with default maintain duration of 1 day * @throws IllegalArgumentException if the advance interval is negative, zero, or larger-or-equal the window size */ @@ -194,7 +192,7 @@ public long size() { * * @param afterWindowEnd The grace period to admit late-arriving events to a window. * @return this updated builder - * @throws IllegalArgumentException if afterWindowEnd is negative or can't be represented as {@code long milliseconds} + * @throws IllegalArgumentException if {@code afterWindowEnd} is negative or can't be represented as {@code long milliseconds} */ @SuppressWarnings("deprecation") // will be fixed when we remove segments from Windows public TimeWindows grace(final Duration afterWindowEnd) throws IllegalArgumentException { diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java index a120ba136abd8..46d7270b332b9 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/UnlimitedWindows.java @@ -79,7 +79,7 @@ public UnlimitedWindows startOn(final long startMs) throws IllegalArgumentExcept * * @param start the window start time * @return a new unlimited window that starts at {@code start} - * @throws IllegalArgumentException if the start time is negative + * @throws IllegalArgumentException if the start time is negative or can't be represented as {@code long milliseconds} */ public UnlimitedWindows startOn(final Instant start) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(start, "start"); diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java index 8f03764544fec..8ec06d57c4d61 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/ProcessorContext.java @@ -98,16 +98,15 @@ void register(final StateStore store, * Schedules a periodic operation for processors. A processor may call this method during * {@link Processor#init(ProcessorContext) initialization} or * {@link Processor#process(Object, Object) processing} to - * schedule a periodic callback - called a punctuation - to {@link Punctuator#punctuate(long)}. + * schedule a periodic callback — called a punctuation — to {@link Punctuator#punctuate(long)}. * The type parameter controls what notion of time is used for punctuation: *

      - *
    • {@link PunctuationType#STREAM_TIME} - uses "stream time", which is advanced by the processing of messages + *
    • {@link PunctuationType#STREAM_TIME} — uses "stream time", which is advanced by the processing of messages * in accordance with the timestamp as extracted by the {@link TimestampExtractor} in use. * The first punctuation will be triggered by the first record that is processed. * NOTE: Only advanced if messages arrive
    • - *
    • {@link PunctuationType#WALL_CLOCK_TIME} - uses system time (the wall-clock time), - * which is advanced at the polling interval ({@link org.apache.kafka.streams.StreamsConfig#POLL_MS_CONFIG}) - * independent of whether new messages arrive. + *
    • {@link PunctuationType#WALL_CLOCK_TIME} — uses system time (the wall-clock time), + * which is advanced independent of whether new messages arrive. * The first punctuation will be triggered after interval has elapsed. * NOTE: This is best effort only as its granularity is limited by how long an iteration of the * processing loop takes to complete
    • @@ -136,16 +135,15 @@ Cancellable schedule(final long intervalMs, * Schedules a periodic operation for processors. A processor may call this method during * {@link Processor#init(ProcessorContext) initialization} or * {@link Processor#process(Object, Object) processing} to - * schedule a periodic callback - called a punctuation - to {@link Punctuator#punctuate(long)}. + * schedule a periodic callback — called a punctuation — to {@link Punctuator#punctuate(long)}. * The type parameter controls what notion of time is used for punctuation: *
        - *
      • {@link PunctuationType#STREAM_TIME} - uses "stream time", which is advanced by the processing of messages + *
      • {@link PunctuationType#STREAM_TIME} — uses "stream time", which is advanced by the processing of messages * in accordance with the timestamp as extracted by the {@link TimestampExtractor} in use. * The first punctuation will be triggered by the first record that is processed. * NOTE: Only advanced if messages arrive
      • - *
      • {@link PunctuationType#WALL_CLOCK_TIME} - uses system time (the wall-clock time), - * which is advanced at the polling interval ({@link org.apache.kafka.streams.StreamsConfig#POLL_MS_CONFIG}) - * independent of whether new messages arrive. + *
      • {@link PunctuationType#WALL_CLOCK_TIME} — uses system time (the wall-clock time), + * which is advanced independent of whether new messages arrive. * The first punctuation will be triggered after interval has elapsed. * NOTE: This is best effort only as its granularity is limited by how long an iteration of the * processing loop takes to complete
      • diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java index dda91db491896..5c5b84f155d67 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/GlobalProcessorContextImpl.java @@ -102,7 +102,7 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin * @throws UnsupportedOperationException on every invocation */ @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) { throw new UnsupportedOperationException("this should not happen: schedule() not supported in global processor context."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java index 8b3bc153f7063..21e1c17a8985f 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorContextImpl.java @@ -158,8 +158,9 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(interval, "interval"); return schedule(interval.toMillis(), type, callback); } diff --git a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java index 90370cdc6050c..6b835d97a3fb1 100644 --- a/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java +++ b/streams/src/main/java/org/apache/kafka/streams/processor/internals/StandbyContextImpl.java @@ -190,7 +190,7 @@ public void commit() { * @throws UnsupportedOperationException on every invocation */ @Override - @Deprecated + @SuppressWarnings("deprecation") public Cancellable schedule(final long interval, final PunctuationType type, final Punctuator callback) { throw new UnsupportedOperationException("this should not happen: schedule() not supported in standby tasks."); } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index 332b230872032..b3f2318c4b563 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -42,9 +42,8 @@ public interface ReadOnlyWindowStore { V fetch(K key, long time); /** - * Get all the key-value pairs with the given key and the time range from all - * the existing windows. - * + * Get all the key-value pairs with the given key and the time range from all the existing windows. + *

        * This iterator must be closed after use. *

        * The time range is inclusive and applies to the starting timestamp of the window. @@ -81,9 +80,8 @@ public interface ReadOnlyWindowStore { WindowStoreIterator fetch(K key, long timeFrom, long timeTo); /** - * Get all the key-value pairs with the given key and the time range from all - * the existing windows. - * + * Get all the key-value pairs with the given key and the time range from all the existing windows. + *

        * This iterator must be closed after use. *

        * The time range is inclusive and applies to the starting timestamp of the window. @@ -102,7 +100,7 @@ public interface ReadOnlyWindowStore { * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", ofEpochMilli(10), ofMillis(20))} then the results will contain the first + * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

        * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -119,9 +117,8 @@ public interface ReadOnlyWindowStore { WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException; /** - * Get all the key-value pairs in the given key range and time range from all - * the existing windows. - * + * Get all the key-value pairs in the given key range and time range from all the existing windows. + *

        * This iterator must be closed after use. * * @param from the first key in the range @@ -137,9 +134,8 @@ public interface ReadOnlyWindowStore { KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); /** - * Get all the key-value pairs in the given key range and time range from all - * the existing windows. - * + * Get all the key-value pairs in the given key range and time range from all the existing windows. + *

        * This iterator must be closed after use. * * @param from the first key in the range diff --git a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java index 98edc71d0feec..30e51403714c5 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/Stores.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/Stores.java @@ -210,6 +210,7 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @param windowSize size of the windows (cannot be negative) * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} + * @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds} */ public static WindowBytesStoreSupplier persistentWindowStore(final String name, final Duration retentionPeriod, @@ -233,6 +234,7 @@ public static WindowBytesStoreSupplier persistentWindowStore(final String name, * @param windowSize size of the windows (cannot be negative) * @param retainDuplicates whether or not to retain duplicates. * @return an instance of {@link WindowBytesStoreSupplier} + * @deprecated Use {@link #persistentWindowStore(String, Duration, Duration, boolean)} instead */ @Deprecated public static WindowBytesStoreSupplier persistentWindowStore(final String name, diff --git a/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java index d07de539c41a1..ad74ae1e74d78 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/WindowStore.java @@ -52,9 +52,8 @@ public interface WindowStore extends StateStore, ReadOnlyWindowStore void put(K key, V value, long windowStartTimestamp); /** - * Get all the key-value pairs with the given key and the time range from all - * the existing windows. - * + * Get all the key-value pairs with the given key and the time range from all the existing windows. + *

        * This iterator must be closed after use. *

        * The time range is inclusive and applies to the starting timestamp of the window. @@ -73,7 +72,7 @@ public interface WindowStore extends StateStore, ReadOnlyWindowStore * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first + * And we call {@code store.fetch("A", 10, 20)} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

        * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -89,9 +88,8 @@ public interface WindowStore extends StateStore, ReadOnlyWindowStore WindowStoreIterator fetch(K key, long timeFrom, long timeTo); /** - * Get all the key-value pairs in the given key range and time range from all - * the existing windows. - * + * Get all the key-value pairs in the given key range and time range from all the existing windows. + *

        * This iterator must be closed after use. * * @param from the first key in the range diff --git a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java index 0b73d7df6f1f7..c07130e0fe56a 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java @@ -16,14 +16,12 @@ */ package org.apache.kafka.streams.state; -import java.time.Duration; -import java.time.Instant; import org.apache.kafka.streams.KeyValue; import java.io.Closeable; /** - * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, Instant, Duration)}. + * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, long, long)}. * * Users must call its {@code close} method explicitly upon completeness to release resources, * or use try-with-resources statement (available since JDK7) for this {@link Closeable} class. diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java index 2f59bb5f560fa..4ce9a9f4f177c 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/AbstractProcessorContextTest.java @@ -204,8 +204,9 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { return null; } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java index 99aacd0d19bd4..d67c2c7d17e82 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java @@ -90,18 +90,18 @@ public Object fetch(final Object key, final long time) { } @Override - @Deprecated + @SuppressWarnings("deprecation") public WindowStoreIterator fetch(final Object key, final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } @Override - public WindowStoreIterator fetch(final Object key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final Object key, final Instant from, final Duration duration) { return EMPTY_WINDOW_STORE_ITERATOR; } @Override - @Deprecated + @SuppressWarnings("deprecation") public WindowStoreIterator fetch(final Object from, final Object to, final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } @@ -118,13 +118,13 @@ public WindowStoreIterator all() { } @Override - @Deprecated + @SuppressWarnings("deprecation") public WindowStoreIterator fetchAll(final long timeFrom, final long timeTo) { return EMPTY_WINDOW_STORE_ITERATOR; } @Override - public KeyValueIterator fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator fetchAll(final Instant from, final Duration duration) { return EMPTY_WINDOW_STORE_ITERATOR; } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java index ba6df179665dc..0c148421177fe 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java @@ -226,8 +226,10 @@ public void remove() { }; } - @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { + @Override public KeyValueIterator, V> fetch(final K from, + final K to, + final Instant fromTime, + final Duration duration) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); ApiUtils.validateMillisecondDuration(duration, "duration"); return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); diff --git a/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java b/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java index d486abe3c6aaf..2f356bff3a133 100644 --- a/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java +++ b/streams/src/test/java/org/apache/kafka/test/InternalMockProcessorContext.java @@ -211,8 +211,9 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { throw new UnsupportedOperationException("schedule() not supported."); } diff --git a/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java b/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java index e58567a698a98..36d049c58bad1 100644 --- a/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java +++ b/streams/src/test/java/org/apache/kafka/test/NoOpProcessorContext.java @@ -59,8 +59,9 @@ public Cancellable schedule(final long interval, final PunctuationType type, fin } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { return null; } diff --git a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java index fcfed6e0cfeaa..88a7fe79e2beb 100644 --- a/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java +++ b/streams/test-utils/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java @@ -390,8 +390,9 @@ public Cancellable schedule(final long intervalMs, final PunctuationType type, f } @Override - public Cancellable schedule(final Duration interval, final PunctuationType type, - final Punctuator callback) throws IllegalArgumentException { + public Cancellable schedule(final Duration interval, + final PunctuationType type, + final Punctuator callback) throws IllegalArgumentException { ApiUtils.validateMillisecondDuration(interval, "interval"); return schedule(interval.toMillis(), type, callback); } From 4e8b65f1d46a9778ec8342ce078ba8a56f933cb1 Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Thu, 4 Oct 2018 07:09:32 +0300 Subject: [PATCH 12/14] KAFKA-7277: Duration -> Instant in ReadOnlyWindowStore. --- .../streams/state/ReadOnlyWindowStore.java | 23 +++++------ .../state/internals/CachingWindowStore.java | 24 +++++------ .../ChangeLoggingWindowBytesStore.java | 23 ++++++----- .../CompositeReadOnlyWindowStore.java | 19 +++++---- .../state/internals/MeteredWindowStore.java | 19 +++++---- .../state/internals/RocksDBWindowStore.java | 19 +++++---- .../kafka/streams/state/NoOpWindowStore.java | 11 ++--- .../internals/CachingWindowStoreTest.java | 40 +++++++++---------- .../internals/ReadOnlyWindowStoreStub.java | 19 +++++---- 9 files changed, 96 insertions(+), 101 deletions(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java index b3f2318c4b563..08043388fb464 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/ReadOnlyWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.kstream.Windowed; @@ -62,7 +61,7 @@ public interface ReadOnlyWindowStore { * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first + * And we call {@code store.fetch("A", 10, 20)} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

        * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -74,7 +73,7 @@ public interface ReadOnlyWindowStore { * @return an iterator over key-value pairs {@code } * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for key. - * @deprecated Use {@link #fetch(K, Instant, Duration)} instead + * @deprecated Use {@link #fetch(K, Instant, Instant)} instead */ @Deprecated WindowStoreIterator fetch(K key, long timeFrom, long timeTo); @@ -100,7 +99,7 @@ public interface ReadOnlyWindowStore { * | A | 25 | 35 | * +-------------------------------- * - * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Duration.ofMillis(20))} then the results will contain the first + * And we call {@code store.fetch("A", Instant.ofEpochMilli(10), Instant.ofEpochMilli(20))} then the results will contain the first * three windows from the table above, i.e., all those where 10 <= start time <= 20. *

        * For each key, the iterator guarantees ordering of windows, starting from the oldest/earliest @@ -108,13 +107,13 @@ public interface ReadOnlyWindowStore { * * @param key the key to fetch * @param from time range start (inclusive) - * @param duration duration of range + * @param from time range end (inclusive) * @return an iterator over key-value pairs {@code } * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for key. * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ - WindowStoreIterator fetch(K key, Instant from, Duration duration) throws IllegalArgumentException; + WindowStoreIterator fetch(K key, Instant from, Instant to) throws IllegalArgumentException; /** * Get all the key-value pairs in the given key range and time range from all the existing windows. @@ -128,7 +127,7 @@ public interface ReadOnlyWindowStore { * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for any key. - * @deprecated Use {@link #fetch(Object, Object, Instant, Duration)} instead + * @deprecated Use {@link #fetch(Object, Object, Instant, Instant)} instead */ @Deprecated KeyValueIterator, V> fetch(K from, K to, long timeFrom, long timeTo); @@ -141,13 +140,13 @@ public interface ReadOnlyWindowStore { * @param from the first key in the range * @param to the last key in the range * @param fromTime time range start (inclusive) - * @param duration time range duration + * @param toTime time range end (inclusive) * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException If {@code null} is used for any key. * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ - KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration duration) + KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Instant toTime) throws IllegalArgumentException; /** @@ -166,7 +165,7 @@ KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException if {@code null} is used for any key - * @deprecated Use {@link #fetchAll(Instant, Duration)} instead + * @deprecated Use {@link #fetchAll(Instant, Instant)} instead */ @Deprecated KeyValueIterator, V> fetchAll(long timeFrom, long timeTo); @@ -175,11 +174,11 @@ KeyValueIterator, V> fetch(K from, K to, Instant fromTime, Duration * Gets all the key-value pairs that belong to the windows within in the given time range. * * @param from the beginning of the time slot from which to search (inclusive) - * @param duration the druation of the time slot from which to search (inclusive) + * @param to the end of the time slot from which to search (inclusive) * @return an iterator over windowed key-value pairs {@code , value>} * @throws InvalidStateStoreException if the store is not initialized * @throws NullPointerException if {@code null} is used for any key * @throws IllegalArgumentException if duration is negative or can't be represented as {@code long milliseconds} */ - KeyValueIterator, V> fetchAll(Instant from, Duration duration) throws IllegalArgumentException; + KeyValueIterator, V> fetchAll(Instant from, Instant to) throws IllegalArgumentException; } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java index 4b2fd069e7e68..f6b62b2b9351b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CachingWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; @@ -207,10 +206,10 @@ public synchronized WindowStoreIterator fetch(final Bytes key, final lon } @Override - public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final Bytes key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -243,12 +242,13 @@ public KeyValueIterator, byte[]> fetch(final Bytes from, final B } @Override - public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { - + public KeyValueIterator, byte[]> fetch(final Bytes from, + final Bytes to, + final Instant fromTime, + final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } private V fetchPrevious(final Bytes key, final long timestamp) { @@ -296,9 +296,9 @@ public KeyValueIterator, byte[]> fetchAll(final long timeFrom, f } @Override - public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java index ee6d0a2c953ed..d4e47c6d18f96 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; @@ -60,10 +59,10 @@ public WindowStoreIterator fetch(final Bytes key, final long from, final } @Override - public WindowStoreIterator fetch(final Bytes key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final Bytes key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -72,11 +71,13 @@ public KeyValueIterator, byte[]> fetch(final Bytes keyFrom, fina } @Override - public KeyValueIterator, byte[]> fetch(final Bytes from, final Bytes to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetch(final Bytes from, + final Bytes to, + final Instant fromTime, + final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } @Override @@ -90,10 +91,10 @@ public KeyValueIterator, byte[]> fetchAll(final long timeFrom, f } @Override - public KeyValueIterator, byte[]> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, byte[]> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java index a52aa3366ab50..d95b44222e20b 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.errors.InvalidStateStoreException; @@ -89,10 +88,10 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -112,10 +111,10 @@ public KeyValueIterator, V> apply(final ReadOnlyWindowStore st } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } @Override @@ -148,9 +147,9 @@ public KeyValueIterator, V> apply(final ReadOnlyWindowStore st } @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java index 27bf0163ccfd1..e1b6cd1d52e4f 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.serialization.Serde; @@ -151,10 +150,10 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -172,10 +171,10 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long } @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -188,10 +187,10 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } @Override diff --git a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java index e57337cb62530..e8037bc816358 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; @@ -95,10 +94,10 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -108,10 +107,10 @@ public KeyValueIterator, V> fetch(final K from, final K to, final lo } @Override - public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } @Override @@ -127,10 +126,10 @@ public KeyValueIterator, V> fetchAll(final long timeFrom, final long } @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } private void maybeUpdateSeqnumForDups() { diff --git a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java index d67c2c7d17e82..34d9050c74870 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/NoOpWindowStore.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.processor.ProcessorContext; @@ -96,7 +95,7 @@ public WindowStoreIterator fetch(final Object key, final long timeFrom, final lo } @Override - public WindowStoreIterator fetch(final Object key, final Instant from, final Duration duration) { + public WindowStoreIterator fetch(final Object key, final Instant from, final Instant to) { return EMPTY_WINDOW_STORE_ITERATOR; } @@ -107,8 +106,10 @@ public WindowStoreIterator fetch(final Object from, final Object to, f } @Override - public KeyValueIterator fetch(final Object from, final Object to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { + public KeyValueIterator fetch(final Object from, + final Object to, + final Instant fromTime, + final Instant toTime) throws IllegalArgumentException { return EMPTY_WINDOW_STORE_ITERATOR; } @@ -124,7 +125,7 @@ public WindowStoreIterator fetchAll(final long timeFrom, final long ti } @Override - public KeyValueIterator fetchAll(final Instant from, final Duration duration) { + public KeyValueIterator fetchAll(final Instant from, final Instant to) { return EMPTY_WINDOW_STORE_ITERATOR; } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java index 4fe2e010a0359..e4d40a3dc60d0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java @@ -50,9 +50,7 @@ import java.util.UUID; import static java.time.Duration.ofHours; -import static java.time.Duration.ofMillis; import static java.time.Duration.ofMinutes; -import static java.time.Instant.ofEpochMilli; import static org.apache.kafka.common.utils.Utils.mkList; import static org.apache.kafka.streams.state.internals.ThreadCacheTest.memoryCacheEntrySize; import static org.apache.kafka.test.StreamsTestUtils.toList; @@ -201,8 +199,8 @@ public void shouldPutFetchFromCache() { assertThat(cachingStore.fetch(bytesKey("c"), 10), equalTo(null)); assertThat(cachingStore.fetch(bytesKey("a"), 0), equalTo(null)); - final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), ofEpochMilli(10), ofMillis(0)); - final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), ofEpochMilli(10), ofMillis(0)); + final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), 10, 10); + final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), 10, 10); verifyKeyValue(a.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(b.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(a.hasNext()); @@ -228,7 +226,7 @@ public void shouldPutFetchRangeFromCache() { cachingStore.put(bytesKey("a"), bytesValue("a")); cachingStore.put(bytesKey("b"), bytesValue("b")); - final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(10), ofMillis(0)); + final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), 10, 10); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "b"); assertFalse(iterator.hasNext()); @@ -262,21 +260,21 @@ public void shouldFetchAllWithinTimestampRange() { cachingStore.put(bytesKey(array[i]), bytesValue(array[i])); } - final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(ofEpochMilli(0), ofMillis(7)); + final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(0, 7); for (int i = 0; i < array.length; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator.hasNext()); - final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(ofEpochMilli(2), ofMillis(2)); + final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(2, 4); for (int i = 2; i <= 4; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator1.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator1.hasNext()); - final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(ofEpochMilli(5), ofMillis(2)); + final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(5, 7); for (int i = 5; i <= 7; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator2.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); @@ -340,7 +338,7 @@ public void shouldTakeValueFromCacheIfSameTimestampFlushedToRocks() { cachingStore.flush(); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(0)); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(fetch.hasNext()); } @@ -350,7 +348,7 @@ public void shouldIterateAcrossWindows() { cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -361,7 +359,7 @@ public void shouldIterateCacheAndStore() { final Bytes key = Bytes.wrap("1".getBytes()); underlying.put(WindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes()); cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -374,7 +372,7 @@ public void shouldIterateCacheAndStoreKeyRange() { cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); final KeyValueIterator, byte[]> fetchRange = - cachingStore.fetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofMillis(WINDOW_SIZE)); + cachingStore.fetch(key, bytesKey("2"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)), "b"); assertFalse(fetchRange.hasNext()); @@ -391,13 +389,13 @@ public void shouldClearNamespaceCacheOnClose() { @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofMillis(10)); + cachingStore.fetch(bytesKey("a"), 0, 10); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchRangeFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(0), ofMillis(10)); + cachingStore.fetch(bytesKey("a"), bytesKey("b"), 0, 10); } @Test(expected = InvalidStateStoreException.class) @@ -419,7 +417,7 @@ public void shouldFetchAndIterateOverExactKeys() { KeyValue.pair(1L, bytesValue("0003")), KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005")) ); - final List> actual = toList(cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); + final List> actual = toList(cachingStore.fetch(bytesKey("a"), 0, Long.MAX_VALUE)); verifyKeyValueList(expected, actual); } @@ -437,12 +435,12 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("a", "0003", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), 0, Long.MAX_VALUE)) ); verifyKeyValueList( mkList(windowedPair("aa", "0002", 0), windowedPair("aa", "0004", 1)), - toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) + toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), 0, Long.MAX_VALUE)) ); verifyKeyValueList( @@ -453,7 +451,7 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("aa", "0004", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0), ofMillis(Long.MAX_VALUE))) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), 0, Long.MAX_VALUE)) ); } @@ -469,17 +467,17 @@ public void shouldNotThrowNullPointerExceptionOnPutNullValue() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnFetchNullKey() { - cachingStore.fetch(null, ofEpochMilli(1L), ofMillis(1L)); + cachingStore.fetch(null, 1L, 2L); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullFromKey() { - cachingStore.fetch(null, bytesKey("anyTo"), ofEpochMilli(1L), ofMillis(1L)); + cachingStore.fetch(null, bytesKey("anyTo"), 1L, 2L); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullToKey() { - cachingStore.fetch(bytesKey("anyFrom"), null, ofEpochMilli(1L), ofMillis(1L)); + cachingStore.fetch(bytesKey("anyFrom"), null, 1L, 2L); } private static KeyValue, byte[]> windowedPair(final String key, final String value, final long timestamp) { diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java index 0c148421177fe..99abdc4746b71 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ReadOnlyWindowStoreStub.java @@ -16,7 +16,6 @@ */ package org.apache.kafka.streams.state.internals; -import java.time.Duration; import java.time.Instant; import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.KeyValue; @@ -77,10 +76,10 @@ public WindowStoreIterator fetch(final K key, final long timeFrom, final long } @Override - public WindowStoreIterator fetch(final K key, final Instant from, final Duration duration) throws IllegalArgumentException { + public WindowStoreIterator fetch(final K key, final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(key, from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetch(key, from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -175,10 +174,10 @@ public void remove() { } @Override - public KeyValueIterator, V> fetchAll(final Instant from, final Duration duration) throws IllegalArgumentException { + public KeyValueIterator, V> fetchAll(final Instant from, final Instant to) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(from, "from"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetchAll(from.toEpochMilli(), from.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(to, "to"); + return fetchAll(from.toEpochMilli(), to.toEpochMilli()); } @Override @@ -229,10 +228,10 @@ public void remove() { @Override public KeyValueIterator, V> fetch(final K from, final K to, final Instant fromTime, - final Duration duration) throws IllegalArgumentException { + final Instant toTime) throws IllegalArgumentException { ApiUtils.validateMillisecondInstant(fromTime, "fromTime"); - ApiUtils.validateMillisecondDuration(duration, "duration"); - return fetch(from, to, fromTime.toEpochMilli(), fromTime.toEpochMilli() + duration.toMillis()); + ApiUtils.validateMillisecondInstant(toTime, "toTime"); + return fetch(from, to, fromTime.toEpochMilli(), toTime.toEpochMilli()); } public void put(final K key, final V value, final long timestamp) { From a583259287f809189ec266e44a79652a3614338b Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Thu, 4 Oct 2018 07:37:48 +0300 Subject: [PATCH 13/14] KAFKA-7277: Duration -> Instant in ReadOnlyWindowStore. Tests passed. --- .../QueryableStateIntegrationTest.java | 6 +- .../internals/SuppressTopologyTest.java | 5 +- .../TimeWindowedKStreamImplTest.java | 7 +- .../kafka/streams/perf/SimpleBenchmark.java | 2 +- .../internals/CachingWindowStoreTest.java | 39 +-- .../ChangeLoggingWindowBytesStoreTest.java | 5 +- .../CompositeReadOnlyWindowStoreTest.java | 33 +- .../internals/MeteredWindowStoreTest.java | 5 +- .../internals/RocksDBWindowStoreTest.java | 294 +++++++++--------- 9 files changed, 196 insertions(+), 200 deletions(-) diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java index 08d7373ba385a..76eec71daf979 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java @@ -364,7 +364,7 @@ public boolean conditionMet() { final int index = metadata.hostInfo().port(); final KafkaStreams streamsWithKey = streamRunnables[index].getStream(); final ReadOnlyWindowStore store = streamsWithKey.store(storeName, QueryableStoreTypes.windowStore()); - return store != null && store.fetch(key, ofEpochMilli(from), ofMillis(to - from)) != null; + return store != null && store.fetch(key, ofEpochMilli(from), ofEpochMilli(to)) != null; } catch (final IllegalStateException e) { // Kafka Streams instance may have closed but rebalance hasn't happened return false; @@ -1020,7 +1020,7 @@ private void waitUntilAtLeastNumRecordProcessed(final String topic, final int nu private Set> fetch(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofMillis(System.currentTimeMillis())); + final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofEpochMilli(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singleton(KeyValue.pair(key, next.value)); @@ -1031,7 +1031,7 @@ private Set> fetch(final ReadOnlyWindowStore fetchMap(final ReadOnlyWindowStore store, final String key) { - final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofMillis(System.currentTimeMillis())); + final WindowStoreIterator fetch = store.fetch(key, ofEpochMilli(0), ofEpochMilli(System.currentTimeMillis())); if (fetch.hasNext()) { final KeyValue next = fetch.next(); return Collections.singletonMap(key, next.value); diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressTopologyTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressTopologyTest.java index 70c25c7506001..97bdb1d39ec66 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressTopologyTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressTopologyTest.java @@ -32,6 +32,7 @@ import java.time.Duration; +import static java.time.Duration.ofMillis; import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded; import static org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit; import static org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses; @@ -146,7 +147,7 @@ public void shouldUseNumberingForAnonymousFinalSuppressionNode() { anonymousNodeBuilder .stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)) .groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(SessionWindows.with(5L).grace(5L)) + .windowedBy(SessionWindows.with(ofMillis(5L)).grace(ofMillis(5L))) .count(Materialized.>as("counts").withCachingDisabled()) .suppress(untilWindowCloses(unbounded())) .toStream() @@ -164,7 +165,7 @@ public void shouldApplyNameToFinalSuppressionNode() { namedNodeBuilder .stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)) .groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)) - .windowedBy(SessionWindows.with(5L).grace(5L)) + .windowedBy(SessionWindows.with(ofMillis(5L)).grace(ofMillis(5L))) .count(Materialized.>as("counts").withCachingDisabled()) .suppress(untilWindowCloses(unbounded()).withName("myname")) .toStream() diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java index 6b2687d7444cf..f951ebb415504 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowedKStreamImplTest.java @@ -47,7 +47,6 @@ import java.util.Properties; import static java.time.Duration.ofMillis; -import static java.time.Duration.ofSeconds; import static java.time.Instant.ofEpochMilli; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -139,7 +138,7 @@ public void shouldMaterializeCount() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("count-store"); - final List, Long>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); + final List, Long>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofEpochMilli(1000L))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), 2L), @@ -158,7 +157,7 @@ public void shouldMaterializeReduced() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("reduced"); - final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); + final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofEpochMilli(1000L))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "1+2"), @@ -178,7 +177,7 @@ public void shouldMaterializeAggregated() { try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props, 0L)) { processData(driver); final WindowStore windowStore = driver.getWindowStore("aggregated"); - final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofSeconds(1))); + final List, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "2", ofEpochMilli(0), ofEpochMilli(1000L))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(0, 500)), "0+1+2"), KeyValue.pair(new Windowed<>("1", new TimeWindow(500, 1000)), "0+3"), diff --git a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java index a82025c5999a4..4008689d3efb5 100644 --- a/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java +++ b/streams/src/test/java/org/apache/kafka/streams/perf/SimpleBenchmark.java @@ -501,7 +501,7 @@ public void init(final ProcessorContext context) { @Override public void process(final Integer key, final byte[] value) { final long timestamp = context().timestamp(); - final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, ofEpochMilli(timestamp - 1000L), ofSeconds(1L)); + final KeyValueIterator, byte[]> iter = store.fetch(key - 10, key + 10, ofEpochMilli(timestamp - 1000L), ofEpochMilli(timestamp)); while (iter.hasNext()) { iter.next(); } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java index e4d40a3dc60d0..71147319fb3ff 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CachingWindowStoreTest.java @@ -51,6 +51,7 @@ import static java.time.Duration.ofHours; import static java.time.Duration.ofMinutes; +import static java.time.Instant.ofEpochMilli; import static org.apache.kafka.common.utils.Utils.mkList; import static org.apache.kafka.streams.state.internals.ThreadCacheTest.memoryCacheEntrySize; import static org.apache.kafka.test.StreamsTestUtils.toList; @@ -199,8 +200,8 @@ public void shouldPutFetchFromCache() { assertThat(cachingStore.fetch(bytesKey("c"), 10), equalTo(null)); assertThat(cachingStore.fetch(bytesKey("a"), 0), equalTo(null)); - final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), 10, 10); - final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), 10, 10); + final WindowStoreIterator a = cachingStore.fetch(bytesKey("a"), ofEpochMilli(10), ofEpochMilli(10)); + final WindowStoreIterator b = cachingStore.fetch(bytesKey("b"), ofEpochMilli(10), ofEpochMilli(10)); verifyKeyValue(a.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(b.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(a.hasNext()); @@ -226,7 +227,7 @@ public void shouldPutFetchRangeFromCache() { cachingStore.put(bytesKey("a"), bytesValue("a")); cachingStore.put(bytesKey("b"), bytesValue("b")); - final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), 10, 10); + final KeyValueIterator, byte[]> iterator = cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(10), ofEpochMilli(10)); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "b"); assertFalse(iterator.hasNext()); @@ -260,21 +261,21 @@ public void shouldFetchAllWithinTimestampRange() { cachingStore.put(bytesKey(array[i]), bytesValue(array[i])); } - final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(0, 7); + final KeyValueIterator, byte[]> iterator = cachingStore.fetchAll(ofEpochMilli(0), ofEpochMilli(7)); for (int i = 0; i < array.length; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator.hasNext()); - final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(2, 4); + final KeyValueIterator, byte[]> iterator1 = cachingStore.fetchAll(ofEpochMilli(2), ofEpochMilli(4)); for (int i = 2; i <= 4; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator1.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); } assertFalse(iterator1.hasNext()); - final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(5, 7); + final KeyValueIterator, byte[]> iterator2 = cachingStore.fetchAll(ofEpochMilli(5), ofEpochMilli(7)); for (int i = 5; i <= 7; i++) { final String str = array[i]; verifyWindowedKeyValue(iterator2.next(), new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)), str); @@ -338,7 +339,7 @@ public void shouldTakeValueFromCacheIfSameTimestampFlushedToRocks() { cachingStore.flush(); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "b"); assertFalse(fetch.hasNext()); } @@ -348,7 +349,7 @@ public void shouldIterateAcrossWindows() { cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP); cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -359,7 +360,7 @@ public void shouldIterateCacheAndStore() { final Bytes key = Bytes.wrap("1".getBytes()); underlying.put(WindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes()); cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); - final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + final WindowStoreIterator fetch = cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE)); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a"); verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b"); assertFalse(fetch.hasNext()); @@ -372,7 +373,7 @@ public void shouldIterateCacheAndStoreKeyRange() { cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE); final KeyValueIterator, byte[]> fetchRange = - cachingStore.fetch(key, bytesKey("2"), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE); + cachingStore.fetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE)); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)), "a"); verifyWindowedKeyValue(fetchRange.next(), new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)), "b"); assertFalse(fetchRange.hasNext()); @@ -389,13 +390,13 @@ public void shouldClearNamespaceCacheOnClose() { @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), 0, 10); + cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(10)); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowIfTryingToFetchRangeFromClosedCachingStore() { cachingStore.close(); - cachingStore.fetch(bytesKey("a"), bytesKey("b"), 0, 10); + cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(0), ofEpochMilli(10)); } @Test(expected = InvalidStateStoreException.class) @@ -417,7 +418,7 @@ public void shouldFetchAndIterateOverExactKeys() { KeyValue.pair(1L, bytesValue("0003")), KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005")) ); - final List> actual = toList(cachingStore.fetch(bytesKey("a"), 0, Long.MAX_VALUE)); + final List> actual = toList(cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))); verifyKeyValueList(expected, actual); } @@ -435,12 +436,12 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("a", "0003", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))) ); verifyKeyValueList( mkList(windowedPair("aa", "0002", 0), windowedPair("aa", "0004", 1)), - toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))) ); verifyKeyValueList( @@ -451,7 +452,7 @@ public void shouldFetchAndIterateOverKeyRange() { windowedPair("aa", "0004", 1), windowedPair("a", "0005", SEGMENT_INTERVAL) ), - toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), 0, Long.MAX_VALUE)) + toList(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))) ); } @@ -467,17 +468,17 @@ public void shouldNotThrowNullPointerExceptionOnPutNullValue() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnFetchNullKey() { - cachingStore.fetch(null, 1L, 2L); + cachingStore.fetch(null, ofEpochMilli(1L), ofEpochMilli(2L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullFromKey() { - cachingStore.fetch(null, bytesKey("anyTo"), 1L, 2L); + cachingStore.fetch(null, bytesKey("anyTo"), ofEpochMilli(1L), ofEpochMilli(2L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullToKey() { - cachingStore.fetch(bytesKey("anyFrom"), null, 1L, 2L); + cachingStore.fetch(bytesKey("anyFrom"), null, ofEpochMilli(1L), ofEpochMilli(2L)); } private static KeyValue, byte[]> windowedPair(final String key, final String value, final long timestamp) { diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java index e1204892a018c..6d2d994c1c6e6 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/ChangeLoggingWindowBytesStoreTest.java @@ -36,7 +36,6 @@ import java.util.HashMap; import java.util.Map; -import static java.time.Duration.ofMillis; import static java.time.Instant.ofEpochMilli; import static org.junit.Assert.assertArrayEquals; @@ -103,7 +102,7 @@ public void shouldDelegateToUnderlyingStoreWhenFetching() { init(); - store.fetch(bytesKey, ofEpochMilli(0), ofMillis(10)); + store.fetch(bytesKey, ofEpochMilli(0), ofEpochMilli(10)); EasyMock.verify(inner); } @@ -113,7 +112,7 @@ public void shouldDelegateToUnderlyingStoreWhenFetchingRange() { init(); - store.fetch(bytesKey, bytesKey, ofEpochMilli(0), ofMillis(1)); + store.fetch(bytesKey, bytesKey, ofEpochMilli(0), ofEpochMilli(1)); EasyMock.verify(inner); } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java index 1692c9d74ed17..79afb7855c38e 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/CompositeReadOnlyWindowStoreTest.java @@ -35,7 +35,6 @@ import java.util.List; import java.util.NoSuchElementException; -import static java.time.Duration.ofMillis; import static java.time.Instant.ofEpochMilli; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; @@ -79,7 +78,7 @@ public void shouldFetchValuesFromWindowStore() { underlyingWindowStore.put("my-key", "my-value", 0L); underlyingWindowStore.put("my-key", "my-later-value", 10L); - final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofMillis(25L)); + final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofEpochMilli(25L)); final List> results = StreamsTestUtils.toList(iterator); assertEquals(asList(new KeyValue<>(0L, "my-value"), @@ -89,7 +88,7 @@ public void shouldFetchValuesFromWindowStore() { @Test public void shouldReturnEmptyIteratorIfNoData() { - final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofMillis(25L)); + final WindowStoreIterator iterator = windowStore.fetch("my-key", ofEpochMilli(0L), ofEpochMilli(25L)); assertEquals(false, iterator.hasNext()); } @@ -103,9 +102,9 @@ public void shouldFindValueForKeyWhenMultiStores() { secondUnderlying.put("key-two", "value-two", 10L); final List> keyOneResults = StreamsTestUtils.toList(windowStore.fetch("key-one", ofEpochMilli(0L), - ofMillis(1L))); + ofEpochMilli(1L))); final List> keyTwoResults = StreamsTestUtils.toList(windowStore.fetch("key-two", ofEpochMilli(10L), - ofMillis(1L))); + ofEpochMilli(11L))); assertEquals(Collections.singletonList(KeyValue.pair(0L, "value-one")), keyOneResults); assertEquals(Collections.singletonList(KeyValue.pair(10L, "value-two")), keyTwoResults); @@ -116,14 +115,14 @@ public void shouldNotGetValuesFromOtherStores() { otherUnderlyingStore.put("some-key", "some-value", 0L); underlyingWindowStore.put("some-key", "my-value", 1L); - final List> results = StreamsTestUtils.toList(windowStore.fetch("some-key", ofEpochMilli(0L), ofMillis(2L))); + final List> results = StreamsTestUtils.toList(windowStore.fetch("some-key", ofEpochMilli(0L), ofEpochMilli(2L))); assertEquals(Collections.singletonList(new KeyValue<>(1L, "my-value")), results); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStateStoreExceptionOnRebalance() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(true), QueryableStoreTypes.windowStore(), "foo"); - store.fetch("key", ofEpochMilli(1), ofMillis(10)); + store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); } @Test @@ -132,7 +131,7 @@ public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(stubProviderOne, QueryableStoreTypes.windowStore(), "window-store"); try { - store.fetch("key", ofEpochMilli(1), ofMillis(10)); + store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); Assert.fail("InvalidStateStoreException was expected"); } catch (final InvalidStateStoreException e) { Assert.assertEquals("State store is not available anymore and may have been migrated to another instance; " + @@ -144,7 +143,7 @@ public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() { public void emptyIteratorAlwaysReturnsFalse() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); Assert.assertFalse(windowStoreIterator.hasNext()); } @@ -153,7 +152,7 @@ public void emptyIteratorAlwaysReturnsFalse() { public void emptyIteratorPeekNextKeyShouldThrowNoSuchElementException() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.peekNextKey(); @@ -163,7 +162,7 @@ public void emptyIteratorPeekNextKeyShouldThrowNoSuchElementException() { public void emptyIteratorNextShouldThrowNoSuchElementException() { final CompositeReadOnlyWindowStore store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); - final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofMillis(10)); + final WindowStoreIterator windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.next(); @@ -175,7 +174,7 @@ public void shouldFetchKeyRangeAcrossStores() { stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("a", "a", 0L); secondUnderlying.put("b", "b", 10L); - final List, String>> results = StreamsTestUtils.toList(windowStore.fetch("a", "b", ofEpochMilli(0), ofMillis(10))); + final List, String>> results = StreamsTestUtils.toList(windowStore.fetch("a", "b", ofEpochMilli(0), ofEpochMilli(10))); assertThat(results, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); @@ -206,7 +205,7 @@ public void shouldGetAllAcrossStores() { KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); } - + @Test public void shouldFetchAllAcrossStores() { final ReadOnlyWindowStoreStub secondUnderlying = new @@ -214,7 +213,7 @@ public void shouldFetchAllAcrossStores() { stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("a", "a", 0L); secondUnderlying.put("b", "b", 10L); - final List, String>> results = StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(0), ofMillis(10))); + final List, String>> results = StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(0), ofEpochMilli(10))); assertThat(results, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); @@ -222,17 +221,17 @@ public void shouldFetchAllAcrossStores() { @Test(expected = NullPointerException.class) public void shouldThrowNPEIfKeyIsNull() { - windowStore.fetch(null, ofEpochMilli(0), ofMillis(0)); + windowStore.fetch(null, ofEpochMilli(0), ofEpochMilli(0)); } @Test(expected = NullPointerException.class) public void shouldThrowNPEIfFromKeyIsNull() { - windowStore.fetch(null, "a", ofEpochMilli(0), ofMillis(0)); + windowStore.fetch(null, "a", ofEpochMilli(0), ofEpochMilli(0)); } @Test(expected = NullPointerException.class) public void shouldThrowNPEIfToKeyIsNull() { - windowStore.fetch("a", null, ofEpochMilli(0), ofMillis(0)); + windowStore.fetch("a", null, ofEpochMilli(0), ofEpochMilli(0)); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java index 0fe69fa3ced9e..3a6a3b4987ffa 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredWindowStoreTest.java @@ -40,7 +40,6 @@ import java.util.Map; -import static java.time.Duration.ofMillis; import static java.time.Instant.ofEpochMilli; import static java.util.Collections.singletonMap; import static org.apache.kafka.test.StreamsTestUtils.getMetricByNameFilterByTags; @@ -116,7 +115,7 @@ public void shouldRecordFetchLatency() { EasyMock.replay(innerStoreMock); store.init(context, store); - store.fetch("a", ofEpochMilli(1), ofMillis(0)).close(); // recorded on close; + store.fetch("a", ofEpochMilli(1), ofEpochMilli(1)).close(); // recorded on close; final Map metrics = context.metrics().metrics(); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "all")).metricValue()); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "mocked-store")).metricValue()); @@ -129,7 +128,7 @@ public void shouldRecordFetchRangeLatency() { EasyMock.replay(innerStoreMock); store.init(context, store); - store.fetch("a", "b", ofEpochMilli(1), ofMillis(0)).close(); // recorded on close; + store.fetch("a", "b", ofEpochMilli(1), ofEpochMilli(1)).close(); // recorded on close; final Map metrics = context.metrics().metrics(); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "all")).metricValue()); assertEquals(1.0, getMetricByNameFilterByTags(metrics, "fetch-total", "stream-scope-metrics", singletonMap("scope-id", "mocked-store")).metricValue()); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java index b19ccbf40a26e..08f019feffac3 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBWindowStoreTest.java @@ -54,9 +54,6 @@ import java.util.Map; import java.util.Set; -import static java.time.Duration.ofMillis; -import static java.time.Duration.ofMinutes; -import static java.time.Duration.ofSeconds; import static java.time.Instant.ofEpochMilli; import static java.util.Objects.requireNonNull; import static org.hamcrest.CoreMatchers.equalTo; @@ -146,7 +143,7 @@ public void shouldOnlyIterateOpenSegments() { setCurrentTime(currentTime); windowStore.put(1, "three"); - final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(0), ofMillis(currentTime)); + final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(0), ofEpochMilli(currentTime)); // roll to the next segment that will close the first currentTime = currentTime + segmentInterval; @@ -181,12 +178,12 @@ public void testRangeAndSinglePointFetch() { assertEquals("four", windowStore.fetch(4, startTime + 4L)); assertEquals("five", windowStore.fetch(5, startTime + 5L)); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofEpochMilli(startTime + 1L + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofEpochMilli(startTime + 2L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofEpochMilli(startTime + 3L + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofEpochMilli(startTime + 4L + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofEpochMilli(startTime + 5L + windowSize)))); putSecondBatch(windowStore, startTime, context); @@ -197,21 +194,21 @@ public void testRangeAndSinglePointFetch() { assertEquals("two+5", windowStore.fetch(2, startTime + 7L)); assertEquals("two+6", windowStore.fetch(2, startTime + 8L)); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L - windowSize), ofEpochMilli(startTime - 2L + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofEpochMilli(startTime - 1L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofEpochMilli(startTime + 1L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofEpochMilli(startTime + 2L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofEpochMilli(startTime + 3L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofEpochMilli(startTime + 4L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofEpochMilli(startTime + 5L + windowSize)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofEpochMilli(startTime + 6L + windowSize)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofEpochMilli(startTime + 7L + windowSize)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofEpochMilli(startTime + 8L + windowSize)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofEpochMilli(startTime + 9L + windowSize)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofEpochMilli(startTime + 10L + windowSize)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofEpochMilli(startTime + 11L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofEpochMilli(startTime + 12L + windowSize)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -261,17 +258,17 @@ public void shouldFetchAllInTimeRange() { assertEquals( Utils.mkList(one, two, four), - StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofMillis(3))) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofEpochMilli(startTime + 4))) ); assertEquals( Utils.mkList(zero, one, two), - StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 0), ofMillis(3))) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 0), ofEpochMilli(startTime + 3))) ); assertEquals( Utils.mkList(one, two, four, five), - StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofMillis(4))) + StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(startTime + 1), ofEpochMilli(startTime + 5))) ); } @@ -290,36 +287,36 @@ public void testFetchRange() { assertEquals( Utils.mkList(zero, one), - StreamsTestUtils.toList(windowStore.fetch(0, 1, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) + StreamsTestUtils.toList(windowStore.fetch(0, 1, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize))) ); assertEquals( Utils.mkList(one), - StreamsTestUtils.toList(windowStore.fetch(1, 1, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) + StreamsTestUtils.toList(windowStore.fetch(1, 1, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize))) ); assertEquals( Utils.mkList(one, two), - StreamsTestUtils.toList(windowStore.fetch(1, 3, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) + StreamsTestUtils.toList(windowStore.fetch(1, 3, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize))) ); assertEquals( Utils.mkList(zero, one, two), - StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize * 2))) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize))) ); assertEquals( Utils.mkList(zero, one, two, four, five), - StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize + 5L))) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L + windowSize + 5L))) ); assertEquals( Utils.mkList(two, four, five), - StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 2L), ofMillis(windowSize + 5L))) + StreamsTestUtils.toList(windowStore.fetch(0, 5, ofEpochMilli(startTime + 2L), ofEpochMilli(startTime + 0L + windowSize + 5L))) ); assertEquals( Utils.mkList(), - StreamsTestUtils.toList(windowStore.fetch(4, 5, ofEpochMilli(startTime + 2L), ofMillis(windowSize - 2L))) + StreamsTestUtils.toList(windowStore.fetch(4, 5, ofEpochMilli(startTime + 2L), ofEpochMilli(startTime + windowSize))) ); assertEquals( Utils.mkList(), - StreamsTestUtils.toList(windowStore.fetch(0, 3, ofEpochMilli(startTime + 3L), ofMillis(windowSize + 2L))) + StreamsTestUtils.toList(windowStore.fetch(0, 3, ofEpochMilli(startTime + 3L), ofEpochMilli(startTime + windowSize + 5))) ); } @@ -330,30 +327,30 @@ public void testPutAndFetchBefore() { putFirstBatch(windowStore, startTime, context); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L - windowSize), ofEpochMilli(startTime + 1L)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofEpochMilli(startTime + 2L)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L - windowSize), ofEpochMilli(startTime + 3L)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L - windowSize), ofEpochMilli(startTime + 4L)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L - windowSize), ofEpochMilli(startTime + 5L)))); putSecondBatch(windowStore, startTime, context); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 0L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 13L - windowSize), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L - windowSize), ofEpochMilli(startTime - 1L)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 0L - windowSize), ofEpochMilli(startTime + 0L)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L - windowSize), ofEpochMilli(startTime + 1L)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L - windowSize), ofEpochMilli(startTime + 2L)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L - windowSize), ofEpochMilli(startTime + 3L)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L - windowSize), ofEpochMilli(startTime + 4L)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L - windowSize), ofEpochMilli(startTime + 5L)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L - windowSize), ofEpochMilli(startTime + 6L)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L - windowSize), ofEpochMilli(startTime + 7L)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L - windowSize), ofEpochMilli(startTime + 8L)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L - windowSize), ofEpochMilli(startTime + 9L)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L - windowSize), ofEpochMilli(startTime + 10L)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L - windowSize), ofEpochMilli(startTime + 11L)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L - windowSize), ofEpochMilli(startTime + 12L)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 13L - windowSize), ofEpochMilli(startTime + 13L)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -376,30 +373,30 @@ public void testPutAndFetchAfter() { putFirstBatch(windowStore, startTime, context); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L), ofMillis(windowSize)))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 0L), ofEpochMilli(startTime + 0L + windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + 1L), ofEpochMilli(startTime + 1L + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofEpochMilli(startTime + 2L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + 3L), ofEpochMilli(startTime + 3L + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + 4L), ofEpochMilli(startTime + 4L + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + 5L), ofEpochMilli(startTime + 5L + windowSize)))); putSecondBatch(windowStore, startTime, context); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L), ofMillis(windowSize)))); - assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L), ofMillis(windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime - 2L), ofEpochMilli(startTime - 2L + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime - 1L), ofEpochMilli(startTime - 1L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, ofEpochMilli(startTime), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 1L), ofEpochMilli(startTime + 1L + windowSize)))); + assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 2L), ofEpochMilli(startTime + 2L + windowSize)))); + assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 3L), ofEpochMilli(startTime + 3L + windowSize)))); + assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 4L), ofEpochMilli(startTime + 4L + windowSize)))); + assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 5L), ofEpochMilli(startTime + 5L + windowSize)))); + assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 6L), ofEpochMilli(startTime + 6L + windowSize)))); + assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 7L), ofEpochMilli(startTime + 7L + windowSize)))); + assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, ofEpochMilli(startTime + 8L), ofEpochMilli(startTime + 8L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 9L), ofEpochMilli(startTime + 9L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 10L), ofEpochMilli(startTime + 10L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 11L), ofEpochMilli(startTime + 11L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + 12L), ofEpochMilli(startTime + 12L + windowSize)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -423,17 +420,17 @@ public void testPutSameKeyTimestamp() { setCurrentTime(startTime); windowStore.put(0, "zero"); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); windowStore.put(0, "zero"); windowStore.put(0, "zero+"); windowStore.put(0, "zero++"); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 1L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 2L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 3L - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime + 4L - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 1L - windowSize), ofEpochMilli(startTime + 1L + windowSize)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 2L - windowSize), ofEpochMilli(startTime + 2L + windowSize)))); + assertEquals(Utils.mkList("zero", "zero", "zero+", "zero++"), toList(windowStore.fetch(0, ofEpochMilli(startTime + 3L - windowSize), ofEpochMilli(startTime + 3L + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime + 4L - windowSize), ofEpochMilli(startTime + 4L + windowSize)))); // Flush the store and verify all current entries were properly flushed ... windowStore.flush(); @@ -491,12 +488,12 @@ public void testRolling() { segmentDirs(baseDir) ); - assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); setCurrentTime(startTime + increment * 6); windowStore.put(6, "six"); @@ -510,13 +507,13 @@ public void testRolling() { ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofEpochMilli(startTime + increment * 6 + windowSize)))); setCurrentTime(startTime + increment * 7); @@ -530,14 +527,14 @@ public void testRolling() { segmentDirs(baseDir) ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofEpochMilli(startTime + increment * 6 + windowSize)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofEpochMilli(startTime + increment * 7 + windowSize)))); setCurrentTime(startTime + increment * 8); windowStore.put(8, "eight"); @@ -551,15 +548,15 @@ public void testRolling() { ); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofEpochMilli(startTime + increment * 6 + windowSize)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofEpochMilli(startTime + increment * 7 + windowSize)))); + assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofEpochMilli(startTime + increment * 8 + windowSize)))); // check segment directories windowStore.flush(); @@ -607,27 +604,27 @@ public void testRestore() throws IOException { Utils.delete(baseDir); windowStore = createWindowStore(context, false); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofEpochMilli(startTime + increment * 6 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofEpochMilli(startTime + increment * 7 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofEpochMilli(startTime + increment * 8 + windowSize)))); context.restore(windowName, changeLog); - assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofMillis(windowSize * 2)))); - assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofMillis(windowSize * 2)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(0, ofEpochMilli(startTime - windowSize), ofEpochMilli(startTime + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(1, ofEpochMilli(startTime + increment - windowSize), ofEpochMilli(startTime + increment + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(2, ofEpochMilli(startTime + increment * 2 - windowSize), ofEpochMilli(startTime + increment * 2 + windowSize)))); + assertEquals(Utils.mkList(), toList(windowStore.fetch(3, ofEpochMilli(startTime + increment * 3 - windowSize), ofEpochMilli(startTime + increment * 3 + windowSize)))); + assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, ofEpochMilli(startTime + increment * 4 - windowSize), ofEpochMilli(startTime + increment * 4 + windowSize)))); + assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, ofEpochMilli(startTime + increment * 5 - windowSize), ofEpochMilli(startTime + increment * 5 + windowSize)))); + assertEquals(Utils.mkList("six"), toList(windowStore.fetch(6, ofEpochMilli(startTime + increment * 6 - windowSize), ofEpochMilli(startTime + increment * 6 + windowSize)))); + assertEquals(Utils.mkList("seven"), toList(windowStore.fetch(7, ofEpochMilli(startTime + increment * 7 - windowSize), ofEpochMilli(startTime + increment * 7 + windowSize)))); + assertEquals(Utils.mkList("eight"), toList(windowStore.fetch(8, ofEpochMilli(startTime + increment * 8 - windowSize), ofEpochMilli(startTime + increment * 8 + windowSize)))); // check segment directories windowStore.flush(); @@ -666,7 +663,7 @@ public void testSegmentMaintenance() { WindowStoreIterator iter; int fetchedCount; - iter = windowStore.fetch(0, ofEpochMilli(0L), ofMillis(segmentInterval * 4)); + iter = windowStore.fetch(0, ofEpochMilli(0L), ofEpochMilli(segmentInterval * 4)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -682,7 +679,7 @@ public void testSegmentMaintenance() { setCurrentTime(segmentInterval * 3); windowStore.put(0, "v"); - iter = windowStore.fetch(0, ofEpochMilli(0L), ofMillis(segmentInterval * 4)); + iter = windowStore.fetch(0, ofEpochMilli(0L), ofEpochMilli(segmentInterval * 4)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -698,7 +695,7 @@ public void testSegmentMaintenance() { setCurrentTime(segmentInterval * 5); windowStore.put(0, "v"); - iter = windowStore.fetch(0, ofEpochMilli(segmentInterval * 4), ofMillis(segmentInterval * 6)); + iter = windowStore.fetch(0, ofEpochMilli(segmentInterval * 4), ofEpochMilli(segmentInterval * 10)); fetchedCount = 0; while (iter.hasNext()) { iter.next(); @@ -740,7 +737,7 @@ public void testInitialLoading() { assertEquals(expected, actual); - try (final WindowStoreIterator iter = windowStore.fetch(0, ofEpochMilli(0L), ofSeconds(1000L))) { + try (final WindowStoreIterator iter = windowStore.fetch(0, ofEpochMilli(0L), ofEpochMilli(1000000L))) { while (iter.hasNext()) { iter.next(); } @@ -760,7 +757,7 @@ public void shouldCloseOpenIteratorsWhenStoreIsClosedAndNotThrowInvalidStateStor windowStore.put(1, "two", 2L); windowStore.put(1, "three", 3L); - final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(1L), ofMillis(3L)); + final WindowStoreIterator iterator = windowStore.fetch(1, ofEpochMilli(1L), ofEpochMilli(3L)); assertTrue(iterator.hasNext()); windowStore.close(); @@ -774,7 +771,7 @@ public void shouldFetchAndIterateOverExactKeys() { final long retentionPeriod = 0x7a00000000000000L; final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, ofMillis(retentionPeriod), ofMillis(windowSize), true), + Stores.persistentWindowStore(windowName, retentionPeriod, windowSize, true), Serdes.String(), Serdes.String()).build(); @@ -788,16 +785,17 @@ public void shouldFetchAndIterateOverExactKeys() { final List expected = Utils.mkList("0001", "0003", "0005"); - assertThat(toList(windowStore.fetch("a", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expected)); + assertThat(toList(windowStore.fetch("a", ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))), equalTo(expected)); - List, String>> list = StreamsTestUtils.toList(windowStore.fetch("a", "a", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); + List, String>> list = + StreamsTestUtils.toList(windowStore.fetch("a", "a", ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))); assertThat(list, equalTo(Utils.mkList( windowedPair("a", "0001", 0, windowSize), windowedPair("a", "0003", 1, windowSize), windowedPair("a", "0005", 0x7a00000000000000L - 1, windowSize) ))); - list = StreamsTestUtils.toList(windowStore.fetch("aa", "aa", ofEpochMilli(0), ofMillis(Long.MAX_VALUE))); + list = StreamsTestUtils.toList(windowStore.fetch("aa", "aa", ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))); assertThat(list, equalTo(Utils.mkList( windowedPair("aa", "0002", 0, windowSize), windowedPair("aa", "0004", 1, windowSize) @@ -819,19 +817,19 @@ public void shouldNotThrowNullPointerExceptionOnPutNullValue() { @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnGetNullKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(null, ofEpochMilli(1L), ofMillis(2L)); + windowStore.fetch(null, ofEpochMilli(1L), ofEpochMilli(2L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullFromKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(null, 2, ofEpochMilli(1L), ofMillis(1L)); + windowStore.fetch(null, 2, ofEpochMilli(1L), ofEpochMilli(2L)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionOnRangeNullToKey() { windowStore = createWindowStore(context, false); - windowStore.fetch(1, null, ofEpochMilli(1L), ofMillis(1L)); + windowStore.fetch(1, null, ofEpochMilli(1L), ofEpochMilli(2L)); } @Test @@ -850,7 +848,7 @@ public void shouldNoNullPointerWhenSerdeDoesNotHandleNull() { @Test public void shouldFetchAndIterateOverExactBinaryKeys() { final WindowStore windowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(windowName, ofMinutes(1L), ofMinutes(1L), true), + Stores.persistentWindowStore(windowName, 60_000L, 60_000L, true), Serdes.Bytes(), Serdes.String()).build(); @@ -870,11 +868,11 @@ public void shouldFetchAndIterateOverExactBinaryKeys() { windowStore.put(key3, "9", 59999); final List expectedKey1 = Utils.mkList("1", "4", "7"); - assertThat(toList(windowStore.fetch(key1, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey1)); + assertThat(toList(windowStore.fetch(key1, ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))), equalTo(expectedKey1)); final List expectedKey2 = Utils.mkList("2", "5", "8"); - assertThat(toList(windowStore.fetch(key2, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey2)); + assertThat(toList(windowStore.fetch(key2, ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))), equalTo(expectedKey2)); final List expectedKey3 = Utils.mkList("3", "6", "9"); - assertThat(toList(windowStore.fetch(key3, ofEpochMilli(0), ofMillis(Long.MAX_VALUE))), equalTo(expectedKey3)); + assertThat(toList(windowStore.fetch(key3, ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE))), equalTo(expectedKey3)); } private void putFirstBatch(final WindowStore store, From 5210f9fff117c695cb1f3024c94eff7f49599a6a Mon Sep 17 00:00:00 2001 From: Nikolay Izhikov Date: Thu, 4 Oct 2018 17:34:12 +0300 Subject: [PATCH 14/14] KAFKA-7277: Javadoc fixed. --- .../org/apache/kafka/streams/state/WindowStoreIterator.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java index c07130e0fe56a..14163514b0f9d 100644 --- a/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java +++ b/streams/src/main/java/org/apache/kafka/streams/state/WindowStoreIterator.java @@ -16,12 +16,14 @@ */ package org.apache.kafka.streams.state; +import java.time.Instant; import org.apache.kafka.streams.KeyValue; import java.io.Closeable; /** - * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, long, long)}. + * Iterator interface of {@link KeyValue} with key typed {@link Long} used for {@link WindowStore#fetch(Object, long, long)} + * and {@link WindowStore#fetch(Object, Instant, Instant)} * * Users must call its {@code close} method explicitly upon completeness to release resources, * or use try-with-resources statement (available since JDK7) for this {@link Closeable} class.