From 6f7b24817a81995e90cfc2cd77efadb41be8cddc Mon Sep 17 00:00:00 2001 From: Jiabao Sun Date: Thu, 7 Mar 2024 19:31:06 +0800 Subject: [PATCH] [FLINK-25544][streaming][JUnit5 Migration] The api package of module flink-stream-java (#24429) --- .../api/AggregationFunctionTest.java | 182 ++-- .../flink/streaming/api/DataStreamTest.java | 861 +++++++++--------- .../streaming/api/RestartStrategyTest.java | 59 +- .../streaming/api/SourceFunctionTest.java | 14 +- .../flink/streaming/api/TypeFillTest.java | 141 ++- .../api/checkpoint/ListCheckpointedTest.java | 22 +- .../api/datastream/CoGroupedStreamsTest.java | 23 +- .../DataStreamSinkDeprecatedTest.java | 23 +- .../api/datastream/DataStreamSinkTest.java | 23 +- .../api/datastream/DataStreamSourceTest.java | 14 +- .../api/datastream/JoinedStreamsTest.java | 27 +- ...ExecutionEnvironmentConfigurationTest.java | 33 +- .../AscendingTimestampExtractorTest.java | 55 +- ...dOutOfOrdernessTimestampExtractorTest.java | 44 +- .../functions/FromElementsFunctionTest.java | 351 +++---- .../functions/IngestionTimeExtractorTest.java | 19 +- .../api/functions/PrintSinkFunctionTest.java | 48 +- .../api/functions/PrintSinkTest.java | 26 +- .../functions/StatefulSequenceSourceTest.java | 175 +--- .../async/RichAsyncFunctionTest.java | 365 ++++---- .../sink/OutputFormatSinkFunctionTest.java | 6 +- .../functions/sink/SocketClientSinkTest.java | 148 ++- .../functions/sink/TransactionHolderTest.java | 11 +- .../sink/TwoPhaseCommitSinkFunctionTest.java | 127 ++- .../sink/filesystem/BucketAssignerTest.java | 19 +- .../filesystem/BucketStateSerializerTest.java | 111 ++- .../functions/sink/filesystem/BucketTest.java | 99 +- .../filesystem/BucketsRollingPolicyTest.java | 45 +- .../sink/filesystem/BucketsTest.java | 164 ++-- .../sink/filesystem/BulkWriterTest.java | 48 +- .../LocalStreamingFileSinkTest.java | 136 ++- .../functions/sink/filesystem/TestUtils.java | 12 +- .../ContinuousFileReaderOperatorTest.java | 62 +- .../source/FileMonitoringFunctionTest.java | 6 +- .../source/InputFormatSourceFunctionTest.java | 63 +- .../source/SocketTextStreamFunctionTest.java | 21 +- .../datagen/DataGeneratorSourceTest.java | 27 +- .../delta/extractor/ArrayFromTupleTest.java | 22 +- .../extractor/ConcatenatedExtractTest.java | 34 +- .../delta/extractor/FieldFromArrayTest.java | 27 +- .../delta/extractor/FieldFromTupleTest.java | 17 +- .../delta/extractor/FieldsFromArrayTest.java | 39 +- .../delta/extractor/FieldsFromTupleTest.java | 19 +- ...inkTransformationTranslatorITCaseBase.java | 80 +- .../SinkV1TransformationTranslatorITCase.java | 53 +- ...nsformationTranslatorDeprecatedITCase.java | 31 +- .../SinkV2TransformationTranslatorITCase.java | 31 +- .../api/graph/SlotAllocationTest.java | 150 ++- ...treamGraphGeneratorBatchExecutionTest.java | 213 ++--- ...phGeneratorExecutionModeDetectionTest.java | 185 ++-- .../api/graph/StreamGraphGeneratorTest.java | 279 +++--- .../graph/StreamingJobGraphGeneratorTest.java | 5 +- ...ratorWithGlobalStreamExchangeModeTest.java | 91 +- .../operators/AbstractStreamOperatorTest.java | 100 +- .../AbstractStreamOperatorV2Test.java | 15 +- ...bstractUdfStreamOperatorLifecycleTest.java | 64 +- .../BackendRestorerProcedureTest.java | 40 +- .../BatchGroupedReduceOperatorTest.java | 25 +- .../api/operators/InputSelectionTest.java | 198 ++-- .../InternalTimeServiceManagerImplTest.java | 23 +- .../InternalTimerServiceImplTest.java | 242 +++-- .../operators/KeyedProcessOperatorTest.java | 47 +- .../LegacyKeyedProcessOperatorTest.java | 38 +- .../api/operators/OperatorAttributesTest.java | 4 +- .../OperatorSnapshotFinalizerTest.java | 31 +- .../OperatorSnapshotFuturesTest.java | 13 +- .../api/operators/ProcessOperatorTest.java | 23 +- .../api/operators/SourceOperatorIdleTest.java | 29 +- ...ceOperatorSplitWatermarkAlignmentTest.java | 6 +- .../api/operators/SourceOperatorTest.java | 94 +- .../operators/StateDescriptorPassingTest.java | 38 +- .../StateInitializationContextImplTest.java | 51 +- ...ateSnapshotContextSynchronousImplTest.java | 49 +- .../api/operators/StreamFilterTest.java | 30 +- .../api/operators/StreamFlatMapTest.java | 30 +- .../StreamGroupedReduceOperatorTest.java | 30 +- .../api/operators/StreamMapTest.java | 30 +- .../StreamOperatorStateHandlerTest.java | 101 +- .../api/operators/StreamProjectTest.java | 6 +- .../api/operators/StreamSinkOperatorTest.java | 33 +- ...StreamSourceContextIdleDetectionTests.java | 67 +- .../StreamTaskStateInitializerImplTest.java | 79 +- .../StreamingRuntimeContextTest.java | 71 +- .../WrappingFunctionSnapshotRestoreTest.java | 23 +- .../async/AsyncWaitOperatorTest.java | 132 ++- .../queue/OrderedStreamElementQueueTest.java | 27 +- .../api/operators/async/queue/QueueUtil.java | 7 +- .../async/queue/StreamElementQueueTest.java | 77 +- .../UnorderedStreamElementQueueTest.java | 53 +- .../co/CoBroadcastWithKeyedOperatorTest.java | 100 +- .../CoBroadcastWithNonKeyedOperatorTest.java | 97 +- .../operators/co/CoProcessOperatorTest.java | 9 +- .../api/operators/co/CoStreamFlatMapTest.java | 35 +- .../api/operators/co/CoStreamMapTest.java | 34 +- .../co/IntervalJoinOperatorTest.java | 137 +-- .../co/KeyedCoProcessOperatorTest.java | 33 +- .../co/LegacyKeyedCoProcessOperatorTest.java | 30 +- .../collect/CollectResultBufferTest.java | 64 +- .../collect/CollectResultIteratorTest.java | 37 +- .../CollectSinkFunctionRandomITCase.java | 14 +- .../collect/CollectSinkFunctionTest.java | 7 +- ...bstractTestCoordinationRequestHandler.java | 11 +- .../utils/CollectSinkFunctionTestWrapper.java | 6 +- .../collect/utils/CollectTestUtils.java | 23 +- .../collect/utils/TestJobClient.java | 6 +- .../FixedLengthKeyAndValueSerializerTest.java | 4 +- .../sort/LargeSortingDataInputITCase.java | 26 +- .../sort/MultiInputSortingDataInputsTest.java | 104 +-- .../operators/sort/SortingDataInputTest.java | 69 +- ...riableLengthKeyAndValueSerializerTest.java | 4 +- ...BatchExecutionInternalTimeServiceTest.java | 126 +-- .../state/BatchExecutionStateBackendTest.java | 320 +++---- ...ExecutionStateBackendVerificationTest.java | 31 +- .../SourceOutputWithWatermarksTest.java | 25 +- .../source/WatermarkToDataOutputTest.java | 25 +- .../functions/InternalWindowFunctionTest.java | 24 +- .../streamtask/StreamIterationHeadTest.java | 12 +- .../deltafunction/CosineDistanceTest.java | 24 +- .../deltafunction/EuclideanDistanceTest.java | 24 +- .../streaming/util/BlockingSourceContext.java | 101 ++ .../org.junit.jupiter.api.extension.Extension | 16 + 121 files changed, 3979 insertions(+), 4411 deletions(-) create mode 100644 flink-streaming-java/src/test/java/org/apache/flink/streaming/util/BlockingSourceContext.java create mode 100644 flink-streaming-java/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/AggregationFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/AggregationFunctionTest.java index a108ca721b75b..e163e8a63d8ce 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/AggregationFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/AggregationFunctionTest.java @@ -36,19 +36,19 @@ import org.apache.flink.shaded.guava31.com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.Serializable; import java.util.ArrayList; import java.util.List; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link AggregationFunction}. */ -public class AggregationFunctionTest { +class AggregationFunctionTest { @Test - public void groupSumIntegerTest() throws Exception { + void groupSumIntegerTest() throws Exception { // preparing expected outputs List> expectedGroupSumList = new ArrayList<>(); @@ -124,13 +124,13 @@ public void groupSumIntegerTest() throws Exception { keySelector, keyType); - assertEquals(expectedGroupSumList, groupedSumList); - assertEquals(expectedGroupMinList, groupedMinList); - assertEquals(expectedGroupMaxList, groupedMaxList); + assertThat(groupedSumList).isEqualTo(expectedGroupSumList); + assertThat(groupedMinList).isEqualTo(expectedGroupMinList); + assertThat(groupedMaxList).isEqualTo(expectedGroupMaxList); } @Test - public void pojoGroupSumIntegerTest() throws Exception { + void pojoGroupSumIntegerTest() throws Exception { // preparing expected outputs List expectedGroupSumList = new ArrayList<>(); @@ -204,13 +204,13 @@ public void pojoGroupSumIntegerTest() throws Exception { keySelector, keyType); - assertEquals(expectedGroupSumList, groupedSumList); - assertEquals(expectedGroupMinList, groupedMinList); - assertEquals(expectedGroupMaxList, groupedMaxList); + assertThat(groupedSumList).isEqualTo(expectedGroupSumList); + assertThat(groupedMinList).isEqualTo(expectedGroupMinList); + assertThat(groupedMaxList).isEqualTo(expectedGroupMaxList); } @Test - public void minMaxByTest() throws Exception { + void minMaxByTest() throws Exception { // Tuples are grouped on field 0, aggregated on field 1 // preparing expected outputs @@ -283,49 +283,49 @@ public void minMaxByTest() throws Exception { ReduceFunction> minByFunctionLast = new ComparableAggregator<>(1, typeInfo, AggregationType.MINBY, false, config); - assertEquals( - maxByFirstExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - maxByFunctionFirst, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByList(), - keySelector, - keyType)); - - assertEquals( - maxByLastExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - maxByFunctionLast, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByList(), - keySelector, - keyType)); - - assertEquals( - minByLastExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - minByFunctionLast, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByList(), - keySelector, - keyType)); - - assertEquals( - minByFirstExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - minByFunctionFirst, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByList(), - keySelector, - keyType)); + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + maxByFunctionFirst, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByList(), + keySelector, + keyType)) + .isEqualTo(maxByFirstExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + maxByFunctionLast, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByList(), + keySelector, + keyType)) + .isEqualTo(maxByLastExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + minByFunctionLast, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByList(), + keySelector, + keyType)) + .isEqualTo(minByLastExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + minByFunctionFirst, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByList(), + keySelector, + keyType)) + .isEqualTo(minByFirstExpected); } @Test - public void pojoMinMaxByTest() throws Exception { + void pojoMinMaxByTest() throws Exception { // Pojos are grouped on field 0, aggregated on field 1 // preparing expected outputs @@ -397,45 +397,45 @@ public void pojoMinMaxByTest() throws Exception { ReduceFunction minByFunctionLast = new ComparableAggregator<>("f1", typeInfo, AggregationType.MINBY, false, config); - assertEquals( - maxByFirstExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - maxByFunctionFirst, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByPojoList(), - keySelector, - keyType)); - - assertEquals( - maxByLastExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - maxByFunctionLast, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByPojoList(), - keySelector, - keyType)); - - assertEquals( - minByLastExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - minByFunctionLast, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByPojoList(), - keySelector, - keyType)); - - assertEquals( - minByFirstExpected, - MockContext.createAndExecuteForKeyedStream( - new StreamGroupedReduceOperator<>( - minByFunctionFirst, - typeInfo.createSerializer(config.getSerializerConfig())), - getInputByPojoList(), - keySelector, - keyType)); + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + maxByFunctionFirst, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByPojoList(), + keySelector, + keyType)) + .isEqualTo(maxByFirstExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + maxByFunctionLast, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByPojoList(), + keySelector, + keyType)) + .isEqualTo(maxByLastExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + minByFunctionLast, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByPojoList(), + keySelector, + keyType)) + .isEqualTo(minByLastExpected); + + assertThat( + MockContext.createAndExecuteForKeyedStream( + new StreamGroupedReduceOperator<>( + minByFunctionFirst, + typeInfo.createSerializer(config.getSerializerConfig())), + getInputByPojoList(), + keySelector, + keyType)) + .isEqualTo(minByFirstExpected); } // ************************************************************************* diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/DataStreamTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/DataStreamTest.java index 6b75b1d2ee70c..9245912d13b2c 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/DataStreamTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/DataStreamTest.java @@ -83,13 +83,8 @@ import org.apache.flink.streaming.runtime.partitioner.ShufflePartitioner; import org.apache.flink.streaming.runtime.partitioner.StreamPartitioner; import org.apache.flink.util.Collector; -import org.apache.flink.util.TestLogger; -import org.hamcrest.core.StringStartsWith; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import javax.annotation.Nullable; @@ -97,23 +92,17 @@ import java.time.Duration; import java.util.List; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.fail; /** Tests for {@link DataStream}. */ @SuppressWarnings("serial") -public class DataStreamTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class DataStreamTest { /** Ensure that WatermarkStrategy is easy to use in the API, without superfluous generics. */ @Test - public void testErgonomicWatermarkStrategy() { + void testErgonomicWatermarkStrategy() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream input = env.fromData("bonjour"); @@ -135,7 +124,7 @@ public void testErgonomicWatermarkStrategy() { * @throws Exception */ @Test - public void testUnion() throws Exception { + void testUnion() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); @@ -250,15 +239,14 @@ public Long map(Long value) throws Exception { StreamGraph streamGraph = getStreamGraph(env); // verify self union - assertTrue(streamGraph.getStreamNode(selfUnion.getId()).getInEdges().size() == 2); + assertThat(streamGraph.getStreamNode(selfUnion.getId()).getInEdges()).hasSize(2); for (StreamEdge edge : streamGraph.getStreamNode(selfUnion.getId()).getInEdges()) { - assertTrue(edge.getPartitioner() instanceof ForwardPartitioner); + assertThat(edge.getPartitioner()).isInstanceOf(ForwardPartitioner.class); } // verify self union with different partitioners - assertTrue( - streamGraph.getStreamNode(selfUnionDifferentPartition.getId()).getInEdges().size() - == 2); + assertThat(streamGraph.getStreamNode(selfUnionDifferentPartition.getId()).getInEdges()) + .hasSize(2); boolean hasForward = false; boolean hasBroadcast = false; for (StreamEdge edge : @@ -270,33 +258,31 @@ public Long map(Long value) throws Exception { hasBroadcast = true; } } - assertTrue(hasForward && hasBroadcast); + assertThat(hasForward && hasBroadcast).isTrue(); // verify union of streams with differing parallelism - assertTrue( - streamGraph.getStreamNode(unionDifferingParallelism.getId()).getInEdges().size() - == 2); + assertThat(streamGraph.getStreamNode(unionDifferingParallelism.getId()).getInEdges()) + .hasSize(2); for (StreamEdge edge : streamGraph.getStreamNode(unionDifferingParallelism.getId()).getInEdges()) { if (edge.getSourceId() == input2.getId()) { - assertTrue(edge.getPartitioner() instanceof ForwardPartitioner); + assertThat(edge.getPartitioner()).isInstanceOf(ForwardPartitioner.class); } else if (edge.getSourceId() == input3.getId()) { - assertTrue(edge.getPartitioner() instanceof RebalancePartitioner); + assertThat(edge.getPartitioner()).isInstanceOf(RebalancePartitioner.class); } else { fail("Wrong input edge."); } } // verify union of streams with differing partitionings - assertTrue( - streamGraph.getStreamNode(unionDifferingPartitioning.getId()).getInEdges().size() - == 2); + assertThat(streamGraph.getStreamNode(unionDifferingPartitioning.getId()).getInEdges()) + .hasSize(2); for (StreamEdge edge : streamGraph.getStreamNode(unionDifferingPartitioning.getId()).getInEdges()) { if (edge.getSourceId() == input4.getId()) { - assertTrue(edge.getPartitioner() instanceof BroadcastPartitioner); + assertThat(edge.getPartitioner()).isInstanceOf(BroadcastPartitioner.class); } else if (edge.getSourceId() == input5.getId()) { - assertTrue(edge.getPartitioner() instanceof ForwardPartitioner); + assertThat(edge.getPartitioner()).isInstanceOf(ForwardPartitioner.class); } else { fail("Wrong input edge."); } @@ -309,7 +295,7 @@ public Long map(Long value) throws Exception { * @throws Exception */ @Test - public void testNaming() throws Exception { + void testNaming() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream dataStream1 = @@ -367,12 +353,12 @@ public Long reduce(Long value1, Long value2) throws Exception { // test functionality through the operator names in the execution plan String plan = env.getExecutionPlan(); - assertTrue(plan.contains("testSource1")); - assertTrue(plan.contains("testSource2")); - assertTrue(plan.contains("testMap")); - assertTrue(plan.contains("testMap")); - assertTrue(plan.contains("testCoFlatMap")); - assertTrue(plan.contains("testWindowReduce")); + assertThat(plan).contains("testSource1"); + assertThat(plan).contains("testSource2"); + assertThat(plan).contains("testMap"); + assertThat(plan).contains("testMap"); + assertThat(plan).contains("testCoFlatMap"); + assertThat(plan).contains("testWindowReduce"); } /** @@ -380,7 +366,7 @@ public Long reduce(Long value1, Long value2) throws Exception { * result in different and correct topologies. Does the some for the {@link ConnectedStreams}. */ @Test - public void testPartitioning() { + void testPartitioning() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream> src1 = env.fromData(new Tuple2<>(0L, 0L)); @@ -398,15 +384,19 @@ public void testPartitioning() { int id3 = createDownStreamId(group3); int id4 = createDownStreamId(group4); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id1))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id2))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id3))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id4))); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id1))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id2))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id3))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), id4))) + .isTrue(); - assertTrue(isKeyed(group1)); - assertTrue(isKeyed(group2)); - assertTrue(isKeyed(group3)); - assertTrue(isKeyed(group4)); + assertThat(isKeyed(group1)).isTrue(); + assertThat(isKeyed(group2)).isTrue(); + assertThat(isKeyed(group3)).isTrue(); + assertThat(isKeyed(group4)).isTrue(); // Testing DataStream partitioning DataStream> partition1 = src1.keyBy(0); @@ -419,15 +409,19 @@ public void testPartitioning() { int pid3 = createDownStreamId(partition3); int pid4 = createDownStreamId(partition4); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid1))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid2))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid3))); - assertTrue(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid4))); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid1))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid2))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid3))) + .isTrue(); + assertThat(isPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), pid4))) + .isTrue(); - assertTrue(isKeyed(partition1)); - assertTrue(isKeyed(partition3)); - assertTrue(isKeyed(partition2)); - assertTrue(isKeyed(partition4)); + assertThat(isKeyed(partition1)).isTrue(); + assertThat(isKeyed(partition3)).isTrue(); + assertThat(isKeyed(partition2)).isTrue(); + assertThat(isKeyed(partition4)).isTrue(); // Testing DataStream custom partitioning Partitioner longPartitioner = @@ -448,16 +442,22 @@ public int partition(Long key, int numPartitions) { int cid2 = createDownStreamId(customPartition3); int cid3 = createDownStreamId(customPartition4); - assertTrue( - isCustomPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid1))); - assertTrue( - isCustomPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid2))); - assertTrue( - isCustomPartitioned(getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid3))); - - assertFalse(isKeyed(customPartition1)); - assertFalse(isKeyed(customPartition3)); - assertFalse(isKeyed(customPartition4)); + assertThat( + isCustomPartitioned( + getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid1))) + .isTrue(); + assertThat( + isCustomPartitioned( + getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid2))) + .isTrue(); + assertThat( + isCustomPartitioned( + getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), cid3))) + .isTrue(); + + assertThat(isKeyed(customPartition1)).isFalse(); + assertThat(isKeyed(customPartition3)).isFalse(); + assertThat(isKeyed(customPartition4)).isFalse(); // Testing ConnectedStreams grouping ConnectedStreams, Tuple2> connectedGroup1 = @@ -480,46 +480,66 @@ public int partition(Long key, int numPartitions) { connected.keyBy(new FirstSelector(), new FirstSelector()); Integer downStreamId5 = createDownStreamId(connectedGroup5); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), downStreamId1))); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src2.getId(), downStreamId1))); - - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), downStreamId2))); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src2.getId(), downStreamId2))); - - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), downStreamId3))); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src2.getId(), downStreamId3))); - - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), downStreamId4))); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src2.getId(), downStreamId4))); - - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src1.getId(), downStreamId5))); - assertTrue( - isPartitioned( - getStreamGraph(env).getStreamEdgesOrThrow(src2.getId(), downStreamId5))); - - assertTrue(isKeyed(connectedGroup1)); - assertTrue(isKeyed(connectedGroup2)); - assertTrue(isKeyed(connectedGroup3)); - assertTrue(isKeyed(connectedGroup4)); - assertTrue(isKeyed(connectedGroup5)); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), downStreamId1))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), downStreamId1))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), downStreamId2))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), downStreamId2))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), downStreamId3))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), downStreamId3))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), downStreamId4))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), downStreamId4))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), downStreamId5))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), downStreamId5))) + .isTrue(); + + assertThat(isKeyed(connectedGroup1)).isTrue(); + assertThat(isKeyed(connectedGroup2)).isTrue(); + assertThat(isKeyed(connectedGroup3)).isTrue(); + assertThat(isKeyed(connectedGroup4)).isTrue(); + assertThat(isKeyed(connectedGroup5)).isTrue(); // Testing ConnectedStreams partitioning ConnectedStreams, Tuple2> connectedPartition1 = @@ -542,61 +562,71 @@ public int partition(Long key, int numPartitions) { connected.keyBy(new FirstSelector(), new FirstSelector()); Integer connectDownStreamId5 = createDownStreamId(connectedPartition5); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId1))); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId1))); - - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId2))); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId2))); - - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId3))); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId3))); - - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId4))); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId4))); - - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId5))); - assertTrue( - isPartitioned( - getStreamGraph(env) - .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId5))); - - assertTrue(isKeyed(connectedPartition1)); - assertTrue(isKeyed(connectedPartition2)); - assertTrue(isKeyed(connectedPartition3)); - assertTrue(isKeyed(connectedPartition4)); - assertTrue(isKeyed(connectedPartition5)); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId1))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId1))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId2))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId2))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId3))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId3))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId4))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId4))) + .isTrue(); + + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src1.getId(), connectDownStreamId5))) + .isTrue(); + assertThat( + isPartitioned( + getStreamGraph(env) + .getStreamEdgesOrThrow(src2.getId(), connectDownStreamId5))) + .isTrue(); + + assertThat(isKeyed(connectedPartition1)).isTrue(); + assertThat(isKeyed(connectedPartition2)).isTrue(); + assertThat(isKeyed(connectedPartition3)).isTrue(); + assertThat(isKeyed(connectedPartition4)).isTrue(); + assertThat(isKeyed(connectedPartition5)).isTrue(); } /** Tests whether parallelism gets set. */ @Test - public void testParallelism() { + void testParallelism() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource> src = env.fromData(new Tuple2<>(0L, 0L)); @@ -634,50 +664,52 @@ public Long reduce(Long value1, Long value2) throws Exception { public void invoke(Long value) throws Exception {} }); - assertEquals(1, getStreamGraph(env).getStreamNode(src.getId()).getParallelism()); - assertEquals(10, getStreamGraph(env).getStreamNode(map.getId()).getParallelism()); - assertEquals(1, getStreamGraph(env).getStreamNode(windowed.getId()).getParallelism()); - assertEquals( - 10, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getParallelism()); + assertThat(getStreamGraph(env).getStreamNode(src.getId()).getParallelism()).isOne(); + assertThat(getStreamGraph(env).getStreamNode(map.getId()).getParallelism()).isEqualTo(10); + assertThat(getStreamGraph(env).getStreamNode(windowed.getId()).getParallelism()).isOne(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getParallelism()) + .isEqualTo(10); env.setParallelism(7); // Some parts, such as windowing rely on the fact that previous operators have a parallelism // set when instantiating the Discretizer. This would break if we dynamically changed // the parallelism of operations when changing the setting on the Execution Environment. - assertEquals(1, getStreamGraph(env).getStreamNode(src.getId()).getParallelism()); - assertEquals(10, getStreamGraph(env).getStreamNode(map.getId()).getParallelism()); - assertEquals(1, getStreamGraph(env).getStreamNode(windowed.getId()).getParallelism()); - assertEquals( - 10, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getParallelism()); + assertThat(getStreamGraph(env).getStreamNode(src.getId()).getParallelism()).isOne(); + assertThat(getStreamGraph(env).getStreamNode(map.getId()).getParallelism()).isEqualTo(10); + assertThat(getStreamGraph(env).getStreamNode(windowed.getId()).getParallelism()).isOne(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getParallelism()) + .isEqualTo(10); DataStreamSource parallelSource = env.fromSequence(0, 0); parallelSource.sinkTo(new DiscardingSink()); - assertEquals(7, getStreamGraph(env).getStreamNode(parallelSource.getId()).getParallelism()); + assertThat(getStreamGraph(env).getStreamNode(parallelSource.getId()).getParallelism()) + .isEqualTo(7); parallelSource.setParallelism(3); - assertEquals(3, getStreamGraph(env).getStreamNode(parallelSource.getId()).getParallelism()); + assertThat(getStreamGraph(env).getStreamNode(parallelSource.getId()).getParallelism()) + .isEqualTo(3); map.setParallelism(2); - assertEquals(2, getStreamGraph(env).getStreamNode(map.getId()).getParallelism()); + assertThat(getStreamGraph(env).getStreamNode(map.getId()).getParallelism()).isEqualTo(2); sink.setParallelism(4); - assertEquals( - 4, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getParallelism()); + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getParallelism()) + .isEqualTo(4); } /** Tests whether resources get set. */ @Test - public void testResources() throws Exception { + void testResources() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ResourceSpec minResource1 = ResourceSpec.newBuilder(1.0, 100).build(); @@ -769,62 +801,54 @@ public Long reduce(Long value1, Long value2) throws Exception { DataStreamSink sink = windowed.print(); sinkMethod.invoke(sink, minResource7, preferredResource7); - assertEquals( - minResource1, getStreamGraph(env).getStreamNode(source1.getId()).getMinResources()); - assertEquals( - preferredResource1, - getStreamGraph(env).getStreamNode(source1.getId()).getPreferredResources()); - - assertEquals( - minResource2, getStreamGraph(env).getStreamNode(map1.getId()).getMinResources()); - assertEquals( - preferredResource2, - getStreamGraph(env).getStreamNode(map1.getId()).getPreferredResources()); - - assertEquals( - minResource3, getStreamGraph(env).getStreamNode(source2.getId()).getMinResources()); - assertEquals( - preferredResource3, - getStreamGraph(env).getStreamNode(source2.getId()).getPreferredResources()); - - assertEquals( - minResource4, getStreamGraph(env).getStreamNode(map2.getId()).getMinResources()); - assertEquals( - preferredResource4, - getStreamGraph(env).getStreamNode(map2.getId()).getPreferredResources()); - - assertEquals( - minResource5, - getStreamGraph(env).getStreamNode(connected.getId()).getMinResources()); - assertEquals( - preferredResource5, - getStreamGraph(env).getStreamNode(connected.getId()).getPreferredResources()); - - assertEquals( - minResource6, - getStreamGraph(env).getStreamNode(windowed.getId()).getMinResources()); - assertEquals( - preferredResource6, - getStreamGraph(env).getStreamNode(windowed.getId()).getPreferredResources()); - - assertEquals( - minResource7, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getMinResources()); - assertEquals( - preferredResource7, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getPreferredResources()); + assertThat(getStreamGraph(env).getStreamNode(source1.getId()).getMinResources()) + .isEqualTo(minResource1); + assertThat(getStreamGraph(env).getStreamNode(source1.getId()).getPreferredResources()) + .isEqualTo(preferredResource1); + + assertThat(getStreamGraph(env).getStreamNode(map1.getId()).getMinResources()) + .isEqualTo(minResource2); + assertThat(getStreamGraph(env).getStreamNode(map1.getId()).getPreferredResources()) + .isEqualTo(preferredResource2); + + assertThat(getStreamGraph(env).getStreamNode(source2.getId()).getMinResources()) + .isEqualTo(minResource3); + assertThat(getStreamGraph(env).getStreamNode(source2.getId()).getPreferredResources()) + .isEqualTo(preferredResource3); + + assertThat(getStreamGraph(env).getStreamNode(map2.getId()).getMinResources()) + .isEqualTo(minResource4); + assertThat(getStreamGraph(env).getStreamNode(map2.getId()).getPreferredResources()) + .isEqualTo(preferredResource4); + + assertThat(getStreamGraph(env).getStreamNode(connected.getId()).getMinResources()) + .isEqualTo(minResource5); + assertThat(getStreamGraph(env).getStreamNode(connected.getId()).getPreferredResources()) + .isEqualTo(preferredResource5); + + assertThat(getStreamGraph(env).getStreamNode(windowed.getId()).getMinResources()) + .isEqualTo(minResource6); + assertThat(getStreamGraph(env).getStreamNode(windowed.getId()).getPreferredResources()) + .isEqualTo(preferredResource6); + + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getMinResources()) + .isEqualTo(minResource7); + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getPreferredResources()) + .isEqualTo(preferredResource7); } @Test - public void testTypeInfo() { + void testTypeInfo() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream src1 = env.fromSequence(0, 0); - assertEquals(TypeExtractor.getForClass(Long.class), src1.getType()); + assertThat(src1.getType()).isEqualTo(TypeExtractor.getForClass(Long.class)); DataStream> map = src1.map( @@ -835,7 +859,7 @@ public Tuple2 map(Long value) throws Exception { } }); - assertEquals(TypeExtractor.getForObject(new Tuple2<>(0, "")), map.getType()); + assertThat(map.getType()).isEqualTo(TypeExtractor.getForObject(new Tuple2<>(0, ""))); DataStream window = map.windowAll(GlobalWindows.create()) @@ -851,7 +875,7 @@ public void apply( throws Exception {} }); - assertEquals(TypeExtractor.getForClass(String.class), window.getType()); + assertThat(window.getType()).isEqualTo(TypeExtractor.getForClass(String.class)); DataStream flatten = window.windowAll(GlobalWindows.create()) @@ -881,7 +905,7 @@ public CustomPOJO merge(CustomPOJO a, CustomPOJO b) { } }); - assertEquals(TypeExtractor.getForClass(CustomPOJO.class), flatten.getType()); + assertThat(flatten.getType()).isEqualTo(TypeExtractor.getForClass(CustomPOJO.class)); } /** @@ -890,7 +914,7 @@ public CustomPOJO merge(CustomPOJO a, CustomPOJO b) { */ @Test @Deprecated - public void testKeyedStreamProcessTranslation() { + void testKeyedStreamProcessTranslation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource src = env.fromSequence(0, 0); @@ -916,8 +940,9 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) processed.sinkTo(new DiscardingSink()); - assertEquals(processFunction, getFunctionForDataStream(processed)); - assertTrue(getOperatorForDataStream(processed) instanceof LegacyKeyedProcessOperator); + assertThat(getFunctionForDataStream(processed)).isEqualTo(processFunction); + assertThat(getOperatorForDataStream(processed)) + .isInstanceOf(LegacyKeyedProcessOperator.class); } /** @@ -925,7 +950,7 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) * to an operator. */ @Test - public void testKeyedStreamKeyedProcessTranslation() { + void testKeyedStreamKeyedProcessTranslation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource src = env.fromSequence(0, 0); @@ -951,8 +976,8 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) processed.sinkTo(new DiscardingSink()); - assertEquals(keyedProcessFunction, getFunctionForDataStream(processed)); - assertTrue(getOperatorForDataStream(processed) instanceof KeyedProcessOperator); + assertThat(getFunctionForDataStream(processed)).isEqualTo(keyedProcessFunction); + assertThat(getOperatorForDataStream(processed)).isInstanceOf(KeyedProcessOperator.class); } /** @@ -960,7 +985,7 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) * operator. */ @Test - public void testProcessTranslation() { + void testProcessTranslation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource src = env.fromSequence(0, 0); @@ -985,8 +1010,8 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) processed.sinkTo(new DiscardingSink()); - assertEquals(processFunction, getFunctionForDataStream(processed)); - assertTrue(getOperatorForDataStream(processed) instanceof ProcessOperator); + assertThat(getFunctionForDataStream(processed)).isEqualTo(processFunction); + assertThat(getOperatorForDataStream(processed)).isInstanceOf(ProcessOperator.class); } /** @@ -994,7 +1019,7 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) * KeyedBroadcastProcessFunction}. */ @Test - public void testFailedTranslationOnKeyed() { + void testFailedTranslationOnKeyed() { final MapStateDescriptor descriptor = new MapStateDescriptor<>( @@ -1028,27 +1053,32 @@ public long extractTimestamp( BroadcastStream broadcast = srcTwo.broadcast(descriptor); BroadcastConnectedStream bcStream = srcOne.connect(broadcast); - expectedException.expect(IllegalArgumentException.class); - bcStream.process( - new BroadcastProcessFunction() { - @Override - public void processBroadcastElement( - String value, Context ctx, Collector out) throws Exception { - // do nothing - } - - @Override - public void processElement( - Long value, ReadOnlyContext ctx, Collector out) - throws Exception { - // do nothing - } - }); + assertThatThrownBy( + () -> + bcStream.process( + new BroadcastProcessFunction() { + @Override + public void processBroadcastElement( + String value, + Context ctx, + Collector out) { + // do nothing + } + + @Override + public void processElement( + Long value, + ReadOnlyContext ctx, + Collector out) { + // do nothing + } + })) + .isInstanceOf(IllegalArgumentException.class); } /** Tests that with a non-keyed stream we have to provide a {@link BroadcastProcessFunction}. */ @Test - public void testFailedTranslationOnNonKeyed() { + void testFailedTranslationOnNonKeyed() { final MapStateDescriptor descriptor = new MapStateDescriptor<>( @@ -1081,27 +1111,33 @@ public long extractTimestamp( BroadcastStream broadcast = srcTwo.broadcast(descriptor); BroadcastConnectedStream bcStream = srcOne.connect(broadcast); - expectedException.expect(IllegalArgumentException.class); - bcStream.process( - new KeyedBroadcastProcessFunction() { - @Override - public void processBroadcastElement( - String value, Context ctx, Collector out) throws Exception { - // do nothing - } - - @Override - public void processElement( - Long value, ReadOnlyContext ctx, Collector out) - throws Exception { - // do nothing - } - }); + assertThatThrownBy( + () -> + bcStream.process( + new KeyedBroadcastProcessFunction< + String, Long, String, String>() { + @Override + public void processBroadcastElement( + String value, + Context ctx, + Collector out) { + // do nothing + } + + @Override + public void processElement( + Long value, + ReadOnlyContext ctx, + Collector out) { + // do nothing + } + })) + .isInstanceOf(IllegalArgumentException.class); } /** Tests that verifies window operator has different name and description. */ @Test - public void testWindowOperatorDescription() { + void testWindowOperatorDescription() { // global window StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream dataStream1 = @@ -1118,13 +1154,13 @@ public Long reduce(Long value1, Long value2) throws Exception { } }); // name is simplified - assertEquals("GlobalWindows", dataStream1.getTransformation().getName()); + assertThat(dataStream1.getTransformation().getName()).isEqualTo("GlobalWindows"); // description contains detail of function: // TriggerWindow(GlobalWindows(), ReducingStateDescriptor{name=window-contents, // defaultValue=null, // serializer=org.apache.flink.api.common.typeutils.base.LongSerializer@6af9fcb2}, // PurgingTrigger(CountTrigger(10)), AllWindowedStream.reduce(AllWindowedStream.java:229)) - assertTrue(dataStream1.getTransformation().getDescription().contains("PurgingTrigger")); + assertThat(dataStream1.getTransformation().getDescription()).contains("PurgingTrigger"); // keyed window DataStream dataStream2 = @@ -1142,11 +1178,11 @@ public Long reduce(Long value1, Long value2) throws Exception { } }); // name is simplified - assertEquals("TumblingEventTimeWindows", dataStream2.getTransformation().getName()); + assertThat(dataStream2.getTransformation().getName()).isEqualTo("TumblingEventTimeWindows"); // description contains detail of function: // Window(TumblingEventTimeWindows(1000), PurgingTrigger, ReduceFunction$36, // PassThroughWindowFunction) - assertTrue(dataStream2.getTransformation().getDescription().contains("PurgingTrigger")); + assertThat(dataStream2.getTransformation().getDescription()).contains("PurgingTrigger"); } /** @@ -1155,7 +1191,7 @@ public Long reduce(Long value1, Long value2) throws Exception { * @throws Exception */ @Test - public void testUserDefinedDescription() { + void testUserDefinedDescription() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream dataStream1 = @@ -1219,12 +1255,13 @@ public Long reduce(Long value1, Long value2) throws Exception { // test functionality through the operator names in the execution plan String plan = env.getExecutionPlan(); - assertTrue(plan.contains("this is test source 1")); - assertTrue(plan.contains("this is test source 2")); - assertTrue(plan.contains("this is test map 1")); - assertTrue(plan.contains("this is test map 2")); - assertTrue(plan.contains("this is test co flat map")); - assertTrue(plan.contains("this is test window reduce")); + assertThat(plan) + .contains( + "this is test source 1", + "this is test map 1", + "this is test map 2", + "this is test co flat map", + "this is test window reduce"); } private abstract static class CustomWmEmitter @@ -1238,7 +1275,7 @@ public Watermark checkAndGetNextWatermark(T lastElement, long extractedTimestamp } @Test - public void operatorTest() { + void operatorTest() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource src = env.fromSequence(0, 0); @@ -1252,7 +1289,7 @@ public Integer map(Long value) throws Exception { }; DataStream map = src.map(mapFunction); map.sinkTo(new DiscardingSink()); - assertEquals(mapFunction, getFunctionForDataStream(map)); + assertThat(getFunctionForDataStream(map)).isEqualTo(mapFunction); FlatMapFunction flatMapFunction = new FlatMapFunction() { @@ -1263,7 +1300,7 @@ public void flatMap(Long value, Collector out) throws Exception {} }; DataStream flatMap = src.flatMap(flatMapFunction); flatMap.sinkTo(new DiscardingSink()); - assertEquals(flatMapFunction, getFunctionForDataStream(flatMap)); + assertThat(getFunctionForDataStream(flatMap)).isEqualTo(flatMapFunction); FilterFunction filterFunction = new FilterFunction() { @@ -1277,19 +1314,10 @@ public boolean filter(Integer value) throws Exception { unionFilter.sinkTo(new DiscardingSink()); - assertEquals(filterFunction, getFunctionForDataStream(unionFilter)); - - try { - getStreamGraph(env).getStreamEdgesOrThrow(map.getId(), unionFilter.getId()); - } catch (RuntimeException e) { - fail(e.getMessage()); - } + assertThat(getFunctionForDataStream(unionFilter)).isEqualTo(filterFunction); - try { - getStreamGraph(env).getStreamEdgesOrThrow(flatMap.getId(), unionFilter.getId()); - } catch (RuntimeException e) { - fail(e.getMessage()); - } + getStreamGraph(env).getStreamEdgesOrThrow(map.getId(), unionFilter.getId()); + getStreamGraph(env).getStreamEdgesOrThrow(flatMap.getId(), unionFilter.getId()); ConnectedStreams connect = map.connect(flatMap); CoMapFunction coMapper = @@ -1308,23 +1336,14 @@ public String map2(Integer value) { }; DataStream coMap = connect.map(coMapper); coMap.sinkTo(new DiscardingSink()); - assertEquals(coMapper, getFunctionForDataStream(coMap)); - - try { - getStreamGraph(env).getStreamEdgesOrThrow(map.getId(), coMap.getId()); - } catch (RuntimeException e) { - fail(e.getMessage()); - } + assertThat(getFunctionForDataStream(coMap)).isEqualTo(coMapper); - try { - getStreamGraph(env).getStreamEdgesOrThrow(flatMap.getId(), coMap.getId()); - } catch (RuntimeException e) { - fail(e.getMessage()); - } + getStreamGraph(env).getStreamEdgesOrThrow(map.getId(), coMap.getId()); + getStreamGraph(env).getStreamEdgesOrThrow(flatMap.getId(), coMap.getId()); } @Test - public void testKeyedConnectedStreamsType() { + void testKeyedConnectedStreamsType() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource stream1 = env.fromData(1, 2); @@ -1335,28 +1354,28 @@ public void testKeyedConnectedStreamsType() { KeyedStream firstKeyedInput = (KeyedStream) connectedStreams.getFirstInput(); KeyedStream secondKeyedInput = (KeyedStream) connectedStreams.getSecondInput(); - assertThat(firstKeyedInput.getKeyType(), equalTo(Types.INT)); - assertThat(secondKeyedInput.getKeyType(), equalTo(Types.INT)); + assertThat(firstKeyedInput.getKeyType()).isEqualTo(Types.INT); + assertThat(secondKeyedInput.getKeyType()).isEqualTo(Types.INT); } @Test - public void sinkKeyTest() { + void sinkKeyTest() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSink sink = env.fromSequence(1, 100).print(); - assertEquals( - 0, - getStreamGraph(env) - .getStreamNode(sink.getTransformation().getId()) - .getStatePartitioners() - .length); - assertTrue( - getStreamGraph(env) + assertThat( + getStreamGraph(env) + .getStreamNode(sink.getTransformation().getId()) + .getStatePartitioners() + .length) + .isZero(); + assertThat( + getStreamGraph(env) .getStreamNode(sink.getTransformation().getId()) .getInEdges() .get(0) - .getPartitioner() - instanceof ForwardPartitioner); + .getPartitioner()) + .isInstanceOf(ForwardPartitioner.class); KeySelector key1 = new KeySelector() { @@ -1371,32 +1390,34 @@ public Long getKey(Long value) throws Exception { DataStreamSink sink2 = env.fromSequence(1, 100).keyBy(key1).print(); - assertEquals( - 1, - getStreamGraph(env) - .getStreamNode(sink2.getTransformation().getId()) - .getStatePartitioners() - .length); - assertNotNull( - getStreamGraph(env) - .getStreamNode(sink2.getTransformation().getId()) - .getStateKeySerializer()); - assertNotNull( - getStreamGraph(env) - .getStreamNode(sink2.getTransformation().getId()) - .getStateKeySerializer()); - assertEquals( - key1, - getStreamGraph(env) - .getStreamNode(sink2.getTransformation().getId()) - .getStatePartitioners()[0]); - assertTrue( - getStreamGraph(env) + assertThat( + getStreamGraph(env) + .getStreamNode(sink2.getTransformation().getId()) + .getStatePartitioners() + .length) + .isOne(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink2.getTransformation().getId()) + .getStateKeySerializer()) + .isNotNull(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink2.getTransformation().getId()) + .getStateKeySerializer()) + .isNotNull(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink2.getTransformation().getId()) + .getStatePartitioners()[0]) + .isEqualTo(key1); + assertThat( + getStreamGraph(env) .getStreamNode(sink2.getTransformation().getId()) .getInEdges() .get(0) - .getPartitioner() - instanceof KeyGroupStreamPartitioner); + .getPartitioner()) + .isInstanceOf(KeyGroupStreamPartitioner.class); KeySelector key2 = new KeySelector() { @@ -1411,28 +1432,28 @@ public Long getKey(Long value) throws Exception { DataStreamSink sink3 = env.fromSequence(1, 100).keyBy(key2).print(); - assertEquals( - 1, - getStreamGraph(env) - .getStreamNode(sink3.getTransformation().getId()) - .getStatePartitioners() - .length); - assertEquals( - key2, - getStreamGraph(env) - .getStreamNode(sink3.getTransformation().getId()) - .getStatePartitioners()[0]); - assertTrue( - getStreamGraph(env) + assertThat( + getStreamGraph(env) + .getStreamNode(sink3.getTransformation().getId()) + .getStatePartitioners() + .length) + .isOne(); + assertThat( + getStreamGraph(env) + .getStreamNode(sink3.getTransformation().getId()) + .getStatePartitioners()[0]) + .isEqualTo(key2); + assertThat( + getStreamGraph(env) .getStreamNode(sink3.getTransformation().getId()) .getInEdges() .get(0) - .getPartitioner() - instanceof KeyGroupStreamPartitioner); + .getPartitioner()) + .isInstanceOf(KeyGroupStreamPartitioner.class); } @Test - public void testChannelSelectors() { + void testChannelSelectors() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource src = env.fromSequence(0, 0); @@ -1444,7 +1465,7 @@ public void testChannelSelectors() { .getStreamEdges(src.getId(), broadcastSink.getTransformation().getId()) .get(0) .getPartitioner(); - assertTrue(broadcastPartitioner instanceof BroadcastPartitioner); + assertThat(broadcastPartitioner).isInstanceOf(BroadcastPartitioner.class); DataStream shuffle = src.shuffle(); DataStreamSink shuffleSink = shuffle.print(); @@ -1453,7 +1474,7 @@ public void testChannelSelectors() { .getStreamEdges(src.getId(), shuffleSink.getTransformation().getId()) .get(0) .getPartitioner(); - assertTrue(shufflePartitioner instanceof ShufflePartitioner); + assertThat(shufflePartitioner).isInstanceOf(ShufflePartitioner.class); DataStream forward = src.forward(); DataStreamSink forwardSink = forward.print(); @@ -1462,7 +1483,7 @@ public void testChannelSelectors() { .getStreamEdges(src.getId(), forwardSink.getTransformation().getId()) .get(0) .getPartitioner(); - assertTrue(forwardPartitioner instanceof ForwardPartitioner); + assertThat(forwardPartitioner).isInstanceOf(ForwardPartitioner.class); DataStream rebalance = src.rebalance(); DataStreamSink rebalanceSink = rebalance.print(); @@ -1471,7 +1492,7 @@ public void testChannelSelectors() { .getStreamEdges(src.getId(), rebalanceSink.getTransformation().getId()) .get(0) .getPartitioner(); - assertTrue(rebalancePartitioner instanceof RebalancePartitioner); + assertThat(rebalancePartitioner).isInstanceOf(RebalancePartitioner.class); DataStream global = src.global(); DataStreamSink globalSink = global.print(); @@ -1480,7 +1501,7 @@ public void testChannelSelectors() { .getStreamEdges(src.getId(), globalSink.getTransformation().getId()) .get(0) .getPartitioner(); - assertTrue(globalPartitioner instanceof GlobalPartitioner); + assertThat(globalPartitioner).isInstanceOf(GlobalPartitioner.class); } ///////////////////////////////////////////////////////////// @@ -1488,7 +1509,7 @@ public void testChannelSelectors() { ///////////////////////////////////////////////////////////// @Test - public void testPrimitiveArrayKeyRejection() { + void testPrimitiveArrayKeyRejection() { KeySelector, int[]> keySelector = new KeySelector, int[]>() { @@ -1507,7 +1528,7 @@ public int[] getKey(Tuple2 value) throws Exception { } @Test - public void testBasicArrayKeyRejection() { + void testBasicArrayKeyRejection() { KeySelector, Integer[]> keySelector = new KeySelector, Integer[]>() { @@ -1522,7 +1543,7 @@ public Integer[] getKey(Tuple2 value) throws Exception { } @Test - public void testObjectArrayKeyRejection() { + void testObjectArrayKeyRejection() { KeySelector, Object[]> keySelector = new KeySelector, Object[]>() { @@ -1551,19 +1572,17 @@ private void assertArrayKeyRejection( DataStream> input = env.fromData(new Tuple2<>(new Integer[] {1, 2}, "barfoo")); - Assert.assertEquals( - expectedKeyType, TypeExtractor.getKeySelectorTypes(keySelector, input.getType())); + assertThat(TypeExtractor.getKeySelectorTypes(keySelector, input.getType())) + .isEqualTo(expectedKeyType); // adjust the rule - expectedException.expect(InvalidProgramException.class); - expectedException.expectMessage( - new StringStartsWith("Type " + expectedKeyType + " cannot be used as key.")); - - input.keyBy(keySelector); + assertThatThrownBy(() -> input.keyBy(keySelector)) + .isInstanceOf(InvalidProgramException.class) + .hasMessageStartingWith("Type " + expectedKeyType + " cannot be used as key."); } @Test - public void testEnumKeyRejection() { + void testEnumKeyRejection() { KeySelector, TestEnum> keySelector = value -> value.f0; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -1571,18 +1590,16 @@ public void testEnumKeyRejection() { DataStream> input = env.fromData(Tuple2.of(TestEnum.FOO, "Foo"), Tuple2.of(TestEnum.BAR, "Bar")); - expectedException.expect(InvalidProgramException.class); - expectedException.expectMessage( - new StringStartsWith( - "Type " + EnumTypeInfo.of(TestEnum.class) + " cannot be used as key.")); - - input.keyBy(keySelector); + assertThatThrownBy(() -> input.keyBy(keySelector)) + .isInstanceOf(InvalidProgramException.class) + .hasMessageStartingWith( + "Type " + EnumTypeInfo.of(TestEnum.class) + " cannot be used as key."); } //////////////// Composite Key Tests : POJOs //////////////// @Test - public void testPOJOWithNestedArrayNoHashCodeKeyRejection() { + void testPOJOWithNestedArrayNoHashCodeKeyRejection() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream input = env.fromData(new POJOWithHashCode(new int[] {1, 2})); @@ -1592,15 +1609,13 @@ public void testPOJOWithNestedArrayNoHashCodeKeyRejection() { PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO); // adjust the rule - expectedException.expect(InvalidProgramException.class); - expectedException.expectMessage( - new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key.")); - - input.keyBy("id"); + assertThatThrownBy(() -> input.keyBy("id")) + .isInstanceOf(InvalidProgramException.class) + .hasMessageStartingWith("Type " + expectedTypeInfo + " cannot be used as key."); } @Test - public void testPOJOWithNestedArrayAndHashCodeWorkAround() { + void testPOJOWithNestedArrayAndHashCodeWorkAround() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream input = env.fromData(new POJOWithHashCode(new int[] {1, 2})); @@ -1616,14 +1631,14 @@ public POJOWithHashCode getKey(POJOWithHashCode value) .addSink( new SinkFunction() { @Override - public void invoke(POJOWithHashCode value) throws Exception { - Assert.assertEquals(value.getId(), new int[] {1, 2}); + public void invoke(POJOWithHashCode value) { + assertThat(value.getId()).containsExactly(1, 2); } }); } @Test - public void testPOJOnoHashCodeKeyRejection() { + void testPOJOnoHashCodeKeyRejection() { KeySelector keySelector = new KeySelector() { @@ -1639,15 +1654,14 @@ public POJOWithoutHashCode getKey(POJOWithoutHashCode value) throws Exception { env.fromData(new POJOWithoutHashCode(new int[] {1, 2})); // adjust the rule - expectedException.expect(InvalidProgramException.class); - - input.keyBy(keySelector); + assertThatThrownBy(() -> input.keyBy(keySelector)) + .isInstanceOf(InvalidProgramException.class); } //////////////// Composite Key Tests : Tuples //////////////// @Test - public void testTupleNestedArrayKeyRejection() { + void testTupleNestedArrayKeyRejection() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream> input = @@ -1658,22 +1672,25 @@ public void testTupleNestedArrayKeyRejection() { BasicArrayTypeInfo.INT_ARRAY_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); // adjust the rule - expectedException.expect(InvalidProgramException.class); - expectedException.expectMessage( - new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key.")); - - input.keyBy( - new KeySelector, Tuple2>() { - @Override - public Tuple2 getKey(Tuple2 value) - throws Exception { - return value; - } - }); + assertThatThrownBy( + () -> + input.keyBy( + new KeySelector< + Tuple2, + Tuple2>() { + @Override + public Tuple2 getKey( + Tuple2 value) + throws Exception { + return value; + } + })) + .isInstanceOf(InvalidProgramException.class) + .hasMessageStartingWith("Type " + expectedTypeInfo + " cannot be used as key."); } @Test - public void testPrimitiveKeyAcceptance() throws Exception { + void testPrimitiveKeyAcceptance() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setMaxParallelism(1); @@ -1693,7 +1710,7 @@ public Object getKey(Integer value) throws Exception { new SinkFunction() { @Override public void invoke(Integer value) throws Exception { - Assert.assertEquals(10000L, (long) value); + assertThat(value).isEqualTo(10000); } }); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/RestartStrategyTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/RestartStrategyTest.java index e27065ed04b96..f2909276b364b 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/RestartStrategyTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/RestartStrategyTest.java @@ -22,20 +22,20 @@ import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.graph.StreamGraph; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link RestartStrategies}. */ -public class RestartStrategyTest extends TestLogger { +class RestartStrategyTest { /** * Tests that in a streaming use case where checkpointing is enabled, there is no default * strategy set on the client side. */ @Test - public void testFallbackStrategyOnClientSideWhenCheckpointingEnabled() throws Exception { + void testFallbackStrategyOnClientSideWhenCheckpointingEnabled() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(500); @@ -49,9 +49,9 @@ public void testFallbackStrategyOnClientSideWhenCheckpointingEnabled() throws Ex .deserializeValue(getClass().getClassLoader()) .getRestartStrategy(); - Assert.assertNotNull(restartStrategy); - Assert.assertTrue( - restartStrategy instanceof RestartStrategies.FallbackRestartStrategyConfiguration); + assertThat(restartStrategy) + .isNotNull() + .isInstanceOf(RestartStrategies.FallbackRestartStrategyConfiguration.class); } /** @@ -59,8 +59,7 @@ public void testFallbackStrategyOnClientSideWhenCheckpointingEnabled() throws Ex * execution retries is set to 0, restarting is deactivated. */ @Test - public void testNoRestartingWhenCheckpointingAndExplicitExecutionRetriesZero() - throws Exception { + void testNoRestartingWhenCheckpointingAndExplicitExecutionRetriesZero() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(500); env.setNumberOfExecutionRetries(0); @@ -75,9 +74,9 @@ public void testNoRestartingWhenCheckpointingAndExplicitExecutionRetriesZero() .deserializeValue(getClass().getClassLoader()) .getRestartStrategy(); - Assert.assertNotNull(restartStrategy); - Assert.assertTrue( - restartStrategy instanceof RestartStrategies.NoRestartStrategyConfiguration); + assertThat(restartStrategy) + .isNotNull() + .isInstanceOf(RestartStrategies.NoRestartStrategyConfiguration.class); } /** @@ -85,8 +84,7 @@ public void testNoRestartingWhenCheckpointingAndExplicitExecutionRetriesZero() * execution retries is set to 42 and the delay to 1337, fixed delay restarting is used. */ @Test - public void testFixedRestartingWhenCheckpointingAndExplicitExecutionRetriesNonZero() - throws Exception { + void testFixedRestartingWhenCheckpointingAndExplicitExecutionRetriesNonZero() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(500); env.setNumberOfExecutionRetries(42); @@ -102,18 +100,23 @@ public void testFixedRestartingWhenCheckpointingAndExplicitExecutionRetriesNonZe .deserializeValue(getClass().getClassLoader()) .getRestartStrategy(); - Assert.assertNotNull(restartStrategy); - Assert.assertTrue( - restartStrategy - instanceof RestartStrategies.FixedDelayRestartStrategyConfiguration); - Assert.assertEquals( - 42, - ((RestartStrategies.FixedDelayRestartStrategyConfiguration) restartStrategy) - .getRestartAttempts()); - Assert.assertEquals( - 1337, - ((RestartStrategies.FixedDelayRestartStrategyConfiguration) restartStrategy) - .getDurationBetweenAttempts() - .toMillis()); + assertThat(restartStrategy) + .isNotNull() + .isInstanceOfSatisfying( + RestartStrategies.FixedDelayRestartStrategyConfiguration.class, + strategy -> + assertThat(strategy) + .satisfies( + fixedDelayRestartStrategy -> { + assertThat( + fixedDelayRestartStrategy + .getRestartAttempts()) + .isEqualTo(42); + assertThat( + fixedDelayRestartStrategy + .getDurationBetweenAttempts() + .toMillis()) + .isEqualTo(1337); + })); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/SourceFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/SourceFunctionTest.java index 754573076765c..61e62dad3422e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/SourceFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/SourceFunctionTest.java @@ -23,35 +23,35 @@ import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.util.SourceFunctionUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.List; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link SourceFunction}. */ -public class SourceFunctionTest { +class SourceFunctionTest { @Test - public void fromElementsTest() throws Exception { + void fromElementsTest() throws Exception { List expectedList = Arrays.asList(1, 2, 3); List actualList = SourceFunctionUtil.runSourceFunction( CommonTestUtils.createCopySerializable( new FromElementsFunction( IntSerializer.INSTANCE, 1, 2, 3))); - assertEquals(expectedList, actualList); + assertThat(actualList).isEqualTo(expectedList); } @Test - public void fromCollectionTest() throws Exception { + void fromCollectionTest() throws Exception { List expectedList = Arrays.asList(1, 2, 3); List actualList = SourceFunctionUtil.runSourceFunction( CommonTestUtils.createCopySerializable( new FromElementsFunction( IntSerializer.INSTANCE, Arrays.asList(1, 2, 3)))); - assertEquals(expectedList, actualList); + assertThat(actualList).isEqualTo(expectedList); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/TypeFillTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/TypeFillTest.java index a6d1cb1ced0e9..fbabad51e015c 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/TypeFillTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/TypeFillTest.java @@ -18,6 +18,7 @@ package org.apache.flink.streaming.api; import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.common.functions.InvalidTypesException; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeinfo.TypeHint; @@ -33,88 +34,80 @@ import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.util.Collector; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Tests for handling missing type information either by calling {@code returns()} or having an * explicit type information parameter. */ @SuppressWarnings("serial") -public class TypeFillTest { +class TypeFillTest { @Test - public void test() { + void test() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - try { - env.addSource(new TestSource()).print(); - fail(); - } catch (Exception ignored) { - } + assertThatThrownBy(() -> env.addSource(new TestSource()).print()) + .isInstanceOf(InvalidTypesException.class); DataStream source = env.fromSequence(1, 10); - try { - source.map(new TestMap()).print(); - fail(); - } catch (Exception ignored) { - } - - try { - source.flatMap(new TestFlatMap()).print(); - fail(); - } catch (Exception ignored) { - } - - try { - source.connect(source).map(new TestCoMap()).print(); - fail(); - } catch (Exception ignored) { - } - - try { - source.connect(source).flatMap(new TestCoFlatMap()).print(); - fail(); - } catch (Exception ignored) { - } - - try { - source.keyBy(new TestKeySelector()).print(); - fail(); - } catch (Exception ignored) { - } - - try { - source.connect(source) - .keyBy(new TestKeySelector(), new TestKeySelector<>()); - fail(); - } catch (Exception ignored) { - } - - try { - source.coGroup(source).where(new TestKeySelector<>()).equalTo(new TestKeySelector<>()); - fail(); - } catch (Exception ignored) { - } - - try { - source.join(source).where(new TestKeySelector<>()).equalTo(new TestKeySelector<>()); - fail(); - } catch (Exception ignored) { - } - - try { - source.keyBy((in) -> in) - .intervalJoin(source.keyBy((in) -> in)) - .between(Time.milliseconds(10L), Time.milliseconds(10L)) - .process(new TestProcessJoinFunction<>()) - .print(); - fail(); - } catch (Exception ignored) { - } + assertThatThrownBy(() -> source.map(new TestMap()).print()) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy(() -> source.flatMap(new TestFlatMap()).print()) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.connect(source) + .map(new TestCoMap()) + .print()) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.connect(source) + .flatMap(new TestCoFlatMap()) + .print()) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy(() -> source.keyBy(new TestKeySelector()).print()) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.connect(source) + .keyBy( + new TestKeySelector(), + new TestKeySelector<>())) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.coGroup(source) + .where(new TestKeySelector<>()) + .equalTo(new TestKeySelector<>())) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.join(source) + .where(new TestKeySelector<>()) + .equalTo(new TestKeySelector<>())) + .isInstanceOf(InvalidTypesException.class); + + assertThatThrownBy( + () -> + source.keyBy((in) -> in) + .intervalJoin(source.keyBy((in) -> in)) + .between(Time.milliseconds(10L), Time.milliseconds(10L)) + .process(new TestProcessJoinFunction<>()) + .print()) + .isInstanceOf(InvalidTypesException.class); env.addSource(new TestSource()).returns(Integer.class); source.map(new TestMap()).returns(Long.class).print(); @@ -145,9 +138,8 @@ public void test() { .between(Time.milliseconds(10L), Time.milliseconds(10L)) .process(new TestProcessJoinFunction<>(), Types.STRING); - assertEquals( - BasicTypeInfo.LONG_TYPE_INFO, - source.map(new TestMap()).returns(Long.class).getType()); + assertThat(source.map(new TestMap()).returns(Long.class).getType()) + .isEqualTo(BasicTypeInfo.LONG_TYPE_INFO); SingleOutputStreamOperator map = source.map( @@ -160,11 +152,8 @@ public String map(Long value) throws Exception { }); map.print(); - try { - map.returns(String.class); - fail(); - } catch (Exception ignored) { - } + assertThatThrownBy(() -> map.returns(String.class)) + .isInstanceOf(IllegalStateException.class); } private static class TestSource implements SourceFunction { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/checkpoint/ListCheckpointedTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/checkpoint/ListCheckpointedTest.java index 0a9e110d2a3ca..7371e2df78ee9 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/checkpoint/ListCheckpointedTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/checkpoint/ListCheckpointedTest.java @@ -23,31 +23,29 @@ import org.apache.flink.streaming.api.operators.StreamMap; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link ListCheckpointed}. */ -public class ListCheckpointedTest { +class ListCheckpointedTest { @Test - public void testUDFReturningNull() throws Exception { + void testUDFReturningNull() throws Exception { testUDF(new TestUserFunction(null)); } @Test - public void testUDFReturningEmpty() throws Exception { + void testUDFReturningEmpty() throws Exception { testUDF(new TestUserFunction(Collections.emptyList())); } @Test - public void testUDFReturningData() throws Exception { + void testUDFReturningData() throws Exception { testUDF(new TestUserFunction(Arrays.asList(1, 2, 3))); } @@ -57,13 +55,13 @@ private static void testUDF(TestUserFunction userFunction) throws Exception { createTestHarness(userFunction)) { testHarness.open(); snapshot = testHarness.snapshot(0L, 0L); - assertFalse(userFunction.isRestored()); + assertThat(userFunction.isRestored()).isFalse(); } try (AbstractStreamOperatorTestHarness testHarness = createTestHarness(userFunction)) { testHarness.initializeState(snapshot); testHarness.open(); - assertTrue(userFunction.isRestored()); + assertThat(userFunction.isRestored()).isTrue(); } } @@ -98,9 +96,9 @@ public List snapshotState(long checkpointId, long timestamp) throws Exc @Override public void restoreState(List state) throws Exception { if (null != expected) { - Assert.assertEquals(expected, state); + assertThat(state).isEqualTo(expected); } else { - assertTrue(state.isEmpty()); + assertThat(state).isEmpty(); } restored = true; } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/CoGroupedStreamsTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/CoGroupedStreamsTest.java index 5a8446d264aa9..3e964bf756501 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/CoGroupedStreamsTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/CoGroupedStreamsTest.java @@ -24,22 +24,23 @@ import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; import org.apache.flink.streaming.api.windowing.windows.TimeWindow; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.time.Duration; +import static org.assertj.core.api.Assertions.assertThat; + /** Unit test for {@link CoGroupedStreams}. */ -public class CoGroupedStreamsTest { +class CoGroupedStreamsTest { private DataStream dataStream1; private DataStream dataStream2; private KeySelector keySelector; private TumblingEventTimeWindows tsAssigner; private CoGroupFunction coGroupFunction; - @Before - public void setUp() { + @BeforeEach + void setUp() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); dataStream1 = env.fromData("a1", "a2", "a3"); dataStream2 = env.fromData("a1", "a2"); @@ -50,7 +51,7 @@ public void setUp() { } @Test - public void testDelegateToCoGrouped() { + void testDelegateToCoGrouped() { Duration lateness = Duration.ofMillis(42L); CoGroupedStreams.WithWindow withLateness = @@ -63,12 +64,12 @@ public void testDelegateToCoGrouped() { withLateness.apply(coGroupFunction, BasicTypeInfo.STRING_TYPE_INFO); - Assert.assertEquals( - lateness.toMillis(), withLateness.getWindowedStream().getAllowedLateness()); + assertThat(withLateness.getWindowedStream().getAllowedLateness()) + .isEqualTo(lateness.toMillis()); } @Test - public void testSetAllowedLateness() { + void testSetAllowedLateness() { Duration lateness = Duration.ofMillis(42L); CoGroupedStreams.WithWindow withLateness = @@ -79,6 +80,6 @@ public void testSetAllowedLateness() { .window(tsAssigner) .allowedLateness(lateness); - Assert.assertEquals(lateness, withLateness.getAllowedLatenessDuration().orElse(null)); + assertThat(withLateness.getAllowedLatenessDuration()).hasValue(lateness); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkDeprecatedTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkDeprecatedTest.java index 45140ff567caa..a873f3a0b4727 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkDeprecatedTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkDeprecatedTest.java @@ -22,9 +22,10 @@ import org.apache.flink.streaming.api.transformations.SinkTransformation; import org.apache.flink.streaming.runtime.operators.sink.deprecated.TestSinkV2; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Unit test for {@link DataStreamSink}. @@ -33,21 +34,27 @@ * org.apache.flink.api.connector.sink2.TwoPhaseCommittingSink}. */ @Deprecated -public class DataStreamSinkDeprecatedTest { +class DataStreamSinkDeprecatedTest { @Test - public void testGettingTransformationWithNewSinkAPI() { + void testGettingTransformationWithNewSinkAPI() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final Transformation transformation = env.fromData(1, 2) .sinkTo(TestSinkV2.newBuilder().build()) .getTransformation(); - assertTrue(transformation instanceof SinkTransformation); + assertThat(transformation).isInstanceOf(SinkTransformation.class); } - @Test(expected = UnsupportedOperationException.class) - public void throwExceptionWhenSetUidWithNewSinkAPI() { + @Test + void throwExceptionWhenSetUidWithNewSinkAPI() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.fromData(1, 2).sinkTo(TestSinkV2.newBuilder().build()).setUidHash("Test"); + + assertThatThrownBy( + () -> + env.fromData(1, 2) + .sinkTo(TestSinkV2.newBuilder().build()) + .setUidHash("Test")) + .isInstanceOf(UnsupportedOperationException.class); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkTest.java index d0d96f1b4d0b9..b4017222e5af8 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSinkTest.java @@ -22,26 +22,33 @@ import org.apache.flink.streaming.api.transformations.SinkTransformation; import org.apache.flink.streaming.runtime.operators.sink.TestSinkV2; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Unit test for {@link DataStreamSink}. */ -public class DataStreamSinkTest { +class DataStreamSinkTest { @Test - public void testGettingTransformationWithNewSinkAPI() { + void testGettingTransformationWithNewSinkAPI() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final Transformation transformation = env.fromData(1, 2) .sinkTo(TestSinkV2.newBuilder().build()) .getTransformation(); - assertTrue(transformation instanceof SinkTransformation); + assertThat(transformation).isInstanceOf(SinkTransformation.class); } - @Test(expected = UnsupportedOperationException.class) - public void throwExceptionWhenSetUidWithNewSinkAPI() { + @Test + void throwExceptionWhenSetUidWithNewSinkAPI() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.fromData(1, 2).sinkTo(TestSinkV2.newBuilder().build()).setUidHash("Test"); + + assertThatThrownBy( + () -> + env.fromData(1, 2) + .sinkTo(TestSinkV2.newBuilder().build()) + .setUidHash("Test")) + .isInstanceOf(UnsupportedOperationException.class); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSourceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSourceTest.java index 39781aace609c..ec6f423c7edcd 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSourceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/DataStreamSourceTest.java @@ -22,26 +22,24 @@ import org.apache.flink.api.connector.source.mocks.MockSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Unit test for {@link DataStreamSource}. */ -public class DataStreamSourceTest { +class DataStreamSourceTest { /** Test constructor for new Sources (FLIP-27). */ @Test - public void testConstructor() { + void testConstructor() { int expectParallelism = 100; - boolean expectIsParallel = true; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); MockSource mockSource = new MockSource(Boundedness.BOUNDED, 10); DataStreamSource stream = env.fromSource(mockSource, WatermarkStrategy.noWatermarks(), "TestingSource"); stream.setParallelism(expectParallelism); - assertEquals(expectIsParallel, stream.isParallel()); - - assertEquals(expectParallelism, stream.getParallelism()); + assertThat(stream.isParallel()).isTrue(); + assertThat(stream.getParallelism()).isEqualTo(expectParallelism); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/JoinedStreamsTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/JoinedStreamsTest.java index 4a3898a265c3d..9dfb8a84634de 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/JoinedStreamsTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/datastream/JoinedStreamsTest.java @@ -24,22 +24,23 @@ import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; import org.apache.flink.streaming.api.windowing.windows.TimeWindow; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.time.Duration; +import static org.assertj.core.api.Assertions.assertThat; + /** Unit test for {@link JoinedStreams}. */ -public class JoinedStreamsTest { +class JoinedStreamsTest { private DataStream dataStream1; private DataStream dataStream2; private KeySelector keySelector; private TumblingEventTimeWindows tsAssigner; private JoinFunction joinFunction; - @Before - public void setUp() { + @BeforeEach + void setUp() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); dataStream1 = env.fromData("a1", "a2", "a3"); dataStream2 = env.fromData("a1", "a2"); @@ -49,7 +50,7 @@ public void setUp() { } @Test - public void testDelegateToCoGrouped() { + void testDelegateToCoGrouped() { Duration lateness = Duration.ofMillis(42L); JoinedStreams.WithWindow withLateness = @@ -62,16 +63,12 @@ public void testDelegateToCoGrouped() { withLateness.apply(joinFunction, BasicTypeInfo.STRING_TYPE_INFO); - Assert.assertEquals( - lateness, - withLateness - .getCoGroupedWindowedStream() - .getAllowedLatenessDuration() - .orElse(null)); + assertThat(withLateness.getCoGroupedWindowedStream().getAllowedLatenessDuration()) + .hasValue(lateness); } @Test - public void testSetAllowedLateness() { + void testSetAllowedLateness() { Duration lateness = Duration.ofMillis(42L); JoinedStreams.WithWindow withLateness = @@ -82,6 +79,6 @@ public void testSetAllowedLateness() { .window(tsAssigner) .allowedLateness(lateness); - Assert.assertEquals(lateness, withLateness.getAllowedLatenessDuration().orElse(null)); + assertThat(withLateness.getAllowedLatenessDuration()).hasValue(lateness); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironmentConfigurationTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironmentConfigurationTest.java index ef54374740bcc..dc7bce84f1539 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironmentConfigurationTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironmentConfigurationTest.java @@ -23,18 +23,19 @@ import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.streaming.api.TimeCharacteristic; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameter; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.Arrays; import java.util.Collection; import java.util.function.BiConsumer; import java.util.function.Function; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for configuring {@link StreamExecutionEnvironment} via {@link @@ -42,11 +43,11 @@ * * @see StreamExecutionEnvironmentComplexConfigurationTest */ -@RunWith(Parameterized.class) -public class StreamExecutionEnvironmentConfigurationTest { +@ExtendWith(ParameterizedTestExtension.class) +class StreamExecutionEnvironmentConfigurationTest { - @Parameterized.Parameters(name = "{0}") - public static Collection specs() { + @Parameters(name = "{0}") + private static Collection specs() { return Arrays.asList( TestSpec.testValue(TimeCharacteristic.IngestionTime) .whenSetFromFile("pipeline.time-characteristic", "IngestionTime") @@ -83,10 +84,10 @@ public static Collection specs() { .nonDefaultValue(100L)); } - @Parameterized.Parameter public TestSpec spec; + @Parameter private TestSpec spec; - @Test - public void testLoadingFromConfiguration() { + @TestTemplate + void testLoadingFromConfiguration() { StreamExecutionEnvironment configFromSetters = StreamExecutionEnvironment.getExecutionEnvironment(); StreamExecutionEnvironment configFromFile = @@ -100,8 +101,8 @@ public void testLoadingFromConfiguration() { spec.assertEqual(configFromFile, configFromSetters); } - @Test - public void testNotOverridingIfNotSet() { + @TestTemplate + void testNotOverridingIfNotSet() { StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -160,11 +161,11 @@ public void setNonDefaultValue(StreamExecutionEnvironment config) { public void assertEqual( StreamExecutionEnvironment configFromFile, StreamExecutionEnvironment configFromSetters) { - assertThat(getter.apply(configFromFile), equalTo(getter.apply(configFromSetters))); + assertThat(getter.apply(configFromFile)).isEqualTo(getter.apply(configFromSetters)); } public void assertEqualNonDefault(StreamExecutionEnvironment configFromFile) { - assertThat(getter.apply(configFromFile), equalTo(nonDefaultValue)); + assertThat(getter.apply(configFromFile)).isEqualTo(nonDefaultValue); } @Override diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/AscendingTimestampExtractorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/AscendingTimestampExtractorTest.java index 0a5e7b1578810..e97020165e988 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/AscendingTimestampExtractorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/AscendingTimestampExtractorTest.java @@ -20,30 +20,28 @@ import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link AscendingTimestampExtractor}. */ -public class AscendingTimestampExtractorTest { +class AscendingTimestampExtractorTest { @Test - public void testWithFailingHandler() { + void testWithFailingHandler() { AscendingTimestampExtractor extractor = new LongExtractor() .withViolationHandler(new AscendingTimestampExtractor.FailingHandler()); runValidTests(extractor); - try { - runInvalidTest(extractor); - fail("should fail with an exception"); - } catch (Exception ignored) { - } + assertThatThrownBy(() -> runInvalidTest(extractor)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Ascending timestamps condition violated."); } @Test - public void testWithIgnoringHandler() { + void testWithIgnoringHandler() { AscendingTimestampExtractor extractor = new LongExtractor() .withViolationHandler(new AscendingTimestampExtractor.IgnoringHandler()); @@ -53,7 +51,7 @@ public void testWithIgnoringHandler() { } @Test - public void testWithLoggingHandler() { + void testWithLoggingHandler() { AscendingTimestampExtractor extractor = new LongExtractor() .withViolationHandler(new AscendingTimestampExtractor.LoggingHandler()); @@ -63,7 +61,7 @@ public void testWithLoggingHandler() { } @Test - public void testWithDefaultHandler() { + void testWithDefaultHandler() { AscendingTimestampExtractor extractor = new LongExtractor(); runValidTests(extractor); @@ -71,36 +69,37 @@ public void testWithDefaultHandler() { } @Test - public void testInitialAndFinalWatermark() { + void testInitialAndFinalWatermark() { AscendingTimestampExtractor extractor = new LongExtractor(); - assertEquals(Long.MIN_VALUE, extractor.getCurrentWatermark().getTimestamp()); + assertThat(extractor.getCurrentWatermark().getTimestamp()).isEqualTo(Long.MIN_VALUE); extractor.extractTimestamp(Long.MIN_VALUE, -1L); extractor.extractTimestamp(Long.MAX_VALUE, -1L); - assertEquals(Long.MAX_VALUE - 1, extractor.getCurrentWatermark().getTimestamp()); + assertThat(extractor.getCurrentWatermark().getTimestamp()).isEqualTo(Long.MAX_VALUE - 1); } // ------------------------------------------------------------------------ private void runValidTests(AscendingTimestampExtractor extractor) { - assertEquals(13L, extractor.extractTimestamp(13L, -1L)); - assertEquals(13L, extractor.extractTimestamp(13L, 0L)); - assertEquals(14L, extractor.extractTimestamp(14L, 0L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); - assertEquals(500L, extractor.extractTimestamp(500L, 0L)); - - assertEquals(Long.MAX_VALUE - 1, extractor.extractTimestamp(Long.MAX_VALUE - 1, 99999L)); + assertThat(extractor.extractTimestamp(13L, -1L)).isEqualTo(13L); + assertThat(extractor.extractTimestamp(13L, 0L)).isEqualTo(13L); + assertThat(extractor.extractTimestamp(14L, 0L)).isEqualTo(14L); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); + assertThat(extractor.extractTimestamp(500L, 0L)).isEqualTo(500L); + + assertThat(extractor.extractTimestamp(Long.MAX_VALUE - 1, 99999L)) + .isEqualTo(Long.MAX_VALUE - 1); } private void runInvalidTest(AscendingTimestampExtractor extractor) { - assertEquals(1000L, extractor.extractTimestamp(1000L, 100)); - assertEquals(1000L, extractor.extractTimestamp(1000L, 100)); + assertThat(extractor.extractTimestamp(1000L, 100)).isEqualTo(1000L); + assertThat(extractor.extractTimestamp(1000L, 100)).isEqualTo(1000L); // violation - assertEquals(999L, extractor.extractTimestamp(999L, 100)); + assertThat(extractor.extractTimestamp(999L, 100)).isEqualTo(999L); } // ------------------------------------------------------------------------ diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/BoundedOutOfOrdernessTimestampExtractorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/BoundedOutOfOrdernessTimestampExtractorTest.java index f08b7d2054b51..fad7fbb88dc9f 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/BoundedOutOfOrdernessTimestampExtractorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/BoundedOutOfOrdernessTimestampExtractorTest.java @@ -22,29 +22,30 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.api.windowing.time.Time; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link BoundedOutOfOrdernessTimestampExtractor}. */ -public class BoundedOutOfOrdernessTimestampExtractorTest { +class BoundedOutOfOrdernessTimestampExtractorTest { @Test - public void testInitializationAndRuntime() { + void testInitializationAndRuntime() { Time maxAllowedLateness = Time.milliseconds(10L); BoundedOutOfOrdernessTimestampExtractor extractor = new LongExtractor(maxAllowedLateness); - assertEquals(maxAllowedLateness.toMilliseconds(), extractor.getMaxOutOfOrdernessInMillis()); + assertThat(extractor.getMaxOutOfOrdernessInMillis()) + .isEqualTo(maxAllowedLateness.toMilliseconds()); runValidTests(extractor); } @Test - public void testInitialFinalAndWatermarkUnderflow() { + void testInitialFinalAndWatermarkUnderflow() { BoundedOutOfOrdernessTimestampExtractor extractor = new LongExtractor(Time.milliseconds(10L)); - assertEquals(Long.MIN_VALUE, extractor.getCurrentWatermark().getTimestamp()); + assertThat(extractor.getCurrentWatermark().getTimestamp()).isEqualTo(Long.MIN_VALUE); extractor.extractTimestamp(Long.MIN_VALUE, -1L); @@ -55,32 +56,33 @@ public void testInitialFinalAndWatermarkUnderflow() { // would lead to underflow. extractor.extractTimestamp(Long.MIN_VALUE + 2, -1); - assertEquals(Long.MIN_VALUE, extractor.getCurrentWatermark().getTimestamp()); + assertThat(extractor.getCurrentWatermark().getTimestamp()).isEqualTo(Long.MIN_VALUE); extractor.extractTimestamp(Long.MAX_VALUE, -1L); - assertEquals(Long.MAX_VALUE - 10, extractor.getCurrentWatermark().getTimestamp()); + assertThat(extractor.getCurrentWatermark().getTimestamp()).isEqualTo(Long.MAX_VALUE - 10); } // ------------------------------------------------------------------------ private void runValidTests(BoundedOutOfOrdernessTimestampExtractor extractor) { - assertEquals(Watermark.UNINITIALIZED, extractor.getCurrentWatermark()); + assertThat(extractor.getCurrentWatermark()).isEqualTo(Watermark.UNINITIALIZED); - assertEquals(13L, extractor.extractTimestamp(13L, 0L)); - assertEquals(13L, extractor.extractTimestamp(13L, 0L)); - assertEquals(14L, extractor.extractTimestamp(14L, 0L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); + assertThat(extractor.extractTimestamp(13L, 0L)).isEqualTo(13L); + assertThat(extractor.extractTimestamp(13L, 0L)).isEqualTo(13L); + assertThat(extractor.extractTimestamp(14L, 0L)).isEqualTo(14L); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); - assertEquals(new Watermark(10L), extractor.getCurrentWatermark()); + assertThat(extractor.getCurrentWatermark()).isEqualTo(new Watermark(10L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); - assertEquals(20L, extractor.extractTimestamp(20L, 0L)); - assertEquals(500L, extractor.extractTimestamp(500L, 0L)); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); + assertThat(extractor.extractTimestamp(20L, 0L)).isEqualTo(20L); + assertThat(extractor.extractTimestamp(500L, 0L)).isEqualTo(500L); - assertEquals(new Watermark(490L), extractor.getCurrentWatermark()); + assertThat(extractor.getCurrentWatermark()).isEqualTo(new Watermark(490L)); - assertEquals(Long.MAX_VALUE - 1, extractor.extractTimestamp(Long.MAX_VALUE - 1, 0L)); - assertEquals(new Watermark(Long.MAX_VALUE - 11), extractor.getCurrentWatermark()); + assertThat(extractor.extractTimestamp(Long.MAX_VALUE - 1, 0L)) + .isEqualTo(Long.MAX_VALUE - 1); + assertThat(extractor.getCurrentWatermark()).isEqualTo(new Watermark(Long.MAX_VALUE - 11)); } // ------------------------------------------------------------------------ diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/FromElementsFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/FromElementsFunctionTest.java index a47650d8606a3..7d2467fb1fa50 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/FromElementsFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/FromElementsFunctionTest.java @@ -29,42 +29,31 @@ import org.apache.flink.api.java.typeutils.ValueTypeInfo; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; +import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.streaming.api.functions.source.FromElementsFunction; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.operators.StreamSource; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.types.Value; -import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.InstantiationUtil; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for the {@link org.apache.flink.streaming.api.functions.source.FromElementsFunction}. */ -public class FromElementsFunctionTest { +class FromElementsFunctionTest { private static final String[] STRING_ARRAY_DATA = {"Oh", "boy", "what", "a", "show", "!"}; private static final List STRING_LIST_DATA = Arrays.asList(STRING_ARRAY_DATA); - @Rule public final ExpectedException thrown = ExpectedException.none(); - private static List runSource(FromElementsFunction source) throws Exception { List result = new ArrayList<>(); FromElementsFunction clonedSource = InstantiationUtil.clone(source); @@ -73,54 +62,48 @@ private static List runSource(FromElementsFunction source) throws Exce } @Test - public void testStrings() { - try { - String[] data = {"Oh", "boy", "what", "a", "show", "!"}; - - FromElementsFunction source = - new FromElementsFunction( - BasicTypeInfo.STRING_TYPE_INFO.createSerializer( - new SerializerConfigImpl()), - data); - - List result = new ArrayList(); - source.run(new ListSourceContext(result)); - - assertEquals(Arrays.asList(data), result); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } + void testStrings() throws Exception { + String[] data = {"Oh", "boy", "what", "a", "show", "!"}; + + FromElementsFunction source = + new FromElementsFunction<>( + BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()), + data); + + List result = new ArrayList<>(); + source.run(new ListSourceContext<>(result)); + + assertThat(result).containsExactly(data); } @Test - public void testNullElement() throws Exception { - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("contains a null element"); - - new FromElementsFunction<>("a", null, "b"); + void testNullElement() { + assertThatThrownBy(() -> new FromElementsFunction<>("a", null, "b")) + .hasMessageContaining("contains a null element") + .isInstanceOf(IllegalArgumentException.class); } @Test - public void testSetOutputTypeWithNoSerializer() throws Exception { + void testSetOutputTypeWithNoSerializer() throws Exception { FromElementsFunction source = new FromElementsFunction<>(STRING_ARRAY_DATA); - assertNull(source.getSerializer()); + assertThat(source.getSerializer()).isNull(); source.setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig()); - assertNotNull(source.getSerializer()); - assertEquals( - BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()), - source.getSerializer()); + assertThat(source.getSerializer()) + .isNotNull() + .isEqualTo( + BasicTypeInfo.STRING_TYPE_INFO.createSerializer( + new SerializerConfigImpl())); List result = runSource(source); - assertEquals(STRING_LIST_DATA, result); + assertThat(result).containsExactly(STRING_ARRAY_DATA); } @Test - public void testSetOutputTypeWithSameSerializer() throws Exception { + void testSetOutputTypeWithSameSerializer() throws Exception { FromElementsFunction source = new FromElementsFunction<>( BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()), @@ -132,25 +115,29 @@ public void testSetOutputTypeWithSameSerializer() throws Exception { TypeSerializer newSerializer = source.getSerializer(); - assertEquals(existingSerializer, newSerializer); + assertThat(newSerializer).isEqualTo(existingSerializer); List result = runSource(source); - assertEquals(STRING_LIST_DATA, result); + assertThat(result).containsExactly(STRING_ARRAY_DATA); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testSetOutputTypeWithIncompatibleType() throws Exception { - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("not all subclasses of java.lang.Integer"); - + void testSetOutputTypeWithIncompatibleType() { FromElementsFunction source = new FromElementsFunction<>(STRING_LIST_DATA); - source.setOutputType((TypeInformation) BasicTypeInfo.INT_TYPE_INFO, new ExecutionConfig()); + + assertThatThrownBy( + () -> + source.setOutputType( + (TypeInformation) BasicTypeInfo.INT_TYPE_INFO, + new ExecutionConfig())) + .hasMessageContaining("not all subclasses of java.lang.Integer") + .isInstanceOf(IllegalArgumentException.class); } @Test - public void testSetOutputTypeWithExistingBrokenSerializer() throws Exception { + void testSetOutputTypeWithExistingBrokenSerializer() throws Exception { // the original serializer throws an exception TypeInformation info = new ValueTypeInfo<>(DeserializeTooMuchType.class); @@ -167,56 +154,53 @@ public void testSetOutputTypeWithExistingBrokenSerializer() throws Exception { TypeSerializer newSerializer = source.getSerializer(); - assertNotEquals(existingSerializer, newSerializer); + assertThat(newSerializer).isNotEqualTo(existingSerializer); List result = runSource(source); - assertThat(result, hasSize(1)); - assertThat(result.get(0), instanceOf(DeserializeTooMuchType.class)); + assertThat(result).hasSize(1).first().isInstanceOf(DeserializeTooMuchType.class); } @Test - public void testSetOutputTypeAfterTransferred() throws Exception { - thrown.expect(IllegalStateException.class); - thrown.expectMessage( - "The output type should've been specified before shipping the graph to the cluster"); - + void testSetOutputTypeAfterTransferred() throws Exception { FromElementsFunction source = InstantiationUtil.clone(new FromElementsFunction<>(STRING_LIST_DATA)); - source.setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig()); + + assertThatThrownBy( + () -> + source.setOutputType( + BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig())) + .hasMessageContaining( + "The output type should've been specified before shipping the graph to the cluster") + .isInstanceOf(IllegalStateException.class); } @Test - public void testNoSerializer() throws Exception { - thrown.expect(IllegalStateException.class); - thrown.expectMessage("serializer not configured"); - + void testNoSerializer() { FromElementsFunction source = new FromElementsFunction<>(STRING_LIST_DATA); - runSource(source); + + assertThatThrownBy(() -> runSource(source)) + .hasMessageContaining("serializer not configured") + .isInstanceOf(IllegalStateException.class); } @Test - public void testNonJavaSerializableType() { - try { - MyPojo[] data = {new MyPojo(1, 2), new MyPojo(3, 4), new MyPojo(5, 6)}; - - FromElementsFunction source = - new FromElementsFunction( - TypeExtractor.getForClass(MyPojo.class) - .createSerializer(new SerializerConfigImpl()), - data); - - List result = runSource(source); - - assertEquals(Arrays.asList(data), result); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } + void testNonJavaSerializableType() throws Exception { + MyPojo[] data = {new MyPojo(1, 2), new MyPojo(3, 4), new MyPojo(5, 6)}; + + FromElementsFunction source = + new FromElementsFunction<>( + TypeExtractor.getForClass(MyPojo.class) + .createSerializer(new SerializerConfigImpl()), + data); + + List result = runSource(source); + + assertThat(result).containsExactly(data); } @Test - public void testNonJavaSerializableTypeWithSetOutputType() throws Exception { + void testNonJavaSerializableTypeWithSetOutputType() throws Exception { MyPojo[] data = {new MyPojo(1, 2), new MyPojo(3, 4), new MyPojo(5, 6)}; FromElementsFunction source = new FromElementsFunction<>(data); @@ -225,137 +209,100 @@ public void testNonJavaSerializableTypeWithSetOutputType() throws Exception { List result = runSource(source); - assertEquals(Arrays.asList(data), result); + assertThat(result).containsExactly(data); + } + + @Test + void testSerializationError() { + TypeInformation info = + new ValueTypeInfo<>(SerializationErrorType.class); + + assertThatThrownBy( + () -> + new FromElementsFunction<>( + info.createSerializer(new SerializerConfigImpl()), + new SerializationErrorType())) + .isInstanceOf(IOException.class) + .hasMessageContaining("test exception"); } @Test - public void testSerializationError() { - try { - TypeInformation info = - new ValueTypeInfo(SerializationErrorType.class); + void testDeSerializationError() throws Exception { + TypeInformation info = + new ValueTypeInfo<>(DeserializeTooMuchType.class); - try { - new FromElementsFunction( + FromElementsFunction source = + new FromElementsFunction<>( info.createSerializer(new SerializerConfigImpl()), - new SerializationErrorType()); + new DeserializeTooMuchType()); - fail("should fail with an exception"); - } catch (IOException e) { - assertTrue(ExceptionUtils.stringifyException(e).contains("test exception")); - } - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } + assertThatThrownBy(() -> source.run(new ListSourceContext<>(new ArrayList<>()))) + .hasMessageContaining("user-defined serialization") + .isInstanceOf(IOException.class); } @Test - public void testDeSerializationError() { - try { - TypeInformation info = - new ValueTypeInfo(DeserializeTooMuchType.class); - - FromElementsFunction source = - new FromElementsFunction( - info.createSerializer(new SerializerConfigImpl()), - new DeserializeTooMuchType()); - - try { - source.run( - new ListSourceContext( - new ArrayList())); - fail("should fail with an exception"); - } catch (IOException e) { - assertTrue( - ExceptionUtils.stringifyException(e) - .contains("user-defined serialization")); - } - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } + void testCheckpointAndRestore() throws Exception { + final int numElements = 10000; - @Test - public void testCheckpointAndRestore() { - try { - final int numElements = 10000; + List data = new ArrayList(numElements); + List result = new ArrayList(numElements); - List data = new ArrayList(numElements); - List result = new ArrayList(numElements); + for (int i = 0; i < numElements; i++) { + data.add(i); + } - for (int i = 0; i < numElements; i++) { - data.add(i); - } + final FromElementsFunction source = + new FromElementsFunction<>(IntSerializer.INSTANCE, data); + StreamSource> src = new StreamSource<>(source); + AbstractStreamOperatorTestHarness testHarness = + new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0); + testHarness.open(); + + final SourceFunction.SourceContext ctx = + new ListSourceContext(result, 2L); + + // run the source asynchronously + CheckedThread runner = + new CheckedThread() { + @Override + public void go() throws Exception { + source.run(ctx); + } + }; + runner.start(); + + // wait for a bit + Thread.sleep(1000); + + // make a checkpoint + List checkpointData = new ArrayList<>(numElements); + OperatorSubtaskState handles = null; + synchronized (ctx.getCheckpointLock()) { + handles = testHarness.snapshot(566, System.currentTimeMillis()); + checkpointData.addAll(result); + } - final FromElementsFunction source = - new FromElementsFunction<>(IntSerializer.INSTANCE, data); - StreamSource> src = new StreamSource<>(source); - AbstractStreamOperatorTestHarness testHarness = - new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0); - testHarness.open(); - - final SourceFunction.SourceContext ctx = - new ListSourceContext(result, 2L); - - final Throwable[] error = new Throwable[1]; - - // run the source asynchronously - Thread runner = - new Thread() { - @Override - public void run() { - try { - source.run(ctx); - } catch (Throwable t) { - error[0] = t; - } - } - }; - runner.start(); - - // wait for a bit - Thread.sleep(1000); - - // make a checkpoint - List checkpointData = new ArrayList<>(numElements); - OperatorSubtaskState handles = null; - synchronized (ctx.getCheckpointLock()) { - handles = testHarness.snapshot(566, System.currentTimeMillis()); - checkpointData.addAll(result); - } + // cancel the source + source.cancel(); + runner.sync(); - // cancel the source - source.cancel(); - runner.join(); + final FromElementsFunction sourceCopy = + new FromElementsFunction<>(IntSerializer.INSTANCE, data); + StreamSource> srcCopy = + new StreamSource<>(sourceCopy); + AbstractStreamOperatorTestHarness testHarnessCopy = + new AbstractStreamOperatorTestHarness<>(srcCopy, 1, 1, 0); + testHarnessCopy.setup(); + testHarnessCopy.initializeState(handles); + testHarnessCopy.open(); - // check for errors - if (error[0] != null) { - System.err.println("Error in asynchronous source runner"); - error[0].printStackTrace(); - fail("Error in asynchronous source runner"); - } + // recovery run + SourceFunction.SourceContext newCtx = new ListSourceContext<>(checkpointData); - final FromElementsFunction sourceCopy = - new FromElementsFunction<>(IntSerializer.INSTANCE, data); - StreamSource> srcCopy = - new StreamSource<>(sourceCopy); - AbstractStreamOperatorTestHarness testHarnessCopy = - new AbstractStreamOperatorTestHarness<>(srcCopy, 1, 1, 0); - testHarnessCopy.setup(); - testHarnessCopy.initializeState(handles); - testHarnessCopy.open(); - - // recovery run - SourceFunction.SourceContext newCtx = new ListSourceContext<>(checkpointData); - - sourceCopy.run(newCtx); - - assertEquals(data, checkpointData); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } + sourceCopy.run(newCtx); + + assertThat(checkpointData).isEqualTo(data); } // ------------------------------------------------------------------------ diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/IngestionTimeExtractorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/IngestionTimeExtractorTest.java index a892d07f93d8a..81936cd272bc5 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/IngestionTimeExtractorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/IngestionTimeExtractorTest.java @@ -20,16 +20,15 @@ import org.apache.flink.streaming.api.watermark.Watermark; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link IngestionTimeExtractor}. */ -public class IngestionTimeExtractorTest { +class IngestionTimeExtractorTest { @Test - public void testMonotonousTimestamps() { + void testMonotonousTimestamps() { AssignerWithPeriodicWatermarks assigner = new IngestionTimeExtractor<>(); long maxRecordSoFar = 0L; @@ -38,22 +37,22 @@ public void testMonotonousTimestamps() { for (int i = 0; i < 1343; i++) { if (i % 7 == 1) { Watermark mark = assigner.getCurrentWatermark(); - assertNotNull(mark); + assertThat(mark).isNotNull(); // increasing watermarks - assertTrue(mark.getTimestamp() >= maxWatermarkSoFar); + assertThat(mark.getTimestamp()).isGreaterThanOrEqualTo(maxWatermarkSoFar); maxWatermarkSoFar = mark.getTimestamp(); // tight watermarks - assertTrue(mark.getTimestamp() >= maxRecordSoFar - 1); + assertThat(mark.getTimestamp()).isGreaterThanOrEqualTo(maxRecordSoFar - 1); } else { long next = assigner.extractTimestamp("a", Long.MIN_VALUE); // increasing timestamps - assertTrue(next >= maxRecordSoFar); + assertThat(next).isGreaterThanOrEqualTo(maxRecordSoFar); // timestamps are never below or at the watermark - assertTrue(next > maxWatermarkSoFar); + assertThat(next).isGreaterThanOrEqualTo(maxWatermarkSoFar); maxRecordSoFar = next; } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkFunctionTest.java index 22351c9d0b1c5..a869b32ccf3ea 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkFunctionTest.java @@ -22,17 +22,17 @@ import org.apache.flink.streaming.api.functions.sink.SinkContextUtil; import org.apache.flink.streaming.util.MockStreamingRuntimeContext; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.ByteArrayOutputStream; import java.io.PrintStream; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for the {@link PrintSinkFunction}. */ -public class PrintSinkFunctionTest { +class PrintSinkFunctionTest { private final PrintStream originalSystemOut = System.out; private final PrintStream originalSystemErr = System.err; @@ -42,14 +42,14 @@ public class PrintSinkFunctionTest { private final String line = System.lineSeparator(); - @Before - public void setUp() { + @BeforeEach + void setUp() { System.setOut(new PrintStream(arrayOutputStream)); System.setErr(new PrintStream(arrayErrorStream)); } - @After - public void tearDown() { + @AfterEach + void tearDown() { if (System.out != originalSystemOut) { System.out.close(); } @@ -61,7 +61,7 @@ public void tearDown() { } @Test - public void testPrintSinkStdOut() throws Exception { + void testPrintSinkStdOut() throws Exception { PrintSinkFunction printSink = new PrintSinkFunction<>(); printSink.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0)); @@ -69,60 +69,60 @@ public void testPrintSinkStdOut() throws Exception { printSink.invoke("hello world!", SinkContextUtil.forTimestamp(0)); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("hello world!" + line); printSink.close(); } @Test - public void testPrintSinkStdErr() throws Exception { + void testPrintSinkStdErr() throws Exception { PrintSinkFunction printSink = new PrintSinkFunction<>(true); printSink.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0)); printSink.open(DefaultOpenContext.INSTANCE); printSink.invoke("hello world!", SinkContextUtil.forTimestamp(0)); - assertEquals("Print to System.err", printSink.toString()); - assertEquals("hello world!" + line, arrayErrorStream.toString()); + assertThat(printSink).hasToString("Print to System.err"); + assertThat(arrayErrorStream).hasToString("hello world!" + line); printSink.close(); } @Test - public void testPrintSinkWithPrefix() throws Exception { + void testPrintSinkWithPrefix() throws Exception { PrintSinkFunction printSink = new PrintSinkFunction<>(); printSink.setRuntimeContext(new MockStreamingRuntimeContext(false, 2, 1)); printSink.open(DefaultOpenContext.INSTANCE); printSink.invoke("hello world!", SinkContextUtil.forTimestamp(0)); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("2> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("2> hello world!" + line); printSink.close(); } @Test - public void testPrintSinkWithIdentifierAndPrefix() throws Exception { + void testPrintSinkWithIdentifierAndPrefix() throws Exception { PrintSinkFunction printSink = new PrintSinkFunction<>("mySink", false); printSink.setRuntimeContext(new MockStreamingRuntimeContext(false, 2, 1)); printSink.open(DefaultOpenContext.INSTANCE); printSink.invoke("hello world!", SinkContextUtil.forTimestamp(0)); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("mySink:2> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("mySink:2> hello world!" + line); printSink.close(); } @Test - public void testPrintSinkWithIdentifierButNoPrefix() throws Exception { + void testPrintSinkWithIdentifierButNoPrefix() throws Exception { PrintSinkFunction printSink = new PrintSinkFunction<>("mySink", false); printSink.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0)); printSink.open(DefaultOpenContext.INSTANCE); printSink.invoke("hello world!", SinkContextUtil.forTimestamp(0)); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("mySink> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("mySink> hello world!" + line); printSink.close(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkTest.java index 7ecaa123f33a9..c73ea0b55bdb0 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/PrintSinkTest.java @@ -46,7 +46,7 @@ import java.io.PrintStream; import java.util.OptionalLong; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for the {@link PrintSink}. */ class PrintSinkTest { @@ -84,8 +84,8 @@ void testPrintSinkStdOut() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(1))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("hello world!" + line); } } @@ -96,8 +96,8 @@ void testPrintSinkStdErr() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(1))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.err", printSink.toString()); - assertEquals("hello world!" + line, arrayErrorStream.toString()); + assertThat(printSink).hasToString("Print to System.err"); + assertThat(arrayErrorStream).hasToString("hello world!" + line); } } @@ -108,8 +108,8 @@ void testPrintSinkStdErrWithIdentifier() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(1))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.err", printSink.toString()); - assertEquals("mySink> hello world!" + line, arrayErrorStream.toString()); + assertThat(printSink).hasToString("Print to System.err"); + assertThat(arrayErrorStream).hasToString("mySink> hello world!" + line); } } @@ -120,8 +120,8 @@ void testPrintSinkWithPrefix() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(2))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("1> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("1> hello world!" + line); } } @@ -132,8 +132,8 @@ void testPrintSinkWithIdentifierAndPrefix() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(2))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("mySink:1> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("mySink:1> hello world!" + line); } } @@ -144,8 +144,8 @@ void testPrintSinkWithIdentifierButNoPrefix() throws Exception { try (SinkWriter writer = printSink.createWriter(new MockInitContext(1))) { writer.write("hello world!", new MockContext()); - assertEquals("Print to System.out", printSink.toString()); - assertEquals("mySink> hello world!" + line, arrayOutputStream.toString()); + assertThat(printSink).hasToString("Print to System.out"); + assertThat(arrayOutputStream).hasToString("mySink> hello world!" + line); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/StatefulSequenceSourceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/StatefulSequenceSourceTest.java index ca0571a8f908a..83ad164a4feca 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/StatefulSequenceSourceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/StatefulSequenceSourceTest.java @@ -18,29 +18,29 @@ package org.apache.flink.streaming.api.functions; +import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; -import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.functions.source.StatefulSequenceSource; import org.apache.flink.streaming.api.operators.StreamSource; -import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; +import org.apache.flink.streaming.util.BlockingSourceContext; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for {@link StatefulSequenceSource}. */ -public class StatefulSequenceSourceTest { +class StatefulSequenceSourceTest { @Test - public void testCheckpointRestore() throws Exception { + void testCheckpointRestore() throws Exception { final int initElement = 0; final int maxElement = 100; final int maxParallelsim = 2; @@ -70,45 +70,25 @@ public void testCheckpointRestore() throws Exception { new AbstractStreamOperatorTestHarness<>(src2, maxParallelsim, 2, 1); testHarness2.open(); - final Throwable[] error = new Throwable[3]; - // run the source asynchronously - Thread runner1 = - new Thread() { + CheckedThread runner1 = + new CheckedThread() { @Override - public void run() { - try { - source1.run( - new BlockingSourceContext<>( - "1", - latchToTrigger1, - latchToWait1, - outputCollector, - 21)); - } catch (Throwable t) { - t.printStackTrace(); - error[0] = t; - } + public void go() throws Exception { + source1.run( + new BlockingSourceContext<>( + "1", latchToTrigger1, latchToWait1, outputCollector, 21)); } }; // run the source asynchronously - Thread runner2 = - new Thread() { + CheckedThread runner2 = + new CheckedThread() { @Override - public void run() { - try { - source2.run( - new BlockingSourceContext<>( - "2", - latchToTrigger2, - latchToWait2, - outputCollector, - 32)); - } catch (Throwable t) { - t.printStackTrace(); - error[1] = t; - } + public void go() throws Exception { + source2.run( + new BlockingSourceContext<>( + "2", latchToTrigger2, latchToWait2, outputCollector, 32)); } }; @@ -145,28 +125,19 @@ public void run() { latchToWait3.trigger(); // run the source asynchronously - Thread runner3 = - new Thread() { + CheckedThread runner3 = + new CheckedThread() { @Override - public void run() { - try { - source3.run( - new BlockingSourceContext<>( - "3", - latchToTrigger3, - latchToWait3, - outputCollector, - 3)); - } catch (Throwable t) { - t.printStackTrace(); - error[2] = t; - } + public void go() throws Exception { + source3.run( + new BlockingSourceContext<>( + "3", latchToTrigger3, latchToWait3, outputCollector, 3)); } }; runner3.start(); - runner3.join(); + runner3.sync(); - Assert.assertEquals(3, outputCollector.size()); // we have 3 tasks. + assertThat(outputCollector).hasSize(3); // we have 3 tasks. // test for at-most-once Set dedupRes = new HashSet<>(Math.abs(maxElement - initElement) + 1); @@ -175,101 +146,25 @@ public void run() { List elements = outputCollector.get(key); // this tests the correctness of the latches in the test - Assert.assertTrue(elements.size() > 0); + assertThat(elements).isNotEmpty(); for (Long elem : elements) { - if (!dedupRes.add(elem)) { - Assert.fail("Duplicate entry: " + elem); - } + assertThat(dedupRes.add(elem)).as("Duplicate entry: " + elem).isTrue(); - if (!expectedOutput.contains(elem)) { - Assert.fail("Unexpected element: " + elem); - } + assertThat(expectedOutput.contains(elem)) + .as("Unexpected element: " + elem) + .isTrue(); } } // test for exactly-once - Assert.assertEquals(Math.abs(initElement - maxElement) + 1, dedupRes.size()); + assertThat(dedupRes).hasSize(Math.abs(initElement - maxElement) + 1); latchToWait1.trigger(); latchToWait2.trigger(); // wait for everybody ot finish. - runner1.join(); - runner2.join(); - } - - /** Test SourceContext. */ - public static class BlockingSourceContext implements SourceFunction.SourceContext { - - private final String name; - - private final Object lock; - private final OneShotLatch latchToTrigger; - private final OneShotLatch latchToWait; - private final ConcurrentHashMap> collector; - - private final int threshold; - private int counter = 0; - - private final List localOutput; - - public BlockingSourceContext( - String name, - OneShotLatch latchToTrigger, - OneShotLatch latchToWait, - ConcurrentHashMap> output, - int elemToFire) { - this.name = name; - this.lock = new Object(); - this.latchToTrigger = latchToTrigger; - this.latchToWait = latchToWait; - this.collector = output; - this.threshold = elemToFire; - - this.localOutput = new ArrayList<>(); - List prev = collector.put(name, localOutput); - if (prev != null) { - Assert.fail(); - } - } - - @Override - public void collectWithTimestamp(T element, long timestamp) { - collect(element); - } - - @Override - public void collect(T element) { - localOutput.add(element); - if (++counter == threshold) { - latchToTrigger.trigger(); - try { - if (!latchToWait.isTriggered()) { - latchToWait.await(); - } - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - - @Override - public void emitWatermark(Watermark mark) { - throw new UnsupportedOperationException(); - } - - @Override - public void markAsTemporarilyIdle() { - throw new UnsupportedOperationException(); - } - - @Override - public Object getCheckpointLock() { - return lock; - } - - @Override - public void close() {} + runner1.sync(); + runner2.sync(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/async/RichAsyncFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/async/RichAsyncFunctionTest.java index 180fe4570d01e..076cb1d66c00a 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/async/RichAsyncFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/async/RichAsyncFunctionTest.java @@ -21,7 +21,6 @@ import org.apache.flink.api.common.TaskInfo; import org.apache.flink.api.common.accumulators.Accumulator; import org.apache.flink.api.common.functions.AggregateFunction; -import org.apache.flink.api.common.functions.BroadcastVariableInitializer; import org.apache.flink.api.common.functions.IterationRuntimeContext; import org.apache.flink.api.common.functions.ReduceFunction; import org.apache.flink.api.common.functions.RuntimeContext; @@ -33,25 +32,25 @@ import org.apache.flink.metrics.groups.OperatorMetricGroup; import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.HashMap; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** Test cases for {@link RichAsyncFunction}. */ -public class RichAsyncFunctionTest { +class RichAsyncFunctionTest { /** * Test the set of iteration runtime context methods in the context of a {@link * RichAsyncFunction}. */ @Test - public void testIterationRuntimeContext() throws Exception { + void testIterationRuntimeContext() { RichAsyncFunction function = new RichAsyncFunction() { private static final long serialVersionUID = -2023923961609455894L; @@ -71,27 +70,18 @@ public void asyncInvoke(Integer input, ResultFuture resultFuture) IterationRuntimeContext iterationRuntimeContext = function.getIterationRuntimeContext(); - assertEquals(superstepNumber, iterationRuntimeContext.getSuperstepNumber()); - - try { - iterationRuntimeContext.getIterationAggregator("foobar"); - fail("Expected getIterationAggregator to fail with unsupported operation exception"); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - iterationRuntimeContext.getPreviousIterationAggregate("foobar"); - fail( - "Expected getPreviousIterationAggregator to fail with unsupported operation exception"); - } catch (UnsupportedOperationException e) { - // expected - } + assertThat(iterationRuntimeContext.getSuperstepNumber()).isEqualTo(superstepNumber); + + assertThatThrownBy(() -> iterationRuntimeContext.getIterationAggregator("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> iterationRuntimeContext.getPreviousIterationAggregate("foobar")) + .isInstanceOf(UnsupportedOperationException.class); } /** Test the set of runtime context methods in the context of a {@link RichAsyncFunction}. */ @Test - public void testRuntimeContext() throws Exception { + void testRuntimeContext() { RichAsyncFunction function = new RichAsyncFunction() { private static final long serialVersionUID = 1707630162838967972L; @@ -132,190 +122,149 @@ public void asyncInvoke(Integer input, ResultFuture resultFuture) RuntimeContext runtimeContext = function.getRuntimeContext(); - assertEquals(taskName, runtimeContext.getTaskInfo().getTaskName()); - assertEquals(metricGroup, runtimeContext.getMetricGroup()); - assertEquals( - numberOfParallelSubtasks, - runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()); - assertEquals(indexOfSubtask, runtimeContext.getTaskInfo().getIndexOfThisSubtask()); - assertEquals(attemptNumber, runtimeContext.getTaskInfo().getAttemptNumber()); - assertEquals(taskNameWithSubtask, runtimeContext.getTaskInfo().getTaskNameWithSubtasks()); - assertEquals(globalJobParameters, runtimeContext.getGlobalJobParameters()); - assertEquals(isObjectReused, runtimeContext.isObjectReuseEnabled()); - assertEquals(userCodeClassLoader, runtimeContext.getUserCodeClassLoader()); - - try { - runtimeContext.getDistributedCache(); - fail("Expected getDistributedCached to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getState(new ValueStateDescriptor<>("foobar", Integer.class, 42)); - fail("Expected getState to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getListState(new ListStateDescriptor<>("foobar", Integer.class)); - fail("Expected getListState to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getReducingState( - new ReducingStateDescriptor<>( - "foobar", - new ReduceFunction() { - private static final long serialVersionUID = 2136425961884441050L; - - @Override - public Integer reduce(Integer value1, Integer value2) - throws Exception { - return value1; - } - }, - Integer.class)); - fail("Expected getReducingState to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getAggregatingState( - new AggregatingStateDescriptor<>( - "foobar", - new AggregateFunction() { - - @Override - public Integer createAccumulator() { - return null; - } - - @Override - public Integer add(Integer value, Integer accumulator) { - return null; - } - - @Override - public Integer getResult(Integer accumulator) { - return null; - } - - @Override - public Integer merge(Integer a, Integer b) { - return null; - } - }, - Integer.class)); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getMapState( - new MapStateDescriptor<>("foobar", Integer.class, String.class)); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.addAccumulator( - "foobar", - new Accumulator() { - private static final long serialVersionUID = -4673320336846482358L; - - @Override - public void add(Integer value) { - // no op - } - - @Override - public Integer getLocalValue() { - return null; - } - - @Override - public void resetLocal() {} - - @Override - public void merge(Accumulator other) {} - - @Override - public Accumulator clone() { - return null; - } - }); - fail("Expected addAccumulator to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getAccumulator("foobar"); - fail("Expected getAccumulator to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getIntCounter("foobar"); - fail("Expected getIntCounter to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getLongCounter("foobar"); - fail("Expected getLongCounter to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getDoubleCounter("foobar"); - fail("Expected getDoubleCounter to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getHistogram("foobar"); - fail("Expected getHistogram to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.hasBroadcastVariable("foobar"); - fail("Expected hasBroadcastVariable to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getBroadcastVariable("foobar"); - fail("Expected getBroadcastVariable to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } - - try { - runtimeContext.getBroadcastVariableWithInitializer( - "foobar", - new BroadcastVariableInitializer() { - @Override - public Object initializeBroadcastVariable(Iterable data) { - return null; - } - }); - fail( - "Expected getBroadcastVariableWithInitializer to fail with unsupported operation exception."); - } catch (UnsupportedOperationException e) { - // expected - } + assertThat(runtimeContext.getTaskInfo().getTaskName()).isEqualTo(taskName); + assertThat(runtimeContext.getMetricGroup()).isEqualTo(metricGroup); + assertThat(runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()) + .isEqualTo(numberOfParallelSubtasks); + assertThat(runtimeContext.getTaskInfo().getIndexOfThisSubtask()).isEqualTo(indexOfSubtask); + assertThat(runtimeContext.getTaskInfo().getAttemptNumber()).isEqualTo(attemptNumber); + assertThat(runtimeContext.getTaskInfo().getTaskNameWithSubtasks()) + .isEqualTo(taskNameWithSubtask); + assertThat(runtimeContext.getGlobalJobParameters()).isEqualTo(globalJobParameters); + assertThat(runtimeContext.isObjectReuseEnabled()).isEqualTo(isObjectReused); + assertThat(runtimeContext.getUserCodeClassLoader()).isEqualTo(userCodeClassLoader); + + assertThatThrownBy(runtimeContext::getDistributedCache) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getState( + new ValueStateDescriptor<>("foobar", Integer.class, 42))) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getListState( + new ListStateDescriptor<>("foobar", Integer.class))) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getReducingState( + new ReducingStateDescriptor<>( + "foobar", + new ReduceFunction() { + private static final long serialVersionUID = + 2136425961884441050L; + + @Override + public Integer reduce( + Integer value1, Integer value2) { + return value1; + } + }, + Integer.class))) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getAggregatingState( + new AggregatingStateDescriptor<>( + "foobar", + new AggregateFunction() { + + @Override + public Integer createAccumulator() { + return null; + } + + @Override + public Integer add( + Integer value, Integer accumulator) { + return null; + } + + @Override + public Integer getResult(Integer accumulator) { + return null; + } + + @Override + public Integer merge(Integer a, Integer b) { + return null; + } + }, + Integer.class))) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getMapState( + new MapStateDescriptor<>( + "foobar", Integer.class, String.class))) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.addAccumulator( + "foobar", + new Accumulator() { + private static final long serialVersionUID = + -4673320336846482358L; + + @Override + public void add(Integer value) { + // no op + } + + @Override + public Integer getLocalValue() { + return null; + } + + @Override + public void resetLocal() {} + + @Override + public void merge( + Accumulator other) {} + + @Override + public Accumulator clone() { + return null; + } + })) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getAccumulator("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getIntCounter("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getLongCounter("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getDoubleCounter("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getHistogram("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getBroadcastVariable("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.hasBroadcastVariable("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy(() -> runtimeContext.getBroadcastVariable("foobar")) + .isInstanceOf(UnsupportedOperationException.class); + + assertThatThrownBy( + () -> + runtimeContext.getBroadcastVariableWithInitializer( + "foobar", data -> null)) + .isInstanceOf(UnsupportedOperationException.class); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/OutputFormatSinkFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/OutputFormatSinkFunctionTest.java index 43406ff988565..83726339f75fe 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/OutputFormatSinkFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/OutputFormatSinkFunctionTest.java @@ -22,14 +22,14 @@ import org.apache.flink.api.common.io.OutputFormat; import org.apache.flink.api.common.io.RichOutputFormat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; /** Tests for {@link OutputFormatSinkFunction}. */ -public class OutputFormatSinkFunctionTest { +class OutputFormatSinkFunctionTest { @Test - public void setRuntimeContext() throws Exception { + void setRuntimeContext() { RuntimeContext mockRuntimeContext = Mockito.mock(RuntimeContext.class); // Make sure setRuntimeContext of the rich output format is called diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/SocketClientSinkTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/SocketClientSinkTest.java index a5a52e272fa7e..c41f2c138cd67 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/SocketClientSinkTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/SocketClientSinkTest.java @@ -21,12 +21,12 @@ import org.apache.flink.api.common.functions.DefaultOpenContext; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.ConfigConstants; +import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.util.NetUtils; -import org.apache.flink.util.TestLogger; import org.apache.commons.io.IOUtils; import org.junit.AssumptionViolatedException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.IOException; @@ -39,15 +39,12 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicReference; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for the {@link org.apache.flink.streaming.api.functions.sink.SocketClientSink}. */ -@SuppressWarnings("serial") -public class SocketClientSinkTest extends TestLogger { +class SocketClientSinkTest { private static final String TEST_MESSAGE = "testSocketSinkInvoke"; @@ -65,25 +62,19 @@ public byte[] serialize(String element) { }; @Test - public void testSocketSink() throws Exception { + void testSocketSink() throws Exception { final ServerSocket server = new ServerSocket(0); final int port = server.getLocalPort(); - final AtomicReference error = new AtomicReference(); - - Thread sinkRunner = - new Thread("Test sink runner") { + CheckedThread sinkRunner = + new CheckedThread("Test sink runner") { @Override - public void run() { - try { - SocketClientSink simpleSink = - new SocketClientSink<>(host, port, simpleSchema, 0); - simpleSink.open(DefaultOpenContext.INSTANCE); - simpleSink.invoke(TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); - simpleSink.close(); - } catch (Throwable t) { - error.set(t); - } + public void go() throws Exception { + SocketClientSink simpleSink = + new SocketClientSink<>(host, port, simpleSchema, 0); + simpleSink.open(DefaultOpenContext.INSTANCE); + simpleSink.invoke(TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); + simpleSink.close(); } }; @@ -94,20 +85,14 @@ public void run() { String value = rdr.readLine(); - sinkRunner.join(); + sinkRunner.sync(); server.close(); - if (error.get() != null) { - Throwable t = error.get(); - t.printStackTrace(); - fail("Error in spawned thread: " + t.getMessage()); - } - - assertEquals(TEST_MESSAGE, value); + assertThat(value).isEqualTo(TEST_MESSAGE); } @Test - public void testSinkAutoFlush() throws Exception { + void testSinkAutoFlush() throws Exception { final ServerSocket server = new ServerSocket(0); final int port = server.getLocalPort(); @@ -115,19 +100,13 @@ public void testSinkAutoFlush() throws Exception { new SocketClientSink<>(host, port, simpleSchema, 0, true); simpleSink.open(DefaultOpenContext.INSTANCE); - final AtomicReference error = new AtomicReference(); - - Thread sinkRunner = - new Thread("Test sink runner") { + CheckedThread sinkRunner = + new CheckedThread("Test sink runner") { @Override - public void run() { - try { - // need two messages here: send a fin to cancel the client - // state:FIN_WAIT_2 while the server is CLOSE_WAIT - simpleSink.invoke(TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); - } catch (Throwable t) { - error.set(t); - } + public void go() throws Exception { + // need two messages here: send a fin to cancel the client + // state:FIN_WAIT_2 while the server is CLOSE_WAIT + simpleSink.invoke(TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); } }; @@ -137,38 +116,26 @@ public void run() { BufferedReader rdr = new BufferedReader(new InputStreamReader(sk.getInputStream())); String value = rdr.readLine(); - sinkRunner.join(); + sinkRunner.sync(); simpleSink.close(); server.close(); - if (error.get() != null) { - Throwable t = error.get(); - t.printStackTrace(); - fail("Error in spawned thread: " + t.getMessage()); - } - - assertEquals(TEST_MESSAGE, value); + assertThat(value).isEqualTo(TEST_MESSAGE); } @Test - public void testSocketSinkNoRetry() throws Exception { + void testSocketSinkNoRetry() throws Exception { final ServerSocket server = new ServerSocket(0); final int port = server.getLocalPort(); try { - final AtomicReference error = new AtomicReference(); - - Thread serverRunner = - new Thread("Test server runner") { + CheckedThread serverRunner = + new CheckedThread("Test server runner") { @Override - public void run() { - try { - Socket sk = NetUtils.acceptWithoutTimeout(server); - sk.close(); - } catch (Throwable t) { - error.set(t); - } + public void go() throws Exception { + Socket sk = NetUtils.acceptWithoutTimeout(server); + sk.close(); } }; serverRunner.start(); @@ -178,36 +145,33 @@ public void run() { simpleSink.open(DefaultOpenContext.INSTANCE); // wait socket server to close - serverRunner.join(); - if (error.get() != null) { - Throwable t = error.get(); - t.printStackTrace(); - fail("Error in server thread: " + t.getMessage()); - } - - try { - // socket should be closed, so this should trigger a re-try - // need two messages here: send a fin to cancel the client state:FIN_WAIT_2 while - // the server is CLOSE_WAIT - while (true) { // we have to do this more often as the server side closed is not - // guaranteed to be noticed immediately - simpleSink.invoke(TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); - } - } catch (IOException e) { - // check whether throw a exception that reconnect failed. - assertTrue("Wrong exception", e.getMessage().contains(EXCEPTION_MESSGAE)); - } catch (Exception e) { - fail("wrong exception: " + e.getClass().getName() + " - " + e.getMessage()); - } - - assertEquals(0, simpleSink.getCurrentNumberOfRetries()); + serverRunner.sync(); + + assertThatThrownBy( + () -> { + // socket should be closed, so this should trigger a re-try + // need two messages here: send a fin to cancel the client + // state:FIN_WAIT_2 while + // the server is CLOSE_WAIT + while (true) { // we have to do this more often as the server side + // closed is not + // guaranteed to be noticed immediately + simpleSink.invoke( + TEST_MESSAGE + '\n', SinkContextUtil.forTimestamp(0)); + } + }) + // check whether throw a exception that reconnect failed. + .isInstanceOf(IOException.class) + .hasMessageContaining(EXCEPTION_MESSGAE); + + assertThat(simpleSink.getCurrentNumberOfRetries()).isZero(); } finally { IOUtils.closeQuietly(server); } } @Test - public void testRetry() throws Exception { + void testRetry() throws Exception { final ServerSocket[] serverSocket = new ServerSocket[1]; final ExecutorService[] executor = new ExecutorService[1]; @@ -229,7 +193,7 @@ public Void call() throws Exception { new InputStreamReader(socket.getInputStream())); String value = reader.readLine(); - assertEquals("0", value); + assertThat(value).isEqualTo("0"); socket.close(); return null; @@ -255,10 +219,10 @@ public Void call() throws Exception { // Shutdown the server socket serverSocket[0].close(); - assertTrue(serverSocket[0].isClosed()); + assertThat(serverSocket[0].isClosed()).isTrue(); // No retries expected at this point - assertEquals(0, sink.getCurrentNumberOfRetries()); + assertThat(sink.getCurrentNumberOfRetries()).isZero(); final CountDownLatch retryLatch = new CountDownLatch(1); final CountDownLatch again = new CountDownLatch(1); @@ -305,7 +269,7 @@ public Void call() throws Exception { // Wait for the reconnect String value = reader.readLine(); - assertEquals("1", value); + assertThat(value).isEqualTo("1"); // OK the sink re-connected. :) } finally { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TransactionHolderTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TransactionHolderTest.java index e0469de65b7e3..7d0a7640a6f1e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TransactionHolderTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TransactionHolderTest.java @@ -19,23 +19,22 @@ import org.apache.flink.streaming.api.functions.sink.TwoPhaseCommitSinkFunction.TransactionHolder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.time.Clock; import java.time.Instant; import java.time.ZoneOffset; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; +import static org.assertj.core.api.Assertions.assertThat; /** Unit tests {@link TransactionHolder}. */ -public class TransactionHolderTest { +class TransactionHolderTest { @Test - public void testElapsedTime() { + void testElapsedTime() { final long elapsedTime = new TransactionHolder<>(new Object(), 0) .elapsedTime(Clock.fixed(Instant.ofEpochMilli(1000), ZoneOffset.UTC)); - assertThat(elapsedTime, equalTo(1000L)); + assertThat(elapsedTime).isEqualTo(1000L); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TwoPhaseCommitSinkFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TwoPhaseCommitSinkFunctionTest.java index d2a5f8e0a5293..9361edbf44097 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TwoPhaseCommitSinkFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/TwoPhaseCommitSinkFunctionTest.java @@ -25,15 +25,14 @@ import org.apache.flink.streaming.api.operators.StreamSink; import org.apache.flink.streaming.util.ContentDump; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.apache.flink.testutils.logging.TestLoggerResource; +import org.apache.flink.testutils.logging.LoggerAuditingExtension; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.slf4j.event.Level; -import java.io.IOException; import java.time.Clock; import java.time.Instant; import java.time.ZoneId; @@ -45,14 +44,11 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link TwoPhaseCommitSinkFunction}. */ -public class TwoPhaseCommitSinkFunctionTest { +class TwoPhaseCommitSinkFunctionTest { private ContentDumpSinkFunction sinkFunction; @@ -66,12 +62,12 @@ public class TwoPhaseCommitSinkFunctionTest { private SettableClock clock; - @Rule - public final TestLoggerResource testLoggerResource = - new TestLoggerResource(TwoPhaseCommitSinkFunction.class, Level.WARN); + @RegisterExtension + private LoggerAuditingExtension testLoggerResource = + new LoggerAuditingExtension(TwoPhaseCommitSinkFunction.class, Level.WARN); - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { targetDirectory = new ContentDump(); tmpDirectory = new ContentDump(); clock = new SettableClock(); @@ -79,8 +75,8 @@ public void setUp() throws Exception { setUpTestHarness(); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { closeTestHarness(); } @@ -101,7 +97,7 @@ private void closeTestHarness() throws Exception { * completes first. See FLINK-10377 and FLINK-14979 for more details. */ @Test - public void testSubsumedNotificationOfPreviousCheckpoint() throws Exception { + void testSubsumedNotificationOfPreviousCheckpoint() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); @@ -113,11 +109,11 @@ public void testSubsumedNotificationOfPreviousCheckpoint() throws Exception { harness.notifyOfCompletedCheckpoint(1); assertExactlyOnce(Arrays.asList("42", "43", "44")); - assertEquals(1, tmpDirectory.listFiles().size()); // one for currentTransaction + assertThat(tmpDirectory.listFiles()).hasSize(1); // one for currentTransaction } @Test - public void testNoTransactionAfterSinkFunctionFinish() throws Exception { + void testNoTransactionAfterSinkFunctionFinish() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); @@ -134,23 +130,18 @@ public void testNoTransactionAfterSinkFunctionFinish() throws Exception { // make sure the previous empty transaction will not be pre-committed harness.snapshot(3, 6); - try { - harness.processElement("45", 7); - fail( - "TwoPhaseCommitSinkFunctionTest should not process any more input data after finish!"); - } catch (NullPointerException e) { - // expected and do nothing here - } + assertThatThrownBy(() -> harness.processElement("45", 7)) + .isInstanceOf(NullPointerException.class); // Checkpoint2 has not complete assertExactlyOnce(Arrays.asList("42", "43")); // transaction for checkpoint2 - assertEquals(1, tmpDirectory.listFiles().size()); + assertThat(tmpDirectory.listFiles()).hasSize(1); } @Test - public void testRecoverFromStateAfterFinished() throws Exception { + void testRecoverFromStateAfterFinished() throws Exception { harness.open(); harness.processElement("42", 0); sinkFunction.finish(); @@ -162,11 +153,11 @@ public void testRecoverFromStateAfterFinished() throws Exception { harness.initializeState(operatorSubtaskState); harness.open(); - assertEquals(0, sinkFunction.abortedTransactions.size()); + assertThat(sinkFunction.abortedTransactions).isEmpty(); } @Test - public void testNotifyOfCompletedCheckpoint() throws Exception { + void testNotifyOfCompletedCheckpoint() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); @@ -177,15 +168,12 @@ public void testNotifyOfCompletedCheckpoint() throws Exception { harness.notifyOfCompletedCheckpoint(1); assertExactlyOnce(Arrays.asList("42", "43")); - assertEquals( - 2, - tmpDirectory - .listFiles() - .size()); // one for checkpointId 2 and second for the currentTransaction + // one for checkpointId 2 and second for the currentTransaction + assertThat(tmpDirectory.listFiles()).hasSize(2); } @Test - public void testFailBeforeNotify() throws Exception { + void testFailBeforeNotify() throws Exception { harness.open(); harness.processElement("42", 0); harness.snapshot(0, 1); @@ -193,16 +181,14 @@ public void testFailBeforeNotify() throws Exception { OperatorSubtaskState snapshot = harness.snapshot(1, 3); tmpDirectory.setWritable(false); - try { - harness.processElement("44", 4); - harness.snapshot(2, 5); - fail("something should fail"); - } catch (Exception ex) { - if (!(ex.getCause() instanceof ContentDump.NotWritableException)) { - throw ex; - } - // ignore - } + + assertThatThrownBy( + () -> { + harness.processElement("44", 4); + harness.snapshot(2, 5); + }) + .hasCauseInstanceOf(ContentDump.NotWritableException.class); + closeTestHarness(); tmpDirectory.setWritable(true); @@ -213,11 +199,11 @@ public void testFailBeforeNotify() throws Exception { assertExactlyOnce(Arrays.asList("42", "43")); closeTestHarness(); - assertEquals(0, tmpDirectory.listFiles().size()); + assertThat(tmpDirectory.listFiles()).isEmpty(); } @Test - public void testIgnoreCommitExceptionDuringRecovery() throws Exception { + void testIgnoreCommitExceptionDuringRecovery() throws Exception { clock.setEpochMilli(0); harness.open(); @@ -235,12 +221,9 @@ public void testIgnoreCommitExceptionDuringRecovery() throws Exception { sinkFunction.setTransactionTimeout(transactionTimeout); sinkFunction.ignoreFailuresAfterTransactionTimeout(); - try { - harness.initializeState(snapshot); - fail("Expected exception not thrown"); - } catch (RuntimeException e) { - assertEquals("Expected exception", e.getMessage()); - } + assertThatThrownBy(() -> harness.initializeState(snapshot)) + .isInstanceOf(RuntimeException.class) + .hasMessage("Expected exception"); clock.setEpochMilli(transactionTimeout + 1); harness.initializeState(snapshot); @@ -249,7 +232,7 @@ public void testIgnoreCommitExceptionDuringRecovery() throws Exception { } @Test - public void testLogTimeoutAlmostReachedWarningDuringCommit() throws Exception { + void testLogTimeoutAlmostReachedWarningDuringCommit() throws Exception { clock.setEpochMilli(0); final long transactionTimeout = 1000; @@ -263,16 +246,15 @@ public void testLogTimeoutAlmostReachedWarningDuringCommit() throws Exception { clock.setEpochMilli(elapsedTime); harness.notifyOfCompletedCheckpoint(1); - assertThat( - testLoggerResource.getMessages(), - hasItem( - containsString( - "has been open for 502 ms. " - + "This is close to or even exceeding the transaction timeout of 1000 ms."))); + assertThat(testLoggerResource.getMessages()) + .anyMatch( + item -> + item.contains( + "has been open for 502 ms. This is close to or even exceeding the transaction timeout of 1000 ms.")); } @Test - public void testLogTimeoutAlmostReachedWarningDuringRecovery() throws Exception { + void testLogTimeoutAlmostReachedWarningDuringRecovery() throws Exception { clock.setEpochMilli(0); final long transactionTimeout = 1000; @@ -296,22 +278,21 @@ public void testLogTimeoutAlmostReachedWarningDuringRecovery() throws Exception closeTestHarness(); - assertThat( - testLoggerResource.getMessages(), - hasItem( - containsString( - "has been open for 502 ms. " - + "This is close to or even exceeding the transaction timeout of 1000 ms."))); + assertThat(testLoggerResource.getMessages()) + .anyMatch( + item -> + item.contains( + "has been open for 502 ms. This is close to or even exceeding the transaction timeout of 1000 ms.")); } - private void assertExactlyOnce(List expectedValues) throws IOException { + private void assertExactlyOnce(List expectedValues) { ArrayList actualValues = new ArrayList<>(); for (String name : targetDirectory.listFiles()) { actualValues.addAll(targetDirectory.read(name)); } Collections.sort(actualValues); Collections.sort(expectedValues); - assertEquals(expectedValues, actualValues); + assertThat(actualValues).isEqualTo(expectedValues); } private class ContentDumpSinkFunction diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketAssignerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketAssignerTest.java index 31ddb8cdabebb..4c0930d589922 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketAssignerTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketAssignerTest.java @@ -24,22 +24,23 @@ import org.apache.flink.core.fs.Path; import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.BasePathBucketAssigner; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy; +import org.apache.flink.testutils.junit.utils.TempDirUtils; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; +import static org.assertj.core.api.Assertions.assertThat; + /** Integration tests for {@link BucketAssigner bucket assigners}. */ -public class BucketAssignerTest { +class BucketAssignerTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempDir; @Test - public void testAssembleBucketPath() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testAssembleBucketPath() throws Exception { + final File outDir = TempDirUtils.newFolder(tempDir); final Path basePath = new Path(outDir.toURI()); final long time = 1000L; @@ -60,6 +61,6 @@ public void testAssembleBucketPath() throws Exception { Bucket bucket = buckets.onElement("abc", new TestUtils.MockSinkContext(time, time, time)); - Assert.assertEquals(new Path(basePath.toUri()), bucket.getBucketPath()); + assertThat(bucket.getBucketPath()).isEqualTo(new Path(basePath.toUri())); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketStateSerializerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketStateSerializerTest.java index c456ba7af327d..e15b92a5da7d9 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketStateSerializerTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketStateSerializerTest.java @@ -26,15 +26,14 @@ import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameter; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; import org.apache.flink.util.FileUtils; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.io.IOException; import java.nio.file.Files; @@ -46,12 +45,7 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.iterableWithSize; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for the {@link BucketStateSerializer} that verify we can still read snapshots written using @@ -59,17 +53,17 @@ * control (including the current version). The tests verify that the current version of the * serializer can still read data from all previous versions. */ -@RunWith(Parameterized.class) -public class BucketStateSerializerTest { +@ExtendWith(ParameterizedTestExtension.class) +class BucketStateSerializerTest { private static final int CURRENT_VERSION = 2; - @Parameterized.Parameters(name = "Previous Version = {0}") - public static Collection previousVersions() { + @Parameters(name = "Previous Version = {0}") + private static Collection previousVersions() { return Arrays.asList(1, 2); } - @Parameterized.Parameter public Integer previousVersion; + @Parameter private Integer previousVersion; private static final String IN_PROGRESS_CONTENT = "writing"; private static final String PENDING_CONTENT = "wrote"; @@ -79,20 +73,18 @@ public static Collection previousVersions() { private static final java.nio.file.Path BASE_PATH = Paths.get("src/test/resources/").resolve("bucket-state-migration-test"); - @ClassRule public static TemporaryFolder tempFolder = new TemporaryFolder(); - private final BucketStateGenerator generator = new BucketStateGenerator( BUCKET_ID, IN_PROGRESS_CONTENT, PENDING_CONTENT, BASE_PATH, CURRENT_VERSION); - @Test - @Ignore - public void prepareDeserializationEmpty() throws IOException { + @TestTemplate + @Disabled + void prepareDeserializationEmpty() throws IOException { generator.prepareDeserializationEmpty(); } - @Test - public void testSerializationEmpty() throws IOException { + @TestTemplate + void testSerializationEmpty() throws IOException { final String scenarioName = "empty"; final BucketStatePathResolver pathResolver = @@ -104,19 +96,19 @@ public void testSerializationEmpty() throws IOException { final Bucket bucket = restoreBucket(0, recoveredState); - Assert.assertEquals(testBucketPath, bucket.getBucketPath()); - Assert.assertNull(bucket.getInProgressPart()); - Assert.assertTrue(bucket.getPendingFileRecoverablesPerCheckpoint().isEmpty()); + assertThat(bucket.getBucketPath()).isEqualTo(testBucketPath); + assertThat(bucket.getInProgressPart()).isNull(); + assertThat(bucket.getPendingFileRecoverablesPerCheckpoint()).isEmpty(); } - @Test - @Ignore - public void prepareDeserializationOnlyInProgress() throws IOException { + @TestTemplate + @Disabled + void prepareDeserializationOnlyInProgress() throws IOException { generator.prepareDeserializationOnlyInProgress(); } - @Test - public void testSerializationOnlyInProgress() throws IOException { + @TestTemplate + void testSerializationOnlyInProgress() throws IOException { final String scenarioName = "only-in-progress"; final BucketStatePathResolver pathResolver = @@ -130,44 +122,43 @@ public void testSerializationOnlyInProgress() throws IOException { final Bucket bucket = restoreBucket(0, recoveredState); - Assert.assertEquals(testBucketPath, bucket.getBucketPath()); + assertThat(bucket.getBucketPath()).isEqualTo(testBucketPath); // check restore the correct in progress file writer - Assert.assertEquals(8, bucket.getInProgressPart().getSize()); + assertThat(bucket.getInProgressPart().getSize()).isEqualTo(8); long numFiles = Files.list(Paths.get(testBucketPath.toString())) .map( file -> { - assertThat( - file.getFileName().toString(), - startsWith(".part-0-0.inprogress")); + assertThat(file.getFileName().toString()) + .startsWith(".part-0-0.inprogress"); return 1; }) .count(); - assertThat(numFiles, is(1L)); + assertThat(numFiles).isOne(); } - @Test - @Ignore - public void prepareDeserializationFull() throws IOException { + @TestTemplate + @Disabled + void prepareDeserializationFull() throws IOException { generator.prepareDeserializationFull(); } - @Test - public void testSerializationFull() throws IOException { + @TestTemplate + void testSerializationFull() throws IOException { testDeserializationFull(true, "full"); } - @Test - @Ignore + @TestTemplate + @Disabled public void prepareDeserializationNullInProgress() throws IOException { generator.prepareDeserializationNullInProgress(); } - @Test - public void testSerializationNullInProgress() throws IOException { + @TestTemplate + void testSerializationNullInProgress() throws IOException { testDeserializationFull(false, "full-no-in-progress"); } @@ -189,7 +180,7 @@ private void testDeserializationFull(final boolean withInProgress, final String final Map> pendingFileRecoverables = recoveredState.getPendingFileRecoverablesPerCheckpoint(); - Assert.assertEquals(5L, pendingFileRecoverables.size()); + assertThat(pendingFileRecoverables).hasSize(5); final Set beforeRestorePaths = Files.list(outputPath.resolve(BUCKET_ID)) @@ -199,13 +190,13 @@ private void testDeserializationFull(final boolean withInProgress, final String // before retsoring all file has "inprogress" for (int i = 0; i < noOfPendingCheckpoints; i++) { final String part = ".part-0-" + i + ".inprogress"; - assertThat(beforeRestorePaths, hasItem(startsWith(part))); + assertThat(beforeRestorePaths).anyMatch(item -> item.startsWith(part)); } // recover and commit final Bucket bucket = restoreBucket(noOfPendingCheckpoints + 1, recoveredState); - Assert.assertEquals(testBucketPath, bucket.getBucketPath()); - Assert.assertEquals(0, bucket.getPendingFileRecoverablesForCurrentCheckpoint().size()); + assertThat(bucket.getBucketPath()).isEqualTo(testBucketPath); + assertThat(bucket.getPendingFileRecoverablesForCurrentCheckpoint()).isEmpty(); final Set afterRestorePaths = Files.list(outputPath.resolve(BUCKET_ID)) @@ -216,20 +207,24 @@ private void testDeserializationFull(final boolean withInProgress, final String // there is no "inporgress" in file name for the committed files. for (int i = 0; i < noOfPendingCheckpoints; i++) { final String part = "part-0-" + i; - assertThat(afterRestorePaths, hasItem(part)); + assertThat(afterRestorePaths).contains(part); afterRestorePaths.remove(part); } if (withInProgress) { // only the in-progress must be left - assertThat(afterRestorePaths, iterableWithSize(1)); + assertThat(afterRestorePaths).hasSize(1); // verify that the in-progress file is still there - assertThat( - afterRestorePaths, - hasItem(startsWith(".part-0-" + noOfPendingCheckpoints + ".inprogress"))); + assertThat(afterRestorePaths) + .anyMatch( + item -> + item.startsWith( + ".part-0-" + + noOfPendingCheckpoints + + ".inprogress")); } else { - assertThat(afterRestorePaths, empty()); + assertThat(afterRestorePaths).isEmpty(); } } finally { FileUtils.deleteDirectory(pathResolver.getResourcePath(scenarioName).toFile()); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketTest.java index 245e895878b41..372cfca368527 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketTest.java @@ -31,13 +31,12 @@ import org.apache.flink.streaming.api.functions.sink.filesystem.utils.NoOpRecoverable; import org.apache.flink.streaming.api.functions.sink.filesystem.utils.NoOpRecoverableFsDataOutputStream; import org.apache.flink.streaming.api.functions.sink.filesystem.utils.NoOpRecoverableWriter; +import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; @@ -47,17 +46,17 @@ import java.util.Map; import static org.apache.flink.util.Preconditions.checkArgument; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.HamcrestCondition.matching; /** Tests for the {@code Bucket}. */ -public class BucketTest { +class BucketTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempFolder; @Test - public void shouldNotCleanupResumablesThatArePartOfTheAckedCheckpoint() throws IOException { - final File outDir = TEMP_FOLDER.newFolder(); + void shouldNotCleanupResumablesThatArePartOfTheAckedCheckpoint() throws IOException { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final TestRecoverableWriter recoverableWriter = getRecoverableWriter(path); @@ -67,17 +66,16 @@ public void shouldNotCleanupResumablesThatArePartOfTheAckedCheckpoint() throws I bucketUnderTest.write("test-element", 0L); final BucketState state = bucketUnderTest.onReceptionOfCheckpoint(0L); - assertThat(state, hasActiveInProgressFile()); + assertThat(state).is(matching(hasActiveInProgressFile())); bucketUnderTest.onSuccessfulCompletionOfCheckpoint(0L); - assertThat( - recoverableWriter, - hasCalledDiscard(0)); // it did not discard as this is still valid. + assertThat(recoverableWriter) + .is(matching(hasCalledDiscard(0))); // it did not discard as this is still valid. } @Test - public void shouldCleanupOutdatedResumablesOnCheckpointAck() throws IOException { - final File outDir = TEMP_FOLDER.newFolder(); + void shouldCleanupOutdatedResumablesOnCheckpointAck() throws IOException { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final TestRecoverableWriter recoverableWriter = getRecoverableWriter(path); @@ -87,7 +85,7 @@ public void shouldCleanupOutdatedResumablesOnCheckpointAck() throws IOException bucketUnderTest.write("test-element", 0L); final BucketState state = bucketUnderTest.onReceptionOfCheckpoint(0L); - assertThat(state, hasActiveInProgressFile()); + assertThat(state).is(matching(hasActiveInProgressFile())); bucketUnderTest.onSuccessfulCompletionOfCheckpoint(0L); @@ -95,12 +93,13 @@ public void shouldCleanupOutdatedResumablesOnCheckpointAck() throws IOException bucketUnderTest.onReceptionOfCheckpoint(2L); bucketUnderTest.onSuccessfulCompletionOfCheckpoint(2L); - assertThat(recoverableWriter, hasCalledDiscard(2)); // that is for checkpoints 0 and 1 + assertThat(recoverableWriter) + .is(matching(hasCalledDiscard(2))); // that is for checkpoints 0 and 1 } @Test - public void shouldNotCallCleanupWithoutInProgressPartFiles() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void shouldNotCallCleanupWithoutInProgressPartFiles() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final TestRecoverableWriter recoverableWriter = getRecoverableWriter(path); @@ -108,18 +107,19 @@ public void shouldNotCallCleanupWithoutInProgressPartFiles() throws Exception { createBucket(recoverableWriter, path, 0, 0, OutputFileConfig.builder().build()); final BucketState state = bucketUnderTest.onReceptionOfCheckpoint(0L); - assertThat(state, hasNoActiveInProgressFile()); + assertThat(state).is(matching(hasNoActiveInProgressFile())); bucketUnderTest.onReceptionOfCheckpoint(1L); bucketUnderTest.onReceptionOfCheckpoint(2L); bucketUnderTest.onSuccessfulCompletionOfCheckpoint(2L); - assertThat(recoverableWriter, hasCalledDiscard(0)); // we have no in-progress file. + assertThat(recoverableWriter) + .is(matching(hasCalledDiscard(0))); // we have no in-progress file. } @Test - public void shouldCleanupOutdatedResumablesAfterResumed() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void shouldCleanupOutdatedResumablesAfterResumed() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final TestRecoverableWriter recoverableWriter = getRecoverableWriter(path); @@ -128,55 +128,55 @@ public void shouldCleanupOutdatedResumablesAfterResumed() throws Exception { bucketUnderTest.write("test-element", 0L); final BucketState state0 = bucketUnderTest.onReceptionOfCheckpoint(0L); - assertThat(state0, hasActiveInProgressFile()); + assertThat(state0).is(matching(hasActiveInProgressFile())); bucketUnderTest.onSuccessfulCompletionOfCheckpoint(0L); - assertThat(recoverableWriter, hasCalledDiscard(0)); + assertThat(recoverableWriter).is(matching(hasCalledDiscard(0))); - final File newOutDir = TEMP_FOLDER.newFolder(); + final File newOutDir = TempDirUtils.newFolder(tempFolder); final Path newPath = new Path(newOutDir.toURI()); final TestRecoverableWriter newRecoverableWriter = getRecoverableWriter(newPath); final Bucket bucketAfterResume = restoreBucket( newRecoverableWriter, 0, 0, state0, OutputFileConfig.builder().build()); final BucketState state1 = bucketAfterResume.onReceptionOfCheckpoint(1L); - assertThat(state1, hasActiveInProgressFile()); + assertThat(state1).is(matching(hasActiveInProgressFile())); bucketAfterResume.onSuccessfulCompletionOfCheckpoint(1L); - assertThat(newRecoverableWriter, hasCalledDiscard(1)); + assertThat(newRecoverableWriter).is(matching(hasCalledDiscard(1))); } // --------------------------- Checking Restore --------------------------- @Test - public void inProgressFileShouldBeCommittedIfWriterDoesNotSupportResume() throws IOException { + void inProgressFileShouldBeCommittedIfWriterDoesNotSupportResume() throws IOException { final StubNonResumableWriter nonResumableWriter = new StubNonResumableWriter(); final Bucket bucket = getRestoredBucketWithOnlyInProgressPart(nonResumableWriter); - Assert.assertThat(nonResumableWriter, hasMethodCallCountersEqualTo(1, 0, 1)); - Assert.assertThat(bucket, hasNullInProgressFile(true)); + assertThat(nonResumableWriter).is(matching(hasMethodCallCountersEqualTo(1, 0, 1))); + assertThat(bucket).is(matching(hasNullInProgressFile(true))); } @Test - public void inProgressFileShouldBeRestoredIfWriterSupportsResume() throws IOException { + void inProgressFileShouldBeRestoredIfWriterSupportsResume() throws IOException { final StubResumableWriter resumableWriter = new StubResumableWriter(); final Bucket bucket = getRestoredBucketWithOnlyInProgressPart(resumableWriter); - Assert.assertThat(resumableWriter, hasMethodCallCountersEqualTo(1, 1, 0)); - Assert.assertThat(bucket, hasNullInProgressFile(false)); + assertThat(resumableWriter).is(matching(hasMethodCallCountersEqualTo(1, 1, 0))); + assertThat(bucket).is(matching(hasNullInProgressFile(false))); } @Test - public void pendingFilesShouldBeRestored() throws IOException { + void pendingFilesShouldBeRestored() throws IOException { final int expectedRecoverForCommitCounter = 10; final StubNonResumableWriter writer = new StubNonResumableWriter(); final Bucket bucket = getRestoredBucketWithOnlyPendingParts(writer, expectedRecoverForCommitCounter); - Assert.assertThat( - writer, hasMethodCallCountersEqualTo(0, 0, expectedRecoverForCommitCounter)); - Assert.assertThat(bucket, hasNullInProgressFile(true)); + assertThat(writer) + .is(matching(hasMethodCallCountersEqualTo(0, 0, expectedRecoverForCommitCounter))); + assertThat(bucket).is(matching(hasNullInProgressFile(true))); } // ------------------------------- Matchers -------------------------------- @@ -432,21 +432,12 @@ private static Bucket restoreBucket( outputFileConfig); } - private static TestRecoverableWriter getRecoverableWriter(Path path) { - try { - final FileSystem fs = FileSystem.get(path.toUri()); - if (!(fs instanceof LocalFileSystem)) { - fail( - "Expected Local FS but got a " - + fs.getClass().getName() - + " for path: " - + path); - } - return new TestRecoverableWriter((LocalFileSystem) fs); - } catch (IOException e) { - fail(); - } - return null; + private static TestRecoverableWriter getRecoverableWriter(Path path) throws IOException { + final FileSystem fs = FileSystem.get(path.toUri()); + assertThat(fs) + .as("Expected Local FS but got a " + fs.getClass().getName() + " for path: " + path) + .isInstanceOf(LocalFileSystem.class); + return new TestRecoverableWriter((LocalFileSystem) fs); } private Bucket getRestoredBucketWithOnlyInProgressPart( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsRollingPolicyTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsRollingPolicyTest.java index 8eefba09a602a..6e3765f9dc95d 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsRollingPolicyTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsRollingPolicyTest.java @@ -24,25 +24,26 @@ import org.apache.flink.core.fs.Path; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; +import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.Preconditions; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; import java.time.Duration; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for different {@link RollingPolicy rolling policies}. */ -public class BucketsRollingPolicyTest { +class BucketsRollingPolicyTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempDir; @Test - public void testDefaultRollingPolicy() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testDefaultRollingPolicy() throws Exception { + final File outDir = TempDirUtils.newFolder(tempDir); final Path path = new Path(outDir.toURI()); final RollingPolicy originalRollingPolicy = @@ -87,7 +88,7 @@ public void testDefaultRollingPolicy() throws Exception { } @Test - public void testDefaultRollingPolicyDeprecatedCreate() throws Exception { + void testDefaultRollingPolicyDeprecatedCreate() { DefaultRollingPolicy policy = DefaultRollingPolicy.builder() .withInactivityInterval(Duration.ofMillis(10)) @@ -95,14 +96,14 @@ public void testDefaultRollingPolicyDeprecatedCreate() throws Exception { .withRolloverInterval(Duration.ofMillis(30)) .build(); - Assert.assertEquals(10, policy.getInactivityInterval()); - Assert.assertEquals(20, policy.getMaxPartSize()); - Assert.assertEquals(30, policy.getRolloverInterval()); + assertThat(policy.getInactivityInterval()).isEqualTo(10); + assertThat(policy.getMaxPartSize()).isEqualTo(20); + assertThat(policy.getRolloverInterval()).isEqualTo(30); } @Test - public void testRollOnCheckpointPolicy() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testRollOnCheckpointPolicy() throws Exception { + final File outDir = TempDirUtils.newFolder(tempDir); final Path path = new Path(outDir.toURI()); final MethodCallCountingPolicyWrapper rollingPolicy = @@ -131,8 +132,8 @@ public void testRollOnCheckpointPolicy() throws Exception { } @Test - public void testCustomRollingPolicy() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testCustomRollingPolicy() throws Exception { + final File outDir = TempDirUtils.newFolder(tempDir); final Path path = new Path(outDir.toURI()); final MethodCallCountingPolicyWrapper rollingPolicy = @@ -289,12 +290,12 @@ void verifyCallCounters( final long onEventRolls, final long onProcessingTimeCalls, final long onProcessingTimeRolls) { - Assert.assertEquals(onCheckpointCalls, onCheckpointCallCounter); - Assert.assertEquals(onCheckpointRolls, onCheckpointRollCounter); - Assert.assertEquals(onEventCalls, onEventCallCounter); - Assert.assertEquals(onEventRolls, onEventRollCounter); - Assert.assertEquals(onProcessingTimeCalls, onProcessingTimeCallCounter); - Assert.assertEquals(onProcessingTimeRolls, onProcessingTimeRollCounter); + assertThat(onCheckpointCallCounter).isEqualTo(onCheckpointCalls); + assertThat(onCheckpointRollCounter).isEqualTo(onCheckpointRolls); + assertThat(onEventCallCounter).isEqualTo(onEventCalls); + assertThat(onEventRollCounter).isEqualTo(onEventRolls); + assertThat(onProcessingTimeCallCounter).isEqualTo(onProcessingTimeCalls); + assertThat(onProcessingTimeRollCounter).isEqualTo(onProcessingTimeRolls); } } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java index 98dd89abedeb7..30e14518b59aa 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java @@ -29,34 +29,32 @@ import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; +import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import static org.hamcrest.MatcherAssert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.HamcrestCondition.matching; /** Tests for {@link Buckets}. */ -public class BucketsTest { +class BucketsTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempFolder; @Test - public void testSnapshotAndRestore() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testSnapshotAndRestore() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final RollingPolicy onCheckpointRollingPolicy = @@ -70,19 +68,16 @@ public void testSnapshotAndRestore() throws Exception { buckets.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L)); buckets.snapshotState(0L, bucketStateContainer, partCounterContainer); - assertThat( - buckets.getActiveBuckets().get("test1"), - hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1")); + assertThat(buckets.getActiveBuckets().get("test1")) + .is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1"))); buckets.onElement("test2", new TestUtils.MockSinkContext(null, 1L, 2L)); buckets.snapshotState(1L, bucketStateContainer, partCounterContainer); - assertThat( - buckets.getActiveBuckets().get("test1"), - hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1")); - assertThat( - buckets.getActiveBuckets().get("test2"), - hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test2")); + assertThat(buckets.getActiveBuckets().get("test1")) + .is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test1"))); + assertThat(buckets.getActiveBuckets().get("test2")) + .is(matching(hasSinglePartFileToBeCommittedOnCheckpointAck(path, "test2"))); Buckets restoredBuckets = restoreBuckets( @@ -96,7 +91,7 @@ public void testSnapshotAndRestore() throws Exception { restoredBuckets.getActiveBuckets(); // because we commit pending files for previous checkpoints upon recovery - Assert.assertTrue(activeBuckets.isEmpty()); + assertThat(activeBuckets).isEmpty(); } private static TypeSafeMatcher> @@ -123,8 +118,8 @@ public void describeTo(Description description) { } @Test - public void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final RollingPolicy onCheckpointRP = @@ -144,10 +139,10 @@ public void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception { bucketsOne.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L)); bucketsOne.snapshotState(0L, bucketStateContainerOne, partCounterContainerOne); - Assert.assertEquals(1L, bucketsOne.getMaxPartCounter()); + assertThat(bucketsOne.getMaxPartCounter()).isOne(); // make sure we have one in-progress file here - Assert.assertNotNull(bucketsOne.getActiveBuckets().get("test1").getInProgressPart()); + assertThat(bucketsOne.getActiveBuckets().get("test1").getInProgressPart()).isNotNull(); // add a couple of in-progress files so that the part counter increases. bucketsTwo.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L)); @@ -157,17 +152,16 @@ public void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception { bucketsTwo.snapshotState(0L, bucketStateContainerTwo, partCounterContainerTwo); - Assert.assertEquals(2L, bucketsTwo.getMaxPartCounter()); + assertThat(bucketsTwo.getMaxPartCounter()).isEqualTo(2L); // make sure we have one in-progress file here and a pending - Assert.assertEquals( - 1L, - bucketsTwo - .getActiveBuckets() - .get("test1") - .getPendingFileRecoverablesPerCheckpoint() - .size()); - Assert.assertNotNull(bucketsTwo.getActiveBuckets().get("test1").getInProgressPart()); + assertThat( + bucketsTwo + .getActiveBuckets() + .get("test1") + .getPendingFileRecoverablesPerCheckpoint()) + .hasSize(1); + assertThat(bucketsTwo.getActiveBuckets().get("test1").getInProgressPart()).isNotNull(); final ListState mergedBucketStateContainer = new MockListState<>(); final ListState mergedPartCounterContainer = new MockListState<>(); @@ -187,30 +181,29 @@ public void testMergeAtScaleInAndMaxCounterAtRecovery() throws Exception { mergedPartCounterContainer); // we get the maximum of the previous tasks - Assert.assertEquals(2L, restoredBuckets.getMaxPartCounter()); + assertThat(restoredBuckets.getMaxPartCounter()).isEqualTo(2L); final Map> activeBuckets = restoredBuckets.getActiveBuckets(); - Assert.assertEquals(1L, activeBuckets.size()); - Assert.assertTrue(activeBuckets.keySet().contains("test1")); + assertThat(activeBuckets).hasSize(1).containsKey("test1"); final Bucket bucket = activeBuckets.get("test1"); - Assert.assertEquals("test1", bucket.getBucketId()); - Assert.assertEquals(new Path(path, "test1"), bucket.getBucketPath()); + assertThat(bucket.getBucketId()).isEqualTo("test1"); + assertThat(bucket.getBucketPath()).isEqualTo(new Path(path, "test1")); - Assert.assertNotNull(bucket.getInProgressPart()); // the restored part file + assertThat(bucket.getInProgressPart()).isNotNull(); // the restored part file // this is due to the Bucket#merge(). The in progress file of one // of the previous tasks is put in the list of pending files. - Assert.assertEquals(1L, bucket.getPendingFileRecoverablesForCurrentCheckpoint().size()); + assertThat(bucket.getPendingFileRecoverablesForCurrentCheckpoint()).hasSize(1); // we commit the pending for previous checkpoints - Assert.assertTrue(bucket.getPendingFileRecoverablesPerCheckpoint().isEmpty()); + assertThat(bucket.getPendingFileRecoverablesPerCheckpoint()).isEmpty(); } @Test - public void testOnProcessingTime() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testOnProcessingTime() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = @@ -225,26 +218,24 @@ public void testOnProcessingTime() throws Exception { // now it should roll buckets.onProcessingTime(7L); - Assert.assertEquals( - 1L, rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()); + assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne(); final Map> activeBuckets = buckets.getActiveBuckets(); - Assert.assertEquals(1L, activeBuckets.size()); - Assert.assertTrue(activeBuckets.keySet().contains("test")); + assertThat(activeBuckets).hasSize(1).containsKey("test"); final Bucket bucket = activeBuckets.get("test"); - Assert.assertEquals("test", bucket.getBucketId()); - Assert.assertEquals(new Path(path, "test"), bucket.getBucketPath()); - Assert.assertEquals("test", bucket.getBucketId()); + assertThat(bucket.getBucketId()).isEqualTo("test"); + assertThat(bucket.getBucketPath()).isEqualTo(new Path(path, "test")); + assertThat(bucket.getBucketId()).isEqualTo("test"); - Assert.assertNull(bucket.getInProgressPart()); - Assert.assertEquals(1L, bucket.getPendingFileRecoverablesForCurrentCheckpoint().size()); - Assert.assertTrue(bucket.getPendingFileRecoverablesPerCheckpoint().isEmpty()); + assertThat(bucket.getInProgressPart()).isNull(); + assertThat(bucket.getPendingFileRecoverablesForCurrentCheckpoint()).hasSize(1); + assertThat(bucket.getPendingFileRecoverablesPerCheckpoint()).isEmpty(); } @Test - public void testBucketIsRemovedWhenNotActive() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testBucketIsRemovedWhenNotActive() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = @@ -259,18 +250,17 @@ public void testBucketIsRemovedWhenNotActive() throws Exception { // now it should roll buckets.onProcessingTime(7L); - Assert.assertEquals( - 1L, rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()); + assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne(); buckets.snapshotState(0L, new MockListState<>(), new MockListState<>()); buckets.commitUpToCheckpoint(0L); - Assert.assertTrue(buckets.getActiveBuckets().isEmpty()); + assertThat(buckets.getActiveBuckets()).isEmpty(); } @Test - public void testPartCounterAfterBucketResurrection() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testPartCounterAfterBucketResurrection() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = @@ -282,21 +272,20 @@ public void testPartCounterAfterBucketResurrection() throws Exception { // it takes the current processing time of the context for the creation time, and for the // last modification time. buckets.onElement("test", new TestUtils.MockSinkContext(1L, 2L, 3L)); - Assert.assertEquals(1L, buckets.getActiveBuckets().get("test").getPartCounter()); + assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isOne(); // now it should roll buckets.onProcessingTime(7L); - Assert.assertEquals( - 1L, rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()); - Assert.assertEquals(1L, buckets.getActiveBuckets().get("test").getPartCounter()); + assertThat(rollOnProcessingTimeCountingPolicy.getOnProcessingTimeRollCounter()).isOne(); + assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isOne(); buckets.snapshotState(0L, new MockListState<>(), new MockListState<>()); buckets.commitUpToCheckpoint(0L); - Assert.assertTrue(buckets.getActiveBuckets().isEmpty()); + assertThat(buckets.getActiveBuckets()).isEmpty(); buckets.onElement("test", new TestUtils.MockSinkContext(2L, 3L, 4L)); - Assert.assertEquals(2L, buckets.getActiveBuckets().get("test").getPartCounter()); + assertThat(buckets.getActiveBuckets().get("test").getPartCounter()).isEqualTo(2L); } private static class OnProcessingTimePolicy @@ -338,18 +327,18 @@ public boolean shouldRollOnProcessingTime( } @Test - public void testContextPassingNormalExecution() throws Exception { + void testContextPassingNormalExecution() throws Exception { testCorrectTimestampPassingInContext(1L, 2L, 3L); } @Test - public void testContextPassingNullTimestamp() throws Exception { + void testContextPassingNullTimestamp() throws Exception { testCorrectTimestampPassingInContext(null, 2L, 3L); } private void testCorrectTimestampPassingInContext( Long timestamp, long watermark, long processingTime) throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + final File outDir = TempDirUtils.newFolder(tempFolder); final Path path = new Path(outDir.toURI()); final Buckets buckets = @@ -391,9 +380,9 @@ public String getBucketId(String element, BucketAssigner.Context context) { final long watermark = context.currentWatermark(); final long processingTime = context.currentProcessingTime(); - Assert.assertEquals(expectedTimestamp, elementTimestamp); - Assert.assertEquals(expectedProcessingTime, processingTime); - Assert.assertEquals(expectedWatermark, watermark); + assertThat(elementTimestamp).isEqualTo(expectedTimestamp); + assertThat(processingTime).isEqualTo(expectedProcessingTime); + assertThat(watermark).isEqualTo(expectedWatermark); return element; } @@ -405,8 +394,8 @@ public SimpleVersionedSerializer getSerializer() { } @Test - public void testBucketLifeCycleListenerOnCreatingAndInactive() throws Exception { - File outDir = TEMP_FOLDER.newFolder(); + void testBucketLifeCycleListenerOnCreatingAndInactive() throws Exception { + File outDir = TempDirUtils.newFolder(tempFolder); Path path = new Path(outDir.toURI()); OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = new OnProcessingTimePolicy<>(2L); @@ -441,12 +430,12 @@ public void testBucketLifeCycleListenerOnCreatingAndInactive() throws Exception new Tuple2<>(RecordBucketLifeCycleListener.EventType.CREATED, "test2"), new Tuple2<>(RecordBucketLifeCycleListener.EventType.INACTIVE, "test1"), new Tuple2<>(RecordBucketLifeCycleListener.EventType.INACTIVE, "test2")); - Assert.assertEquals(expectedEvents, bucketLifeCycleListener.getEvents()); + assertThat(bucketLifeCycleListener.getEvents()).isEqualTo(expectedEvents); } @Test - public void testBucketLifeCycleListenerOnRestoring() throws Exception { - File outDir = TEMP_FOLDER.newFolder(); + void testBucketLifeCycleListenerOnRestoring() throws Exception { + File outDir = TempDirUtils.newFolder(tempFolder); Path path = new Path(outDir.toURI()); OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = new OnProcessingTimePolicy<>(2L); @@ -482,15 +471,13 @@ public void testBucketLifeCycleListenerOnRestoring() throws Exception { partCounterContainer, OutputFileConfig.builder().build()); - Assert.assertEquals( - new HashSet<>(Collections.singletonList("test2")), - buckets.getActiveBuckets().keySet()); + assertThat(buckets.getActiveBuckets().keySet()).containsOnly("test2"); List> expectedEvents = Arrays.asList( new Tuple2<>(RecordBucketLifeCycleListener.EventType.CREATED, "test1"), new Tuple2<>(RecordBucketLifeCycleListener.EventType.CREATED, "test2"), new Tuple2<>(RecordBucketLifeCycleListener.EventType.INACTIVE, "test1")); - Assert.assertEquals(expectedEvents, bucketLifeCycleListener.getEvents()); + assertThat(bucketLifeCycleListener.getEvents()).isEqualTo(expectedEvents); } private static class RecordBucketLifeCycleListener @@ -518,8 +505,8 @@ public List> getEvents() { } @Test - public void testFileLifeCycleListener() throws Exception { - File outDir = TEMP_FOLDER.newFolder(); + void testFileLifeCycleListener() throws Exception { + File outDir = TempDirUtils.newFolder(tempFolder); Path path = new Path(outDir.toURI()); OnProcessingTimePolicy rollOnProcessingTimeCountingPolicy = @@ -545,11 +532,10 @@ public void testFileLifeCycleListener() throws Exception { buckets.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 5L)); buckets.onElement("test2", new TestUtils.MockSinkContext(null, 1L, 6L)); - Assert.assertEquals(2, fileLifeCycleListener.files.size()); - Assert.assertEquals( - Arrays.asList("part-0-0", "part-0-1"), fileLifeCycleListener.files.get("test1")); - Assert.assertEquals( - Collections.singletonList("part-0-1"), fileLifeCycleListener.files.get("test2")); + assertThat(fileLifeCycleListener.files).hasSize(2); + assertThat(fileLifeCycleListener.files.get("test1")) + .containsExactly("part-0-0", "part-0-1"); + assertThat(fileLifeCycleListener.files.get("test2")).containsExactly("part-0-1"); } private static class TestFileLifeCycleListener implements FileLifeCycleListener { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BulkWriterTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BulkWriterTest.java index 0d39d027dc1dc..93ab7d061fa78 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BulkWriterTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BulkWriterTest.java @@ -23,13 +23,11 @@ import org.apache.flink.core.fs.FSDataOutputStream; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; +import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.Preconditions; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; @@ -37,14 +35,16 @@ import java.nio.charset.StandardCharsets; import java.util.Map; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for the {@link StreamingFileSink} with {@link BulkWriter}. */ -public class BulkWriterTest extends TestLogger { +public class BulkWriterTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempFolder; @Test - public void testCustomBulkWriter() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testCustomBulkWriter() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); // we set the max bucket size to small so that we can know when it rolls try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -62,8 +62,8 @@ public void testCustomBulkWriter() throws Exception { } @Test - public void testCustomBulkWriterWithBucketAssigner() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testCustomBulkWriterWithBucketAssigner() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); // we set the max bucket size to small so that we can know when it rolls try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -86,8 +86,8 @@ public void testCustomBulkWriterWithBucketAssigner() throws Exception { } @Test - public void testCustomBulkWriterWithPartConfig() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testCustomBulkWriterWithPartConfig() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); // we set the max bucket size to small so that we can know when it rolls try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -142,15 +142,15 @@ private void testPartFilesWithStringBucketer( for (Map.Entry fileContents : contents.entrySet()) { if (fileContents.getKey().getName().contains(partFileName1)) { fileCounter++; - Assert.assertEquals("test1@1\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\n"); } else if (fileContents.getKey().getName().contains(partFileName2)) { fileCounter++; - Assert.assertEquals("test1@2\ntest1@3\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@2\ntest1@3\n"); } // check bucket name - Assert.assertEquals("test1", fileContents.getKey().getParentFile().getName()); + assertThat(fileContents.getKey().getParentFile().getName()).isEqualTo("test1"); } - Assert.assertEquals(2L, fileCounter); + assertThat(fileCounter).isEqualTo(2L); // we acknowledge the latest checkpoint, so everything should be published. testHarness.notifyOfCompletedCheckpoint(2L); @@ -190,19 +190,19 @@ private void testPartFilesWithIntegerBucketer( for (Map.Entry fileContents : contents.entrySet()) { if (fileContents.getKey().getName().contains(partFileName1)) { fileCounter++; - Assert.assertEquals("test1@1\n", fileContents.getValue()); - Assert.assertEquals("1", fileContents.getKey().getParentFile().getName()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\n"); + assertThat(fileContents.getKey().getParentFile().getName()).isEqualTo("1"); } else if (fileContents.getKey().getName().contains(partFileName2)) { fileCounter++; - Assert.assertEquals("test1@2\n", fileContents.getValue()); - Assert.assertEquals("2", fileContents.getKey().getParentFile().getName()); + assertThat(fileContents.getValue()).isEqualTo("test1@2\n"); + assertThat(fileContents.getKey().getParentFile().getName()).isEqualTo("2"); } else if (fileContents.getKey().getName().contains(partFileName3)) { fileCounter++; - Assert.assertEquals("test1@3\n", fileContents.getValue()); - Assert.assertEquals("3", fileContents.getKey().getParentFile().getName()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\n"); + assertThat(fileContents.getKey().getParentFile().getName()).isEqualTo("3"); } } - Assert.assertEquals(3L, fileCounter); + assertThat(fileCounter).isEqualTo(3L); // we acknowledge the latest checkpoint, so everything should be published. testHarness.notifyOfCompletedCheckpoint(2L); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/LocalStreamingFileSinkTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/LocalStreamingFileSinkTest.java index b8853e0ae0313..c524deca4f530 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/LocalStreamingFileSinkTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/LocalStreamingFileSinkTest.java @@ -27,25 +27,25 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.apache.flink.util.TestLogger; +import org.apache.flink.testutils.junit.utils.TempDirUtils; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.time.Duration; import java.util.Map; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for the {@link StreamingFileSink}. */ -public class LocalStreamingFileSinkTest extends TestLogger { +class LocalStreamingFileSinkTest { - @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder(); + @TempDir private static java.nio.file.Path tempFolder; @Test - public void testClosingWithoutInput() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testClosingWithoutInput() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); try (OneInputStreamOperatorTestHarness, Object> testHarness = TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L); ) { @@ -55,8 +55,8 @@ public void testClosingWithoutInput() throws Exception { } @Test - public void testClosingWithoutInitializingStateShouldNotFail() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testClosingWithoutInitializingStateShouldNotFail() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); try (OneInputStreamOperatorTestHarness, Object> testHarness = TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L)) { @@ -65,8 +65,8 @@ public void testClosingWithoutInitializingStateShouldNotFail() throws Exception } @Test - public void testTruncateAfterRecoveryAndOverwrite() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testTruncateAfterRecoveryAndOverwrite() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); OperatorSubtaskState snapshot; // we set the max bucket size to small so that we can know when it rolls @@ -94,13 +94,13 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { for (Map.Entry fileContents : contents.entrySet()) { if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@2\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { fileCounter++; - Assert.assertEquals("test1@3\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\n"); } } - Assert.assertEquals(2L, fileCounter); + assertThat(fileCounter).isEqualTo(2L); } try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -121,14 +121,14 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) { // truncated fileCounter++; - Assert.assertEquals("test1@1\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { // ignored for now as we do not clean up. This will be overwritten. fileCounter++; - Assert.assertEquals("test1@3\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\n"); } } - Assert.assertEquals(2L, fileCounter); + assertThat(fileCounter).isEqualTo(2L); // the first closes part-0-0 and the second will open part-0-1 testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 4), 4L)); @@ -138,14 +138,14 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@4\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { // ignored for now as we do not clean up. This will be overwritten. fileCounter++; - Assert.assertEquals("test1@3\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\n"); } } - Assert.assertEquals(2L, fileCounter); + assertThat(fileCounter).isEqualTo(2L); testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 5), 5L)); TestUtils.checkLocalFs( @@ -163,7 +163,7 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@4\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { if (fileContents.getValue().equals("test1@5\ntest1@6\n") || fileContents.getValue().equals("test1@3\n")) { @@ -171,7 +171,7 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { } } } - Assert.assertEquals(3L, fileCounter); + assertThat(fileCounter).isEqualTo(3L); // this will publish part-0-0 testHarness.notifyOfCompletedCheckpoint(2L); @@ -182,7 +182,7 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getName().equals("part-0-0")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@4\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { if (fileContents.getValue().equals("test1@5\ntest1@6\n") || fileContents.getValue().equals("test1@3\n")) { @@ -190,13 +190,13 @@ public void testTruncateAfterRecoveryAndOverwrite() throws Exception { } } } - Assert.assertEquals(3L, fileCounter); + assertThat(fileCounter).isEqualTo(3L); } } @Test - public void testCommitStagedFilesInCorrectOrder() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testCommitStagedFilesInCorrectOrder() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); // we set the max bucket size to small so that we can know when it rolls try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -247,19 +247,19 @@ public void testCommitStagedFilesInCorrectOrder() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getName().equals("part-0-0")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@2\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n"); } else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) { fileCounter++; - Assert.assertEquals("test1@3\ntest1@4\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\ntest1@4\n"); } else if (fileContents.getKey().getName().contains(".part-0-2.inprogress")) { fileCounter++; - Assert.assertEquals("test1@5\ntest1@6\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@5\ntest1@6\n"); } else if (fileContents.getKey().getName().contains(".part-0-3.inprogress")) { fileCounter++; - Assert.assertEquals("test1@7\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@7\n"); } } - Assert.assertEquals(4L, fileCounter); + assertThat(fileCounter).isEqualTo(4L); testHarness.notifyOfCompletedCheckpoint( 3L); // all the pending for checkpoint 2 and 3 are committed @@ -270,25 +270,25 @@ public void testCommitStagedFilesInCorrectOrder() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getName().equals("part-0-0")) { fileCounter++; - Assert.assertEquals("test1@1\ntest1@2\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n"); } else if (fileContents.getKey().getName().equals("part-0-1")) { fileCounter++; - Assert.assertEquals("test1@3\ntest1@4\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@3\ntest1@4\n"); } else if (fileContents.getKey().getName().equals("part-0-2")) { fileCounter++; - Assert.assertEquals("test1@5\ntest1@6\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@5\ntest1@6\n"); } else if (fileContents.getKey().getName().equals("part-0-3")) { fileCounter++; - Assert.assertEquals("test1@7\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@7\n"); } } - Assert.assertEquals(4L, fileCounter); + assertThat(fileCounter).isEqualTo(4L); } } @Test - public void testInactivityPeriodWithLateNotify() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testInactivityPeriodWithLateNotify() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); // we set a big bucket size so that it does not close by size, but by timers. try (OneInputStreamOperatorTestHarness, Object> testHarness = @@ -312,8 +312,8 @@ public void testInactivityPeriodWithLateNotify() throws Exception { bucketCounter++; } } - Assert.assertEquals( - 2L, bucketCounter); // verifies that we have 2 buckets, "test1" and "test2" + assertThat(bucketCounter) + .isEqualTo(2L); // verifies that we have 2 buckets, "test1" and "test2" testHarness.setProcessingTime(101L); // put them in pending TestUtils.checkLocalFs(outDir, 2, 0); @@ -339,19 +339,19 @@ public void testInactivityPeriodWithLateNotify() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getParentFile().getName().equals("test1")) { bucketCounter++; - Assert.assertEquals("part-0-0", fileContents.getKey().getName()); - Assert.assertEquals("test1@1\n", fileContents.getValue()); + assertThat(fileContents.getKey().getName()).isEqualTo("part-0-0"); + assertThat(fileContents.getValue()).isEqualTo("test1@1\n"); } else if (fileContents.getKey().getParentFile().getName().equals("test2")) { bucketCounter++; - Assert.assertEquals("part-0-1", fileContents.getKey().getName()); - Assert.assertEquals("test2@1\n", fileContents.getValue()); + assertThat(fileContents.getKey().getName()).isEqualTo("part-0-1"); + assertThat(fileContents.getValue()).isEqualTo("test2@1\n"); } else if (fileContents.getKey().getParentFile().getName().equals("test3")) { bucketCounter++; } else if (fileContents.getKey().getParentFile().getName().equals("test4")) { bucketCounter++; } } - Assert.assertEquals(4L, bucketCounter); + assertThat(bucketCounter).isEqualTo(4L); testHarness.notifyOfCompletedCheckpoint( 1L); // put the pending for 1 to the "committed" state @@ -362,27 +362,27 @@ public void testInactivityPeriodWithLateNotify() throws Exception { TestUtils.getFileContentByPath(outDir).entrySet()) { if (fileContents.getKey().getParentFile().getName().equals("test1")) { bucketCounter++; - Assert.assertEquals("test1@1\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test1@1\n"); } else if (fileContents.getKey().getParentFile().getName().equals("test2")) { bucketCounter++; - Assert.assertEquals("test2@1\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test2@1\n"); } else if (fileContents.getKey().getParentFile().getName().equals("test3")) { bucketCounter++; - Assert.assertEquals("part-0-2", fileContents.getKey().getName()); - Assert.assertEquals("test3@1\n", fileContents.getValue()); + assertThat(fileContents.getKey().getName()).isEqualTo("part-0-2"); + assertThat(fileContents.getValue()).isEqualTo("test3@1\n"); } else if (fileContents.getKey().getParentFile().getName().equals("test4")) { bucketCounter++; - Assert.assertEquals("part-0-3", fileContents.getKey().getName()); - Assert.assertEquals("test4@1\n", fileContents.getValue()); + assertThat(fileContents.getKey().getName()).isEqualTo("part-0-3"); + assertThat(fileContents.getValue()).isEqualTo("test4@1\n"); } } - Assert.assertEquals(4L, bucketCounter); + assertThat(bucketCounter).isEqualTo(4L); } } @Test - public void testClosingOnSnapshot() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testClosingOnSnapshot() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); try (OneInputStreamOperatorTestHarness, Object> testHarness = TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 2L)) { @@ -419,8 +419,8 @@ public void testClosingOnSnapshot() throws Exception { } @Test - public void testClosingWithCustomizedBucketer() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testClosingWithCustomizedBucketer() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); final long partMaxSize = 2L; final long inactivityInterval = 100L; final RollingPolicy, Integer> rollingPolicy = @@ -477,15 +477,15 @@ public void testClosingWithCustomizedBucketer() throws Exception { for (Map.Entry fileContents : contents.entrySet()) { Integer bucketId = Integer.parseInt(fileContents.getKey().getParentFile().getName()); - Assert.assertTrue(bucketId >= 1 && bucketId <= 4); - Assert.assertEquals( - String.format("test%d@%d\n", bucketId, bucketId), fileContents.getValue()); + assertThat(bucketId).isBetween(1, 4); + assertThat(fileContents.getValue()) + .isEqualTo(String.format("test%d@%d\n", bucketId, bucketId)); } } @Test - public void testScalingDownAndMergingOfStates() throws Exception { - final File outDir = TEMP_FOLDER.newFolder(); + void testScalingDownAndMergingOfStates() throws Exception { + final File outDir = TempDirUtils.newFolder(tempFolder); OperatorSubtaskState mergedSnapshot; @@ -524,7 +524,7 @@ public void testScalingDownAndMergingOfStates() throws Exception { counter++; } } - Assert.assertEquals(3L, counter); + assertThat(counter).isEqualTo(3L); // intentionally we snapshot them in the reverse order so that the states are shuffled mergedSnapshot = @@ -560,17 +560,15 @@ public void testScalingDownAndMergingOfStates() throws Exception { // consumed in the initialize state. if (filename.contains("-0.inprogress") || filename.endsWith("-0")) { counter++; - Assert.assertTrue( - fileContents.getValue().equals("test1@1\n") - || fileContents.getValue().equals("test1@0\n")); + assertThat(fileContents.getValue()).isIn("test1@1\n", "test1@0\n"); } } else if (parentFilename.equals("test2") && filename.contains(".part-1-1.inprogress")) { counter++; - Assert.assertEquals("test2@1\n", fileContents.getValue()); + assertThat(fileContents.getValue()).isEqualTo("test2@1\n"); } } - Assert.assertEquals(3L, counter); + assertThat(counter).isEqualTo(3L); } } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/TestUtils.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/TestUtils.java index aba2801d63e35..0a45d7f4f7d16 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/TestUtils.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/TestUtils.java @@ -32,7 +32,6 @@ import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.commons.io.FileUtils; -import org.junit.Assert; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -52,6 +51,7 @@ import java.util.Map; import static org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy.build; +import static org.assertj.core.api.Assertions.assertThat; /** Utilities for the {@link StreamingFileSink} tests. */ public class TestUtils { @@ -245,8 +245,8 @@ static void checkLocalFs(File outDir, int expectedInProgress, int expectedComple } } - Assert.assertEquals(expectedInProgress, inProgress); - Assert.assertEquals(expectedCompleted, finished); + assertThat(inProgress).isEqualTo(expectedInProgress); + assertThat(finished).isEqualTo(expectedCompleted); } static Map getFileContentByPath(File directory) throws IOException { @@ -324,9 +324,9 @@ public byte[] serialize(Integer value) { return bytes; } - public Integer deserialize(int version, byte[] serialized) throws IOException { - Assert.assertEquals(1L, (long) version); - Assert.assertEquals(4L, serialized.length); + public Integer deserialize(int version, byte[] serialized) { + assertThat(version).isOne(); + assertThat(serialized.length).isEqualTo(4); return ByteBuffer.wrap(serialized).order(ByteOrder.LITTLE_ENDIAN).getInt(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/ContinuousFileReaderOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/ContinuousFileReaderOperatorTest.java index b7c1dc9dd1bff..f8779fe9df9d2 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/ContinuousFileReaderOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/ContinuousFileReaderOperatorTest.java @@ -29,43 +29,59 @@ import org.apache.flink.streaming.runtime.tasks.mailbox.Mail; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** {@link ContinuousFileReaderOperator} test. */ -public class ContinuousFileReaderOperatorTest { +class ContinuousFileReaderOperatorTest { - @Test(expected = ExpectedTestException.class) - public void testExceptionRethrownFromClose() throws Exception { + @Test + void testExceptionRethrownFromClose() throws Exception { OneInputStreamOperatorTestHarness harness = createHarness(failingFormat()); harness.getExecutionConfig().setAutoWatermarkInterval(10); harness.setTimeCharacteristic(TimeCharacteristic.IngestionTime); - try (OneInputStreamOperatorTestHarness tester = - harness) { - tester.open(); - } + + assertThatThrownBy( + () -> { + try (OneInputStreamOperatorTestHarness< + TimestampedFileInputSplit, String> + tester = harness) { + tester.open(); + } + }) + .isInstanceOf(ExpectedTestException.class); } - @Test(expected = ExpectedTestException.class) - public void testExceptionRethrownFromProcessElement() throws Exception { + @Test + void testExceptionRethrownFromProcessElement() throws Exception { OneInputStreamOperatorTestHarness harness = createHarness(failingFormat()); harness.getExecutionConfig().setAutoWatermarkInterval(10); harness.setTimeCharacteristic(TimeCharacteristic.IngestionTime); - try (OneInputStreamOperatorTestHarness tester = - harness) { - tester.open(); - tester.processElement( - new StreamRecord<>( - new TimestampedFileInputSplit( - 0L, 1, new Path(), 0L, 0L, new String[] {}))); - for (Mail m : harness.getTaskMailbox().drain()) { - m.run(); - } - fail("should throw from processElement"); - } + + assertThatThrownBy( + () -> { + try (OneInputStreamOperatorTestHarness< + TimestampedFileInputSplit, String> + tester = harness) { + tester.open(); + tester.processElement( + new StreamRecord<>( + new TimestampedFileInputSplit( + 0L, + 1, + new Path(), + 0L, + 0L, + new String[] {}))); + for (Mail m : harness.getTaskMailbox().drain()) { + m.run(); + } + } + }) + .isInstanceOf(ExpectedTestException.class); } private FileInputFormat failingFormat() { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/FileMonitoringFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/FileMonitoringFunctionTest.java index 9781b0f33286b..72e0ccc765a86 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/FileMonitoringFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/FileMonitoringFunctionTest.java @@ -21,13 +21,13 @@ import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.streaming.api.watermark.Watermark; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Tests for the {@link org.apache.flink.streaming.api.functions.source.FileMonitoringFunction}. */ -public class FileMonitoringFunctionTest { +class FileMonitoringFunctionTest { @Test - public void testForEmptyLocation() throws Exception { + void testForEmptyLocation() throws Exception { final FileMonitoringFunction fileMonitoringFunction = new FileMonitoringFunction( "?non-existing-path", 1L, FileMonitoringFunction.WatchType.ONLY_NEW_FILES); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/InputFormatSourceFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/InputFormatSourceFunctionTest.java index 5d149c5f2b785..3880969d04b33 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/InputFormatSourceFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/InputFormatSourceFunctionTest.java @@ -39,22 +39,23 @@ import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.watermark.Watermark; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Collections; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for {@link InputFormatSourceFunction}. */ -public class InputFormatSourceFunctionTest { +class InputFormatSourceFunctionTest { @Test - public void testNormalOp() throws Exception { + void testNormalOp() throws Exception { testFormatLifecycle(false); } @Test - public void testCancelation() throws Exception { + void testCancelation() throws Exception { testFormatLifecycle(true); } @@ -75,24 +76,24 @@ private void testFormatLifecycle(final boolean midCancel) throws Exception { reader.setRuntimeContext(new MockRuntimeContext(format, noOfSplits, environment)); - Assert.assertTrue(!format.isConfigured); - Assert.assertTrue(!format.isInputFormatOpen); - Assert.assertTrue(!format.isSplitOpen); + assertThat(format.isConfigured).isFalse(); + assertThat(format.isInputFormatOpen).isFalse(); + assertThat(format.isSplitOpen).isFalse(); reader.open(DefaultOpenContext.INSTANCE); - Assert.assertTrue(format.isConfigured); + assertThat(format.isConfigured).isTrue(); TestSourceContext ctx = new TestSourceContext(reader, format, midCancel, cancelAt); reader.run(ctx); int splitsSeen = ctx.getSplitsSeen(); - Assert.assertTrue(midCancel ? splitsSeen == cancelAt : splitsSeen == noOfSplits); + assertThat(midCancel ? splitsSeen == cancelAt : splitsSeen == noOfSplits).isTrue(); // we have exhausted the splits so the // format and splits should be closed by now - Assert.assertTrue(!format.isSplitOpen); - Assert.assertTrue(!format.isInputFormatOpen); + assertThat(format.isSplitOpen).isFalse(); + assertThat(format.isInputFormatOpen).isFalse(); } } @@ -113,21 +114,21 @@ private static class LifeCycleTestInputFormat extends RichInputFormat pos) { - assertEquals(expectedData[pos], element); + assertThat(element).isEqualTo(expectedData[pos]); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/datagen/DataGeneratorSourceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/datagen/DataGeneratorSourceTest.java index 3462063a6f0e4..c853206498b85 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/datagen/DataGeneratorSourceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/datagen/DataGeneratorSourceTest.java @@ -20,14 +20,13 @@ import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; -import org.apache.flink.streaming.api.functions.StatefulSequenceSourceTest.BlockingSourceContext; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.operators.StreamSource; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; +import org.apache.flink.streaming.util.BlockingSourceContext; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.HashSet; @@ -37,11 +36,13 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Supplier; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for {@link DataGeneratorSource}. */ public class DataGeneratorSourceTest { @Test - public void testRandomGenerator() throws Exception { + void testRandomGenerator() throws Exception { long min = 10; long max = 20; DataGeneratorSource source = @@ -87,12 +88,12 @@ public void close() {} }); for (Long l : results) { - Assert.assertTrue(l >= min && l <= max); + assertThat(l).isBetween(min, max); } } @Test - public void testSequenceCheckpointRestore() throws Exception { + void testSequenceCheckpointRestore() throws Exception { final int initElement = 0; final int maxElement = 100; final Set expectedOutput = new HashSet<>(); @@ -214,7 +215,7 @@ public static void innerTestDataGenCheckpointRestore( runner3.start(); runner3.join(); - Assert.assertEquals(3, outputCollector.size()); // we have 3 tasks. + assertThat(outputCollector).hasSize(3); // we have 3 tasks. // test for at-most-once Set dedupRes = new HashSet<>(expectedOutput.size()); @@ -223,21 +224,17 @@ public static void innerTestDataGenCheckpointRestore( List elements = outputCollector.get(key); // this tests the correctness of the latches in the test - Assert.assertTrue(elements.size() > 0); + assertThat(elements).isNotEmpty(); for (T elem : elements) { - if (!dedupRes.add(elem)) { - Assert.fail("Duplicate entry: " + elem); - } + assertThat(dedupRes.add(elem)).as("Duplicate entry: " + elem).isTrue(); - if (!expectedOutput.contains(elem)) { - Assert.fail("Unexpected element: " + elem); - } + assertThat(expectedOutput).as("Unexpected element: " + elem).contains(elem); } } // test for exactly-once - Assert.assertEquals(expectedOutput.size(), dedupRes.size()); + assertThat(dedupRes).hasSameSizeAs(expectedOutput); latchToWait1.trigger(); latchToWait2.trigger(); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ArrayFromTupleTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ArrayFromTupleTest.java index 673426a9be8c6..f4e5e7c5c6d62 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ArrayFromTupleTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ArrayFromTupleTest.java @@ -44,18 +44,18 @@ import org.apache.flink.api.java.tuple.Tuple8; import org.apache.flink.api.java.tuple.Tuple9; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link Tuple} to {@code Array}. */ -public class ArrayFromTupleTest { +class ArrayFromTupleTest { private String[] testStrings; - @Before - public void init() { + @BeforeEach + void init() { testStrings = new String[Tuple.MAX_ARITY]; for (int i = 0; i < Tuple.MAX_ARITY; i++) { testStrings[i] = Integer.toString(i); @@ -63,8 +63,7 @@ public void init() { } @Test - public void testConvertFromTupleToArray() - throws InstantiationException, IllegalAccessException { + void testConvertFromTupleToArray() throws InstantiationException, IllegalAccessException { for (int i = 0; i < Tuple.MAX_ARITY; i++) { Tuple currentTuple = (Tuple) CLASSES[i].newInstance(); String[] currentArray = new String[i + 1]; @@ -77,7 +76,7 @@ public void testConvertFromTupleToArray() } @Test - public void testUserSpecifiedOrder() throws InstantiationException, IllegalAccessException { + void testUserSpecifiedOrder() throws InstantiationException, IllegalAccessException { Tuple currentTuple = (Tuple) CLASSES[Tuple.MAX_ARITY - 1].newInstance(); for (int i = 0; i < Tuple.MAX_ARITY; i++) { currentTuple.setField(testStrings[i], i); @@ -131,10 +130,7 @@ public void testUserSpecifiedOrder() throws InstantiationException, IllegalAcces } private void arrayEqualityCheck(Object[] array1, Object[] array2) { - assertEquals("The result arrays must have the same length", array1.length, array2.length); - for (int i = 0; i < array1.length; i++) { - assertEquals("Unequal fields at position " + i, array1[i], array2[i]); - } + assertThat(array1).isEqualTo(array2); } private static final Class[] CLASSES = diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ConcatenatedExtractTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ConcatenatedExtractTest.java index 2fc9be4b69b90..dac243e40fbcc 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ConcatenatedExtractTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/ConcatenatedExtractTest.java @@ -19,13 +19,13 @@ import org.apache.flink.api.java.tuple.Tuple2; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link ConcatenatedExtract}. */ -public class ConcatenatedExtractTest { +class ConcatenatedExtractTest { private String[] testStringArray1 = {"1", "2", "3"}; private int[] testIntArray1 = {1, 2, 3}; @@ -38,34 +38,32 @@ public class ConcatenatedExtractTest { private Tuple2, Tuple2[]> testData; @SuppressWarnings("unchecked") - @Before - public void setupData() { + @BeforeEach + void setupData() { testTuple2Array = new Tuple2[2]; - testTuple2Array[0] = new Tuple2(testStringArray1, testIntArray2); - testTuple2Array[1] = new Tuple2(testStringArray2, testIntArray1); + testTuple2Array[0] = new Tuple2<>(testStringArray1, testIntArray2); + testTuple2Array[1] = new Tuple2<>(testStringArray2, testIntArray1); - testTuple2 = new Tuple2(testStringArray3, testIntArray3); + testTuple2 = new Tuple2<>(testStringArray3, testIntArray3); - testData = - new Tuple2, Tuple2[]>( - testTuple2, testTuple2Array); + testData = new Tuple2<>(testTuple2, testTuple2Array); } @SuppressWarnings({"rawtypes", "unchecked"}) @Test - public void test1() { + void test1() { Extractor ext = new ConcatenatedExtract(new FieldFromTuple(0), new FieldFromTuple(1)) .add(new FieldsFromArray(Integer.class, 2, 1, 0)); int[] expected = {testIntArray3[2], testIntArray3[1], testIntArray3[0]}; - assertEquals(new Integer(expected[0]), ((Integer[]) ext.extract(testData))[0]); - assertEquals(new Integer(expected[1]), ((Integer[]) ext.extract(testData))[1]); - assertEquals(new Integer(expected[2]), ((Integer[]) ext.extract(testData))[2]); + assertThat(((Integer[]) ext.extract(testData))[0]).isEqualTo(expected[0]); + assertThat(((Integer[]) ext.extract(testData))[1]).isEqualTo(expected[1]); + assertThat(((Integer[]) ext.extract(testData))[2]).isEqualTo(expected[2]); } @SuppressWarnings({"unchecked", "rawtypes"}) @Test - public void test2() { + void test2() { Extractor ext = new ConcatenatedExtract( new FieldFromTuple(1), // Tuple2[] @@ -76,6 +74,6 @@ public void test2() { .add(new FieldFromArray(1)); // String String expected2 = testStringArray2[1]; - assertEquals(expected2, ext.extract(testData)); + assertThat(ext.extract(testData)).isEqualTo(expected2); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromArrayTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromArrayTest.java index a3bca1fe1d05f..ae6261fad3986 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromArrayTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromArrayTest.java @@ -17,41 +17,38 @@ package org.apache.flink.streaming.api.functions.windowing.delta.extractor; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link FieldFromArray}. */ -public class FieldFromArrayTest { +class FieldFromArrayTest { String[] testStringArray = {"0", "1", "2", "3", "4"}; Integer[] testIntegerArray = {10, 11, 12, 13, 14}; int[] testIntArray = {20, 21, 22, 23, 24}; @Test - public void testStringArray() { + void testStringArray() { for (int i = 0; i < this.testStringArray.length; i++) { - assertEquals( - this.testStringArray[i], - new FieldFromArray(i).extract(testStringArray)); + assertThat(new FieldFromArray(i).extract(testStringArray)) + .isEqualTo(testStringArray[i]); } } @Test - public void testIntegerArray() { + void testIntegerArray() { for (int i = 0; i < this.testIntegerArray.length; i++) { - assertEquals( - this.testIntegerArray[i], - new FieldFromArray(i).extract(testIntegerArray)); + assertThat(new FieldFromArray(i).extract(testIntegerArray)) + .isEqualTo(testIntegerArray[i]); } } @Test - public void testIntArray() { + void testIntArray() { for (int i = 0; i < this.testIntArray.length; i++) { - assertEquals( - new Integer(this.testIntArray[i]), - new FieldFromArray(i).extract(testIntArray)); + assertThat(new FieldFromArray(i).extract(testIntArray)) + .isEqualTo(new Integer(testIntArray[i])); } } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromTupleTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromTupleTest.java index c38fe7a1d07d0..01e5b7b6111bf 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromTupleTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldFromTupleTest.java @@ -44,18 +44,18 @@ import org.apache.flink.api.java.tuple.Tuple8; import org.apache.flink.api.java.tuple.Tuple9; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link FieldFromTuple}. */ -public class FieldFromTupleTest { +class FieldFromTupleTest { private String[] testStrings; - @Before - public void init() { + @BeforeEach + void init() { testStrings = new String[Tuple.MAX_ARITY]; for (int i = 0; i < Tuple.MAX_ARITY; i++) { testStrings[i] = Integer.toString(i); @@ -63,7 +63,7 @@ public void init() { } @Test - public void testSingleFieldExtraction() throws InstantiationException, IllegalAccessException { + void testSingleFieldExtraction() throws InstantiationException, IllegalAccessException { // extract single fields for (int i = 0; i < Tuple.MAX_ARITY; i++) { Tuple current = (Tuple) CLASSES[i].newInstance(); @@ -71,7 +71,8 @@ public void testSingleFieldExtraction() throws InstantiationException, IllegalAc current.setField(testStrings[j], j); } for (int j = 0; j < i; j++) { - assertEquals(testStrings[j], new FieldFromTuple(j).extract(current)); + assertThat(new FieldFromTuple(j).extract(current)) + .isEqualTo(testStrings[j]); } } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromArrayTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromArrayTest.java index 0df8b7f42933c..8fb7336835a4b 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromArrayTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromArrayTest.java @@ -17,24 +17,24 @@ package org.apache.flink.streaming.api.functions.windowing.delta.extractor; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link FieldsFromArray}. */ -public class FieldsFromArrayTest { +class FieldsFromArrayTest { String[] testStringArray = {"0", "1", "2", "3", "4"}; Integer[] testIntegerArray = {10, 11, 12, 13, 14}; int[] testIntArray = {20, 21, 22, 23, 24}; @Test - public void testStringArray() { + void testStringArray() { // check single field extraction for (int i = 0; i < testStringArray.length; i++) { String[] tmp = {testStringArray[i]}; arrayEqualityCheck( - tmp, new FieldsFromArray(String.class, i).extract(testStringArray)); + tmp, new FieldsFromArray<>(String.class, i).extract(testStringArray)); } // check reverse order @@ -44,22 +44,21 @@ public void testStringArray() { } arrayEqualityCheck( reverseOrder, - new FieldsFromArray(String.class, 4, 3, 2, 1, 0).extract(testStringArray)); + new FieldsFromArray<>(String.class, 4, 3, 2, 1, 0).extract(testStringArray)); // check picking fields and reorder String[] crazyOrder = {testStringArray[4], testStringArray[1], testStringArray[2]}; arrayEqualityCheck( - crazyOrder, - new FieldsFromArray(String.class, 4, 1, 2).extract(testStringArray)); + crazyOrder, new FieldsFromArray<>(String.class, 4, 1, 2).extract(testStringArray)); } @Test - public void testIntegerArray() { + void testIntegerArray() { // check single field extraction for (int i = 0; i < testIntegerArray.length; i++) { Integer[] tmp = {testIntegerArray[i]}; arrayEqualityCheck( - tmp, new FieldsFromArray(Integer.class, i).extract(testIntegerArray)); + tmp, new FieldsFromArray<>(Integer.class, i).extract(testIntegerArray)); } // check reverse order @@ -69,22 +68,20 @@ public void testIntegerArray() { } arrayEqualityCheck( reverseOrder, - new FieldsFromArray(Integer.class, 4, 3, 2, 1, 0) - .extract(testIntegerArray)); + new FieldsFromArray<>(Integer.class, 4, 3, 2, 1, 0).extract(testIntegerArray)); // check picking fields and reorder Integer[] crazyOrder = {testIntegerArray[4], testIntegerArray[1], testIntegerArray[2]}; arrayEqualityCheck( crazyOrder, - new FieldsFromArray(Integer.class, 4, 1, 2).extract(testIntegerArray)); + new FieldsFromArray<>(Integer.class, 4, 1, 2).extract(testIntegerArray)); } @Test - public void testIntArray() { + void testIntArray() { for (int i = 0; i < testIntArray.length; i++) { Integer[] tmp = {testIntArray[i]}; - arrayEqualityCheck( - tmp, new FieldsFromArray(Integer.class, i).extract(testIntArray)); + arrayEqualityCheck(tmp, new FieldsFromArray<>(Integer.class, i).extract(testIntArray)); } // check reverse order @@ -94,19 +91,15 @@ public void testIntArray() { } arrayEqualityCheck( reverseOrder, - new FieldsFromArray(Integer.class, 4, 3, 2, 1, 0).extract(testIntArray)); + new FieldsFromArray<>(Integer.class, 4, 3, 2, 1, 0).extract(testIntArray)); // check picking fields and reorder Integer[] crazyOrder = {testIntArray[4], testIntArray[1], testIntArray[2]}; arrayEqualityCheck( - crazyOrder, - new FieldsFromArray(Integer.class, 4, 1, 2).extract(testIntArray)); + crazyOrder, new FieldsFromArray<>(Integer.class, 4, 1, 2).extract(testIntArray)); } private void arrayEqualityCheck(Object[] array1, Object[] array2) { - assertEquals("The result arrays must have the same length", array1.length, array2.length); - for (int i = 0; i < array1.length; i++) { - assertEquals("Unequal fields at position " + i, array1[i], array2[i]); - } + assertThat(array1).isEqualTo(array2); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromTupleTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromTupleTest.java index d99c6326c709f..868a8c05f4391 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromTupleTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/windowing/delta/extractor/FieldsFromTupleTest.java @@ -44,18 +44,18 @@ import org.apache.flink.api.java.tuple.Tuple8; import org.apache.flink.api.java.tuple.Tuple9; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link FieldsFromTuple}. */ -public class FieldsFromTupleTest { +class FieldsFromTupleTest { private double[] testDouble; - @Before - public void init() { + @BeforeEach + void init() { testDouble = new double[Tuple.MAX_ARITY]; for (int i = 0; i < Tuple.MAX_ARITY; i++) { testDouble[i] = i; @@ -63,7 +63,7 @@ public void init() { } @Test - public void testUserSpecifiedOrder() throws InstantiationException, IllegalAccessException { + void testUserSpecifiedOrder() throws InstantiationException, IllegalAccessException { Tuple currentTuple = (Tuple) CLASSES[Tuple.MAX_ARITY - 1].newInstance(); for (int i = 0; i < Tuple.MAX_ARITY; i++) { currentTuple.setField(testDouble[i], i); @@ -117,10 +117,7 @@ public void testUserSpecifiedOrder() throws InstantiationException, IllegalAcces } private void arrayEqualityCheck(double[] array1, double[] array2) { - assertEquals("The result arrays must have the same length", array1.length, array2.length); - for (int i = 0; i < array1.length; i++) { - assertEquals("Unequal fields at position " + i, array1[i], array2[i], 0d); - } + assertThat(array1).isEqualTo(array2); } private static final Class[] CLASSES = diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkTransformationTranslatorITCaseBase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkTransformationTranslatorITCaseBase.java index 3c93b178b518e..62157911abe7c 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkTransformationTranslatorITCaseBase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkTransformationTranslatorITCaseBase.java @@ -31,36 +31,34 @@ import org.apache.flink.streaming.api.operators.StreamOperatorFactory; import org.apache.flink.streaming.runtime.operators.sink.CommitterOperatorFactory; import org.apache.flink.streaming.runtime.operators.sink.SinkWriterOperatorFactory; -import org.apache.flink.util.TestLogger; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameter; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.Arrays; import java.util.Collection; import java.util.function.Predicate; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.MatcherAssert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Tests for {@link org.apache.flink.streaming.api.transformations.SinkTransformation}. * *

ATTENTION: This test is extremely brittle. Do NOT remove, add or re-order test cases. */ -@RunWith(Parameterized.class) -public abstract class SinkTransformationTranslatorITCaseBase extends TestLogger { +@ExtendWith(ParameterizedTestExtension.class) +abstract class SinkTransformationTranslatorITCaseBase { - @Parameterized.Parameters(name = "Execution Mode: {0}") - public static Collection data() { + @Parameters(name = "Execution Mode: {0}") + private static Collection data() { return Arrays.asList(RuntimeExecutionMode.STREAMING, RuntimeExecutionMode.BATCH); } - @Parameterized.Parameter() public RuntimeExecutionMode runtimeExecutionMode; + @Parameter protected RuntimeExecutionMode runtimeExecutionMode; static final String NAME = "FileSink"; static final String SLOT_SHARE_GROUP = "FileGroup"; @@ -73,14 +71,14 @@ public static Collection data() { abstract DataStreamSink sinkTo(DataStream stream, SinkT sink); - @Test - public void generateWriterTopology() { + @TestTemplate + void generateWriterTopology() { final StreamGraph streamGraph = buildGraph(simpleSink(), runtimeExecutionMode); final StreamNode sourceNode = findNodeName(streamGraph, node -> node.contains("Source")); final StreamNode writerNode = findWriter(streamGraph); - assertThat(streamGraph.getStreamNodes().size(), equalTo(2)); + assertThat(streamGraph.getStreamNodes()).hasSize(2); validateTopology( sourceNode, @@ -91,8 +89,8 @@ public void generateWriterTopology() { -1); } - @Test - public void generateWriterCommitterTopology() { + @TestTemplate + void generateWriterCommitterTopology() { final StreamGraph streamGraph = buildGraph(sinkWithCommitter(), runtimeExecutionMode); @@ -110,7 +108,7 @@ public void generateWriterCommitterTopology() { final StreamNode committerNode = findNodeName(streamGraph, name -> name.contains("Committer")); - assertThat(streamGraph.getStreamNodes().size(), equalTo(3)); + assertThat(streamGraph.getStreamNodes()).hasSize(3); validateTopology( writerNode, @@ -136,8 +134,8 @@ StreamNode findGlobalCommitter(StreamGraph streamGraph) { return findNodeName(streamGraph, name -> name.contains("Global Committer")); } - @Test(expected = IllegalStateException.class) - public void throwExceptionWithoutSettingUid() { + @TestTemplate + void throwExceptionWithoutSettingUid() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final Configuration config = new Configuration(); @@ -146,11 +144,11 @@ public void throwExceptionWithoutSettingUid() { // disable auto generating uid env.getConfig().disableAutoGeneratedUIDs(); sinkTo(env.fromElements(1, 2), simpleSink()); - env.getStreamGraph(); + assertThatThrownBy(env::getStreamGraph).isInstanceOf(IllegalStateException.class); } - @Test - public void disableOperatorChain() { + @TestTemplate + void disableOperatorChain() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromElements(1, 2); @@ -161,9 +159,10 @@ public void disableOperatorChain() { final StreamNode writer = findWriter(streamGraph); final StreamNode committer = findCommitter(streamGraph); - assertThat(writer.getOperatorFactory().getChainingStrategy(), is(ChainingStrategy.NEVER)); - assertThat( - committer.getOperatorFactory().getChainingStrategy(), is(ChainingStrategy.NEVER)); + assertThat(writer.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.NEVER); + assertThat(committer.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.NEVER); } void validateTopology( @@ -176,23 +175,24 @@ void validateTopology( // verify src node final StreamEdge srcOutEdge = src.getOutEdges().get(0); - assertThat(srcOutEdge.getTargetId(), equalTo(dest.getId())); - assertThat(src.getTypeSerializerOut(), instanceOf(srcOutTypeInfo)); + assertThat(srcOutEdge.getTargetId()).isEqualTo(dest.getId()); + assertThat(src.getTypeSerializerOut()).isInstanceOf(srcOutTypeInfo); // verify dest node input final StreamEdge destInputEdge = dest.getInEdges().get(0); - assertThat(destInputEdge.getSourceId(), equalTo(src.getId())); - assertThat(dest.getTypeSerializersIn()[0], instanceOf(srcOutTypeInfo)); + assertThat(destInputEdge.getTargetId()).isEqualTo(dest.getId()); + assertThat(dest.getTypeSerializersIn()[0]).isInstanceOf(srcOutTypeInfo); // make sure 2 sink operators have different names/uid - assertThat(dest.getOperatorName(), not(equalTo(src.getOperatorName()))); - assertThat(dest.getTransformationUID(), not(equalTo(src.getTransformationUID()))); - - assertThat(dest.getOperatorFactory(), instanceOf(operatorFactoryClass)); - assertThat(dest.getParallelism(), equalTo(expectedParallelism)); - assertThat(dest.getMaxParallelism(), equalTo(expectedMaxParallelism)); - assertThat(dest.getOperatorFactory().getChainingStrategy(), is(ChainingStrategy.ALWAYS)); - assertThat(dest.getSlotSharingGroup(), equalTo(SLOT_SHARE_GROUP)); + assertThat(dest.getOperatorName()).isNotEqualTo(src.getOperatorName()); + assertThat(dest.getTransformationUID()).isNotEqualTo(src.getTransformationUID()); + + assertThat(dest.getOperatorFactory()).isInstanceOf(operatorFactoryClass); + assertThat(dest.getParallelism()).isEqualTo(expectedParallelism); + assertThat(dest.getMaxParallelism()).isEqualTo(expectedMaxParallelism); + assertThat(dest.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.ALWAYS); + assertThat(dest.getSlotSharingGroup()).isEqualTo(SLOT_SHARE_GROUP); } StreamGraph buildGraph(SinkT sink, RuntimeExecutionMode runtimeExecutionMode) { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV1TransformationTranslatorITCase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV1TransformationTranslatorITCase.java index d1cd770b7fb3a..8f4ddf773898c 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV1TransformationTranslatorITCase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV1TransformationTranslatorITCase.java @@ -31,22 +31,20 @@ import org.apache.flink.streaming.runtime.operators.sink.CommitterOperatorFactory; import org.apache.flink.streaming.runtime.operators.sink.SinkWriterOperatorFactory; import org.apache.flink.streaming.runtime.operators.sink.TestSink; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link org.apache.flink.streaming.api.transformations.SinkTransformation}. * *

ATTENTION: This test is extremely brittle. Do NOT remove, add or re-order test cases. */ -@RunWith(Parameterized.class) -public class SinkV1TransformationTranslatorITCase +@ExtendWith(ParameterizedTestExtension.class) +class SinkV1TransformationTranslatorITCase extends SinkTransformationTranslatorITCaseBase> { @Override @@ -64,8 +62,8 @@ DataStreamSink sinkTo(DataStream stream, Sink src = env.fromData(1, 2); final String writerHash = "f6b178ce445dc3ffaa06bad27a51fead"; @@ -185,17 +184,17 @@ public void testSettingOperatorUidHash() { final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getUserHash(), writerHash); - assertEquals(findCommitter(streamGraph).getUserHash(), committerHash); - assertEquals(findGlobalCommitter(streamGraph).getUserHash(), globalCommitterHash); + assertThat(findWriter(streamGraph).getUserHash()).isEqualTo(writerHash); + assertThat(findCommitter(streamGraph).getUserHash()).isEqualTo(committerHash); + assertThat(findGlobalCommitter(streamGraph).getUserHash()).isEqualTo(globalCommitterHash); } /** * When ever you need to change something in this test case please think about possible state * upgrade problems introduced by your changes. */ - @Test - public void testSettingOperatorUids() { + @TestTemplate + void testSettingOperatorUids() { final String sinkUid = "f6b178ce445dc3ffaa06bad27a51fead"; final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromData(1, 2); @@ -204,12 +203,10 @@ public void testSettingOperatorUids() { .uid(sinkUid); final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getTransformationUID(), sinkUid); - assertEquals( - findCommitter(streamGraph).getTransformationUID(), - String.format("Sink Committer: %s", sinkUid)); - assertEquals( - findGlobalCommitter(streamGraph).getTransformationUID(), - String.format("Sink %s Global Committer", sinkUid)); + assertThat(findWriter(streamGraph).getTransformationUID()).isEqualTo(sinkUid); + assertThat(findCommitter(streamGraph).getTransformationUID()) + .isEqualTo(String.format("Sink Committer: %s", sinkUid)); + assertThat(findGlobalCommitter(streamGraph).getTransformationUID()) + .isEqualTo(String.format("Sink %s Global Committer", sinkUid)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorDeprecatedITCase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorDeprecatedITCase.java index 9456b4f45a09c..20cd837dc27f7 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorDeprecatedITCase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorDeprecatedITCase.java @@ -25,12 +25,12 @@ import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.runtime.operators.sink.deprecated.TestSinkV2; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link org.apache.flink.streaming.api.transformations.SinkTransformation}. @@ -41,8 +41,8 @@ * org.apache.flink.api.connector.sink2.TwoPhaseCommittingSink}. */ @Deprecated -@RunWith(Parameterized.class) -public class SinkV2TransformationTranslatorDeprecatedITCase +@ExtendWith(ParameterizedTestExtension.class) +class SinkV2TransformationTranslatorDeprecatedITCase extends SinkTransformationTranslatorITCaseBase> { @Override @@ -60,8 +60,8 @@ DataStreamSink sinkTo(DataStream stream, Sink sink) { return stream.sinkTo(sink); } - @Test - public void testSettingOperatorUidHash() { + @TestTemplate + void testSettingOperatorUidHash() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromElements(1, 2); final String writerHash = "f6b178ce445dc3ffaa06bad27a51fead"; @@ -75,25 +75,24 @@ public void testSettingOperatorUidHash() { final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getUserHash(), writerHash); - assertEquals(findCommitter(streamGraph).getUserHash(), committerHash); + assertThat(findWriter(streamGraph).getUserHash()).isEqualTo(writerHash); + assertThat(findCommitter(streamGraph).getUserHash()).isEqualTo(committerHash); } /** * When ever you need to change something in this test case please think about possible state * upgrade problems introduced by your changes. */ - @Test - public void testSettingOperatorUids() { + @TestTemplate + void testSettingOperatorUids() { final String sinkUid = "f6b178ce445dc3ffaa06bad27a51fead"; final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromElements(1, 2); src.sinkTo(sinkWithCommitter()).name(NAME).uid(sinkUid); final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getTransformationUID(), sinkUid); - assertEquals( - findCommitter(streamGraph).getTransformationUID(), - String.format("Sink Committer: %s", sinkUid)); + assertThat(findWriter(streamGraph).getTransformationUID()).isEqualTo(sinkUid); + assertThat(findCommitter(streamGraph).getTransformationUID()) + .isEqualTo(String.format("Sink Committer: %s", sinkUid)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorITCase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorITCase.java index 97b23ababaca0..6b09b83961dec 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorITCase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SinkV2TransformationTranslatorITCase.java @@ -25,20 +25,20 @@ import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.runtime.operators.sink.TestSinkV2; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link org.apache.flink.streaming.api.transformations.SinkTransformation}. * *

ATTENTION: This test is extremely brittle. Do NOT remove, add or re-order test cases. */ -@RunWith(Parameterized.class) -public class SinkV2TransformationTranslatorITCase +@ExtendWith(ParameterizedTestExtension.class) +class SinkV2TransformationTranslatorITCase extends SinkTransformationTranslatorITCaseBase> { @Override @@ -56,8 +56,8 @@ DataStreamSink sinkTo(DataStream stream, Sink sink) { return stream.sinkTo(sink); } - @Test - public void testSettingOperatorUidHash() { + @TestTemplate + void testSettingOperatorUidHash() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromElements(1, 2); final String writerHash = "f6b178ce445dc3ffaa06bad27a51fead"; @@ -71,25 +71,24 @@ public void testSettingOperatorUidHash() { final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getUserHash(), writerHash); - assertEquals(findCommitter(streamGraph).getUserHash(), committerHash); + assertThat(findWriter(streamGraph).getUserHash()).isEqualTo(writerHash); + assertThat(findCommitter(streamGraph).getUserHash()).isEqualTo(committerHash); } /** * When ever you need to change something in this test case please think about possible state * upgrade problems introduced by your changes. */ - @Test - public void testSettingOperatorUids() { + @TestTemplate + void testSettingOperatorUids() { final String sinkUid = "f6b178ce445dc3ffaa06bad27a51fead"; final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStreamSource src = env.fromElements(1, 2); src.sinkTo(sinkWithCommitter()).name(NAME).uid(sinkUid); final StreamGraph streamGraph = env.getStreamGraph(); - assertEquals(findWriter(streamGraph).getTransformationUID(), sinkUid); - assertEquals( - findCommitter(streamGraph).getTransformationUID(), - String.format("Sink Committer: %s", sinkUid)); + assertThat(findWriter(streamGraph).getTransformationUID()).isEqualTo(sinkUid); + assertThat(findCommitter(streamGraph).getTransformationUID()) + .isEqualTo(String.format("Sink Committer: %s", sinkUid)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SlotAllocationTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SlotAllocationTest.java index 84a20f50e7c42..7fdd7e2708378 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SlotAllocationTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/SlotAllocationTest.java @@ -23,14 +23,12 @@ import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.co.CoMapFunction; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * This verifies that slot sharing groups are correctly forwarded from user job to JobGraph. @@ -39,43 +37,37 @@ * sharing groups. */ @SuppressWarnings("serial") -public class SlotAllocationTest extends TestLogger { +class SlotAllocationTest { + + private static final FilterFunction DUMMY_FILTER = value -> false; @Test - public void testTwoPipelines() { + void testTwoPipelines() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - FilterFunction dummyFilter = - new FilterFunction() { - @Override - public boolean filter(Long value) { - return false; - } - }; - env.fromSequence(1, 10) - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("isolated") - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("default") .disableChaining() - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("group 1") - .filter(dummyFilter) + .filter(DUMMY_FILTER) .startNewChain() .print() .disableChaining(); // verify that a second pipeline does not inherit the groups from the first pipeline env.fromSequence(1, 10) - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("isolated-2") - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("default") .disableChaining() - .filter(dummyFilter) + .filter(DUMMY_FILTER) .slotSharingGroup("group 2") - .filter(dummyFilter) + .filter(DUMMY_FILTER) .startNewChain() .print() .disableChaining(); @@ -84,105 +76,87 @@ public boolean filter(Long value) { List vertices = jobGraph.getVerticesSortedTopologicallyFromSources(); - assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(3).getSlotSharingGroup()); - assertNotEquals( - vertices.get(0).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup()); - assertNotEquals( - vertices.get(3).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup()); - assertEquals(vertices.get(4).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup()); - assertEquals(vertices.get(5).getSlotSharingGroup(), vertices.get(6).getSlotSharingGroup()); + assertThat(vertices.get(0).getSlotSharingGroup()) + .isEqualTo(vertices.get(3).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(2).getSlotSharingGroup()); + assertThat(vertices.get(3).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(4).getSlotSharingGroup()); + assertThat(vertices.get(4).getSlotSharingGroup()) + .isEqualTo(vertices.get(5).getSlotSharingGroup()); + assertThat(vertices.get(5).getSlotSharingGroup()) + .isEqualTo(vertices.get(6).getSlotSharingGroup()); int pipelineStart = 6; - assertEquals( - vertices.get(1).getSlotSharingGroup(), - vertices.get(pipelineStart + 2).getSlotSharingGroup()); - assertNotEquals( - vertices.get(1).getSlotSharingGroup(), - vertices.get(pipelineStart + 1).getSlotSharingGroup()); - assertNotEquals( - vertices.get(pipelineStart + 2).getSlotSharingGroup(), - vertices.get(pipelineStart + 3).getSlotSharingGroup()); - assertEquals( - vertices.get(pipelineStart + 3).getSlotSharingGroup(), - vertices.get(pipelineStart + 4).getSlotSharingGroup()); - assertEquals( - vertices.get(pipelineStart + 4).getSlotSharingGroup(), - vertices.get(pipelineStart + 5).getSlotSharingGroup()); + assertThat(vertices.get(1).getSlotSharingGroup()) + .isEqualTo(vertices.get(pipelineStart + 2).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(pipelineStart + 1).getSlotSharingGroup()); + assertThat(vertices.get(pipelineStart + 2).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(pipelineStart + 3).getSlotSharingGroup()); + assertThat(vertices.get(pipelineStart + 3).getSlotSharingGroup()) + .isEqualTo(vertices.get(pipelineStart + 4).getSlotSharingGroup()); + assertThat(vertices.get(pipelineStart + 4).getSlotSharingGroup()) + .isEqualTo(vertices.get(pipelineStart + 5).getSlotSharingGroup()); } @Test - public void testUnion() { + void testUnion() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - FilterFunction dummyFilter = - new FilterFunction() { - @Override - public boolean filter(Long value) { - return false; - } - }; - DataStream src1 = env.fromSequence(1, 10); DataStream src2 = env.fromSequence(1, 10).slotSharingGroup("src-1"); // this should not inherit group "src-1" - src1.union(src2).filter(dummyFilter); + src1.union(src2).filter(DUMMY_FILTER); DataStream src3 = env.fromSequence(1, 10).slotSharingGroup("group-1"); DataStream src4 = env.fromSequence(1, 10).slotSharingGroup("group-1"); // this should inherit "group-1" now - src3.union(src4).filter(dummyFilter); + src3.union(src4).filter(DUMMY_FILTER); JobGraph jobGraph = env.getStreamGraph().getJobGraph(); List vertices = jobGraph.getVerticesSortedTopologicallyFromSources(); // first pipeline - assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup()); - assertNotEquals( - vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup()); - assertNotEquals( - vertices.get(1).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup()); + assertThat(vertices.get(0).getSlotSharingGroup()) + .isEqualTo(vertices.get(4).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(1).getSlotSharingGroup()); + assertThat(vertices.get(1).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(4).getSlotSharingGroup()); // second pipeline - assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(3).getSlotSharingGroup()); - assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup()); - assertEquals(vertices.get(3).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup()); + assertThat(vertices.get(2).getSlotSharingGroup()) + .isEqualTo(vertices.get(3).getSlotSharingGroup()) + .isEqualTo(vertices.get(5).getSlotSharingGroup()); + assertThat(vertices.get(3).getSlotSharingGroup()) + .isEqualTo(vertices.get(5).getSlotSharingGroup()); } @Test - public void testInheritOverride() { + void testInheritOverride() { // verify that we can explicitly disable inheritance of the input slot sharing groups StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - FilterFunction dummyFilter = - new FilterFunction() { - @Override - public boolean filter(Long value) { - return false; - } - }; - DataStream src1 = env.fromSequence(1, 10).slotSharingGroup("group-1"); DataStream src2 = env.fromSequence(1, 10).slotSharingGroup("group-1"); // this should not inherit group but be in "default" - src1.union(src2).filter(dummyFilter).slotSharingGroup("default"); + src1.union(src2).filter(DUMMY_FILTER).slotSharingGroup("default"); JobGraph jobGraph = env.getStreamGraph().getJobGraph(); List vertices = jobGraph.getVerticesSortedTopologicallyFromSources(); - assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup()); - assertNotEquals( - vertices.get(0).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup()); - assertNotEquals( - vertices.get(1).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup()); + assertThat(vertices.get(0).getSlotSharingGroup()) + .isEqualTo(vertices.get(1).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(2).getSlotSharingGroup()); + assertThat(vertices.get(1).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(2).getSlotSharingGroup()); } @Test - public void testCoOperation() { + void testCoOperation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); CoMapFunction dummyCoMap = @@ -215,15 +189,17 @@ public Long map2(Long value) throws Exception { List vertices = jobGraph.getVerticesSortedTopologicallyFromSources(); // first pipeline - assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup()); - assertNotEquals( - vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup()); - assertNotEquals( - vertices.get(1).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup()); + assertThat(vertices.get(0).getSlotSharingGroup()) + .isEqualTo(vertices.get(4).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(1).getSlotSharingGroup()); + assertThat(vertices.get(1).getSlotSharingGroup()) + .isNotEqualTo(vertices.get(4).getSlotSharingGroup()); // second pipeline - assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(3).getSlotSharingGroup()); - assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup()); - assertEquals(vertices.get(3).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup()); + assertThat(vertices.get(2).getSlotSharingGroup()) + .isEqualTo(vertices.get(3).getSlotSharingGroup()) + .isEqualTo(vertices.get(5).getSlotSharingGroup()); + assertThat(vertices.get(3).getSlotSharingGroup()) + .isEqualTo(vertices.get(5).getSlotSharingGroup()); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java index ab64dcc740587..5dc5fc4de1491 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorBatchExecutionTest.java @@ -60,11 +60,8 @@ import org.apache.flink.streaming.api.transformations.SourceTransformation; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.util.Collector; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collections; @@ -72,24 +69,17 @@ import java.util.List; import java.util.Map; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Tests for generating correct properties for sorting inputs in {@link RuntimeExecutionMode#BATCH} * runtime mode. */ -public class StreamGraphGeneratorBatchExecutionTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class StreamGraphGeneratorBatchExecutionTest { @Test - public void testShuffleMode() { + void testShuffleMode() { testGlobalStreamExchangeMode( RuntimeExecutionMode.AUTOMATIC, BatchShuffleMode.ALL_EXCHANGES_BLOCKING, @@ -117,18 +107,18 @@ public void testShuffleMode() { } @Test - public void testBatchJobType() { + void testBatchJobType() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSink sink = addDummyPipeline(env); StreamGraph graph = getStreamGraphInBatchMode(sink); - assertThat(graph.getJobType(), is(JobType.BATCH)); + assertThat(graph.getJobType()).isEqualTo(JobType.BATCH); } @Test - public void testManagedMemoryWeights() { + void testManagedMemoryWeights() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = @@ -142,16 +132,14 @@ public void testManagedMemoryWeights() { expectedOperatorWeights.put( ManagedMemoryUseCase.OPERATOR, ExecutionOptions.SORTED_INPUTS_MEMORY.defaultValue().getMebiBytes()); - assertThat( - processNode.getManagedMemoryOperatorScopeUseCaseWeights(), - equalTo(expectedOperatorWeights)); - assertThat( - processNode.getManagedMemorySlotScopeUseCases(), - equalTo(Collections.singleton(ManagedMemoryUseCase.STATE_BACKEND))); + assertThat(processNode.getManagedMemoryOperatorScopeUseCaseWeights()) + .isEqualTo(expectedOperatorWeights); + assertThat(processNode.getManagedMemorySlotScopeUseCases()) + .containsOnly(ManagedMemoryUseCase.STATE_BACKEND); } @Test - public void testCustomManagedMemoryWeights() { + void testCustomManagedMemoryWeights() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = env.fromData(1, 2).keyBy(Integer::intValue).process(DUMMY_PROCESS_FUNCTION); @@ -165,16 +153,14 @@ public void testCustomManagedMemoryWeights() { final Map expectedOperatorWeights = new HashMap<>(); expectedOperatorWeights.put(ManagedMemoryUseCase.OPERATOR, 42); - assertThat( - processNode.getManagedMemoryOperatorScopeUseCaseWeights(), - equalTo(expectedOperatorWeights)); - assertThat( - processNode.getManagedMemorySlotScopeUseCases(), - equalTo(Collections.singleton(ManagedMemoryUseCase.STATE_BACKEND))); + assertThat(processNode.getManagedMemoryOperatorScopeUseCaseWeights()) + .isEqualTo(expectedOperatorWeights); + assertThat(processNode.getManagedMemorySlotScopeUseCases()) + .containsOnly(ManagedMemoryUseCase.STATE_BACKEND); } @Test - public void testOneInputTransformation() { + void testOneInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = @@ -184,19 +170,17 @@ public void testOneInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat( - processNode.getInputRequirements().get(0), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getOperatorFactory().getChainingStrategy(), - equalTo(ChainingStrategy.HEAD)); - assertThat(graph.getStateBackend(), instanceOf(BatchExecutionStateBackend.class)); + assertThat(processNode.getInputRequirements().get(0)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.HEAD); + assertThat(graph.getStateBackend()).isInstanceOf(BatchExecutionStateBackend.class); // the provider is passed as a lambda therefore we cannot assert the class of the provider - assertThat(graph.getTimerServiceProvider(), notNullValue()); + assertThat(graph.getTimerServiceProvider()).isNotNull(); } @Test - public void testDisablingStateBackendOneInputTransformation() { + void testDisablingStateBackendOneInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = @@ -209,18 +193,16 @@ public void testDisablingStateBackendOneInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink, configuration); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat( - processNode.getInputRequirements().get(0), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getOperatorFactory().getChainingStrategy(), - equalTo(ChainingStrategy.HEAD)); - assertThat(graph.getStateBackend(), nullValue()); - assertThat(graph.getTimerServiceProvider(), nullValue()); + assertThat(processNode.getInputRequirements().get(0)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.HEAD); + assertThat(graph.getStateBackend()).isNull(); + assertThat(graph.getTimerServiceProvider()).isNull(); } @Test - public void testDisablingSortingInputsOneInputTransformation() { + void testDisablingSortingInputsOneInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = @@ -234,13 +216,13 @@ public void testDisablingSortingInputsOneInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink, configuration); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat(processNode.getInputRequirements().get(0), nullValue()); - assertThat(graph.getStateBackend(), nullValue()); - assertThat(graph.getTimerServiceProvider(), nullValue()); + assertThat(processNode.getInputRequirements().get(0)).isNull(); + assertThat(graph.getStateBackend()).isNull(); + assertThat(graph.getTimerServiceProvider()).isNull(); } @Test - public void testDisablingSortingInputsWithoutBatchStateBackendOneInputTransformation() { + void testDisablingSortingInputsWithoutBatchStateBackendOneInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator process = @@ -250,14 +232,14 @@ public void testDisablingSortingInputsWithoutBatchStateBackendOneInputTransforma Configuration configuration = new Configuration(); configuration.set(ExecutionOptions.SORT_INPUTS, false); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage( - "Batch state backend requires the sorted inputs to be enabled!"); - getStreamGraphInBatchMode(sink, configuration); + assertThatThrownBy(() -> getStreamGraphInBatchMode(sink, configuration)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Batch state backend requires the sorted inputs to be enabled!"); } @Test - public void testTwoInputTransformation() { + void testTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -272,22 +254,19 @@ public void testTwoInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat( - processNode.getInputRequirements().get(0), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getInputRequirements().get(1), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getOperatorFactory().getChainingStrategy(), - equalTo(ChainingStrategy.HEAD)); - assertThat(graph.getStateBackend(), instanceOf(BatchExecutionStateBackend.class)); + assertThat(processNode.getInputRequirements().get(0)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getInputRequirements().get(1)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.HEAD); + assertThat(graph.getStateBackend()).isInstanceOf(BatchExecutionStateBackend.class); // the provider is passed as a lambda therefore we cannot assert the class of the provider - assertThat(graph.getTimerServiceProvider(), notNullValue()); + assertThat(graph.getTimerServiceProvider()).isNotNull(); } @Test - public void testDisablingStateBackendTwoInputTransformation() { + void testDisablingStateBackendTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -305,21 +284,18 @@ public void testDisablingStateBackendTwoInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink, configuration); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat( - processNode.getInputRequirements().get(0), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getInputRequirements().get(1), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - processNode.getOperatorFactory().getChainingStrategy(), - equalTo(ChainingStrategy.HEAD)); - assertThat(graph.getStateBackend(), nullValue()); - assertThat(graph.getTimerServiceProvider(), nullValue()); + assertThat(processNode.getInputRequirements().get(0)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getInputRequirements().get(1)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(processNode.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.HEAD); + assertThat(graph.getStateBackend()).isNull(); + assertThat(graph.getTimerServiceProvider()).isNull(); } @Test - public void testDisablingSortingInputsTwoInputTransformation() { + void testDisablingSortingInputsTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -338,14 +314,14 @@ public void testDisablingSortingInputsTwoInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink, configuration); StreamNode processNode = graph.getStreamNode(process.getId()); - assertThat(processNode.getInputRequirements().get(0), nullValue()); - assertThat(processNode.getInputRequirements().get(1), nullValue()); - assertThat(graph.getStateBackend(), nullValue()); - assertThat(graph.getTimerServiceProvider(), nullValue()); + assertThat(processNode.getInputRequirements().get(0)).isNull(); + assertThat(processNode.getInputRequirements().get(1)).isNull(); + assertThat(graph.getStateBackend()).isNull(); + assertThat(graph.getTimerServiceProvider()).isNull(); } @Test - public void testDisablingSortingInputsWithoutBatchStateBackendTwoInputTransformation() { + void testDisablingSortingInputsWithoutBatchStateBackendTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -360,14 +336,14 @@ public void testDisablingSortingInputsWithoutBatchStateBackendTwoInputTransforma Configuration configuration = new Configuration(); configuration.set(ExecutionOptions.SORT_INPUTS, false); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage( - "Batch state backend requires the sorted inputs to be enabled!"); - getStreamGraphInBatchMode(sink, configuration); + assertThatThrownBy(() -> getStreamGraphInBatchMode(sink, configuration)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Batch state backend requires the sorted inputs to be enabled!"); } @Test - public void testInputSelectableTwoInputTransformation() { + void testInputSelectableTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -388,14 +364,14 @@ public void testInputSelectableTwoInputTransformation() { DataStreamSink sink = selectableOperator.sinkTo(new DiscardingSink<>()); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage( - "Batch state backend and sorting inputs are not supported in graphs with an InputSelectable operator."); - getStreamGraphInBatchMode(sink); + assertThatThrownBy(() -> getStreamGraphInBatchMode(sink)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Batch state backend and sorting inputs are not supported in graphs with an InputSelectable operator."); } @Test - public void testMultiInputTransformation() { + void testMultiInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -423,22 +399,19 @@ public void testMultiInputTransformation() { StreamGraph graph = getStreamGraphInBatchMode(sink); StreamNode operatorNode = graph.getStreamNode(multipleInputTransformation.getId()); - assertThat( - operatorNode.getInputRequirements().get(0), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - operatorNode.getInputRequirements().get(1), - equalTo(StreamConfig.InputRequirement.SORTED)); - assertThat( - operatorNode.getOperatorFactory().getChainingStrategy(), - equalTo(ChainingStrategy.HEAD)); - assertThat(graph.getStateBackend(), instanceOf(BatchExecutionStateBackend.class)); + assertThat(operatorNode.getInputRequirements().get(0)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(operatorNode.getInputRequirements().get(1)) + .isEqualTo(StreamConfig.InputRequirement.SORTED); + assertThat(operatorNode.getOperatorFactory().getChainingStrategy()) + .isEqualTo(ChainingStrategy.HEAD); + assertThat(graph.getStateBackend()).isInstanceOf(BatchExecutionStateBackend.class); // the provider is passed as a lambda therefore we cannot assert the class of the provider - assertThat(graph.getTimerServiceProvider(), notNullValue()); + assertThat(graph.getTimerServiceProvider()).isNotNull(); } @Test - public void testInputSelectableMultiInputTransformation() { + void testInputSelectableMultiInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource elements1 = env.fromData(1, 2); @@ -462,14 +435,14 @@ public void testInputSelectableMultiInputTransformation() { .transform(multipleInputTransformation) .sinkTo(new DiscardingSink<>()); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage( - "Batch state backend and sorting inputs are not supported in graphs with an InputSelectable operator."); - getStreamGraphInBatchMode(sink); + assertThatThrownBy(() -> getStreamGraphInBatchMode(sink)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Batch state backend and sorting inputs are not supported in graphs with an InputSelectable operator."); } @Test - public void testFeedbackThrowsExceptionInBatch() { + void testFeedbackThrowsExceptionInBatch() { final SourceTransformation bounded = new SourceTransformation<>( "Bounded Source", @@ -485,7 +458,7 @@ public void testFeedbackThrowsExceptionInBatch() { } @Test - public void testCoFeedbackThrowsExceptionInBatch() { + void testCoFeedbackThrowsExceptionInBatch() { final CoFeedbackTransformation coFeedbackTransformation = new CoFeedbackTransformation<>(2, TypeInformation.of(Integer.TYPE), 5L); testNoSupportForIterationsInBatchHelper(coFeedbackTransformation); @@ -506,9 +479,9 @@ private void testNoSupportForIterationsInBatchHelper( new CheckpointConfig(), configuration); - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Iterations are not supported in BATCH execution mode."); - streamGraphGenerator.generate(); + assertThatThrownBy(streamGraphGenerator::generate) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining("Iterations are not supported in BATCH execution mode."); } private void testGlobalStreamExchangeMode( @@ -532,7 +505,7 @@ private void testGlobalStreamExchangeMode( StreamGraph graph = streamGraphGenerator.generate(); - assertEquals(expectedStreamExchangeMode, graph.getGlobalStreamExchangeMode()); + assertThat(graph.getGlobalStreamExchangeMode()).isEqualTo(expectedStreamExchangeMode); } private DataStreamSink addDummyPipeline(StreamExecutionEnvironment env) { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java index c95f22738db69..ff06b12d46235 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorExecutionModeDetectionTest.java @@ -34,34 +34,27 @@ import org.apache.flink.streaming.api.operators.SimpleOperatorFactory; import org.apache.flink.streaming.api.transformations.SourceTransformation; import org.apache.flink.streaming.api.transformations.TwoInputTransformation; -import org.apache.flink.util.TestLogger; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.HamcrestCondition.matching; /** * Tests for the detection of the {@link RuntimeExecutionMode runtime execution mode} during stream * graph translation. */ -public class StreamGraphGeneratorExecutionModeDetectionTest extends TestLogger { - - @Rule public ExpectedException thrown = ExpectedException.none(); +class StreamGraphGeneratorExecutionModeDetectionTest { @Test - public void testExecutionModePropagationFromEnvWithDefaultAndBoundedSource() { + void testExecutionModePropagationFromEnvWithDefaultAndBoundedSource() { final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); environment.enableCheckpointing(100); @@ -73,17 +66,18 @@ public void testExecutionModePropagationFromEnvWithDefaultAndBoundedSource() { "bounded-source") .print(); - assertThat( - environment.getStreamGraph(), - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - true, - true)); + assertThat(environment.getStreamGraph()) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + true, + true))); } @Test - public void testExecutionModePropagationFromEnvWithDefaultAndUnboundedSource() { + void testExecutionModePropagationFromEnvWithDefaultAndUnboundedSource() { final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -94,17 +88,18 @@ public void testExecutionModePropagationFromEnvWithDefaultAndUnboundedSource() { "unbounded-source") .print(); - assertThat( - environment.getStreamGraph(), - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - false, - true)); + assertThat(environment.getStreamGraph()) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + false, + true))); } @Test - public void testExecutionModePropagationFromEnvWithAutomaticAndBoundedSource() { + void testExecutionModePropagationFromEnvWithAutomaticAndBoundedSource() { final Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.AUTOMATIC); @@ -120,18 +115,22 @@ public void testExecutionModePropagationFromEnvWithAutomaticAndBoundedSource() { "bounded-source") .print(); - assertTrue(environment.isChainingEnabled()); - assertThat(environment.getCheckpointInterval(), is(equalTo(100L))); + assertThat(environment.isChainingEnabled()).isTrue(); + assertThat(environment.getCheckpointInterval()).isEqualTo(100L); final StreamGraph streamGraph = environment.getStreamGraph(); - assertThat( - streamGraph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, JobType.BATCH, false, false)); + assertThat(streamGraph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, + JobType.BATCH, + false, + false))); } @Test - public void testExecutionModePropagationFromEnvWithBatchAndUnboundedSource() { + void testExecutionModePropagationFromEnvWithBatchAndUnboundedSource() { final Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); @@ -146,20 +145,20 @@ public void testExecutionModePropagationFromEnvWithBatchAndUnboundedSource() { "unbounded-source") .print(); - thrown.expect(IllegalStateException.class); - thrown.expectMessage("combination is not allowed"); - environment.getStreamGraph(); + assertThatThrownBy(environment::getStreamGraph) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("combination is not allowed"); } @Test - public void testDetectionThroughTransitivePredecessors() { + void testDetectionThroughTransitivePredecessors() { final SourceTransformation bounded = getSourceTransformation("Bounded Source", Boundedness.BOUNDED); - assertEquals(Boundedness.BOUNDED, bounded.getBoundedness()); + assertThat(bounded.getBoundedness()).isEqualTo(Boundedness.BOUNDED); final SourceTransformation unbounded = getSourceTransformation("Unbounded Source", Boundedness.CONTINUOUS_UNBOUNDED); - assertEquals(Boundedness.CONTINUOUS_UNBOUNDED, unbounded.getBoundedness()); + assertThat(unbounded.getBoundedness()).isEqualTo(Boundedness.CONTINUOUS_UNBOUNDED); final TwoInputTransformation resultTransform = new TwoInputTransformation<>( @@ -174,85 +173,97 @@ public void testDetectionThroughTransitivePredecessors() { final StreamGraph graph = generateStreamGraph(RuntimeExecutionMode.AUTOMATIC, resultTransform); - assertThat( - graph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - false, - true)); + assertThat(graph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + false, + true))); } @Test - public void testBoundedDetection() { + void testBoundedDetection() { final SourceTransformation bounded = getSourceTransformation("Bounded Source", Boundedness.BOUNDED); - assertEquals(Boundedness.BOUNDED, bounded.getBoundedness()); + assertThat(bounded.getBoundedness()).isEqualTo(Boundedness.BOUNDED); final StreamGraph graph = generateStreamGraph(RuntimeExecutionMode.AUTOMATIC, bounded); - assertThat( - graph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, JobType.BATCH, false, false)); + assertThat(graph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, + JobType.BATCH, + false, + false))); } @Test - public void testUnboundedDetection() { + void testUnboundedDetection() { final SourceTransformation unbounded = getSourceTransformation("Unbounded Source", Boundedness.CONTINUOUS_UNBOUNDED); - assertEquals(Boundedness.CONTINUOUS_UNBOUNDED, unbounded.getBoundedness()); + assertThat(unbounded.getBoundedness()).isEqualTo(Boundedness.CONTINUOUS_UNBOUNDED); final StreamGraph graph = generateStreamGraph(RuntimeExecutionMode.AUTOMATIC, unbounded); - assertThat( - graph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - false, - true)); + assertThat(graph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + false, + true))); } @Test - public void testMixedDetection() { + void testMixedDetection() { final SourceTransformation unbounded = getSourceTransformation("Unbounded Source", Boundedness.CONTINUOUS_UNBOUNDED); - assertEquals(Boundedness.CONTINUOUS_UNBOUNDED, unbounded.getBoundedness()); + assertThat(unbounded.getBoundedness()).isEqualTo(Boundedness.CONTINUOUS_UNBOUNDED); final SourceTransformation bounded = getSourceTransformation("Bounded Source", Boundedness.BOUNDED); - assertEquals(Boundedness.BOUNDED, bounded.getBoundedness()); + assertThat(bounded.getBoundedness()).isEqualTo(Boundedness.BOUNDED); final StreamGraph graph = generateStreamGraph(RuntimeExecutionMode.AUTOMATIC, unbounded); - assertThat( - graph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - false, - true)); + assertThat(graph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + false, + true))); } @Test - public void testExplicitOverridesDetectedMode() { + void testExplicitOverridesDetectedMode() { final SourceTransformation bounded = getSourceTransformation("Bounded Source", Boundedness.BOUNDED); - assertEquals(Boundedness.BOUNDED, bounded.getBoundedness()); + assertThat(bounded.getBoundedness()).isEqualTo(Boundedness.BOUNDED); final StreamGraph graph = generateStreamGraph(RuntimeExecutionMode.AUTOMATIC, bounded); - assertThat( - graph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, JobType.BATCH, false, false)); + assertThat(graph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, + JobType.BATCH, + false, + false))); final StreamGraph streamingGraph = generateStreamGraph(RuntimeExecutionMode.STREAMING, bounded); - assertThat( - streamingGraph, - hasProperties( - GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, - JobType.STREAMING, - false, - true)); + assertThat(streamingGraph) + .is( + matching( + hasProperties( + GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, + JobType.STREAMING, + false, + true))); } private StreamGraph generateStreamGraph( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorTest.java index c6f6979ef1f3a..7cf4a072a429a 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamGraphGeneratorTest.java @@ -78,9 +78,7 @@ import org.apache.flink.util.AbstractID; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; -import org.apache.flink.util.TestLogger; -import org.assertj.core.api.Assertions; import org.hamcrest.Description; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -98,11 +96,10 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.hamcrest.MatcherAssert.assertThat; +import static org.assertj.core.api.HamcrestCondition.matching; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.iterableWithSize; /** * Tests for {@link StreamGraphGenerator}. This only tests correct translation of split/select, @@ -110,10 +107,10 @@ * tests. */ @SuppressWarnings("serial") -public class StreamGraphGeneratorTest extends TestLogger { +class StreamGraphGeneratorTest { @Test - public void generatorForwardsSavepointRestoreSettings() { + void generatorForwardsSavepointRestoreSettings() { StreamGraphGenerator streamGraphGenerator = new StreamGraphGenerator( Collections.emptyList(), new ExecutionConfig(), new CheckpointConfig()); @@ -121,11 +118,11 @@ public void generatorForwardsSavepointRestoreSettings() { streamGraphGenerator.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("hello")); StreamGraph streamGraph = streamGraphGenerator.generate(); - assertThat(streamGraph.getSavepointRestoreSettings().getRestorePath(), is("hello")); + assertThat(streamGraph.getSavepointRestoreSettings().getRestorePath()).isEqualTo("hello"); } @Test - public void testBufferTimeout() { + void testBufferTimeout() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setBufferTimeout(77); // set timeout to some recognizable number @@ -147,20 +144,19 @@ public void testBufferTimeout() { for (StreamNode node : sg.getStreamNodes()) { switch (node.getOperatorName()) { case "A": - Assertions.assertThat(77L).isEqualTo(node.getBufferTimeout()); + assertThat(77L).isEqualTo(node.getBufferTimeout()); break; case "B": - Assertions.assertThat(node.getBufferTimeout()).isEqualTo(0L); + assertThat(node.getBufferTimeout()).isEqualTo(0L); break; case "C": - Assertions.assertThat(node.getBufferTimeout()).isEqualTo(12L); + assertThat(node.getBufferTimeout()).isEqualTo(12L); break; case "D": - Assertions.assertThat(node.getBufferTimeout()).isEqualTo(77L); + assertThat(node.getBufferTimeout()).isEqualTo(77L); break; default: - Assertions.assertThat(node.getOperatorFactory()) - .isInstanceOf(SourceOperatorFactory.class); + assertThat(node.getOperatorFactory()).isInstanceOf(SourceOperatorFactory.class); } } } @@ -172,7 +168,7 @@ public void testBufferTimeout() { * StreamGraph when they are intermixed. */ @Test - public void testVirtualTransformations() throws Exception { + void testVirtualTransformations() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -206,21 +202,13 @@ public void testVirtualTransformations() throws Exception { StreamGraph graph = env.getStreamGraph(); // rebalanceMap - Assertions.assertThat( - graph.getStreamNode(rebalanceMap.getId()) - .getInEdges() - .get(0) - .getPartitioner()) + assertThat(graph.getStreamNode(rebalanceMap.getId()).getInEdges().get(0).getPartitioner()) .isInstanceOf(RebalancePartitioner.class); // verify that only last partitioning takes precedence - Assertions.assertThat( - graph.getStreamNode(broadcastMap.getId()) - .getInEdges() - .get(0) - .getPartitioner()) + assertThat(graph.getStreamNode(broadcastMap.getId()).getInEdges().get(0).getPartitioner()) .isInstanceOf(BroadcastPartitioner.class); - Assertions.assertThat( + assertThat( graph.getSourceVertex( graph.getStreamNode(broadcastMap.getId()) .getInEdges() @@ -229,19 +217,19 @@ public void testVirtualTransformations() throws Exception { .isEqualTo(rebalanceMap.getId()); // verify that partitioning in unions is preserved - Assertions.assertThat( + assertThat( graph.getStreamNode(broadcastOperator.getId()) .getOutEdges() .get(0) .getPartitioner()) .isInstanceOf(BroadcastPartitioner.class); - Assertions.assertThat( + assertThat( graph.getStreamNode(globalOperator.getId()) .getOutEdges() .get(0) .getPartitioner()) .isInstanceOf(GlobalPartitioner.class); - Assertions.assertThat( + assertThat( graph.getStreamNode(shuffleOperator.getId()) .getOutEdges() .get(0) @@ -250,7 +238,7 @@ public void testVirtualTransformations() throws Exception { } @Test - public void testOutputTypeConfigurationWithUdfStreamOperator() throws Exception { + void testOutputTypeConfigurationWithUdfStreamOperator() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); OutputTypeConfigurableFunction function = new OutputTypeConfigurableFunction<>(); @@ -264,8 +252,8 @@ public void testOutputTypeConfigurationWithUdfStreamOperator() throws Exception env.getStreamGraph(); - Assertions.assertThat(udfOperator).isInstanceOf(AbstractUdfStreamOperator.class); - Assertions.assertThat(function.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO); + assertThat(udfOperator).isInstanceOf(AbstractUdfStreamOperator.class); + assertThat(function.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO); } /** @@ -273,7 +261,7 @@ public void testOutputTypeConfigurationWithUdfStreamOperator() throws Exception * output type. In this test case the output type must be BasicTypeInfo.INT_TYPE_INFO. */ @Test - public void testOutputTypeConfigurationWithOneInputTransformation() throws Exception { + void testOutputTypeConfigurationWithOneInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream source = env.fromData(1, 10); @@ -291,12 +279,12 @@ public void testOutputTypeConfigurationWithOneInputTransformation() throws Excep env.getStreamGraph(); - Assertions.assertThat(outputTypeConfigurableOperation.getTypeInformation()) + assertThat(outputTypeConfigurableOperation.getTypeInformation()) .isEqualTo(BasicTypeInfo.INT_TYPE_INFO); } @Test - public void testOutputTypeConfigurationWithTwoInputTransformation() throws Exception { + void testOutputTypeConfigurationWithTwoInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream source1 = env.fromData(1, 10); @@ -317,12 +305,12 @@ public void testOutputTypeConfigurationWithTwoInputTransformation() throws Excep env.getStreamGraph(); - Assertions.assertThat(outputTypeConfigurableOperation.getTypeInformation()) + assertThat(outputTypeConfigurableOperation.getTypeInformation()) .isEqualTo(BasicTypeInfo.INT_TYPE_INFO); } @Test - public void testMultipleInputTransformation() throws Exception { + void testMultipleInputTransformation() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream source1 = env.fromData(1, 10); @@ -343,22 +331,19 @@ public void testMultipleInputTransformation() throws Exception { .addInput(source3.getTransformation())); StreamGraph streamGraph = env.getStreamGraph(); - Assertions.assertThat(streamGraph.getStreamNodes().size()).isEqualTo(4); - - Assertions.assertThat(streamGraph.getStreamEdges(source1.getId(), transform.getId()).size()) - .isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(source2.getId(), transform.getId()).size()) - .isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(source3.getId(), transform.getId()).size()) - .isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(source1.getId()).size()).isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(source2.getId()).size()).isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(source3.getId()).size()).isEqualTo(1); - Assertions.assertThat(streamGraph.getStreamEdges(transform.getId()).size()).isEqualTo(0); + assertThat(streamGraph.getStreamNodes().size()).isEqualTo(4); + + assertThat(streamGraph.getStreamEdges(source1.getId(), transform.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(source2.getId(), transform.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(source3.getId(), transform.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(source1.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(source2.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(source3.getId())).hasSize(1); + assertThat(streamGraph.getStreamEdges(transform.getId())).hasSize(0); } @Test - public void testUnalignedCheckpointDisabledOnPointwise() { + void testUnalignedCheckpointDisabledOnPointwise() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(42); @@ -388,24 +373,29 @@ public void processBroadcastElement( DataStream map4 = map3.rescale().map(l -> l).setParallelism(1337); StreamGraph streamGraph = env.getStreamGraph(); - Assertions.assertThat(streamGraph.getStreamNodes().size()).isEqualTo(7); + assertThat(streamGraph.getStreamNodes().size()).isEqualTo(7); // forward - assertThat(edge(streamGraph, source1, map1), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, source1, map1)) + .is(matching(supportsUnalignedCheckpoints(false))); // shuffle - assertThat(edge(streamGraph, source2, map2), supportsUnalignedCheckpoints(true)); + assertThat(edge(streamGraph, source2, map2)) + .is(matching(supportsUnalignedCheckpoints(true))); // broadcast, but other channel is forwarded - assertThat(edge(streamGraph, map1, joined), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, map1, joined)) + .is(matching(supportsUnalignedCheckpoints(false))); // forward - assertThat(edge(streamGraph, map2, joined), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, map2, joined)) + .is(matching(supportsUnalignedCheckpoints(false))); // shuffle - assertThat(edge(streamGraph, joined, map3), supportsUnalignedCheckpoints(true)); + assertThat(edge(streamGraph, joined, map3)) + .is(matching(supportsUnalignedCheckpoints(true))); // rescale - assertThat(edge(streamGraph, map3, map4), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, map3, map4)).is(matching(supportsUnalignedCheckpoints(false))); } @Test - public void testUnalignedCheckpointDisabledOnBroadcast() { + void testUnalignedCheckpointDisabledOnBroadcast() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(42); @@ -432,20 +422,23 @@ public void processBroadcastElement( }); StreamGraph streamGraph = env.getStreamGraph(); - Assertions.assertThat(streamGraph.getStreamNodes().size()).isEqualTo(4); + assertThat(streamGraph.getStreamNodes().size()).isEqualTo(4); // single broadcast - assertThat(edge(streamGraph, source1, map1), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, source1, map1)) + .is(matching(supportsUnalignedCheckpoints(false))); // keyed, connected with broadcast - assertThat(edge(streamGraph, source2, joined), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, source2, joined)) + .is(matching(supportsUnalignedCheckpoints(false))); // broadcast, connected with keyed - assertThat(edge(streamGraph, map1, joined), supportsUnalignedCheckpoints(false)); + assertThat(edge(streamGraph, map1, joined)) + .is(matching(supportsUnalignedCheckpoints(false))); } private static StreamEdge edge( StreamGraph streamGraph, DataStream op1, DataStream op2) { List streamEdges = streamGraph.getStreamEdges(op1.getId(), op2.getId()); - assertThat(streamEdges, iterableWithSize(1)); + assertThat(streamEdges).hasSize(1); return streamEdges.get(0); } @@ -466,7 +459,7 @@ protected Boolean featureValueOf(StreamEdge actual) { * maximum parallelism. */ @Test - public void testSetupOfKeyGroupPartitioner() { + void testSetupOfKeyGroupPartitioner() { int maxParallelism = 42; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setMaxParallelism(maxParallelism); @@ -487,7 +480,7 @@ public void testSetupOfKeyGroupPartitioner() { /** Tests that the global and operator-wide max parallelism setting is respected. */ @Test - public void testMaxParallelismForwarding() { + void testMaxParallelismForwarding() { int globalMaxParallelism = 42; int keyedResult2MaxParallelism = 17; @@ -511,9 +504,8 @@ public void testMaxParallelismForwarding() { StreamNode keyedResult1Node = graph.getStreamNode(keyedResult1.getId()); StreamNode keyedResult2Node = graph.getStreamNode(keyedResult2.getId()); - Assertions.assertThat(keyedResult1Node.getMaxParallelism()).isEqualTo(globalMaxParallelism); - Assertions.assertThat(keyedResult2Node.getMaxParallelism()) - .isEqualTo(keyedResult2MaxParallelism); + assertThat(keyedResult1Node.getMaxParallelism()).isEqualTo(globalMaxParallelism); + assertThat(keyedResult2Node.getMaxParallelism()).isEqualTo(keyedResult2MaxParallelism); } /** @@ -521,7 +513,7 @@ public void testMaxParallelismForwarding() { * specified. */ @Test - public void testAutoMaxParallelism() { + void testAutoMaxParallelism() { int globalParallelism = 42; int mapParallelism = 17; int maxParallelism = 21; @@ -558,13 +550,13 @@ public void testAutoMaxParallelism() { StreamNode keyedResult3Node = graph.getStreamNode(keyedResult3.getId()); StreamNode keyedResult4Node = graph.getStreamNode(keyedResult4.getId()); - Assertions.assertThat(keyedResult3Node.getMaxParallelism()).isEqualTo(maxParallelism); - Assertions.assertThat(keyedResult4Node.getMaxParallelism()).isEqualTo(maxParallelism); + assertThat(keyedResult3Node.getMaxParallelism()).isEqualTo(maxParallelism); + assertThat(keyedResult4Node.getMaxParallelism()).isEqualTo(maxParallelism); } /** Tests that the max parallelism is properly set for connected streams. */ @Test - public void testMaxParallelismWithConnectedKeyedStream() { + void testMaxParallelismWithConnectedKeyedStream() { int maxParallelism = 42; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -596,7 +588,7 @@ public void testMaxParallelismWithConnectedKeyedStream() { * sort by its id. */ @Test - public void testSinkIdComparison() { + void testSinkIdComparison() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream source = env.fromData(1, 2, 3); for (int i = 0; i < 32; i++) { @@ -616,7 +608,7 @@ public void invoke(Integer value, Context ctx) throws Exception {} /** Test iteration job, check slot sharing group and co-location group. */ @Test - public void testIteration() { + void testIteration() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream source = env.fromData(1, 2, 3).name("source"); @@ -632,19 +624,19 @@ public void testIteration() { StreamGraph streamGraph = env.getStreamGraph(); for (Tuple2 iterationPair : streamGraph.getIterationSourceSinkPairs()) { - Assertions.assertThat(iterationPair.f0.getCoLocationGroup()).isNotNull(); - Assertions.assertThat(iterationPair.f1.getCoLocationGroup()) + assertThat(iterationPair.f0.getCoLocationGroup()).isNotNull(); + assertThat(iterationPair.f1.getCoLocationGroup()) .isEqualTo(iterationPair.f0.getCoLocationGroup()); - Assertions.assertThat(iterationPair.f0.getSlotSharingGroup()) + assertThat(iterationPair.f0.getSlotSharingGroup()) .isEqualTo(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP); - Assertions.assertThat(iterationPair.f1.getSlotSharingGroup()) + assertThat(iterationPair.f1.getSlotSharingGroup()) .isEqualTo(iterationPair.f0.getSlotSharingGroup()); final ResourceSpec sourceMinResources = iterationPair.f0.getMinResources(); final ResourceSpec sinkMinResources = iterationPair.f1.getMinResources(); final ResourceSpec iterationResources = sourceMinResources.merge(sinkMinResources); - assertThat(iterationResources, equalsResourceSpec(resources)); + assertThat(iterationResources).is(matching(equalsResourceSpec(resources))); } } @@ -654,7 +646,7 @@ private Matcher equalsResourceSpec(ResourceSpec resources) { /** Test slot sharing is enabled. */ @Test - public void testEnableSlotSharing() { + void testEnableSlotSharing() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream sourceDataStream = env.fromData(1, 2, 3); DataStream mapDataStream = sourceDataStream.map(x -> x + 1); @@ -671,13 +663,13 @@ public void testEnableSlotSharing() { Collection streamNodes = streamGraph.getStreamNodes(); for (StreamNode streamNode : streamNodes) { - Assertions.assertThat(streamNode.getSlotSharingGroup()) + assertThat(streamNode.getSlotSharingGroup()) .isEqualTo(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP); } } @Test - public void testSetManagedMemoryWeight() { + void testSetManagedMemoryWeight() { final int weight = 123; final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStream source = env.fromData(1, 2, 3).name("source"); @@ -689,18 +681,18 @@ public void testSetManagedMemoryWeight() { for (StreamNode streamNode : streamGraph.getStreamNodes()) { if (streamNode.getOperatorName().contains("source")) { assertThat( - streamNode - .getManagedMemoryOperatorScopeUseCaseWeights() - .get(ManagedMemoryUseCase.OPERATOR), - is(weight)); + streamNode + .getManagedMemoryOperatorScopeUseCaseWeights() + .get(ManagedMemoryUseCase.OPERATOR)) + .isEqualTo(weight); } else { - assertThat(streamNode.getManagedMemoryOperatorScopeUseCaseWeights().size(), is(0)); + assertThat(streamNode.getManagedMemoryOperatorScopeUseCaseWeights()).isEmpty(); } } } @Test - public void testSetSlotSharingResource() { + void testSetSlotSharingResource() { final String slotSharingGroup1 = "a"; final String slotSharingGroup2 = "b"; final ResourceProfile resourceProfile1 = ResourceProfile.fromResources(1, 10); @@ -730,22 +722,18 @@ public void testSetSlotSharingResource() { .setSlotSharingGroupResource(slotSharingGroupResource) .generate(); + assertThat(streamGraph.getSlotSharingGroupResource(slotSharingGroup1)) + .hasValue(resourceProfile1); + assertThat(streamGraph.getSlotSharingGroupResource(slotSharingGroup2)) + .hasValue(resourceProfile2); assertThat( - streamGraph.getSlotSharingGroupResource(slotSharingGroup1).get(), - equalTo(resourceProfile1)); - assertThat( - streamGraph.getSlotSharingGroupResource(slotSharingGroup2).get(), - equalTo(resourceProfile2)); - assertThat( - streamGraph - .getSlotSharingGroupResource( - StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP) - .get(), - equalTo(resourceProfile3)); + streamGraph.getSlotSharingGroupResource( + StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP)) + .hasValue(resourceProfile3); } @Test - public void testSettingSavepointRestoreSettings() { + void testSettingSavepointRestoreSettings() { Configuration config = new Configuration(); config.set(StateRecoveryOptions.SAVEPOINT_PATH, "/tmp/savepoint"); @@ -759,13 +747,12 @@ public void testSettingSavepointRestoreSettings() { SavepointRestoreSettings savepointRestoreSettings = streamGraph.getSavepointRestoreSettings(); - assertThat( - savepointRestoreSettings, - equalTo(SavepointRestoreSettings.forPath("/tmp/savepoint"))); + assertThat(savepointRestoreSettings) + .isEqualTo(SavepointRestoreSettings.forPath("/tmp/savepoint")); } @Test - public void testSettingSavepointRestoreSettingsSetterOverrides() { + void testSettingSavepointRestoreSettingsSetterOverrides() { Configuration config = new Configuration(); config.set(StateRecoveryOptions.SAVEPOINT_PATH, "/tmp/savepoint"); @@ -780,13 +767,12 @@ public void testSettingSavepointRestoreSettingsSetterOverrides() { SavepointRestoreSettings savepointRestoreSettings = streamGraph.getSavepointRestoreSettings(); - assertThat( - savepointRestoreSettings, - equalTo(SavepointRestoreSettings.forPath("/tmp/savepoint1"))); + assertThat(savepointRestoreSettings) + .isEqualTo(SavepointRestoreSettings.forPath("/tmp/savepoint1")); } @Test - public void testConfigureSlotSharingGroupResource() { + void testConfigureSlotSharingGroupResource() { final SlotSharingGroup ssg1 = SlotSharingGroup.newBuilder("ssg1").setCpuCores(1).setTaskHeapMemoryMB(100).build(); final SlotSharingGroup ssg2 = @@ -810,22 +796,18 @@ public void testConfigureSlotSharingGroupResource() { .slotSharingGroup(ssg1); final StreamGraph streamGraph = env.getStreamGraph(); + assertThat(streamGraph.getSlotSharingGroupResource("ssg1")) + .hasValue(ResourceProfile.fromResources(1, 100)); + assertThat(streamGraph.getSlotSharingGroupResource("ssg2")) + .hasValue(ResourceProfile.fromResources(2, 200)); assertThat( - streamGraph.getSlotSharingGroupResource("ssg1").get(), - is(ResourceProfile.fromResources(1, 100))); - assertThat( - streamGraph.getSlotSharingGroupResource("ssg2").get(), - is(ResourceProfile.fromResources(2, 200))); - assertThat( - streamGraph - .getSlotSharingGroupResource( - StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP) - .get(), - is(ResourceProfile.fromResources(3, 300))); + streamGraph.getSlotSharingGroupResource( + StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP)) + .hasValue(ResourceProfile.fromResources(3, 300)); } @Test - public void testConflictSlotSharingGroup() { + void testConflictSlotSharingGroup() { final SlotSharingGroup ssg = SlotSharingGroup.newBuilder("ssg").setCpuCores(1).setTaskHeapMemoryMB(100).build(); final SlotSharingGroup ssgConflict = @@ -838,12 +820,11 @@ public void testConflictSlotSharingGroup() { .sinkTo(new DiscardingSink<>()) .slotSharingGroup(ssgConflict); - Assertions.assertThatThrownBy(env::getStreamGraph) - .isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(env::getStreamGraph).isInstanceOf(IllegalArgumentException.class); } @Test - public void testTrackTransformationsByIdentity() { + void testTrackTransformationsByIdentity() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final Transformation noopTransformation = env.fromSequence(1, 2).getTransformation(); @@ -860,7 +841,7 @@ public void testTrackTransformationsByIdentity() { } @Test - public void testResetBatchExchangeModeInStreamingExecution() { + void testResetBatchExchangeModeInStreamingExecution() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream sourceDataStream = env.fromData(1, 2, 3); @@ -879,11 +860,11 @@ public void testResetBatchExchangeModeInStreamingExecution() { .map(StreamNode::getId) .sorted(Integer::compare) .collect(Collectors.toList()); - Assertions.assertThat(streamGraph.getStreamEdges(nodeIds.get(0), nodeIds.get(1))) + assertThat(streamGraph.getStreamEdges(nodeIds.get(0), nodeIds.get(1))) .hasSize(1) .satisfies( e -> - Assertions.assertThat(e.get(0).getExchangeMode()) + assertThat(e.get(0).getExchangeMode()) .isEqualTo(StreamExchangeMode.UNDEFINED)); } @@ -892,7 +873,7 @@ public void testResetBatchExchangeModeInStreamingExecution() { */ @Deprecated @Test - public void testAutoParallelismForExpandedTransformationsDeprecated() { + void testAutoParallelismForExpandedTransformationsDeprecated() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); @@ -908,13 +889,13 @@ public void testAutoParallelismForExpandedTransformationsDeprecated() { .forEach( node -> { if (!node.getOperatorName().startsWith("Source")) { - Assertions.assertThat(node.getParallelism()).isEqualTo(2); + assertThat(node.getParallelism()).isEqualTo(2); } }); } @Test - public void testAutoParallelismForExpandedTransformations() { + void testAutoParallelismForExpandedTransformations() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); @@ -930,13 +911,13 @@ public void testAutoParallelismForExpandedTransformations() { .forEach( node -> { if (!node.getOperatorName().startsWith("Source")) { - Assertions.assertThat(node.getParallelism()).isEqualTo(2); + assertThat(node.getParallelism()).isEqualTo(2); } }); } @Test - public void testCacheTransformation() { + void testCacheTransformation() { final TestingStreamExecutionEnvironment env = new TestingStreamExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.BATCH); @@ -947,8 +928,7 @@ public void testCacheTransformation() { .reduce(Integer::sum) .setParallelism(upstreamParallelism) .cache(); - Assertions.assertThat(cachedStream.getTransformation()) - .isInstanceOf(CacheTransformation.class); + assertThat(cachedStream.getTransformation()).isInstanceOf(CacheTransformation.class); CacheTransformation cacheTransformation = (CacheTransformation) cachedStream.getTransformation(); @@ -964,7 +944,7 @@ public void testCacheTransformation() { } @Test - public void testCacheSideOutput() { + void testCacheSideOutput() { final TestingStreamExecutionEnvironment env = new TestingStreamExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.BATCH); @@ -973,8 +953,7 @@ public void testCacheSideOutput() { env.fromData(1, 2, 3).map(i -> i).setParallelism(upstreamParallelism); final DataStream sideOutputCache = stream.getSideOutput(new OutputTag("1") {}).cache(); - Assertions.assertThat(sideOutputCache.getTransformation()) - .isInstanceOf(CacheTransformation.class); + assertThat(sideOutputCache.getTransformation()).isInstanceOf(CacheTransformation.class); final CacheTransformation cacheTransformation = (CacheTransformation) sideOutputCache.getTransformation(); sideOutputCache.print(); @@ -993,7 +972,7 @@ private void verifyCacheProduceNode( CacheTransformation cacheTransformation, StreamGraph streamGraph, String expectedTagId) { - Assertions.assertThat(streamGraph.getStreamNodes()) + assertThat(streamGraph.getStreamNodes()) .anyMatch( node -> { if (!CacheTransformationTranslator.CACHE_PRODUCER_OPERATOR_NAME.equals( @@ -1001,22 +980,17 @@ private void verifyCacheProduceNode( return false; } - Assertions.assertThat(node.getParallelism()) - .isEqualTo(upstreamParallelism); - Assertions.assertThat(node.getInEdges().size()).isEqualTo(1); + assertThat(node.getParallelism()).isEqualTo(upstreamParallelism); + assertThat(node.getInEdges().size()).isEqualTo(1); final StreamEdge inEdge = node.getInEdges().get(0); - Assertions.assertThat(inEdge.getPartitioner()) + assertThat(inEdge.getPartitioner()) .isInstanceOf(ForwardPartitioner.class); if (expectedTagId != null) { - Assertions.assertThat(inEdge.getOutputTag().getId()) - .isEqualTo(expectedTagId); + assertThat(inEdge.getOutputTag().getId()).isEqualTo(expectedTagId); } - Assertions.assertThat(inEdge.getIntermediateDatasetIdToProduce()) - .isNotNull(); - Assertions.assertThat( - new AbstractID( - inEdge.getIntermediateDatasetIdToProduce())) + assertThat(inEdge.getIntermediateDatasetIdToProduce()).isNotNull(); + assertThat(new AbstractID(inEdge.getIntermediateDatasetIdToProduce())) .isEqualTo(cacheTransformation.getDatasetId()); return true; }); @@ -1026,7 +1000,7 @@ private void verifyCacheConsumeNode( StreamExecutionEnvironment env, int upstreamParallelism, CacheTransformation cacheTransformation) { - Assertions.assertThat(env.getStreamGraph().getStreamNodes()) + assertThat(env.getStreamGraph().getStreamNodes()) .anyMatch( node -> { if (!CacheTransformationTranslator.CACHE_CONSUMER_OPERATOR_NAME.equals( @@ -1034,9 +1008,8 @@ private void verifyCacheConsumeNode( return false; } - Assertions.assertThat(node.getParallelism()) - .isEqualTo(upstreamParallelism); - Assertions.assertThat(new AbstractID(node.getConsumeClusterDatasetId())) + assertThat(node.getParallelism()).isEqualTo(upstreamParallelism); + assertThat(new AbstractID(node.getConsumeClusterDatasetId())) .isEqualTo(cacheTransformation.getDatasetId()); return true; }); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorTest.java index 4aa9ffa31f2c0..86e77a4254a36 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorTest.java @@ -130,14 +130,12 @@ import org.apache.flink.util.AbstractID; import org.apache.flink.util.Collector; import org.apache.flink.util.SerializedValue; -import org.apache.flink.util.TestLoggerExtension; import org.apache.flink.shaded.guava31.com.google.common.collect.Iterables; import org.assertj.core.api.Assertions; import org.assertj.core.data.Offset; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @@ -166,7 +164,6 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link StreamingJobGraphGenerator}. */ -@ExtendWith(TestLoggerExtension.class) @SuppressWarnings("serial") class StreamingJobGraphGeneratorTest { @@ -2123,7 +2120,7 @@ void testStreamConfigSerializationException() { } @Test - public void testCoordinatedSerializationException() { + void testCoordinatedSerializationException() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource source = env.fromData(1, 2, 3); env.addOperator( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest.java index d9b476ccf9614..8fbc434af2a85 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest.java @@ -26,32 +26,28 @@ import org.apache.flink.streaming.api.transformations.StreamExchangeMode; import org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner; import org.apache.flink.streaming.runtime.partitioner.RescalePartitioner; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.List; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link StreamingJobGraphGenerator} on different {@link GlobalStreamExchangeMode} * settings. */ -public class StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest extends TestLogger { +class StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest { @Test - public void testDefaultGlobalExchangeModeIsAllEdgesPipelined() { + void testDefaultGlobalExchangeModeIsAllEdgesPipelined() { final StreamGraph streamGraph = createStreamGraph(); - assertThat( - streamGraph.getGlobalStreamExchangeMode(), - is(GlobalStreamExchangeMode.ALL_EDGES_PIPELINED)); + assertThat(streamGraph.getGlobalStreamExchangeMode()) + .isEqualTo(GlobalStreamExchangeMode.ALL_EDGES_PIPELINED); } @Test - public void testAllEdgesBlockingMode() { + void testAllEdgesBlockingMode() { final StreamGraph streamGraph = createStreamGraph(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); @@ -61,19 +57,16 @@ public void testAllEdgesBlockingMode() { final JobVertex map1Vertex = verticesSorted.get(1); final JobVertex map2Vertex = verticesSorted.get(2); - assertEquals( - ResultPartitionType.BLOCKING, - sourceVertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.BLOCKING, - map1Vertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.BLOCKING, - map2Vertex.getProducedDataSets().get(0).getResultType()); + assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); + assertThat(map1Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); + assertThat(map2Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); } @Test - public void testAllEdgesPipelinedMode() { + void testAllEdgesPipelinedMode() { final StreamGraph streamGraph = createStreamGraph(); streamGraph.setGlobalStreamExchangeMode(GlobalStreamExchangeMode.ALL_EDGES_PIPELINED); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); @@ -83,19 +76,16 @@ public void testAllEdgesPipelinedMode() { final JobVertex map1Vertex = verticesSorted.get(1); final JobVertex map2Vertex = verticesSorted.get(2); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - sourceVertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - map1Vertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - map2Vertex.getProducedDataSets().get(0).getResultType()); + assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); + assertThat(map1Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); + assertThat(map2Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); } @Test - public void testForwardEdgesPipelinedMode() { + void testForwardEdgesPipelinedMode() { final StreamGraph streamGraph = createStreamGraph(GlobalStreamExchangeMode.FORWARD_EDGES_PIPELINED); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); @@ -105,19 +95,16 @@ public void testForwardEdgesPipelinedMode() { final JobVertex map1Vertex = verticesSorted.get(1); final JobVertex map2Vertex = verticesSorted.get(2); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - sourceVertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.BLOCKING, - map1Vertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.BLOCKING, - map2Vertex.getProducedDataSets().get(0).getResultType()); + assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); + assertThat(map1Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); + assertThat(map2Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); } @Test - public void testPointwiseEdgesPipelinedMode() { + void testPointwiseEdgesPipelinedMode() { final StreamGraph streamGraph = createStreamGraph(GlobalStreamExchangeMode.POINTWISE_EDGES_PIPELINED); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); @@ -127,19 +114,16 @@ public void testPointwiseEdgesPipelinedMode() { final JobVertex map1Vertex = verticesSorted.get(1); final JobVertex map2Vertex = verticesSorted.get(2); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - sourceVertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - map1Vertex.getProducedDataSets().get(0).getResultType()); - assertEquals( - ResultPartitionType.BLOCKING, - map2Vertex.getProducedDataSets().get(0).getResultType()); + assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); + assertThat(map1Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); + assertThat(map2Vertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.BLOCKING); } @Test - public void testGlobalExchangeModeDoesNotOverrideSpecifiedExchangeMode() { + void testGlobalExchangeModeDoesNotOverrideSpecifiedExchangeMode() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final DataStream source = env.fromData(1, 2, 3).setParallelism(1); final DataStream forward = @@ -158,9 +142,8 @@ public void testGlobalExchangeModeDoesNotOverrideSpecifiedExchangeMode() { final List verticesSorted = jobGraph.getVerticesSortedTopologicallyFromSources(); final JobVertex sourceVertex = verticesSorted.get(0); - assertEquals( - ResultPartitionType.PIPELINED_BOUNDED, - sourceVertex.getProducedDataSets().get(0).getResultType()); + assertThat(sourceVertex.getProducedDataSets().get(0).getResultType()) + .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); } /** diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java index afc52d609afa1..54eebf1d4ecdb 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java @@ -48,7 +48,7 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -60,10 +60,8 @@ import java.util.Random; import java.util.concurrent.ConcurrentLinkedQueue; -import static junit.framework.TestCase.assertTrue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.HamcrestCondition.matching; /** * Tests for the facilities provided by {@link AbstractStreamOperator}. This mostly tests timers and @@ -101,7 +99,7 @@ protected KeyedOneInputStreamOperatorTestHarness create } @Test - public void testStateDoesNotInterfere() throws Exception { + void testStateDoesNotInterfere() throws Exception { try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { testHarness.open(); @@ -112,9 +110,8 @@ public void testStateDoesNotInterfere() throws Exception { testHarness.processElement(new Tuple2<>(1, "EMIT_STATE"), 0); testHarness.processElement(new Tuple2<>(0, "EMIT_STATE"), 0); - assertThat( - extractResult(testHarness), - contains("ON_ELEMENT:1:CIAO", "ON_ELEMENT:0:HELLO")); + assertThat(extractResult(testHarness)) + .contains("ON_ELEMENT:1:CIAO", "ON_ELEMENT:0:HELLO"); } } @@ -123,7 +120,7 @@ public void testStateDoesNotInterfere() throws Exception { * was set. */ @Test - public void testEventTimeTimersDontInterfere() throws Exception { + void testEventTimeTimersDontInterfere() throws Exception { try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { testHarness.open(); @@ -139,11 +136,11 @@ public void testEventTimeTimersDontInterfere() throws Exception { testHarness.processWatermark(10L); - assertThat(extractResult(testHarness), contains("ON_EVENT_TIME:HELLO")); + assertThat(extractResult(testHarness)).contains("ON_EVENT_TIME:HELLO"); testHarness.processWatermark(20L); - assertThat(extractResult(testHarness), contains("ON_EVENT_TIME:CIAO")); + assertThat(extractResult(testHarness)).contains("ON_EVENT_TIME:CIAO"); } } @@ -152,7 +149,7 @@ public void testEventTimeTimersDontInterfere() throws Exception { * timer was set. */ @Test - public void testProcessingTimeTimersDontInterfere() throws Exception { + void testProcessingTimeTimersDontInterfere() throws Exception { try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { testHarness.open(); @@ -168,17 +165,17 @@ public void testProcessingTimeTimersDontInterfere() throws Exception { testHarness.setProcessingTime(10L); - assertThat(extractResult(testHarness), contains("ON_PROC_TIME:HELLO")); + assertThat(extractResult(testHarness)).contains("ON_PROC_TIME:HELLO"); testHarness.setProcessingTime(20L); - assertThat(extractResult(testHarness), contains("ON_PROC_TIME:CIAO")); + assertThat(extractResult(testHarness)).contains("ON_PROC_TIME:CIAO"); } } /** Verify that a low-level timer is set for processing-time timers in case of restore. */ @Test - public void testEnsureProcessingTimeTimerRegisteredOnRestore() throws Exception { + void testEnsureProcessingTimeTimerRegisteredOnRestore() throws Exception { OperatorSubtaskState snapshot; try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { @@ -206,17 +203,17 @@ public void testEnsureProcessingTimeTimerRegisteredOnRestore() throws Exception testHarness1.setProcessingTime(10L); - assertThat(extractResult(testHarness1), contains("ON_PROC_TIME:HELLO")); + assertThat(extractResult(testHarness1)).contains("ON_PROC_TIME:HELLO"); testHarness1.setProcessingTime(20L); - assertThat(extractResult(testHarness1), contains("ON_PROC_TIME:CIAO")); + assertThat(extractResult(testHarness1)).contains("ON_PROC_TIME:CIAO"); } } /** Verify that timers for the different time domains don't clash. */ @Test - public void testProcessingTimeAndEventTimeDontInterfere() throws Exception { + void testProcessingTimeAndEventTimeDontInterfere() throws Exception { try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { testHarness.open(); @@ -231,11 +228,11 @@ public void testProcessingTimeAndEventTimeDontInterfere() throws Exception { testHarness.processWatermark(20L); - assertThat(extractResult(testHarness), contains("ON_EVENT_TIME:HELLO")); + assertThat(extractResult(testHarness)).contains("ON_EVENT_TIME:HELLO"); testHarness.setProcessingTime(10L); - assertThat(extractResult(testHarness), contains("ON_PROC_TIME:HELLO")); + assertThat(extractResult(testHarness)).contains("ON_PROC_TIME:HELLO"); } } @@ -244,7 +241,7 @@ public void testProcessingTimeAndEventTimeDontInterfere() throws Exception { * assigned to operator subtasks when restoring. */ @Test - public void testStateAndTimerStateShufflingScalingUp() throws Exception { + void testStateAndTimerStateShufflingScalingUp() throws Exception { final int maxParallelism = 10; // first get two keys that will fall into different key-group ranges that go @@ -277,7 +274,7 @@ public void testStateAndTimerStateShufflingScalingUp() throws Exception { testHarness.processElement(new Tuple2<>(key1, "SET_STATE:HELLO"), 0); testHarness.processElement(new Tuple2<>(key2, "SET_STATE:CIAO"), 0); - assertTrue(extractResult(testHarness).isEmpty()); + assertThat(extractResult(testHarness)).isEmpty(); snapshot = testHarness.snapshot(0, 0); } @@ -295,27 +292,27 @@ public void testStateAndTimerStateShufflingScalingUp() throws Exception { testHarness1.processWatermark(10L); - assertThat(extractResult(testHarness1), contains("ON_EVENT_TIME:HELLO")); + assertThat(extractResult(testHarness1)).contains("ON_EVENT_TIME:HELLO"); - assertTrue(extractResult(testHarness1).isEmpty()); + assertThat(extractResult(testHarness1)).isEmpty(); // this should not trigger anything, the trigger for WM=20 should sit in the // other operator subtask testHarness1.processWatermark(20L); - assertTrue(extractResult(testHarness1).isEmpty()); + assertThat(extractResult(testHarness1)).isEmpty(); testHarness1.setProcessingTime(10L); - assertThat(extractResult(testHarness1), contains("ON_PROC_TIME:HELLO")); + assertThat(extractResult(testHarness1)).contains("ON_PROC_TIME:HELLO"); - assertTrue(extractResult(testHarness1).isEmpty()); + assertThat(extractResult(testHarness1)).isEmpty(); // this should not trigger anything, the trigger for TIME=20 should sit in the // other operator subtask testHarness1.setProcessingTime(20L); - assertTrue(extractResult(testHarness1).isEmpty()); + assertThat(extractResult(testHarness1)).isEmpty(); } // now, for the second operator @@ -332,27 +329,27 @@ public void testStateAndTimerStateShufflingScalingUp() throws Exception { testHarness2.processWatermark(10L); // nothing should happen because this timer is in the other subtask - assertTrue(extractResult(testHarness2).isEmpty()); + assertThat(extractResult(testHarness2)).isEmpty(); testHarness2.processWatermark(20L); - assertThat(extractResult(testHarness2), contains("ON_EVENT_TIME:CIAO")); + assertThat(extractResult(testHarness2)).contains("ON_EVENT_TIME:CIAO"); testHarness2.setProcessingTime(10L); // nothing should happen because this timer is in the other subtask - assertTrue(extractResult(testHarness2).isEmpty()); + assertThat(extractResult(testHarness2)).isEmpty(); testHarness2.setProcessingTime(20L); - assertThat(extractResult(testHarness2), contains("ON_PROC_TIME:CIAO")); + assertThat(extractResult(testHarness2)).contains("ON_PROC_TIME:CIAO"); - assertTrue(extractResult(testHarness2).isEmpty()); + assertThat(extractResult(testHarness2)).isEmpty(); } } @Test - public void testStateAndTimerStateShufflingScalingDown() throws Exception { + void testStateAndTimerStateShufflingScalingDown() throws Exception { final int maxParallelism = 10; // first get two keys that will fall into different key-group ranges that go @@ -416,25 +413,25 @@ public void testStateAndTimerStateShufflingScalingDown() throws Exception { testHarness3.open(); testHarness3.processWatermark(30L); - assertThat(extractResult(testHarness3), contains("ON_EVENT_TIME:HELLO")); - assertTrue(extractResult(testHarness3).isEmpty()); + assertThat(extractResult(testHarness3)).contains("ON_EVENT_TIME:HELLO"); + assertThat(extractResult(testHarness3)).isEmpty(); testHarness3.processWatermark(40L); - assertThat(extractResult(testHarness3), contains("ON_EVENT_TIME:CIAO")); - assertTrue(extractResult(testHarness3).isEmpty()); + assertThat(extractResult(testHarness3)).contains("ON_EVENT_TIME:CIAO"); + assertThat(extractResult(testHarness3)).isEmpty(); testHarness3.setProcessingTime(30L); - assertThat(extractResult(testHarness3), contains("ON_PROC_TIME:HELLO")); - assertTrue(extractResult(testHarness3).isEmpty()); + assertThat(extractResult(testHarness3)).contains("ON_PROC_TIME:HELLO"); + assertThat(extractResult(testHarness3)).isEmpty(); testHarness3.setProcessingTime(40L); - assertThat(extractResult(testHarness3), contains("ON_PROC_TIME:CIAO")); - assertTrue(extractResult(testHarness3).isEmpty()); + assertThat(extractResult(testHarness3)).contains("ON_PROC_TIME:CIAO"); + assertThat(extractResult(testHarness3)).isEmpty(); } } @Test - public void testCustomRawKeyedStateSnapshotAndRestore() throws Exception { + void testCustomRawKeyedStateSnapshotAndRestore() throws Exception { // setup: 10 key groups, all assigned to single subtask final int maxParallelism = 10; final int numSubtasks = 1; @@ -473,13 +470,12 @@ public void testCustomRawKeyedStateSnapshotAndRestore() throws Exception { testHarness.open(); } - assertThat( - testOperator.restoredRawKeyedState, - hasRestoredKeyGroupsWith(testSnapshotData, keyGroupsToWrite)); + assertThat(testOperator.restoredRawKeyedState) + .is(matching(hasRestoredKeyGroupsWith(testSnapshotData, keyGroupsToWrite))); } @Test - public void testIdleWatermarkHandling() throws Exception { + void testIdleWatermarkHandling() throws Exception { final WatermarkTestingOperator testOperator = new WatermarkTestingOperator(); ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); @@ -496,7 +492,7 @@ public void testIdleWatermarkHandling() throws Exception { testHarness.processElement1(3L, 3L); testHarness.processElement1(4L, 4L); testHarness.processWatermark1(new Watermark(1L)); - assertThat(testHarness.getOutput(), empty()); + assertThat(testHarness.getOutput()).isEmpty(); testHarness.processWatermarkStatus2(WatermarkStatus.IDLE); expectedOutput.add(new StreamRecord<>(1L)); @@ -519,7 +515,7 @@ public void testIdleWatermarkHandling() throws Exception { } @Test - public void testIdlenessForwarding() throws Exception { + void testIdlenessForwarding() throws Exception { final WatermarkTestingOperator testOperator = new WatermarkTestingOperator(); ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); KeySelector dummyKeySelector = l -> 0; @@ -541,7 +537,7 @@ public void testIdlenessForwarding() throws Exception { } @Test - public void testTwoInputsRecordAttributesForwarding() throws Exception { + void testTwoInputsRecordAttributesForwarding() throws Exception { final WatermarkTestingOperator testOperator = new WatermarkTestingOperator(); ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); KeySelector dummyKeySelector = l -> 0; @@ -575,7 +571,7 @@ public void testTwoInputsRecordAttributesForwarding() throws Exception { } @Test - public void testOneInputRecordAttributesForwarding() throws Exception { + void testOneInputRecordAttributesForwarding() throws Exception { ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); try (KeyedOneInputStreamOperatorTestHarness, String> testHarness = createTestHarness()) { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorV2Test.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorV2Test.java index c6f7a3d28ea99..40c6a716891f4 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorV2Test.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorV2Test.java @@ -34,18 +34,17 @@ import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.TestHarnessUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.empty; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for the facilities provided by {@link AbstractStreamOperatorV2}. */ -public class AbstractStreamOperatorV2Test extends AbstractStreamOperatorTest { +class AbstractStreamOperatorV2Test extends AbstractStreamOperatorTest { @Override protected KeyedOneInputStreamOperatorTestHarness, String> createTestHarness(int maxParalelism, int numSubtasks, int subtaskIndex) @@ -73,7 +72,7 @@ public Class getStreamOperatorClass(ClassLoader classL } @Test - public void testIdleWatermarkHandling() throws Exception { + void testIdleWatermarkHandling() throws Exception { ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); KeySelector dummyKeySelector = l -> 0; try (KeyedMultiInputStreamOperatorTestHarness testHarness = @@ -88,7 +87,7 @@ public void testIdleWatermarkHandling() throws Exception { testHarness.processElement(0, new StreamRecord<>(3L, 3L)); testHarness.processElement(0, new StreamRecord<>(4L, 4L)); testHarness.processWatermark(0, new Watermark(1L)); - assertThat(testHarness.getOutput(), empty()); + assertThat(testHarness.getOutput()).isEmpty(); testHarness.processWatermarkStatus(1, WatermarkStatus.IDLE); TestHarnessUtil.assertOutputEquals( @@ -114,7 +113,7 @@ public void testIdleWatermarkHandling() throws Exception { } @Test - public void testIdlenessForwarding() throws Exception { + void testIdlenessForwarding() throws Exception { ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); try (KeyedMultiInputStreamOperatorTestHarness testHarness = new KeyedMultiInputStreamOperatorTestHarness<>( @@ -134,7 +133,7 @@ public void testIdlenessForwarding() throws Exception { } @Test - public void testRecordAttributesForwarding() throws Exception { + void testRecordAttributesForwarding() throws Exception { ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); try (KeyedMultiInputStreamOperatorTestHarness testHarness = new KeyedMultiInputStreamOperatorTestHarness<>( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractUdfStreamOperatorLifecycleTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractUdfStreamOperatorLifecycleTest.java index 6833271f728bd..e91f7e9fc03f7 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractUdfStreamOperatorLifecycleTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractUdfStreamOperatorLifecycleTest.java @@ -42,11 +42,10 @@ import org.apache.flink.streaming.runtime.tasks.StreamTask; import org.apache.flink.streaming.runtime.tasks.StreamTaskTest; import org.apache.flink.testutils.TestingUtils; -import org.apache.flink.testutils.executor.TestExecutorResource; +import org.apache.flink.testutils.executor.TestExecutorExtension; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import java.io.Serializable; import java.lang.reflect.Method; @@ -56,13 +55,15 @@ import java.util.List; import java.util.concurrent.ScheduledExecutorService; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.fail; /** This test secures the lifecycle of AbstractUdfStreamOperator, including it's UDF handling. */ -public class AbstractUdfStreamOperatorLifecycleTest { - @ClassRule - public static final TestExecutorResource EXECUTOR_RESOURCE = - TestingUtils.defaultExecutorResource(); +class AbstractUdfStreamOperatorLifecycleTest { + + @RegisterExtension + private static final TestExecutorExtension EXECUTOR_RESOURCE = + TestingUtils.defaultExecutorExtension(); private static final List EXPECTED_CALL_ORDER_FULL = Arrays.asList( @@ -120,20 +121,20 @@ public class AbstractUdfStreamOperatorLifecycleTest { Collections.synchronizedList(new ArrayList(EXPECTED_CALL_ORDER_FULL.size())); @Test - public void testAllMethodsRegisteredInTest() { + void testAllMethodsRegisteredInTest() { List methodsWithSignatureString = new ArrayList<>(); for (Method method : StreamOperator.class.getMethods()) { methodsWithSignatureString.add( method.getName() + Arrays.toString(method.getParameterTypes())); } Collections.sort(methodsWithSignatureString); - Assert.assertEquals( - "It seems like new methods have been introduced to " - + StreamOperator.class - + ". Please register them with this test and ensure to document their position in the lifecycle " - + "(if applicable).", - ALL_METHODS_STREAM_OPERATOR, - methodsWithSignatureString.toString()); + assertThat(methodsWithSignatureString) + .as( + "It seems like new methods have been introduced to " + + StreamOperator.class + + ". Please register them with this test and ensure to document their position in the lifecycle " + + "(if applicable).") + .hasToString(ALL_METHODS_STREAM_OPERATOR); methodsWithSignatureString = new ArrayList<>(); for (Method method : RichFunction.class.getMethods()) { @@ -141,17 +142,17 @@ public void testAllMethodsRegisteredInTest() { method.getName() + Arrays.toString(method.getParameterTypes())); } Collections.sort(methodsWithSignatureString); - Assert.assertEquals( - "It seems like new methods have been introduced to " - + RichFunction.class - + ". Please register them with this test and ensure to document their position in the lifecycle " - + "(if applicable).", - ALL_METHODS_RICH_FUNCTION, - methodsWithSignatureString.toString()); + assertThat(methodsWithSignatureString) + .as( + "It seems like new methods have been introduced to " + + RichFunction.class + + ". Please register them with this test and ensure to document their position in the lifecycle " + + "(if applicable).") + .hasToString(ALL_METHODS_RICH_FUNCTION); } @Test - public void testLifeCycleFull() throws Exception { + void testLifeCycleFull() throws Exception { ACTUAL_ORDER_TRACKING.clear(); Configuration taskManagerConfig = new Configuration(); @@ -177,13 +178,13 @@ public void testLifeCycleFull() throws Exception { // wait for clean termination task.getExecutingThread().join(); - assertEquals(ExecutionState.FINISHED, task.getExecutionState()); - assertEquals(EXPECTED_CALL_ORDER_FULL, ACTUAL_ORDER_TRACKING); + assertThat(task.getExecutionState()).isEqualTo(ExecutionState.FINISHED); + assertThat(ACTUAL_ORDER_TRACKING).isEqualTo(EXPECTED_CALL_ORDER_FULL); } } @Test - public void testLifeCycleCancel() throws Exception { + void testLifeCycleCancel() throws Exception { ACTUAL_ORDER_TRACKING.clear(); Configuration taskManagerConfig = new Configuration(); @@ -210,8 +211,8 @@ public void testLifeCycleCancel() throws Exception { // wait for clean termination task.getExecutingThread().join(); - assertEquals(ExecutionState.CANCELED, task.getExecutionState()); - assertEquals(EXPECTED_CALL_ORDER_CANCEL_RUNNING, ACTUAL_ORDER_TRACKING); + assertThat(task.getExecutionState()).isEqualTo(ExecutionState.CANCELED); + assertThat(ACTUAL_ORDER_TRACKING).isEqualTo(EXPECTED_CALL_ORDER_CANCEL_RUNNING); } } @@ -303,8 +304,7 @@ public void run() { LifecycleTrackingStreamSource.runFinish.trigger(); } } catch (Exception e) { - e.printStackTrace(); - Assert.fail(); + fail(e); } } }; diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BackendRestorerProcedureTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BackendRestorerProcedureTest.java index 1e0fe54821274..b6e36e2c81141 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BackendRestorerProcedureTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BackendRestorerProcedureTest.java @@ -34,11 +34,9 @@ import org.apache.flink.runtime.state.memory.MemCheckpointStreamFactory; import org.apache.flink.runtime.util.BlockingFSDataInputStream; import org.apache.flink.util.FlinkException; -import org.apache.flink.util.TestLogger; import org.apache.flink.util.function.FunctionWithException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collection; @@ -48,6 +46,8 @@ import java.util.concurrent.RunnableFuture; import java.util.concurrent.atomic.AtomicReference; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -55,7 +55,7 @@ import static org.mockito.Mockito.when; /** Tests for {@link BackendRestorerProcedure}. */ -public class BackendRestorerProcedureTest extends TestLogger { +class BackendRestorerProcedureTest { private final FunctionWithException< Collection, OperatorStateBackend, Exception> @@ -74,7 +74,7 @@ public class BackendRestorerProcedureTest extends TestLogger { * attempts if there are more options. */ @Test - public void testRestoreProcedureOrderAndFailure() throws Exception { + void testRestoreProcedureOrderAndFailure() throws Exception { CloseableRegistry closeableRegistry = new CloseableRegistry(); CheckpointStreamFactory checkpointStreamFactory = new MemCheckpointStreamFactory(1024); @@ -124,7 +124,7 @@ public void testRestoreProcedureOrderAndFailure() throws Exception { OperatorStateBackend restoredBackend = restorerProcedure.createAndRestore( sortedRestoreOptions, StateObject.StateObjectSizeStatsCollector.create()); - Assert.assertNotNull(restoredBackend); + assertThat(restoredBackend).isNotNull(); try { verify(firstFailHandle).openInputStream(); @@ -134,11 +134,11 @@ public void testRestoreProcedureOrderAndFailure() throws Exception { ListState listState = restoredBackend.getListState(stateDescriptor); Iterator stateIterator = listState.get().iterator(); - Assert.assertEquals(0, (int) stateIterator.next()); - Assert.assertEquals(1, (int) stateIterator.next()); - Assert.assertEquals(2, (int) stateIterator.next()); - Assert.assertEquals(3, (int) stateIterator.next()); - Assert.assertFalse(stateIterator.hasNext()); + assertThat(stateIterator.next()).isZero(); + assertThat(stateIterator.next()).isOne(); + assertThat(stateIterator.next()).isEqualTo(2); + assertThat(stateIterator.next()).isEqualTo(3); + assertThat(stateIterator).isExhausted(); } finally { restoredBackend.close(); @@ -148,7 +148,7 @@ public void testRestoreProcedureOrderAndFailure() throws Exception { /** Tests if there is an exception if all restore attempts are exhausted and failed. */ @Test - public void testExceptionThrownIfAllRestoresFailed() throws Exception { + void testExceptionThrownIfAllRestoresFailed() throws Exception { CloseableRegistry closeableRegistry = new CloseableRegistry(); @@ -166,12 +166,12 @@ public void testExceptionThrownIfAllRestoresFailed() throws Exception { new BackendRestorerProcedure<>( backendSupplier, closeableRegistry, "test op state backend"); - try { - restorerProcedure.createAndRestore( - sortedRestoreOptions, StateObject.StateObjectSizeStatsCollector.create()); - Assert.fail(); - } catch (Exception ignore) { - } + assertThatThrownBy( + () -> + restorerProcedure.createAndRestore( + sortedRestoreOptions, + StateObject.StateObjectSizeStatsCollector.create())) + .isInstanceOf(FlinkException.class); verify(firstFailHandle).openInputStream(); verify(secondFailHandle).openInputStream(); @@ -180,7 +180,7 @@ public void testExceptionThrownIfAllRestoresFailed() throws Exception { /** Test that the restore can be stopped via the provided closeable registry. */ @Test - public void testCanBeCanceledViaRegistry() throws Exception { + void testCanBeCanceledViaRegistry() throws Exception { CloseableRegistry closeableRegistry = new CloseableRegistry(); OneShotLatch waitForBlock = new OneShotLatch(); OneShotLatch unblock = new OneShotLatch(); @@ -217,6 +217,6 @@ public void testCanBeCanceledViaRegistry() throws Exception { restoreThread.join(); Exception exception = exceptionReference.get(); - Assert.assertTrue(exception instanceof FlinkException); + assertThat(exception).isInstanceOf(FlinkException.class); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BatchGroupedReduceOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BatchGroupedReduceOperatorTest.java index b7188ec0e3455..0430340b15075 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BatchGroupedReduceOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/BatchGroupedReduceOperatorTest.java @@ -24,25 +24,18 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayDeque; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Tests {@link BatchGroupedReduceOperator}. */ -public class BatchGroupedReduceOperatorTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class BatchGroupedReduceOperatorTest { @Test - public void noIncrementalResults() throws Exception { + void noIncrementalResults() throws Exception { KeyedOneInputStreamOperatorTestHarness testHarness = createTestHarness(); @@ -51,11 +44,11 @@ public void noIncrementalResults() throws Exception { testHarness.processElement(new StreamRecord<>("ciao")); testHarness.processElement(new StreamRecord<>("ciao")); - assertThat(testHarness.getOutput(), empty()); + assertThat(testHarness.getOutput()).isEmpty(); } @Test - public void resultsOnMaxWatermark() throws Exception { + void resultsOnMaxWatermark() throws Exception { KeyedOneInputStreamOperatorTestHarness testHarness = createTestHarness(); @@ -72,11 +65,11 @@ public void resultsOnMaxWatermark() throws Exception { expectedOutput.add(new StreamRecord<>("ciaociaociao", Long.MAX_VALUE)); expectedOutput.add(new Watermark(Long.MAX_VALUE)); - assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); + assertThat(testHarness.getOutput()).contains(expectedOutput.toArray()); } @Test - public void resultForSingleInput() throws Exception { + void resultForSingleInput() throws Exception { KeyedOneInputStreamOperatorTestHarness testHarness = createTestHarness(); @@ -90,7 +83,7 @@ public void resultForSingleInput() throws Exception { expectedOutput.add(new StreamRecord<>("ciao", Long.MAX_VALUE)); expectedOutput.add(new Watermark(Long.MAX_VALUE)); - assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); + assertThat(testHarness.getOutput()).contains(expectedOutput.toArray()); } private KeyedOneInputStreamOperatorTestHarness createTestHarness() diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InputSelectionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InputSelectionTest.java index 95dfb387013db..4f4850ea99539 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InputSelectionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InputSelectionTest.java @@ -19,143 +19,149 @@ import org.apache.flink.streaming.api.operators.InputSelection.Builder; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link InputSelection}. */ -public class InputSelectionTest { +class InputSelectionTest { @Test - public void testIsInputSelected() { - assertFalse(new Builder().build().isInputSelected(1)); - assertFalse(new Builder().select(2).build().isInputSelected(1)); + void testIsInputSelected() { + assertThat(new Builder().build().isInputSelected(1)).isFalse(); + assertThat(new Builder().select(2).build().isInputSelected(1)).isFalse(); - assertTrue(new Builder().select(1).build().isInputSelected(1)); - assertTrue(new Builder().select(1).select(2).build().isInputSelected(1)); - assertTrue(new Builder().select(-1).build().isInputSelected(1)); + assertThat(new Builder().select(1).build().isInputSelected(1)).isTrue(); + assertThat(new Builder().select(1).select(2).build().isInputSelected(1)).isTrue(); + assertThat(new Builder().select(-1).build().isInputSelected(1)).isTrue(); - assertTrue(new Builder().select(64).build().isInputSelected(64)); + assertThat(new Builder().select(64).build().isInputSelected(64)).isTrue(); } @Test - public void testInputSelectionNormalization() { - assertTrue(InputSelection.ALL.areAllInputsSelected()); + void testInputSelectionNormalization() { + assertThat(InputSelection.ALL.areAllInputsSelected()).isTrue(); - assertFalse(new Builder().select(1).select(2).build().areAllInputsSelected()); - assertTrue(new Builder().select(1).select(2).build(2).areAllInputsSelected()); + assertThat(new Builder().select(1).select(2).build().areAllInputsSelected()).isFalse(); + assertThat(new Builder().select(1).select(2).build(2).areAllInputsSelected()).isTrue(); - assertFalse(new Builder().select(1).select(2).select(3).build().areAllInputsSelected()); - assertTrue(new Builder().select(1).select(2).select(3).build(3).areAllInputsSelected()); + assertThat(new Builder().select(1).select(2).select(3).build().areAllInputsSelected()) + .isFalse(); + assertThat(new Builder().select(1).select(2).select(3).build(3).areAllInputsSelected()) + .isTrue(); - assertFalse(new Builder().select(1).select(3).build().areAllInputsSelected()); - assertFalse(new Builder().select(1).select(3).build(3).areAllInputsSelected()); + assertThat(new Builder().select(1).select(3).build().areAllInputsSelected()).isFalse(); + assertThat(new Builder().select(1).select(3).build(3).areAllInputsSelected()).isFalse(); - assertFalse(InputSelection.FIRST.areAllInputsSelected()); - assertFalse(InputSelection.SECOND.areAllInputsSelected()); + assertThat(InputSelection.FIRST.areAllInputsSelected()).isFalse(); + assertThat(InputSelection.SECOND.areAllInputsSelected()).isFalse(); } - @Test(expected = IllegalArgumentException.class) - public void testInputSelectionNormalizationOverflow() { - new Builder().select(3).build(2); + @Test + void testInputSelectionNormalizationOverflow() { + assertThatThrownBy(() -> new Builder().select(3).build(2)) + .isInstanceOf(IllegalArgumentException.class); } @Test - public void testFairSelectNextIndexOutOf2() { - assertEquals(1, InputSelection.ALL.fairSelectNextIndexOutOf2(3, 0)); - assertEquals(0, new Builder().select(1).select(2).build().fairSelectNextIndexOutOf2(3, 1)); - - assertEquals(1, InputSelection.ALL.fairSelectNextIndexOutOf2(2, 0)); - assertEquals(1, InputSelection.ALL.fairSelectNextIndexOutOf2(2, 1)); - assertEquals(0, InputSelection.ALL.fairSelectNextIndexOutOf2(1, 0)); - assertEquals(0, InputSelection.ALL.fairSelectNextIndexOutOf2(1, 1)); - assertEquals( - InputSelection.NONE_AVAILABLE, InputSelection.ALL.fairSelectNextIndexOutOf2(0, 0)); - assertEquals( - InputSelection.NONE_AVAILABLE, InputSelection.ALL.fairSelectNextIndexOutOf2(0, 1)); - - assertEquals(0, InputSelection.FIRST.fairSelectNextIndexOutOf2(1, 0)); - assertEquals(0, InputSelection.FIRST.fairSelectNextIndexOutOf2(3, 0)); - assertEquals( - InputSelection.NONE_AVAILABLE, - InputSelection.FIRST.fairSelectNextIndexOutOf2(2, 0)); - assertEquals( - InputSelection.NONE_AVAILABLE, - InputSelection.FIRST.fairSelectNextIndexOutOf2(0, 0)); - - assertEquals(1, InputSelection.SECOND.fairSelectNextIndexOutOf2(2, 1)); - assertEquals(1, InputSelection.SECOND.fairSelectNextIndexOutOf2(3, 1)); - assertEquals( - InputSelection.NONE_AVAILABLE, - InputSelection.SECOND.fairSelectNextIndexOutOf2(1, 1)); - assertEquals( - InputSelection.NONE_AVAILABLE, - InputSelection.SECOND.fairSelectNextIndexOutOf2(0, 1)); + void testFairSelectNextIndexOutOf2() { + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(3, 0)).isOne(); + assertThat(new Builder().select(1).select(2).build().fairSelectNextIndexOutOf2(3, 1)) + .isZero(); + + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(2, 0)).isOne(); + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(2, 1)).isOne(); + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(1, 0)).isZero(); + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(1, 1)).isZero(); + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(0, 0)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + assertThat(InputSelection.ALL.fairSelectNextIndexOutOf2(0, 1)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + + assertThat(InputSelection.FIRST.fairSelectNextIndexOutOf2(1, 0)).isZero(); + assertThat(InputSelection.FIRST.fairSelectNextIndexOutOf2(3, 0)).isZero(); + assertThat(InputSelection.FIRST.fairSelectNextIndexOutOf2(2, 0)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + assertThat(InputSelection.FIRST.fairSelectNextIndexOutOf2(0, 0)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + + assertThat(InputSelection.SECOND.fairSelectNextIndexOutOf2(2, 1)).isOne(); + assertThat(InputSelection.SECOND.fairSelectNextIndexOutOf2(3, 1)).isOne(); + assertThat(InputSelection.SECOND.fairSelectNextIndexOutOf2(1, 1)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + assertThat(InputSelection.SECOND.fairSelectNextIndexOutOf2(0, 1)) + .isEqualTo(InputSelection.NONE_AVAILABLE); } @Test - public void testFairSelectNextIndexWithAllInputsSelected() { - assertEquals(1, InputSelection.ALL.fairSelectNextIndex(7, 0)); - assertEquals(2, InputSelection.ALL.fairSelectNextIndex(7, 1)); - assertEquals(0, InputSelection.ALL.fairSelectNextIndex(7, 2)); - assertEquals(1, InputSelection.ALL.fairSelectNextIndex(7, 0)); - assertEquals(InputSelection.NONE_AVAILABLE, InputSelection.ALL.fairSelectNextIndex(0, 2)); - - assertEquals(11, InputSelection.ALL.fairSelectNextIndex(-1, 10)); - assertEquals(0, InputSelection.ALL.fairSelectNextIndex(-1, 63)); - assertEquals(0, InputSelection.ALL.fairSelectNextIndex(-1, 158)); + void testFairSelectNextIndexWithAllInputsSelected() { + assertThat(InputSelection.ALL.fairSelectNextIndex(7, 0)).isOne(); + assertThat(InputSelection.ALL.fairSelectNextIndex(7, 1)).isEqualTo(2); + assertThat(InputSelection.ALL.fairSelectNextIndex(7, 2)).isZero(); + assertThat(InputSelection.ALL.fairSelectNextIndex(7, 0)).isOne(); + assertThat(InputSelection.ALL.fairSelectNextIndex(0, 2)) + .isEqualTo(InputSelection.NONE_AVAILABLE); + + assertThat(InputSelection.ALL.fairSelectNextIndex(-1, 10)).isEqualTo(11); + assertThat(InputSelection.ALL.fairSelectNextIndex(-1, 63)).isZero(); + assertThat(InputSelection.ALL.fairSelectNextIndex(-1, 158)).isZero(); } @Test - public void testFairSelectNextIndexWithSomeInputsSelected() { + void testFairSelectNextIndexWithSomeInputsSelected() { // combination of selection and availability is supposed to be 3, 5, 8: InputSelection selection = new Builder().select(2).select(3).select(4).select(5).select(8).build(); int availableInputs = (int) new Builder().select(3).select(5).select(6).select(8).build().getInputMask(); - assertEquals(2, selection.fairSelectNextIndex(availableInputs, 0)); - assertEquals(2, selection.fairSelectNextIndex(availableInputs, 1)); - assertEquals(4, selection.fairSelectNextIndex(availableInputs, 2)); - assertEquals(4, selection.fairSelectNextIndex(availableInputs, 3)); - assertEquals(7, selection.fairSelectNextIndex(availableInputs, 4)); - assertEquals(7, selection.fairSelectNextIndex(availableInputs, 5)); - assertEquals(7, selection.fairSelectNextIndex(availableInputs, 6)); - assertEquals(2, selection.fairSelectNextIndex(availableInputs, 7)); - assertEquals(2, selection.fairSelectNextIndex(availableInputs, 8)); - assertEquals(2, selection.fairSelectNextIndex(availableInputs, 158)); - assertEquals(InputSelection.NONE_AVAILABLE, selection.fairSelectNextIndex(0, 5)); - - assertEquals( - InputSelection.NONE_AVAILABLE, new Builder().build().fairSelectNextIndex(-1, 5)); + assertThat(selection.fairSelectNextIndex(availableInputs, 0)).isEqualTo(2); + assertThat(selection.fairSelectNextIndex(availableInputs, 1)).isEqualTo(2); + assertThat(selection.fairSelectNextIndex(availableInputs, 2)).isEqualTo(4); + assertThat(selection.fairSelectNextIndex(availableInputs, 3)).isEqualTo(4); + assertThat(selection.fairSelectNextIndex(availableInputs, 4)).isEqualTo(7); + assertThat(selection.fairSelectNextIndex(availableInputs, 5)).isEqualTo(7); + assertThat(selection.fairSelectNextIndex(availableInputs, 6)).isEqualTo(7); + assertThat(selection.fairSelectNextIndex(availableInputs, 7)).isEqualTo(2); + assertThat(selection.fairSelectNextIndex(availableInputs, 8)).isEqualTo(2); + assertThat(selection.fairSelectNextIndex(availableInputs, 158)).isEqualTo(2); + assertThat(selection.fairSelectNextIndex(0, 5)).isEqualTo(InputSelection.NONE_AVAILABLE); + + assertThat(new Builder().build().fairSelectNextIndex(-1, 5)) + .isEqualTo(InputSelection.NONE_AVAILABLE); } - @Test(expected = UnsupportedOperationException.class) - public void testUnsupportedFairSelectNextIndexOutOf2() { - InputSelection.ALL.fairSelectNextIndexOutOf2(7, 0); + @Test + void testUnsupportedFairSelectNextIndexOutOf2() { + assertThatThrownBy(() -> InputSelection.ALL.fairSelectNextIndexOutOf2(7, 0)) + .isInstanceOf(UnsupportedOperationException.class); } /** Tests for {@link Builder}. */ - public static class BuilderTest { + static class BuilderTest { @Test - public void testSelect() { - assertEquals(1L, new Builder().select(1).build().getInputMask()); - assertEquals(7L, new Builder().select(1).select(2).select(3).build().getInputMask()); - - assertEquals(0x8000_0000_0000_0000L, new Builder().select(64).build().getInputMask()); - assertEquals(0xffff_ffff_ffff_ffffL, new Builder().select(-1).build().getInputMask()); + void testSelect() { + assertThat(new Builder().select(1).build().getInputMask()).isOne(); + assertThat(new Builder().select(1).select(2).select(3).build().getInputMask()) + .isEqualTo(7L); + + assertThat(new Builder().select(64).build().getInputMask()) + .isEqualTo(0x8000_0000_0000_0000L); + assertThat(new Builder().select(-1).build().getInputMask()) + .isEqualTo(0xffff_ffff_ffff_ffffL); } - @Test(expected = IllegalArgumentException.class) - public void testIllegalInputId1() { - new Builder().select(-2); + @Test + void testIllegalInputId1() { + assertThatThrownBy(() -> new Builder().select(-2)) + .isInstanceOf(IllegalArgumentException.class); } - @Test(expected = IllegalArgumentException.class) - public void testIllegalInputId2() { - new Builder().select(65); + @Test + void testIllegalInputId2() { + assertThatThrownBy(() -> new Builder().select(65)) + .isInstanceOf(IllegalArgumentException.class); } } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimeServiceManagerImplTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimeServiceManagerImplTest.java index 6e7f5d4deccd1..fe72e74104056 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimeServiceManagerImplTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimeServiceManagerImplTest.java @@ -18,25 +18,22 @@ package org.apache.flink.streaming.api.operators; -import org.apache.flink.util.TestLogger; +import org.junit.jupiter.api.Test; -import org.junit.Assert; -import org.junit.Test; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link InternalTimeServiceManagerImpl}. */ -public class InternalTimeServiceManagerImplTest extends TestLogger { +class InternalTimeServiceManagerImplTest { /** This test fixes some constants, because changing them can harm backwards compatibility. */ @Test - public void fixConstants() { + void fixConstants() { String expectedTimerStatePrefix = "_timer_state"; - Assert.assertEquals( - expectedTimerStatePrefix, InternalTimeServiceManagerImpl.TIMER_STATE_PREFIX); - Assert.assertEquals( - expectedTimerStatePrefix + "/processing_", - InternalTimeServiceManagerImpl.PROCESSING_TIMER_PREFIX); - Assert.assertEquals( - expectedTimerStatePrefix + "/event_", - InternalTimeServiceManagerImpl.EVENT_TIMER_PREFIX); + assertThat(InternalTimeServiceManagerImpl.TIMER_STATE_PREFIX) + .isEqualTo(expectedTimerStatePrefix); + assertThat(InternalTimeServiceManagerImpl.PROCESSING_TIMER_PREFIX) + .isEqualTo(expectedTimerStatePrefix + "/processing_"); + assertThat(InternalTimeServiceManagerImpl.EVENT_TIMER_PREFIX) + .isEqualTo(expectedTimerStatePrefix + "/event_"); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimerServiceImplTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimerServiceImplTest.java index 915b273298265..5caa0aedae4dd 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimerServiceImplTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/InternalTimerServiceImplTest.java @@ -32,11 +32,11 @@ import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; import org.apache.flink.streaming.runtime.tasks.StreamTaskCancellationContext; import org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -51,9 +51,7 @@ import java.util.Random; import java.util.Set; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.eq; @@ -63,8 +61,8 @@ import static org.mockito.Mockito.verify; /** Tests for {@link InternalTimerServiceImpl}. */ -@RunWith(Parameterized.class) -public class InternalTimerServiceImplTest { +@ExtendWith(ParameterizedTestExtension.class) +class InternalTimerServiceImplTest { private final int maxParallelism; private final KeyGroupRange testKeyGroupRange; @@ -73,13 +71,13 @@ private static InternalTimer anyInternalTimer() { return any(); } - public InternalTimerServiceImplTest(int startKeyGroup, int endKeyGroup, int maxParallelism) { + InternalTimerServiceImplTest(int startKeyGroup, int endKeyGroup, int maxParallelism) { this.testKeyGroupRange = new KeyGroupRange(startKeyGroup, endKeyGroup); this.maxParallelism = maxParallelism; } - @Test - public void testKeyGroupStartIndexSetting() { + @TestTemplate + void testKeyGroupStartIndexSetting() { int startKeyGroupIdx = 7; int endKeyGroupIdx = 21; @@ -98,11 +96,11 @@ public void testKeyGroupStartIndexSetting() { StringSerializer.INSTANCE, createQueueFactory()); - Assert.assertEquals(startKeyGroupIdx, service.getLocalKeyGroupRangeStartIdx()); + assertThat(service.getLocalKeyGroupRangeStartIdx()).isEqualTo(startKeyGroupIdx); } - @Test - public void testTimerAssignmentToKeyGroups() { + @TestTemplate + void testTimerAssignmentToKeyGroups() { int totalNoOfTimers = 100; int totalNoOfKeyGroups = 100; @@ -167,11 +165,11 @@ public void testTimerAssignmentToKeyGroups() { processingTimeTimers.get(i); if (expected == null) { - Assert.assertTrue(actualEvent.isEmpty()); - Assert.assertTrue(actualProcessing.isEmpty()); + assertThat(actualEvent).isEmpty(); + assertThat(actualProcessing).isEmpty(); } else { - Assert.assertEquals(expected, actualEvent); - Assert.assertEquals(expected, actualProcessing); + assertThat(actualEvent).isEqualTo(expected); + assertThat(actualProcessing).isEqualTo(expected); } } } @@ -180,8 +178,8 @@ public void testTimerAssignmentToKeyGroups() { * Verify that we only ever have one processing-time task registered at the {@link * ProcessingTimeService}. */ - @Test - public void testOnlySetsOnePhysicalProcessingTimeTimer() throws Exception { + @TestTemplate + void testOnlySetsOnePhysicalProcessingTimeTimer() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -207,40 +205,40 @@ public void testOnlySetsOnePhysicalProcessingTimeTimer() throws Exception { timerService.registerProcessingTimeTimer("hello", 10); timerService.registerProcessingTimeTimer("hello", 20); - assertEquals(5, timerService.numProcessingTimeTimers()); - assertEquals(2, timerService.numProcessingTimeTimers("hello")); - assertEquals(3, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(5); + assertThat(timerService.numProcessingTimeTimers("hello")).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("ciao")).isEqualTo(3); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(10L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(10L); processingTimeService.setCurrentTime(10); - assertEquals(3, timerService.numProcessingTimeTimers()); - assertEquals(1, timerService.numProcessingTimeTimers("hello")); - assertEquals(2, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(3); + assertThat(timerService.numProcessingTimeTimers("hello")).isOne(); + assertThat(timerService.numProcessingTimeTimers("ciao")).isEqualTo(2); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(20L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(20L); processingTimeService.setCurrentTime(20); - assertEquals(1, timerService.numProcessingTimeTimers()); - assertEquals(0, timerService.numProcessingTimeTimers("hello")); - assertEquals(1, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isOne(); + assertThat(timerService.numProcessingTimeTimers("hello")).isZero(); + assertThat(timerService.numProcessingTimeTimers("ciao")).isOne(); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(30L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(30L); processingTimeService.setCurrentTime(30); - assertEquals(0, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isZero(); - assertEquals(0, processingTimeService.getNumActiveTimers()); + assertThat(processingTimeService.getNumActiveTimers()).isZero(); timerService.registerProcessingTimeTimer("ciao", 40); - assertEquals(1, processingTimeService.getNumActiveTimers()); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); } /** @@ -248,8 +246,8 @@ public void testOnlySetsOnePhysicalProcessingTimeTimer() throws Exception { * removes the one physical timer and creates one for the earlier timestamp {@link * ProcessingTimeService}. */ - @Test - public void testRegisterEarlierProcessingTimerMovesPhysicalProcessingTimer() throws Exception { + @TestTemplate + void testRegisterEarlierProcessingTimerMovesPhysicalProcessingTimer() { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -271,22 +269,22 @@ public void testRegisterEarlierProcessingTimerMovesPhysicalProcessingTimer() thr timerService.registerProcessingTimeTimer("ciao", 20); - assertEquals(1, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isOne(); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(20L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(20L); timerService.registerProcessingTimeTimer("ciao", 10); - assertEquals(2, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(2L); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(10L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(10L); } /** */ - @Test - public void testRegisteringProcessingTimeTimerInOnProcessingTimeDoesNotLeakPhysicalTimers() + @TestTemplate + void testRegisteringProcessingTimeTimerInOnProcessingTimeDoesNotLeakPhysicalTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -309,10 +307,10 @@ public void testRegisteringProcessingTimeTimerInOnProcessingTimeDoesNotLeakPhysi timerService.registerProcessingTimeTimer("ciao", 10); - assertEquals(1, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isOne(); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(10L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(10L); doAnswer( new Answer() { @@ -327,8 +325,8 @@ public Object answer(InvocationOnMock invocation) throws Exception { processingTimeService.setCurrentTime(10); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(20L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(20L); doAnswer( new Answer() { @@ -343,14 +341,14 @@ public Object answer(InvocationOnMock invocation) throws Exception { processingTimeService.setCurrentTime(20); - assertEquals(1, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isOne(); - assertEquals(1, processingTimeService.getNumActiveTimers()); - assertThat(processingTimeService.getActiveTimerTimestamps(), containsInAnyOrder(30L)); + assertThat(processingTimeService.getNumActiveTimers()).isOne(); + assertThat(processingTimeService.getActiveTimerTimestamps()).contains(30L); } - @Test - public void testCurrentProcessingTime() throws Exception { + @TestTemplate + void testCurrentProcessingTime() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -366,14 +364,14 @@ public void testCurrentProcessingTime() throws Exception { createQueueFactory()); processingTimeService.setCurrentTime(17L); - assertEquals(17, timerService.currentProcessingTime()); + assertThat(timerService.currentProcessingTime()).isEqualTo(17L); processingTimeService.setCurrentTime(42); - assertEquals(42, timerService.currentProcessingTime()); + assertThat(timerService.currentProcessingTime()).isEqualTo(42L); } - @Test - public void testCurrentEventTime() throws Exception { + @TestTemplate + void testCurrentEventTime() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -389,15 +387,15 @@ public void testCurrentEventTime() throws Exception { createQueueFactory()); timerService.advanceWatermark(17); - assertEquals(17, timerService.currentWatermark()); + assertThat(timerService.currentWatermark()).isEqualTo(17); timerService.advanceWatermark(42); - assertEquals(42, timerService.currentWatermark()); + assertThat(timerService.currentWatermark()).isEqualTo(42); } /** This also verifies that we don't have leakage between keys/namespaces. */ - @Test - public void testSetAndFireEventTimeTimers() throws Exception { + @TestTemplate + void testSetAndFireEventTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -428,9 +426,9 @@ public void testSetAndFireEventTimeTimers() throws Exception { timerService.registerEventTimeTimer("ciao", 10); timerService.registerEventTimeTimer("hello", 10); - assertEquals(4, timerService.numEventTimeTimers()); - assertEquals(2, timerService.numEventTimeTimers("hello")); - assertEquals(2, timerService.numEventTimeTimers("ciao")); + assertThat(timerService.numEventTimeTimers()).isEqualTo(4); + assertThat(timerService.numEventTimeTimers("hello")).isEqualTo(2); + assertThat(timerService.numEventTimeTimers("ciao")).isEqualTo(2); timerService.advanceWatermark(10); @@ -444,12 +442,12 @@ public void testSetAndFireEventTimeTimers() throws Exception { verify(mockTriggerable, times(1)) .onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello"))); - assertEquals(0, timerService.numEventTimeTimers()); + assertThat(timerService.numEventTimeTimers()).isZero(); } /** This also verifies that we don't have leakage between keys/namespaces. */ - @Test - public void testSetAndFireProcessingTimeTimers() throws Exception { + @TestTemplate + void testSetAndFireProcessingTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -480,9 +478,9 @@ public void testSetAndFireProcessingTimeTimers() throws Exception { timerService.registerProcessingTimeTimer("ciao", 10); timerService.registerProcessingTimeTimer("hello", 10); - assertEquals(4, timerService.numProcessingTimeTimers()); - assertEquals(2, timerService.numProcessingTimeTimers("hello")); - assertEquals(2, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(4L); + assertThat(timerService.numProcessingTimeTimers("hello")).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("ciao")).isEqualTo(2); processingTimeService.setCurrentTime(10); @@ -496,7 +494,7 @@ public void testSetAndFireProcessingTimeTimers() throws Exception { verify(mockTriggerable, times(1)) .onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello"))); - assertEquals(0, timerService.numProcessingTimeTimers()); + assertThat(timerService.numProcessingTimeTimers()).isZero(); } /** @@ -504,8 +502,8 @@ public void testSetAndFireProcessingTimeTimers() throws Exception { * *

This also verifies that deleted timers don't fire. */ - @Test - public void testDeleteEventTimeTimers() throws Exception { + @TestTemplate + void testDeleteEventTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -536,9 +534,9 @@ public void testDeleteEventTimeTimers() throws Exception { timerService.registerEventTimeTimer("ciao", 10); timerService.registerEventTimeTimer("hello", 10); - assertEquals(4, timerService.numEventTimeTimers()); - assertEquals(2, timerService.numEventTimeTimers("hello")); - assertEquals(2, timerService.numEventTimeTimers("ciao")); + assertThat(timerService.numEventTimeTimers()).isEqualTo(4); + assertThat(timerService.numEventTimeTimers("hello")).isEqualTo(2); + assertThat(timerService.numEventTimeTimers("ciao")).isEqualTo(2); keyContext.setCurrentKey(key1); timerService.deleteEventTimeTimer("hello", 10); @@ -546,9 +544,9 @@ public void testDeleteEventTimeTimers() throws Exception { keyContext.setCurrentKey(key2); timerService.deleteEventTimeTimer("ciao", 10); - assertEquals(2, timerService.numEventTimeTimers()); - assertEquals(1, timerService.numEventTimeTimers("hello")); - assertEquals(1, timerService.numEventTimeTimers("ciao")); + assertThat(timerService.numEventTimeTimers()).isEqualTo(2); + assertThat(timerService.numEventTimeTimers("hello")).isOne(); + assertThat(timerService.numEventTimeTimers("ciao")).isOne(); timerService.advanceWatermark(10); @@ -562,7 +560,7 @@ public void testDeleteEventTimeTimers() throws Exception { verify(mockTriggerable, times(1)) .onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello"))); - assertEquals(0, timerService.numEventTimeTimers()); + assertThat(timerService.numEventTimeTimers()).isZero(); } /** @@ -570,8 +568,8 @@ public void testDeleteEventTimeTimers() throws Exception { * *

This also verifies that deleted timers don't fire. */ - @Test - public void testDeleteProcessingTimeTimers() throws Exception { + @TestTemplate + void testDeleteProcessingTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -602,9 +600,9 @@ public void testDeleteProcessingTimeTimers() throws Exception { timerService.registerProcessingTimeTimer("ciao", 10); timerService.registerProcessingTimeTimer("hello", 10); - assertEquals(4, timerService.numProcessingTimeTimers()); - assertEquals(2, timerService.numProcessingTimeTimers("hello")); - assertEquals(2, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(4); + assertThat(timerService.numProcessingTimeTimers("hello")).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("ciao")).isEqualTo(2); keyContext.setCurrentKey(key1); timerService.deleteProcessingTimeTimer("hello", 10); @@ -612,9 +610,9 @@ public void testDeleteProcessingTimeTimers() throws Exception { keyContext.setCurrentKey(key2); timerService.deleteProcessingTimeTimer("ciao", 10); - assertEquals(2, timerService.numProcessingTimeTimers()); - assertEquals(1, timerService.numProcessingTimeTimers("hello")); - assertEquals(1, timerService.numProcessingTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("hello")).isOne(); + assertThat(timerService.numProcessingTimeTimers("ciao")).isOne(); processingTimeService.setCurrentTime(10); @@ -628,14 +626,14 @@ public void testDeleteProcessingTimeTimers() throws Exception { verify(mockTriggerable, times(1)) .onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello"))); - assertEquals(0, timerService.numEventTimeTimers()); + assertThat(timerService.numEventTimeTimers()).isZero(); } /** * This also verifies that we iterate over all timers and set the key context on each element. */ - @Test - public void testForEachEventTimeTimers() throws Exception { + @TestTemplate + void testForEachEventTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -673,14 +671,14 @@ public void testForEachEventTimeTimers() throws Exception { results.add(Tuple3.of((Integer) keyContext.getCurrentKey(), namespace, timer)); }); - Assert.assertEquals(timers, results); + assertThat(results).isEqualTo(timers); } /** * This also verifies that we iterate over all timers and set the key context on each element. */ - @Test - public void testForEachProcessingTimeTimers() throws Exception { + @TestTemplate + void testForEachProcessingTimeTimers() throws Exception { @SuppressWarnings("unchecked") Triggerable mockTriggerable = mock(Triggerable.class); @@ -718,11 +716,11 @@ public void testForEachProcessingTimeTimers() throws Exception { results.add(Tuple3.of((Integer) keyContext.getCurrentKey(), namespace, timer)); }); - Assert.assertEquals(timers, results); + assertThat(results).isEqualTo(timers); } - @Test - public void testSnapshotAndRestore() throws Exception { + @TestTemplate + void testSnapshotAndRestore() throws Exception { testSnapshotAndRestore(InternalTimerServiceSerializationProxy.VERSION); } @@ -730,8 +728,8 @@ public void testSnapshotAndRestore() throws Exception { * This test checks whether timers are assigned to correct key groups and whether * snapshot/restore respects key groups. */ - @Test - public void testSnapshotAndRebalancingRestore() throws Exception { + @TestTemplate + void testSnapshotAndRebalancingRestore() throws Exception { testSnapshotAndRebalancingRestore(InternalTimerServiceSerializationProxy.VERSION); } @@ -766,12 +764,12 @@ private void testSnapshotAndRestore(int snapshotVersion) throws Exception { timerService.registerEventTimeTimer("ciao", 10); timerService.registerProcessingTimeTimer("hello", 10); - assertEquals(2, timerService.numProcessingTimeTimers()); - assertEquals(1, timerService.numProcessingTimeTimers("hello")); - assertEquals(1, timerService.numProcessingTimeTimers("ciao")); - assertEquals(2, timerService.numEventTimeTimers()); - assertEquals(1, timerService.numEventTimeTimers("hello")); - assertEquals(1, timerService.numEventTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("hello")).isOne(); + assertThat(timerService.numProcessingTimeTimers("ciao")).isOne(); + assertThat(timerService.numEventTimeTimers()).isEqualTo(2); + assertThat(timerService.numEventTimeTimers("hello")).isOne(); + assertThat(timerService.numEventTimeTimers("ciao")).isOne(); Map snapshot = new HashMap<>(); for (Integer keyGroupIndex : testKeyGroupRange) { @@ -820,7 +818,7 @@ private void testSnapshotAndRestore(int snapshotVersion) throws Exception { verify(mockTriggerable2, times(1)) .onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "ciao"))); - assertEquals(0, timerService.numEventTimeTimers()); + assertThat(timerService.numEventTimeTimers()).isZero(); } private void testSnapshotAndRebalancingRestore(int snapshotVersion) throws Exception { @@ -864,12 +862,12 @@ private void testSnapshotAndRebalancingRestore(int snapshotVersion) throws Excep timerService.registerEventTimeTimer("ciao", 10); timerService.registerProcessingTimeTimer("hello", 10); - assertEquals(2, timerService.numProcessingTimeTimers()); - assertEquals(1, timerService.numProcessingTimeTimers("hello")); - assertEquals(1, timerService.numProcessingTimeTimers("ciao")); - assertEquals(2, timerService.numEventTimeTimers()); - assertEquals(1, timerService.numEventTimeTimers("hello")); - assertEquals(1, timerService.numEventTimeTimers("ciao")); + assertThat(timerService.numProcessingTimeTimers()).isEqualTo(2); + assertThat(timerService.numProcessingTimeTimers("hello")).isOne(); + assertThat(timerService.numProcessingTimeTimers("ciao")).isOne(); + assertThat(timerService.numEventTimeTimers()).isEqualTo(2); + assertThat(timerService.numEventTimeTimers("hello")).isOne(); + assertThat(timerService.numEventTimeTimers("ciao")).isOne(); // one map per sub key-group range Map snapshot1 = new HashMap<>(); @@ -944,7 +942,7 @@ private void testSnapshotAndRebalancingRestore(int snapshotVersion) throws Excep verify(mockTriggerable1, never()) .onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "ciao"))); - assertEquals(0, timerService1.numEventTimeTimers()); + assertThat(timerService1.numEventTimeTimers()).isZero(); processingTimeService2.setCurrentTime(10); timerService2.advanceWatermark(10); @@ -960,7 +958,7 @@ private void testSnapshotAndRebalancingRestore(int snapshotVersion) throws Excep verify(mockTriggerable2, times(1)) .onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "ciao"))); - assertEquals(0, timerService2.numEventTimeTimers()); + assertThat(timerService2.numEventTimeTimers()).isZero(); } private static class TestKeyContext implements KeyContext { @@ -1070,9 +1068,9 @@ protected PriorityQueueSetFactory createQueueFactory( // Parametrization for testing with different key-group ranges // ------------------------------------------------------------------------ - @Parameterized.Parameters(name = "start = {0}, end = {1}, max = {2}") + @Parameters(name = "start = {0}, end = {1}, max = {2}") @SuppressWarnings("unchecked,rawtypes") - public static Collection keyRanges() { + private static Collection keyRanges() { return Arrays.asList( new Object[][] { {0, Short.MAX_VALUE - 1, Short.MAX_VALUE}, diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java index 56c235cfd3899..0be4b1ac08864 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java @@ -35,24 +35,19 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests {@link KeyedProcessOperator}. */ -public class KeyedProcessOperatorTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class KeyedProcessOperatorTest { @Test - public void testKeyQuerying() throws Exception { + void testKeyQuerying() throws Exception { class KeyQueryingProcessFunction extends KeyedProcessFunction, String> { @@ -62,7 +57,7 @@ public void processElement( Tuple2 value, Context ctx, Collector out) throws Exception { - assertTrue("Did not get expected key.", ctx.getCurrentKey().equals(value.f0)); + assertThat(ctx.getCurrentKey()).as("Did not get expected key.").isEqualTo(value.f0); // we check that we receive this output, to ensure that the assert was actually // checked @@ -93,7 +88,7 @@ public void processElement( } @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>(new QueryingFlatMapFunction(TimeDomain.EVENT_TIME)); @@ -125,7 +120,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>(new QueryingFlatMapFunction(TimeDomain.PROCESSING_TIME)); @@ -155,7 +150,7 @@ public void testTimestampAndProcessingTimeQuerying() throws Exception { } @Test - public void testEventTimeTimers() throws Exception { + void testEventTimeTimers() throws Exception { final int expectedKey = 17; @@ -190,7 +185,7 @@ public void testEventTimeTimers() throws Exception { } @Test - public void testProcessingTimeTimers() throws Exception { + void testProcessingTimeTimers() throws Exception { final int expectedKey = 17; @@ -222,7 +217,7 @@ public void testProcessingTimeTimers() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testEventTimeTimerWithState() throws Exception { + void testEventTimeTimerWithState() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>( @@ -266,7 +261,7 @@ public void testEventTimeTimerWithState() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testProcessingTimeTimerWithState() throws Exception { + void testProcessingTimeTimerWithState() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>( @@ -305,7 +300,7 @@ public void testProcessingTimeTimerWithState() throws Exception { } @Test - public void testSnapshotAndRestore() throws Exception { + void testSnapshotAndRestore() throws Exception { final int expectedKey = 5; @@ -352,7 +347,7 @@ public void testSnapshotAndRestore() throws Exception { } @Test - public void testNullOutputTagRefusal() throws Exception { + void testNullOutputTagRefusal() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>(new NullOutputTagEmittingProcessFunction()); @@ -365,8 +360,8 @@ public void testNullOutputTagRefusal() throws Exception { testHarness.setProcessingTime(17); try { - expectedException.expect(IllegalArgumentException.class); - testHarness.processElement(new StreamRecord<>(5)); + assertThatThrownBy(() -> testHarness.processElement(new StreamRecord<>(5))) + .isInstanceOf(IllegalArgumentException.class); } finally { testHarness.close(); } @@ -374,7 +369,7 @@ public void testNullOutputTagRefusal() throws Exception { /** This also verifies that the timestamps ouf side-emitted records is correct. */ @Test - public void testSideOutput() throws Exception { + void testSideOutput() throws Exception { KeyedProcessOperator operator = new KeyedProcessOperator<>(new SideOutputProcessFunction()); @@ -516,8 +511,8 @@ public void processElement(Integer value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(expectedKey, ctx.getCurrentKey()); - assertEquals(expectedTimeDomain, ctx.timeDomain()); + assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey); + assertThat(ctx.timeDomain()).isEqualTo(expectedTimeDomain); out.collect(1777); } } @@ -564,7 +559,7 @@ public void processElement(Integer value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(expectedTimeDomain, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(expectedTimeDomain); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } @@ -596,7 +591,7 @@ public void processElement(Integer value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(expectedKey, ctx.getCurrentKey()); + assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey); if (TimeDomain.EVENT_TIME.equals(ctx.timeDomain())) { out.collect("EVENT:1777"); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/LegacyKeyedProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/LegacyKeyedProcessOperatorTest.java index 6d60365b1ecff..c1422a82119ca 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/LegacyKeyedProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/LegacyKeyedProcessOperatorTest.java @@ -32,24 +32,20 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests {@link LegacyKeyedProcessOperator}. */ @Deprecated -public class LegacyKeyedProcessOperatorTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class LegacyKeyedProcessOperatorTest { @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -82,7 +78,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -113,7 +109,7 @@ public void testTimestampAndProcessingTimeQuerying() throws Exception { } @Test - public void testEventTimeTimers() throws Exception { + void testEventTimeTimers() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -146,7 +142,7 @@ public void testEventTimeTimers() throws Exception { } @Test - public void testProcessingTimeTimers() throws Exception { + void testProcessingTimeTimers() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -176,7 +172,7 @@ public void testProcessingTimeTimers() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testEventTimeTimerWithState() throws Exception { + void testEventTimeTimerWithState() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -217,7 +213,7 @@ public void testEventTimeTimerWithState() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testProcessingTimeTimerWithState() throws Exception { + void testProcessingTimeTimerWithState() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>( @@ -253,7 +249,7 @@ public void testProcessingTimeTimerWithState() throws Exception { } @Test - public void testSnapshotAndRestore() throws Exception { + void testSnapshotAndRestore() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction()); @@ -298,7 +294,7 @@ public void testSnapshotAndRestore() throws Exception { } @Test - public void testNullOutputTagRefusal() throws Exception { + void testNullOutputTagRefusal() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>(new NullOutputTagEmittingProcessFunction()); @@ -311,8 +307,8 @@ public void testNullOutputTagRefusal() throws Exception { testHarness.setProcessingTime(17); try { - expectedException.expect(IllegalArgumentException.class); - testHarness.processElement(new StreamRecord<>(5)); + assertThatThrownBy(() -> testHarness.processElement(new StreamRecord<>(5))) + .isInstanceOf(IllegalArgumentException.class); } finally { testHarness.close(); } @@ -320,7 +316,7 @@ public void testNullOutputTagRefusal() throws Exception { /** This also verifies that the timestamps ouf side-emitted records is correct. */ @Test - public void testSideOutput() throws Exception { + void testSideOutput() throws Exception { LegacyKeyedProcessOperator operator = new LegacyKeyedProcessOperator<>(new SideOutputProcessFunction()); @@ -457,7 +453,7 @@ public void processElement(Integer value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(this.timeDomain, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(timeDomain); out.collect(1777); } } @@ -494,7 +490,7 @@ public void processElement(Integer value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(this.timeDomain, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(timeDomain); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorAttributesTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorAttributesTest.java index b20bd8a1e7951..9f8d01e51eb4e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorAttributesTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorAttributesTest.java @@ -25,13 +25,13 @@ /** Tests for {@link OperatorAttributes} and {@link OperatorAttributesBuilder}. */ class OperatorAttributesTest { @Test - public void testDefaultValues() { + void testDefaultValues() { OperatorAttributes attributes = new OperatorAttributesBuilder().build(); assertThat(attributes.isOutputOnlyAfterEndOfStream()).isFalse(); } @Test - public void testSetAndGet() { + void testSetAndGet() { OperatorAttributes attributes = new OperatorAttributesBuilder().setOutputOnlyAfterEndOfStream(true).build(); assertThat(attributes.isOutputOnlyAfterEndOfStream()).isTrue(); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFinalizerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFinalizerTest.java index dc108f06db773..f51fdcff3af2b 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFinalizerTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFinalizerTest.java @@ -29,9 +29,8 @@ import org.apache.flink.runtime.state.ResultSubpartitionStateHandle; import org.apache.flink.runtime.state.SnapshotResult; import org.apache.flink.runtime.state.StateObject; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.HashMap; import java.util.Map; @@ -42,16 +41,14 @@ import static org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.deepDummyCopy; import static org.apache.flink.runtime.checkpoint.StateObjectCollection.singleton; import static org.apache.flink.runtime.state.SnapshotResult.withLocalState; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link OperatorSnapshotFinalizer}. */ -public class OperatorSnapshotFinalizerTest extends TestLogger { +class OperatorSnapshotFinalizerTest { /** Test that the runnable futures are executed and the result is correctly extracted. */ @Test - public void testRunAndExtract() throws Exception { + void testRunAndExtract() throws Exception { Random random = new Random(0x42); @@ -91,13 +88,13 @@ public void testRunAndExtract() throws Exception { new PseudoNotDoneFuture<>(resultSubpartition)); for (Future f : snapshotFutures.getAllFutures()) { - assertFalse(f.isDone()); + assertThat(f).isNotDone(); } OperatorSnapshotFinalizer finalizer = new OperatorSnapshotFinalizer(snapshotFutures); for (Future f : snapshotFutures.getAllFutures()) { - assertTrue(f.isDone()); + assertThat(f).isDone(); } Map, Function> map = @@ -111,15 +108,13 @@ public void testRunAndExtract() throws Exception { for (Map.Entry, Function> e : map.entrySet()) { - assertEquals( - e.getKey().getJobManagerOwnedSnapshot(), - e.getValue().apply(finalizer.getJobManagerOwnedState())); + assertThat(e.getValue().apply(finalizer.getJobManagerOwnedState())) + .isEqualTo(e.getKey().getJobManagerOwnedSnapshot()); } for (Map.Entry, Function> e : map.entrySet()) { - assertEquals( - e.getKey().getTaskLocalSnapshot(), - e.getValue().apply(finalizer.getTaskLocalState())); + assertThat(e.getValue().apply(finalizer.getTaskLocalState())) + .isEqualTo(e.getKey().getTaskLocalSnapshot()); } } @@ -131,10 +126,10 @@ private static Function headExt private void checkResult(Object expected, StateObjectCollection actual) { if (expected == null) { - assertTrue(actual == null || actual.isEmpty()); + assertThat(actual == null || actual.isEmpty()).isTrue(); } else { - assertEquals(1, actual.size()); - assertEquals(expected, actual.iterator().next()); + assertThat(actual).hasSize(1); + assertThat(actual.iterator().next()).isEqualTo(expected); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFuturesTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFuturesTest.java index 1be8a48dbeb8c..e984616468b0e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFuturesTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/OperatorSnapshotFuturesTest.java @@ -31,23 +31,22 @@ import org.apache.flink.runtime.state.SnapshotResult; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.apache.flink.runtime.testutils.ExceptionallyDoneFuture; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.Future; import java.util.concurrent.RunnableFuture; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; /** Tests for {@link OperatorSnapshotFutures}. */ -public class OperatorSnapshotFuturesTest extends TestLogger { +class OperatorSnapshotFuturesTest { @Test - public void testCancelReturnsStateSize() throws Exception { + void testCancelReturnsStateSize() throws Exception { KeyGroupsStateHandle s1 = new KeyGroupsStateHandle( new KeyGroupRangeOffsets(0, 0), @@ -65,7 +64,7 @@ public void testCancelReturnsStateSize() throws Exception { ExceptionallyDoneFuture.of(new RuntimeException()), ExceptionallyDoneFuture.of(new RuntimeException())); long stateSize = s1.getStateSize() + s2.getStateSize(); - assertEquals(Tuple2.of(stateSize, stateSize), futures.cancel()); + assertThat(futures.cancel()).isEqualTo(Tuple2.of(stateSize, stateSize)); } /** @@ -73,7 +72,7 @@ public void testCancelReturnsStateSize() throws Exception { * the StreamStateHandle result is retrievable that the state handle are discarded. */ @Test - public void testCancelAndCleanup() throws Exception { + void testCancelAndCleanup() throws Exception { OperatorSnapshotFutures operatorSnapshotResult = new OperatorSnapshotFutures(); operatorSnapshotResult.cancel(); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/ProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/ProcessOperatorTest.java index d2635769f915f..a67a751a40704 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/ProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/ProcessOperatorTest.java @@ -26,21 +26,18 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; -/** Tests {@link ProcessOperator}. */ -public class ProcessOperatorTest extends TestLogger { +import static org.assertj.core.api.Assertions.assertThatThrownBy; - @Rule public ExpectedException expectedException = ExpectedException.none(); +/** Tests {@link ProcessOperator}. */ +class ProcessOperatorTest { @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { ProcessOperator operator = new ProcessOperator<>(new QueryingProcessFunction(TimeDomain.EVENT_TIME)); @@ -71,7 +68,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { ProcessOperator operator = new ProcessOperator<>(new QueryingProcessFunction(TimeDomain.PROCESSING_TIME)); @@ -100,7 +97,7 @@ public void testTimestampAndProcessingTimeQuerying() throws Exception { } @Test - public void testNullOutputTagRefusal() throws Exception { + void testNullOutputTagRefusal() throws Exception { ProcessOperator operator = new ProcessOperator<>(new NullOutputTagEmittingProcessFunction()); @@ -112,8 +109,8 @@ public void testNullOutputTagRefusal() throws Exception { testHarness.setProcessingTime(17); try { - expectedException.expect(IllegalArgumentException.class); - testHarness.processElement(new StreamRecord<>(5)); + assertThatThrownBy(() -> testHarness.processElement(new StreamRecord<>(5))) + .isInstanceOf(IllegalArgumentException.class); } finally { testHarness.close(); } @@ -121,7 +118,7 @@ public void testNullOutputTagRefusal() throws Exception { /** This also verifies that the timestamps ouf side-emitted records is correct. */ @Test - public void testSideOutput() throws Exception { + void testSideOutput() throws Exception { ProcessOperator operator = new ProcessOperator<>(new SideOutputProcessFunction()); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorIdleTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorIdleTest.java index ab4212883aa1c..6069ffcb497dd 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorIdleTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorIdleTest.java @@ -22,48 +22,45 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.flink.runtime.io.AvailabilityProvider; import org.apache.flink.streaming.api.operators.source.CollectingDataOutput; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import javax.annotation.Nullable; import java.util.concurrent.CompletableFuture; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.sameInstance; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; +import static org.assertj.core.api.Assertions.assertThat; /** Unit test for idle {@link SourceOperator}. */ @SuppressWarnings("serial") -public class SourceOperatorIdleTest { +class SourceOperatorIdleTest { @Nullable private SourceOperatorTestContext context; @Nullable private SourceOperator operator; - @Before - public void setup() throws Exception { + @BeforeEach + void setup() throws Exception { context = new SourceOperatorTestContext(); operator = context.getOperator(); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { context.close(); context = null; operator = null; } @Test - public void testSameAvailabilityFuture() throws Exception { + void testSameAvailabilityFuture() throws Exception { operator.initializeState(context.createStateContext()); operator.open(); operator.emitNext(new CollectingDataOutput<>()); final CompletableFuture initialFuture = operator.getAvailableFuture(); - assertFalse(initialFuture.isDone()); + assertThat(initialFuture).isNotDone(); final CompletableFuture secondFuture = operator.getAvailableFuture(); - assertThat(initialFuture, not(sameInstance(AvailabilityProvider.AVAILABLE))); - assertThat(secondFuture, sameInstance(initialFuture)); + assertThat(initialFuture).isNotSameAs(AvailabilityProvider.AVAILABLE); + assertThat(secondFuture).isSameAs(initialFuture); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorSplitWatermarkAlignmentTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorSplitWatermarkAlignmentTest.java index de80d7f4ef53a..bef779e778a8c 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorSplitWatermarkAlignmentTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorSplitWatermarkAlignmentTest.java @@ -54,8 +54,8 @@ Licensed to the Apache Software Foundation (ASF) under one import static org.assertj.core.api.Assertions.assertThat; /** Unit test for split alignment in {@link SourceOperator}. */ -public class SourceOperatorSplitWatermarkAlignmentTest { - public static final WatermarkGenerator WATERMARK_GENERATOR = +class SourceOperatorSplitWatermarkAlignmentTest { + private static final WatermarkGenerator WATERMARK_GENERATOR = new WatermarkGenerator() { private long maxWatermark = Long.MIN_VALUE; @@ -75,7 +75,7 @@ public void onPeriodicEmit(WatermarkOutput output) { }; @Test - public void testSplitWatermarkAlignment() throws Exception { + void testSplitWatermarkAlignment() throws Exception { final SplitAligningSourceReader sourceReader = new SplitAligningSourceReader(); SourceOperator operator = diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorTest.java index edb4a6150a7b5..8886503675aeb 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/SourceOperatorTest.java @@ -44,43 +44,38 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.flink.streaming.util.CollectorOutput; import org.apache.flink.util.CollectionUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import javax.annotation.Nullable; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; /** Unit test for {@link SourceOperator}. */ @SuppressWarnings("serial") -public class SourceOperatorTest { +class SourceOperatorTest { @Nullable private SourceOperatorTestContext context; @Nullable private SourceOperator operator; @Nullable private MockSourceReader mockSourceReader; @Nullable private MockOperatorEventGateway mockGateway; - @Before - public void setup() throws Exception { + @BeforeEach + void setup() throws Exception { context = new SourceOperatorTestContext(); operator = context.getOperator(); mockSourceReader = context.getSourceReader(); mockGateway = context.getGateway(); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { context.close(); context = null; operator = null; @@ -89,63 +84,61 @@ public void tearDown() throws Exception { } @Test - public void testInitializeState() throws Exception { + void testInitializeState() throws Exception { StateInitializationContext stateContext = context.createStateContext(); operator.initializeState(stateContext); - assertNotNull( - stateContext - .getOperatorStateStore() - .getListState(SourceOperator.SPLITS_STATE_DESC)); + assertThat( + stateContext + .getOperatorStateStore() + .getListState(SourceOperator.SPLITS_STATE_DESC)) + .isNotNull(); } @Test - public void testOpen() throws Exception { + void testOpen() throws Exception { // Initialize the operator. operator.initializeState(context.createStateContext()); // Open the operator. operator.open(); // The source reader should have been assigned a split. - assertEquals( - Collections.singletonList(SourceOperatorTestContext.MOCK_SPLIT), - mockSourceReader.getAssignedSplits()); + assertThat(mockSourceReader.getAssignedSplits()) + .containsExactly(SourceOperatorTestContext.MOCK_SPLIT); // The source reader should have started. - assertTrue(mockSourceReader.isStarted()); + assertThat(mockSourceReader.isStarted()).isTrue(); // A ReaderRegistrationRequest should have been sent. - assertEquals(1, mockGateway.getEventsSent().size()); + assertThat(mockGateway.getEventsSent()).hasSize(1); OperatorEvent operatorEvent = mockGateway.getEventsSent().get(0); - assertTrue(operatorEvent instanceof ReaderRegistrationEvent); - assertEquals( - SourceOperatorTestContext.SUBTASK_INDEX, - ((ReaderRegistrationEvent) operatorEvent).subtaskId()); + assertThat(operatorEvent).isInstanceOf(ReaderRegistrationEvent.class); + assertThat(((ReaderRegistrationEvent) operatorEvent).subtaskId()) + .isEqualTo(SourceOperatorTestContext.SUBTASK_INDEX); } @Test - public void testStop() throws Exception { + void testStop() throws Exception { // Initialize the operator. operator.initializeState(context.createStateContext()); // Open the operator. operator.open(); // The source reader should have been assigned a split. - assertEquals( - Collections.singletonList(SourceOperatorTestContext.MOCK_SPLIT), - mockSourceReader.getAssignedSplits()); + assertThat(mockSourceReader.getAssignedSplits()) + .containsExactly(SourceOperatorTestContext.MOCK_SPLIT); CollectingDataOutput dataOutput = new CollectingDataOutput<>(); - assertEquals(DataInputStatus.NOTHING_AVAILABLE, operator.emitNext(dataOutput)); - assertFalse(operator.isAvailable()); + assertThat(operator.emitNext(dataOutput)).isEqualTo(DataInputStatus.NOTHING_AVAILABLE); + assertThat(operator.isAvailable()).isFalse(); CompletableFuture sourceStopped = operator.stop(StopMode.DRAIN); - assertTrue(operator.isAvailable()); - assertFalse(sourceStopped.isDone()); - assertEquals(DataInputStatus.END_OF_DATA, operator.emitNext(dataOutput)); + assertThat(operator.isAvailable()).isTrue(); + assertThat(sourceStopped).isNotDone(); + assertThat(operator.emitNext(dataOutput)).isEqualTo(DataInputStatus.END_OF_DATA); operator.finish(); - assertTrue(sourceStopped.isDone()); + assertThat(sourceStopped).isDone(); } @Test - public void testHandleAddSplitsEvent() throws Exception { + void testHandleAddSplitsEvent() throws Exception { operator.initializeState(context.createStateContext()); operator.open(); MockSourceSplit newSplit = new MockSourceSplit((2)); @@ -153,23 +146,22 @@ public void testHandleAddSplitsEvent() throws Exception { new AddSplitEvent<>( Collections.singletonList(newSplit), new MockSourceSplitSerializer())); // The source reader should have been assigned two splits. - assertEquals( - Arrays.asList(SourceOperatorTestContext.MOCK_SPLIT, newSplit), - mockSourceReader.getAssignedSplits()); + assertThat(mockSourceReader.getAssignedSplits()) + .containsExactly(SourceOperatorTestContext.MOCK_SPLIT, newSplit); } @Test - public void testHandleAddSourceEvent() throws Exception { + void testHandleAddSourceEvent() throws Exception { operator.initializeState(context.createStateContext()); operator.open(); SourceEvent event = new SourceEvent() {}; operator.handleOperatorEvent(new SourceEventWrapper(event)); // The source reader should have been assigned two splits. - assertEquals(Collections.singletonList(event), mockSourceReader.getReceivedSourceEvents()); + assertThat(mockSourceReader.getReceivedSourceEvents()).containsExactly(event); } @Test - public void testSnapshotState() throws Exception { + void testSnapshotState() throws Exception { StateInitializationContext stateContext = context.createStateContext(); operator.initializeState(stateContext); operator.open(); @@ -182,31 +174,31 @@ public void testSnapshotState() throws Exception { // Verify the splits in state. List splitsInState = CollectionUtil.iterableToList(operator.getReaderState().get()); - assertEquals(Arrays.asList(SourceOperatorTestContext.MOCK_SPLIT, newSplit), splitsInState); + assertThat(splitsInState).containsExactly(SourceOperatorTestContext.MOCK_SPLIT, newSplit); } @Test - public void testNotifyCheckpointComplete() throws Exception { + void testNotifyCheckpointComplete() throws Exception { StateInitializationContext stateContext = context.createStateContext(); operator.initializeState(stateContext); operator.open(); operator.snapshotState(new StateSnapshotContextSynchronousImpl(100L, 100L)); operator.notifyCheckpointComplete(100L); - assertEquals(100L, (long) mockSourceReader.getCompletedCheckpoints().get(0)); + assertThat(mockSourceReader.getCompletedCheckpoints().get(0)).isEqualTo(100L); } @Test - public void testNotifyCheckpointAborted() throws Exception { + void testNotifyCheckpointAborted() throws Exception { StateInitializationContext stateContext = context.createStateContext(); operator.initializeState(stateContext); operator.open(); operator.snapshotState(new StateSnapshotContextSynchronousImpl(100L, 100L)); operator.notifyCheckpointAborted(100L); - assertEquals(100L, (long) mockSourceReader.getAbortedCheckpoints().get(0)); + assertThat(mockSourceReader.getAbortedCheckpoints().get(0)).isEqualTo(100L); } @Test - public void testHandleBacklogEvent() throws Exception { + void testHandleBacklogEvent() throws Exception { List outputStreamElements = new ArrayList<>(); context = new SourceOperatorTestContext( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateDescriptorPassingTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateDescriptorPassingTest.java index 138e2979c63cf..c6bb0b6e5db51 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateDescriptorPassingTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateDescriptorPassingTest.java @@ -42,11 +42,11 @@ import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.serializers.JavaSerializer; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** * Various tests around the proper passing of state descriptors to the operators and their @@ -55,10 +55,10 @@ *

The tests use an arbitrary generic type to validate the behavior. */ @SuppressWarnings("serial") -public class StateDescriptorPassingTest { +class StateDescriptorPassingTest { @Test - public void testReduceWindowState() { + void testReduceWindowState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -91,7 +91,7 @@ public File reduce(File value1, File value2) { } @Test - public void testApplyWindowState() { + void testApplyWindowState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -125,7 +125,7 @@ public void apply( } @Test - public void testProcessWindowState() { + void testProcessWindowState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -159,7 +159,7 @@ public void process( } @Test - public void testProcessAllWindowState() { + void testProcessAllWindowState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -186,7 +186,7 @@ public void process( } @Test - public void testReduceWindowAllState() { + void testReduceWindowAllState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -213,7 +213,7 @@ public File reduce(File value1, File value2) { } @Test - public void testApplyWindowAllState() { + void testApplyWindowAllState() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); @@ -252,13 +252,13 @@ private void validateStateDescriptorConfigured(SingleOutputStreamOperator res // this would be the first statement to fail if state descriptors were not properly // initialized TypeSerializer serializer = descr.getSerializer(); - assertTrue(serializer instanceof KryoSerializer); + assertThat(serializer).isInstanceOf(KryoSerializer.class); Kryo kryo = ((KryoSerializer) serializer).getKryo(); - assertTrue( - "serializer registration was not properly passed on", - kryo.getSerializer(File.class) instanceof JavaSerializer); + assertThat(kryo.getSerializer(File.class)) + .as("serializer registration was not properly passed on") + .isInstanceOf(JavaSerializer.class); } private void validateListStateDescriptorConfigured(SingleOutputStreamOperator result) { @@ -267,22 +267,22 @@ private void validateListStateDescriptorConfigured(SingleOutputStreamOperator WindowOperator op = (WindowOperator) transform.getOperator(); StateDescriptor descr = op.getStateDescriptor(); - assertTrue(descr instanceof ListStateDescriptor); + assertThat(descr).isInstanceOf(ListStateDescriptor.class); ListStateDescriptor listDescr = (ListStateDescriptor) descr; // this would be the first statement to fail if state descriptors were not properly // initialized TypeSerializer serializer = listDescr.getSerializer(); - assertTrue(serializer instanceof ListSerializer); + assertThat(serializer).isInstanceOf(ListSerializer.class); TypeSerializer elementSerializer = listDescr.getElementSerializer(); - assertTrue(elementSerializer instanceof KryoSerializer); + assertThat(elementSerializer).isInstanceOf(KryoSerializer.class); Kryo kryo = ((KryoSerializer) elementSerializer).getKryo(); - assertTrue( - "serializer registration was not properly passed on", - kryo.getSerializer(File.class) instanceof JavaSerializer); + assertThat(kryo.getSerializer(File.class)) + .as("serializer registration was not properly passed on") + .isInstanceOf(JavaSerializer.class); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateInitializationContextImplTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateInitializationContextImplTest.java index aa1471fa80fab..f1cf2ab3135f4 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateInitializationContextImplTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateInitializationContextImplTest.java @@ -62,9 +62,8 @@ import org.apache.flink.streaming.runtime.tasks.StreamTaskCancellationContext; import org.apache.flink.util.clock.SystemClock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.io.InputStream; @@ -77,11 +76,13 @@ import java.util.Set; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.createExecutionAttemptId; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** Tests for {@link StateInitializationContextImpl}. */ -public class StateInitializationContextImplTest { +class StateInitializationContextImplTest { static final int NUM_HANDLES = 10; @@ -91,8 +92,8 @@ public class StateInitializationContextImplTest { private int writtenKeyGroups; private Set writtenOperatorStates; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { this.writtenKeyGroups = 0; this.writtenOperatorStates = new HashSet<>(); @@ -239,7 +240,7 @@ public InternalTimeServiceManager create( } @Test - public void getOperatorStateStreams() throws Exception { + void getOperatorStateStreams() throws Exception { int i = 0; int s = 0; @@ -248,12 +249,12 @@ public void getOperatorStateStreams() throws Exception { if (0 == i % 4) { ++i; } - Assert.assertNotNull(streamProvider); + assertThat(streamProvider).isNotNull(); try (InputStream is = streamProvider.getStream()) { DataInputView div = new DataInputViewStreamWrapper(is); int val = div.readInt(); - Assert.assertEquals(i * NUM_HANDLES + s, val); + assertThat(val).isEqualTo(i * NUM_HANDLES + s); } ++s; @@ -265,47 +266,47 @@ public void getOperatorStateStreams() throws Exception { } @Test - public void getKeyedStateStreams() throws Exception { + void getKeyedStateStreams() throws Exception { int readKeyGroupCount = 0; for (KeyGroupStatePartitionStreamProvider stateStreamProvider : initializationContext.getRawKeyedStateInputs()) { - Assert.assertNotNull(stateStreamProvider); + assertThat(stateStreamProvider).isNotNull(); try (InputStream is = stateStreamProvider.getStream()) { DataInputView div = new DataInputViewStreamWrapper(is); int val = div.readInt(); ++readKeyGroupCount; - Assert.assertEquals(stateStreamProvider.getKeyGroupId(), val); + assertThat(val).isEqualTo(stateStreamProvider.getKeyGroupId()); } } - Assert.assertEquals(writtenKeyGroups, readKeyGroupCount); + assertThat(readKeyGroupCount).isEqualTo(writtenKeyGroups); } @Test - public void getOperatorStateStore() throws Exception { + void getOperatorStateStore() throws Exception { Set readStatesCount = new HashSet<>(); for (StatePartitionStreamProvider statePartitionStreamProvider : initializationContext.getRawOperatorStateInputs()) { - Assert.assertNotNull(statePartitionStreamProvider); + assertThat(statePartitionStreamProvider).isNotNull(); try (InputStream is = statePartitionStreamProvider.getStream()) { DataInputView div = new DataInputViewStreamWrapper(is); - Assert.assertTrue(readStatesCount.add(div.readInt())); + assertThat(readStatesCount.add(div.readInt())).isTrue(); } } - Assert.assertEquals(writtenOperatorStates, readStatesCount); + assertThat(readStatesCount).isEqualTo(writtenOperatorStates); } @Test - public void close() throws Exception { + void close() throws Exception { int count = 0; int stopCount = NUM_HANDLES / 2; @@ -314,7 +315,7 @@ public void close() throws Exception { try { for (KeyGroupStatePartitionStreamProvider stateStreamProvider : initializationContext.getRawKeyedStateInputs()) { - Assert.assertNotNull(stateStreamProvider); + assertThat(stateStreamProvider).isNotNull(); if (count == stopCount) { closableRegistry.close(); @@ -325,10 +326,8 @@ public void close() throws Exception { DataInputView div = new DataInputViewStreamWrapper(is); try { int val = div.readInt(); - Assert.assertEquals(stateStreamProvider.getKeyGroupId(), val); - if (isClosed) { - Assert.fail("Close was ignored: stream"); - } + assertThat(val).isEqualTo(stateStreamProvider.getKeyGroupId()); + assertThat(isClosed).as("Close was ignored: stream").isFalse(); ++count; } catch (IOException ioex) { if (!isClosed) { @@ -337,10 +336,10 @@ public void close() throws Exception { } } } - Assert.fail("Close was ignored: registry"); + fail("Close was ignored: registry"); } catch (IOException iex) { - Assert.assertTrue(isClosed); - Assert.assertEquals(stopCount, count); + assertThat(isClosed).isTrue(); + assertThat(count).isEqualTo(stopCount); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateSnapshotContextSynchronousImplTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateSnapshotContextSynchronousImplTest.java index 3b13d11084b53..5fe5998b5ff02 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateSnapshotContextSynchronousImplTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StateSnapshotContextSynchronousImplTest.java @@ -27,28 +27,25 @@ import org.apache.flink.runtime.state.OperatorStateCheckpointOutputStream; import org.apache.flink.runtime.state.StateSnapshotContextSynchronousImpl; import org.apache.flink.runtime.state.memory.MemCheckpointStreamFactory; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.Closeable; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** Tests for {@link StateSnapshotContextSynchronousImpl}. */ -public class StateSnapshotContextSynchronousImplTest extends TestLogger { +class StateSnapshotContextSynchronousImplTest { private StateSnapshotContextSynchronousImpl snapshotContext; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() { CloseableRegistry closableRegistry = new CloseableRegistry(); CheckpointStreamFactory streamFactory = new MemCheckpointStreamFactory(1024); KeyGroupRange keyGroupRange = new KeyGroupRange(0, 2); @@ -58,21 +55,21 @@ public void setUp() throws Exception { } @Test - public void testMetaData() { - assertEquals(42, snapshotContext.getCheckpointId()); - assertEquals(4711, snapshotContext.getCheckpointTimestamp()); + void testMetaData() { + assertThat(snapshotContext.getCheckpointId()).isEqualTo(42); + assertThat(snapshotContext.getCheckpointTimestamp()).isEqualTo(4711); } @Test - public void testCreateRawKeyedStateOutput() throws Exception { + void testCreateRawKeyedStateOutput() throws Exception { KeyedStateCheckpointOutputStream stream = snapshotContext.getRawKeyedOperatorStateOutput(); - Assert.assertNotNull(stream); + assertThat(stream).isNotNull(); } @Test - public void testCreateRawOperatorStateOutput() throws Exception { + void testCreateRawOperatorStateOutput() throws Exception { OperatorStateCheckpointOutputStream stream = snapshotContext.getRawOperatorStateOutput(); - Assert.assertNotNull(stream); + assertThat(stream).isNotNull(); } /** @@ -80,7 +77,7 @@ public void testCreateRawOperatorStateOutput() throws Exception { * output streams. */ @Test - public void testStreamClosingWhenClosing() throws Exception { + void testStreamClosingWhenClosing() throws Exception { long checkpointId = 42L; long checkpointTimestamp = 1L; @@ -110,9 +107,9 @@ public void testStreamClosingWhenClosing() throws Exception { verify(streamFactory, times(2)) .createCheckpointStateOutputStream(CheckpointedStateScope.EXCLUSIVE); - assertEquals(2, closableRegistry.size()); - assertTrue(closableRegistry.contains(outputStream1)); - assertTrue(closableRegistry.contains(outputStream2)); + assertThat(closableRegistry.size()).isEqualTo(2); + assertThat(closableRegistry.contains(outputStream1)).isTrue(); + assertThat(closableRegistry.contains(outputStream2)).isTrue(); context.getKeyedStateStreamFuture().run(); context.getOperatorStateStreamFuture().run(); @@ -120,11 +117,11 @@ public void testStreamClosingWhenClosing() throws Exception { verify(outputStream1).closeAndGetHandle(); verify(outputStream2).closeAndGetHandle(); - assertEquals(0, closableRegistry.size()); + assertThat(closableRegistry.size()).isZero(); } @Test - public void testStreamClosingExceptionally() throws Exception { + void testStreamClosingExceptionally() throws Exception { long checkpointId = 42L; long checkpointTimestamp = 1L; @@ -154,16 +151,16 @@ public void testStreamClosingExceptionally() throws Exception { verify(streamFactory, times(2)) .createCheckpointStateOutputStream(CheckpointedStateScope.EXCLUSIVE); - assertEquals(2, closableRegistry.size()); - assertTrue(closableRegistry.contains(outputStream1)); - assertTrue(closableRegistry.contains(outputStream2)); + assertThat(closableRegistry.size()).isEqualTo(2); + assertThat(closableRegistry.contains(outputStream1)).isTrue(); + assertThat(closableRegistry.contains(outputStream2)).isTrue(); context.closeExceptionally(); verify(outputStream1).close(); verify(outputStream2).close(); - assertEquals(0, closableRegistry.size()); + assertThat(closableRegistry.size()).isZero(); } static final class InsightCloseableRegistry extends CloseableRegistry { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamFilterTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamFilterTest.java index 626d5cbafdcd7..9545f4c32411f 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamFilterTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamFilterTest.java @@ -25,11 +25,12 @@ import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.TestHarnessUtil; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; +import static org.assertj.core.api.Assertions.assertThat; + /** * Tests for {@link StreamFilter}. These test that: * @@ -39,7 +40,7 @@ *

  • Watermarks are correctly forwarded * */ -public class StreamFilterTest { +class StreamFilterTest { static class MyFilter implements FilterFunction { private static final long serialVersionUID = 1L; @@ -52,7 +53,7 @@ public boolean filter(Integer value) throws Exception { @Test @SuppressWarnings("unchecked") - public void testFilter() throws Exception { + void testFilter() throws Exception { StreamFilter operator = new StreamFilter(new MyFilter()); OneInputStreamOperatorTestHarness testHarness = @@ -82,7 +83,7 @@ public void testFilter() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { StreamFilter operator = new StreamFilter(new TestOpenCloseFilterFunction()); OneInputStreamOperatorTestHarness testHarness = @@ -97,9 +98,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", TestOpenCloseFilterFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseFilterFunction.closeCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).as("Output contains no elements.").isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -113,26 +115,20 @@ private static class TestOpenCloseFilterFunction extends RichFilterFunctionWatermarks are correctly forwarded * */ -public class StreamFlatMapTest { +class StreamFlatMapTest { private static final class MyFlatMap implements FlatMapFunction { @@ -56,7 +57,7 @@ public void flatMap(Integer value, Collector out) throws Exception { } @Test - public void testFlatMap() throws Exception { + void testFlatMap() throws Exception { StreamFlatMap operator = new StreamFlatMap(new MyFlatMap()); @@ -93,7 +94,7 @@ public void testFlatMap() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { StreamFlatMap operator = new StreamFlatMap(new TestOpenCloseFlatMapFunction()); @@ -108,9 +109,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", TestOpenCloseFlatMapFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseFlatMapFunction.closeCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).as("Output contains no elements.").isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -124,26 +126,20 @@ private static class TestOpenCloseFlatMapFunction extends RichFlatMapFunction out) throws Exception { - if (!openCalled) { - Assert.fail("Open was not called before run."); - } + assertThat(openCalled).as("Open was not called before run.").isTrue(); out.collect(value); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamGroupedReduceOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamGroupedReduceOperatorTest.java index 6543a97276a6a..9fd28e99b63b1 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamGroupedReduceOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamGroupedReduceOperatorTest.java @@ -30,11 +30,12 @@ import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.TestHarnessUtil; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; +import static org.assertj.core.api.Assertions.assertThat; + /** * Tests for {@link StreamGroupedReduceOperator}. These test that: * @@ -44,10 +45,10 @@ *
  • Watermarks are correctly forwarded * */ -public class StreamGroupedReduceOperatorTest { +class StreamGroupedReduceOperatorTest { @Test - public void testGroupedReduce() throws Exception { + void testGroupedReduce() throws Exception { KeySelector keySelector = new IntegerKeySelector(); @@ -82,7 +83,7 @@ public void testGroupedReduce() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { KeySelector keySelector = new IntegerKeySelector(); @@ -102,9 +103,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", TestOpenCloseReduceFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseReduceFunction.openCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).as("Output contains no elements.").isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -118,26 +120,20 @@ private static class TestOpenCloseReduceFunction extends RichReduceFunctionWatermarks are correctly forwarded * */ -public class StreamMapTest { +class StreamMapTest { private static class Map implements MapFunction { private static final long serialVersionUID = 1L; @@ -51,7 +52,7 @@ public String map(Integer value) throws Exception { } @Test - public void testMap() throws Exception { + void testMap() throws Exception { StreamMap operator = new StreamMap(new Map()); OneInputStreamOperatorTestHarness testHarness = @@ -77,7 +78,7 @@ public void testMap() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { StreamMap operator = new StreamMap(new TestOpenCloseMapFunction()); @@ -92,9 +93,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseMapFunction.closeCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).as("Output contains no elements.").isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -108,26 +110,20 @@ private static class TestOpenCloseMapFunction extends RichMapFunction) stateContext.keyedStateBackend()) - .numKeyValueStatesByName(), - equalTo(1)); - - try { - stateHandler.snapshotState( - checkpointedStreamOperator, - Optional.of(stateContext.internalTimerServiceManager()), - "42", - 42, - 42, - CheckpointOptions.forCheckpointWithDefaultLocation(), - new MemCheckpointStreamFactory(1024), - operatorSnapshotResult, - context, - false); - fail("Exception expected."); - } catch (CheckpointException e) { - // We can not check for ExpectedTestException class directly, - // as CheckpointException is wrapping the cause with SerializedThrowable - if (!ExceptionUtils.findThrowableWithMessage(e, ExpectedTestException.MESSAGE) - .isPresent()) { - throw e; - } - } - - assertTrue(keyedStateManagedFuture.isCancelled()); - assertTrue(keyedStateRawFuture.isCancelled()); - assertTrue(context.getKeyedStateStreamFuture().isCancelled()); - assertTrue(operatorStateManagedFuture.isCancelled()); - assertTrue(operatorStateRawFuture.isCancelled()); - assertTrue(context.getOperatorStateStreamFuture().isCancelled()); - assertTrue(inputChannelStateFuture.isCancelled()); - assertTrue(resultSubpartitionStateFuture.isCancelled()); + ((AbstractKeyedStateBackend) stateContext.keyedStateBackend()) + .numKeyValueStatesByName()) + .isOne(); + + assertThatThrownBy( + () -> + stateHandler.snapshotState( + checkpointedStreamOperator, + Optional.of(stateContext.internalTimerServiceManager()), + "42", + 42, + 42, + CheckpointOptions.forCheckpointWithDefaultLocation(), + new MemCheckpointStreamFactory(1024), + operatorSnapshotResult, + context, + false)) + .isInstanceOfSatisfying( + CheckpointException.class, + // We can not check for ExpectedTestException class directly, + // as CheckpointException is wrapping the cause with SerializedThrowable + e -> + assertThat( + ExceptionUtils.findThrowableWithMessage( + e, ExpectedTestException.MESSAGE)) + .isPresent()); + + assertThat(keyedStateManagedFuture).isCancelled(); + assertThat(keyedStateRawFuture).isCancelled(); + assertThat(context.getKeyedStateStreamFuture()).isCancelled(); + assertThat(operatorStateManagedFuture).isCancelled(); + assertThat(operatorStateRawFuture).isCancelled(); + assertThat(context.getOperatorStateStreamFuture()).isCancelled(); + assertThat(inputChannelStateFuture).isCancelled(); + assertThat(resultSubpartitionStateFuture).isCancelled(); stateHandler.dispose(); + assertThat(stateContext.operatorStateBackend().getRegisteredBroadcastStateNames()) + .isEmpty(); + assertThat(stateContext.operatorStateBackend().getRegisteredStateNames()).isEmpty(); assertThat( - stateContext.operatorStateBackend().getRegisteredBroadcastStateNames(), - is(empty())); - assertThat(stateContext.operatorStateBackend().getRegisteredStateNames(), is(empty())); - assertThat( - ((AbstractKeyedStateBackend) stateContext.keyedStateBackend()) - .numKeyValueStatesByName(), - equalTo(0)); + ((AbstractKeyedStateBackend) stateContext.keyedStateBackend()) + .numKeyValueStatesByName()) + .isZero(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamProjectTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamProjectTest.java index f5b24d6eb577a..323b9da6c52e3 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamProjectTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamProjectTest.java @@ -30,7 +30,7 @@ import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.TestHarnessUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; @@ -42,10 +42,10 @@ *
  • Watermarks are correctly forwarded * */ -public class StreamProjectTest { +class StreamProjectTest { @Test - public void testProject() throws Exception { + void testProject() throws Exception { TypeInformation> inType = TypeExtractor.getForObject( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSinkOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSinkOperatorTest.java index 497d9d3fd28b9..467af282e3d41 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSinkOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSinkOperatorTest.java @@ -23,30 +23,23 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link StreamSink}. */ -public class StreamSinkOperatorTest extends TestLogger { - - @Rule public ExpectedException expectedException = ExpectedException.none(); +class StreamSinkOperatorTest { /** * Verify that we can correctly query watermark, processing time and the timestamp from the * context. */ @Test - public void testTimeQuerying() throws Exception { + void testTimeQuerying() throws Exception { BufferingQueryingSink bufferingSink = new BufferingQueryingSink<>(); @@ -70,23 +63,21 @@ public void testTimeQuerying() throws Exception { testHarness.setProcessingTime(15); testHarness.processElement(new StreamRecord<>("Ciao")); - assertThat(bufferingSink.data.size(), is(3)); + assertThat(bufferingSink.data).hasSize(3); - assertThat( - bufferingSink.data, - contains( + assertThat(bufferingSink.data) + .contains( new Tuple4<>(17L, 12L, 12L, "Hello"), new Tuple4<>(42L, 15L, 13L, "Ciao"), - new Tuple4<>(42L, 15L, null, "Ciao"))); + new Tuple4<>(42L, 15L, null, "Ciao")); - assertThat(bufferingSink.watermarks.size(), is(3)); + assertThat(bufferingSink.watermarks).hasSize(3); - assertThat( - bufferingSink.watermarks, - contains( + assertThat(bufferingSink.watermarks) + .contains( new org.apache.flink.api.common.eventtime.Watermark(17L), new org.apache.flink.api.common.eventtime.Watermark(42L), - new org.apache.flink.api.common.eventtime.Watermark(42L))); + new org.apache.flink.api.common.eventtime.Watermark(42L)); testHarness.close(); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSourceContextIdleDetectionTests.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSourceContextIdleDetectionTests.java index 504cff873272c..99ba0f64cdb8f 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSourceContextIdleDetectionTests.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamSourceContextIdleDetectionTests.java @@ -26,23 +26,22 @@ import org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService; import org.apache.flink.streaming.runtime.watermarkstatus.WatermarkStatus; import org.apache.flink.streaming.util.CollectorOutput; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link StreamSource} awareness of source idleness. */ -@RunWith(Parameterized.class) -public class StreamSourceContextIdleDetectionTests { +@ExtendWith(ParameterizedTestExtension.class) +class StreamSourceContextIdleDetectionTests { /** The tests in this class will be parameterized with these enumerations. */ private enum TestMethod { @@ -79,8 +78,8 @@ public StreamSourceContextIdleDetectionTests(TestMethod testMethod) { * *

    Inline comments will refer to the corresponding tested steps in the scenario. */ - @Test - public void testManualWatermarkContext() throws Exception { + @TestTemplate + void testManualWatermarkContext() throws Exception { long idleTimeout = 100; long initialTime = 0; @@ -105,12 +104,12 @@ public void testManualWatermarkContext() throws Exception { // corresponds to step (2) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + idleTimeout); expectedOutput.add(WatermarkStatus.IDLE); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (3) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 2 * idleTimeout); processingTimeService.setCurrentTime(initialTime + 3 * idleTimeout); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (4) of scenario (please see method-level Javadoc comment) expectedOutput.add(WatermarkStatus.ACTIVE); @@ -119,7 +118,7 @@ public void testManualWatermarkContext() throws Exception { expectedOutput, processingTimeService, context); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (5) of scenario (please see method-level Javadoc comment) emitStreamElement( @@ -127,16 +126,16 @@ public void testManualWatermarkContext() throws Exception { expectedOutput, processingTimeService, context); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (6) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 4 * idleTimeout + idleTimeout / 10); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (7) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 5 * idleTimeout + idleTimeout / 10); expectedOutput.add(WatermarkStatus.IDLE); - assertThat(output, equalTo(expectedOutput)); + assertThat(output).isEqualTo(expectedOutput); } private void emitStreamElement( @@ -178,8 +177,8 @@ private void emitStreamElement( * *

    Inline comments will refer to the corresponding tested steps in the scenario. */ - @Test - public void testAutomaticWatermarkContext() throws Exception { + @TestTemplate + void testAutomaticWatermarkContext() throws Exception { long watermarkInterval = 40; long idleTimeout = 100; long initialTime = 20; @@ -217,7 +216,7 @@ public void testAutomaticWatermarkContext() throws Exception { % watermarkInterval))); processingTimeService.setCurrentTime(initialTime + idleTimeout); expectedOutput.add(WatermarkStatus.IDLE); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (3) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 3 * watermarkInterval); @@ -226,7 +225,7 @@ public void testAutomaticWatermarkContext() throws Exception { processingTimeService.setCurrentTime(initialTime + 6 * watermarkInterval); processingTimeService.setCurrentTime(initialTime + 7 * watermarkInterval); processingTimeService.setCurrentTime(initialTime + 3 * idleTimeout); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (4) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 3 * idleTimeout + idleTimeout / 10); @@ -242,7 +241,7 @@ public void testAutomaticWatermarkContext() throws Exception { processingTimeService.getCurrentProcessingTime() - (processingTimeService.getCurrentProcessingTime() % watermarkInterval))); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case COLLECT_WITH_TIMESTAMP: expectedOutput.add(WatermarkStatus.ACTIVE); @@ -256,7 +255,7 @@ public void testAutomaticWatermarkContext() throws Exception { processingTimeService.getCurrentProcessingTime() - (processingTimeService.getCurrentProcessingTime() % watermarkInterval))); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case EMIT_WATERMARK: // for emitWatermark, since the watermark will be blocked, @@ -264,7 +263,7 @@ public void testAutomaticWatermarkContext() throws Exception { // from here on, the status should remain idle for the emitWatermark variant test context.emitWatermark( new Watermark(processingTimeService.getCurrentProcessingTime())); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); } // corresponds to step (5) of scenario (please see method-level Javadoc comment) @@ -276,7 +275,7 @@ public void testAutomaticWatermarkContext() throws Exception { expectedOutput.add( new StreamRecord<>( "msg", processingTimeService.getCurrentProcessingTime())); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case COLLECT_WITH_TIMESTAMP: context.collectWithTimestamp( @@ -284,12 +283,12 @@ public void testAutomaticWatermarkContext() throws Exception { expectedOutput.add( new StreamRecord<>( "msg", processingTimeService.getCurrentProcessingTime())); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case EMIT_WATERMARK: context.emitWatermark( new Watermark(processingTimeService.getCurrentProcessingTime())); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); } processingTimeService.setCurrentTime(initialTime + 9 * watermarkInterval); @@ -301,10 +300,10 @@ public void testAutomaticWatermarkContext() throws Exception { processingTimeService.getCurrentProcessingTime() - (processingTimeService.getCurrentProcessingTime() % watermarkInterval))); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case EMIT_WATERMARK: - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); } processingTimeService.setCurrentTime(initialTime + 10 * watermarkInterval); @@ -316,15 +315,15 @@ public void testAutomaticWatermarkContext() throws Exception { processingTimeService.getCurrentProcessingTime() - (processingTimeService.getCurrentProcessingTime() % watermarkInterval))); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); break; case EMIT_WATERMARK: - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); } // corresponds to step (6) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 4 * idleTimeout + idleTimeout / 10); - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); // corresponds to step (7) of scenario (please see method-level Javadoc comment) processingTimeService.setCurrentTime(initialTime + 11 * watermarkInterval); @@ -332,12 +331,12 @@ public void testAutomaticWatermarkContext() throws Exception { if (testMethod != TestMethod.EMIT_WATERMARK) { expectedOutput.add(WatermarkStatus.IDLE); } - assertEquals(expectedOutput, output); + assertThat(output).isEqualTo(expectedOutput); } - @Parameterized.Parameters(name = "TestMethod = {0}") + @Parameters(name = "TestMethod = {0}") @SuppressWarnings("unchecked") - public static Collection timeCharacteristic() { + private static Collection timeCharacteristic() { return Arrays.asList( new TestMethod[] {TestMethod.COLLECT}, new TestMethod[] {TestMethod.COLLECT_WITH_TIMESTAMP}, diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamTaskStateInitializerImplTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamTaskStateInitializerImplTest.java index 1d52b26dbddc3..cc99c87908ee3 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamTaskStateInitializerImplTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamTaskStateInitializerImplTest.java @@ -57,13 +57,10 @@ import org.apache.flink.util.CloseableIterable; import org.apache.flink.util.clock.SystemClock; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.Closeable; import java.util.Collections; -import java.util.HashMap; -import java.util.OptionalLong; import java.util.Random; import java.util.stream.Stream; @@ -72,16 +69,16 @@ import static org.apache.flink.runtime.checkpoint.StateObjectCollection.singleton; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.createExecutionAttemptId; import static org.apache.flink.runtime.state.OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; /** Test for {@link StreamTaskStateInitializerImpl}. */ -public class StreamTaskStateInitializerImplTest { +class StreamTaskStateInitializerImplTest { @Test - public void testNoRestore() throws Exception { + void testNoRestore() throws Exception { MemoryStateBackend stateBackend = spy(new MemoryStateBackend(1024)); @@ -122,12 +119,14 @@ public void testNoRestore() throws Exception { CloseableIterable operatorStateInputs = stateContext.rawOperatorStateInputs(); - Assert.assertFalse("Expected the context to NOT be restored", stateContext.isRestored()); - Assert.assertNotNull(operatorStateBackend); - Assert.assertNotNull(keyedStateBackend); - Assert.assertNotNull(timeServiceManager); - Assert.assertNotNull(keyedStateInputs); - Assert.assertNotNull(operatorStateInputs); + assertThat(stateContext.isRestored()) + .as("Expected the context to NOT be restored") + .isFalse(); + assertThat(operatorStateBackend).isNotNull(); + assertThat(keyedStateBackend).isNotNull(); + assertThat(timeServiceManager).isNotNull(); + assertThat(keyedStateInputs).isNotNull(); + assertThat(operatorStateInputs).isNotNull(); checkCloseablesRegistered( closeableRegistry, @@ -136,13 +135,13 @@ public void testNoRestore() throws Exception { keyedStateInputs, operatorStateInputs); - Assert.assertFalse(keyedStateInputs.iterator().hasNext()); - Assert.assertFalse(operatorStateInputs.iterator().hasNext()); + assertThat(keyedStateInputs.iterator()).isExhausted(); + assertThat(operatorStateInputs.iterator()).isExhausted(); } @SuppressWarnings("unchecked") @Test - public void testWithRestore() throws Exception { + void testWithRestore() throws Exception { StateBackend mockingBackend = spy( @@ -233,28 +232,28 @@ public OperatorStateBackend createOperatorStateBackend( CloseableIterable operatorStateInputs = stateContext.rawOperatorStateInputs(); - assertTrue("Expected the context to be restored", stateContext.isRestored()); - Assert.assertEquals(OptionalLong.of(42L), stateContext.getRestoredCheckpointId()); + assertThat(stateContext.isRestored()).as("Expected the context to be restored").isTrue(); + assertThat(stateContext.getRestoredCheckpointId()).hasValue(42L); - Assert.assertNotNull(operatorStateBackend); - Assert.assertNotNull(keyedStateBackend); + assertThat(operatorStateBackend).isNotNull(); + assertThat(keyedStateBackend).isNotNull(); // this is deactivated on purpose so that it does not attempt to consume the raw keyed // state. - Assert.assertNull(timeServiceManager); - Assert.assertNotNull(keyedStateInputs); - Assert.assertNotNull(operatorStateInputs); + assertThat(timeServiceManager).isNull(); + assertThat(keyedStateInputs).isNotNull(); + assertThat(operatorStateInputs).isNotNull(); int count = 0; for (KeyGroupStatePartitionStreamProvider keyedStateInput : keyedStateInputs) { ++count; } - Assert.assertEquals(1, count); + assertThat(count).isOne(); count = 0; for (StatePartitionStreamProvider operatorStateInput : operatorStateInputs) { ++count; } - Assert.assertEquals(3, count); + assertThat(count).isEqualTo(3); long expectedSumLocalMemory = Stream.of( @@ -274,22 +273,18 @@ public OperatorStateBackend createOperatorStateBackend( .sum(); SubTaskInitializationMetrics metrics = metricsBuilder.build(); - Assert.assertEquals( - new HashMap() { - { - put( - MetricNames.RESTORED_STATE_SIZE - + "." - + StateObject.StateObjectLocation.LOCAL_MEMORY.name(), - expectedSumLocalMemory); - put( - MetricNames.RESTORED_STATE_SIZE - + "." - + StateObject.StateObjectLocation.UNKNOWN.name(), - expectedSumUnknown); - } - }, - metrics.getDurationMetrics()); + assertThat(metrics.getDurationMetrics()) + .hasSize(2) + .containsEntry( + MetricNames.RESTORED_STATE_SIZE + + "." + + StateObject.StateObjectLocation.LOCAL_MEMORY.name(), + expectedSumLocalMemory) + .containsEntry( + MetricNames.RESTORED_STATE_SIZE + + "." + + StateObject.StateObjectLocation.UNKNOWN.name(), + expectedSumUnknown); checkCloseablesRegistered( closeableRegistry, @@ -302,7 +297,7 @@ public OperatorStateBackend createOperatorStateBackend( private static void checkCloseablesRegistered( CloseableRegistry closeableRegistry, Closeable... closeables) { for (Closeable closeable : closeables) { - assertTrue(closeableRegistry.unregisterCloseable(closeable)); + assertThat(closeableRegistry.unregisterCloseable(closeable)).isTrue(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamingRuntimeContextTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamingRuntimeContextTest.java index 85694a79023df..24e4bf5d20975 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamingRuntimeContextTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/StreamingRuntimeContextTest.java @@ -66,7 +66,7 @@ import org.apache.flink.streaming.util.CollectorOutput; import org.apache.flink.streaming.util.MockStreamTaskBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -76,19 +76,17 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** Tests for {@link StreamingRuntimeContext}. */ -public class StreamingRuntimeContextTest { +class StreamingRuntimeContextTest { @Test - public void testValueStateInstantiation() throws Exception { + void testValueStateInstantiation() throws Exception { final ExecutionConfig config = new ExecutionConfig(); config.getSerializerConfig().registerKryoType(Path.class); @@ -103,13 +101,13 @@ public void testValueStateInstantiation() throws Exception { TypeSerializer serializer = descrIntercepted.getSerializer(); // check that the Path class is really registered, i.e., the execution config was applied - assertTrue(serializer instanceof KryoSerializer); - assertTrue( - ((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId() > 0); + assertThat(serializer).isInstanceOf(KryoSerializer.class); + assertThat(((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId()) + .isPositive(); } @Test - public void testReducingStateInstantiation() throws Exception { + void testReducingStateInstantiation() throws Exception { final ExecutionConfig config = new ExecutionConfig(); config.getSerializerConfig().registerKryoType(Path.class); @@ -130,13 +128,13 @@ public void testReducingStateInstantiation() throws Exception { TypeSerializer serializer = descrIntercepted.getSerializer(); // check that the Path class is really registered, i.e., the execution config was applied - assertTrue(serializer instanceof KryoSerializer); - assertTrue( - ((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId() > 0); + assertThat(serializer).isInstanceOf(KryoSerializer.class); + assertThat(((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId()) + .isPositive(); } @Test - public void testAggregatingStateInstantiation() throws Exception { + void testAggregatingStateInstantiation() throws Exception { final ExecutionConfig config = new ExecutionConfig(); config.getSerializerConfig().registerKryoType(Path.class); @@ -158,13 +156,13 @@ public void testAggregatingStateInstantiation() throws Exception { TypeSerializer serializer = descrIntercepted.getSerializer(); // check that the Path class is really registered, i.e., the execution config was applied - assertTrue(serializer instanceof KryoSerializer); - assertTrue( - ((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId() > 0); + assertThat(serializer).isInstanceOf(KryoSerializer.class); + assertThat(((KryoSerializer) serializer).getKryo().getRegistration(Path.class).getId()) + .isPositive(); } @Test - public void testListStateInstantiation() throws Exception { + void testListStateInstantiation() throws Exception { final ExecutionConfig config = new ExecutionConfig(); config.getSerializerConfig().registerKryoType(Path.class); @@ -180,32 +178,32 @@ public void testListStateInstantiation() throws Exception { TypeSerializer serializer = descrIntercepted.getSerializer(); // check that the Path class is really registered, i.e., the execution config was applied - assertTrue(serializer instanceof ListSerializer); + assertThat(serializer).isInstanceOf(ListSerializer.class); TypeSerializer elementSerializer = descrIntercepted.getElementSerializer(); - assertTrue(elementSerializer instanceof KryoSerializer); - assertTrue( - ((KryoSerializer) elementSerializer) + assertThat(elementSerializer).isInstanceOf(KryoSerializer.class); + assertThat( + ((KryoSerializer) elementSerializer) .getKryo() .getRegistration(Path.class) - .getId() - > 0); + .getId()) + .isPositive(); } @Test - public void testListStateReturnsEmptyListByDefault() throws Exception { + void testListStateReturnsEmptyListByDefault() throws Exception { StreamingRuntimeContext context = createRuntimeContext(); ListStateDescriptor descr = new ListStateDescriptor<>("name", String.class); ListState state = context.getListState(descr); Iterable value = state.get(); - assertNotNull(value); - assertFalse(value.iterator().hasNext()); + assertThat(value).isNotNull(); + assertThat(value.iterator()).isExhausted(); } @Test - public void testMapStateInstantiation() throws Exception { + void testMapStateInstantiation() throws Exception { final ExecutionConfig config = new ExecutionConfig(); config.getSerializerConfig().registerKryoType(Path.class); @@ -224,14 +222,17 @@ public void testMapStateInstantiation() throws Exception { TypeSerializer valueSerializer = descrIntercepted.getValueSerializer(); // check that the Path class is really registered, i.e., the execution config was applied - assertTrue(valueSerializer instanceof KryoSerializer); - assertTrue( - ((KryoSerializer) valueSerializer).getKryo().getRegistration(Path.class).getId() - > 0); + assertThat(valueSerializer).isInstanceOf(KryoSerializer.class); + assertThat( + ((KryoSerializer) valueSerializer) + .getKryo() + .getRegistration(Path.class) + .getId()) + .isPositive(); } @Test - public void testMapStateReturnsEmptyMapByDefault() throws Exception { + void testMapStateReturnsEmptyMapByDefault() throws Exception { StreamingRuntimeContext context = createMapOperatorRuntimeContext(); @@ -240,8 +241,8 @@ public void testMapStateReturnsEmptyMapByDefault() throws Exception { MapState state = context.getMapState(descr); Iterable> value = state.entries(); - assertNotNull(value); - assertFalse(value.iterator().hasNext()); + assertThat(value).isNotNull(); + assertThat(value.iterator()).isExhausted(); } // ------------------------------------------------------------------------ diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/WrappingFunctionSnapshotRestoreTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/WrappingFunctionSnapshotRestoreTest.java index 122e62d649077..6192316577d67 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/WrappingFunctionSnapshotRestoreTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/WrappingFunctionSnapshotRestoreTest.java @@ -32,18 +32,19 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Collections; import java.util.Iterator; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; + /** Test snapshot state with {@link WrappingFunction}. */ -public class WrappingFunctionSnapshotRestoreTest { +class WrappingFunctionSnapshotRestoreTest { @Test - public void testSnapshotAndRestoreWrappedCheckpointedFunction() throws Exception { + void testSnapshotAndRestoreWrappedCheckpointedFunction() throws Exception { StreamMap operator = new StreamMap<>(new WrappingTestFun(new WrappingTestFun(new InnerTestFun()))); @@ -70,12 +71,12 @@ public void testSnapshotAndRestoreWrappedCheckpointedFunction() throws Exception testHarness.initializeState(snapshot); testHarness.open(); - Assert.assertTrue(innerTestFun.wasRestored); + assertThat(innerTestFun.wasRestored).isTrue(); testHarness.close(); } @Test - public void testSnapshotAndRestoreWrappedListCheckpointed() throws Exception { + void testSnapshotAndRestoreWrappedListCheckpointed() throws Exception { StreamMap operator = new StreamMap<>(new WrappingTestFun(new WrappingTestFun(new InnerTestFunList()))); @@ -102,7 +103,7 @@ public void testSnapshotAndRestoreWrappedListCheckpointed() throws Exception { testHarness.initializeState(snapshot); testHarness.open(); - Assert.assertTrue(innerTestFun.wasRestored); + assertThat(innerTestFun.wasRestored).isTrue(); testHarness.close(); } @@ -150,8 +151,8 @@ public void initializeState(FunctionInitializationContext context) throws Except if (context.isRestored()) { Iterator integers = serializableListState.get().iterator(); int act = integers.next(); - Assert.assertEquals(42, act); - Assert.assertFalse(integers.hasNext()); + assertThat(act).isEqualTo(42); + assertThat(integers).isExhausted(); wasRestored = true; } } @@ -180,9 +181,9 @@ public List snapshotState(long checkpointId, long timestamp) throws Exc @Override public void restoreState(List state) throws Exception { - Assert.assertEquals(1, state.size()); + assertThat(state).hasSize(1); int val = state.get(0); - Assert.assertEquals(42, val); + assertThat(val).isEqualTo(42); wasRestored = true; } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java index d7f7b57ad22cb..5f10094954cf7 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java @@ -58,19 +58,16 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.streaming.util.retryable.AsyncRetryStrategies; import org.apache.flink.streaming.util.retryable.RetryPredicates; -import org.apache.flink.testutils.junit.SharedObjects; +import org.apache.flink.testutils.junit.SharedObjectsExtension; import org.apache.flink.testutils.junit.SharedReference; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.Preconditions; -import org.apache.flink.util.TestLogger; import org.apache.flink.shaded.guava31.com.google.common.collect.Lists; -import org.hamcrest.Matchers; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.api.extension.RegisterExtension; import java.util.ArrayDeque; import java.util.ArrayList; @@ -78,7 +75,6 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -96,10 +92,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link AsyncWaitOperator}. These test that: @@ -112,11 +105,12 @@ *

  • Snapshot state and restore state * */ -public class AsyncWaitOperatorTest extends TestLogger { +@Timeout(value = 100, unit = TimeUnit.SECONDS) +class AsyncWaitOperatorTest { private static final long TIMEOUT = 1000L; - @Rule public Timeout timeoutRule = new Timeout(100, TimeUnit.SECONDS); - @Rule public final SharedObjects sharedObjects = SharedObjects.create(); + @RegisterExtension + private final SharedObjectsExtension sharedObjects = SharedObjectsExtension.create(); private static AsyncRetryStrategy emptyResultFixedDelayRetryStrategy = new AsyncRetryStrategies.FixedDelayRetryStrategyBuilder(2, 10L) @@ -367,13 +361,13 @@ public int compare(Object o1, Object o2) { /** Test the AsyncWaitOperator with ordered mode and event time. */ @Test - public void testEventTimeOrdered() throws Exception { + void testEventTimeOrdered() throws Exception { testEventTime(AsyncDataStream.OutputMode.ORDERED); } /** Test the AsyncWaitOperator with unordered mode and event time. */ @Test - public void testWaterMarkUnordered() throws Exception { + void testWaterMarkUnordered() throws Exception { testEventTime(AsyncDataStream.OutputMode.UNORDERED); } @@ -412,14 +406,12 @@ private void testEventTime(AsyncDataStream.OutputMode mode) throws Exception { } else { Object[] jobOutputQueue = testHarness.getOutput().toArray(); - Assert.assertEquals( - "Watermark should be at index 2", - new Watermark(initialTime + 2), - jobOutputQueue[2]); - Assert.assertEquals( - "StreamRecord 3 should be at the end", - new StreamRecord<>(6, initialTime + 3), - jobOutputQueue[3]); + assertThat(jobOutputQueue[2]) + .as("Watermark should be at index 2") + .isEqualTo(new Watermark(initialTime + 2)); + assertThat(jobOutputQueue[3]) + .as("StreamRecord 3 should be at the end") + .isEqualTo(new StreamRecord<>(6, initialTime + 3)); TestHarnessUtil.assertOutputEqualsSorted( "Output for StreamRecords does not match", @@ -431,13 +423,13 @@ private void testEventTime(AsyncDataStream.OutputMode mode) throws Exception { /** Test the AsyncWaitOperator with ordered mode and processing time. */ @Test - public void testProcessingTimeOrdered() throws Exception { + void testProcessingTimeOrdered() throws Exception { testProcessingTime(AsyncDataStream.OutputMode.ORDERED); } /** Test the AsyncWaitOperator with unordered mode and processing time. */ @Test - public void testProcessingUnordered() throws Exception { + void testProcessingUnordered() throws Exception { testProcessingTime(AsyncDataStream.OutputMode.UNORDERED); } @@ -489,7 +481,7 @@ private void testProcessingTime(AsyncDataStream.OutputMode mode) throws Exceptio /** Tests that the AsyncWaitOperator works together with chaining. */ @Test - public void testOperatorChainWithProcessingTime() throws Exception { + void testOperatorChainWithProcessingTime() throws Exception { JobVertex chainedVertex = createChainedVertex(new MyAsyncFunction(), new MyAsyncFunction()); @@ -601,13 +593,13 @@ public Integer map(Integer value) throws Exception { // be build our own OperatorChain final JobGraph jobGraph = chainEnv.getStreamGraph().getJobGraph(); - Assert.assertEquals(3, jobGraph.getVerticesSortedTopologicallyFromSources().size()); + assertThat(jobGraph.getVerticesSortedTopologicallyFromSources()).hasSize(3); return jobGraph.getVerticesSortedTopologicallyFromSources().get(1); } @Test - public void testStateSnapshotAndRestore() throws Exception { + void testStateSnapshotAndRestore() throws Exception { final OneInputStreamTaskTestHarness testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, @@ -654,7 +646,7 @@ public void testStateSnapshotAndRestore() throws Exception { taskStateManagerMock.getWaitForReportLatch().await(); - assertEquals(checkpointId, taskStateManagerMock.getReportedCheckpointId()); + assertThat(taskStateManagerMock.getReportedCheckpointId()).isEqualTo(checkpointId); LazyAsyncFunction.countDown(); @@ -722,7 +714,7 @@ public void testStateSnapshotAndRestore() throws Exception { @SuppressWarnings("rawtypes") @Test - public void testObjectReused() throws Exception { + void testObjectReused() throws Exception { TypeSerializer[] fieldSerializers = new TypeSerializer[] {IntSerializer.INSTANCE}; TupleSerializer inputSerializer = new TupleSerializer<>(Tuple1.class, fieldSerializers); @@ -782,7 +774,7 @@ public void testObjectReused() throws Exception { } @Test - public void testAsyncTimeoutFailure() throws Exception { + void testAsyncTimeoutFailure() throws Exception { testAsyncTimeout( new LazyAsyncFunction(), Optional.of(TimeoutException.class), @@ -790,7 +782,7 @@ public void testAsyncTimeoutFailure() throws Exception { } @Test - public void testAsyncTimeoutIgnore() throws Exception { + void testAsyncTimeoutIgnore() throws Exception { testAsyncTimeout( new IgnoreTimeoutLazyAsyncFunction(), Optional.empty(), @@ -843,12 +835,12 @@ private void testAsyncTimeout( "Output with watermark was not correct.", expectedOutput, testHarness.getOutput()); if (expectedException.isPresent()) { - assertTrue(mockEnvironment.getActualExternalFailureCause().isPresent()); - assertTrue( - ExceptionUtils.findThrowable( + assertThat(mockEnvironment.getActualExternalFailureCause()).isPresent(); + assertThat( + ExceptionUtils.findThrowable( mockEnvironment.getActualExternalFailureCause().get(), - expectedException.get()) - .isPresent()); + expectedException.get())) + .isPresent(); } } @@ -858,7 +850,7 @@ private void testAsyncTimeout( * StreamRecordQueueEntry. */ @Test - public void testTimeoutCleanup() throws Exception { + void testTimeoutCleanup() throws Exception { OneInputStreamOperatorTestHarness harness = createTestHarness( new MyAsyncFunction(), TIMEOUT, 1, AsyncDataStream.OutputMode.UNORDERED); @@ -875,11 +867,10 @@ public void testTimeoutCleanup() throws Exception { } // check that we actually outputted the result of the single input - assertEquals( - Arrays.asList(new StreamRecord(42 * 2, 1L)), new ArrayList<>(harness.getOutput())); + assertThat(harness.getOutput()).containsOnly(new StreamRecord<>(42 * 2, 1L)); // check that we have cancelled our registered timeout - assertEquals(0, harness.getProcessingTimeService().getNumActiveTimers()); + assertThat(harness.getProcessingTimeService().getNumActiveTimers()).isZero(); } /** @@ -888,7 +879,7 @@ public void testTimeoutCleanup() throws Exception { * @see FLINK-22573 */ @Test - public void testTimeoutAfterComplete() throws Exception { + void testTimeoutAfterComplete() throws Exception { StreamTaskMailboxTestHarnessBuilder builder = new StreamTaskMailboxTestHarnessBuilder<>( OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO) @@ -914,10 +905,8 @@ public void testTimeoutAfterComplete() throws Exception { testTimer.get(); // handle normal completion call outputting the element in mailbox thread harness.processAll(); - assertEquals( - Collections.singleton(new StreamRecord<>(1)), - new HashSet<>(harness.getOutput())); - assertFalse("no timeout expected", TimeoutAfterCompletionTestFunction.TIMED_OUT.get()); + assertThat(harness.getOutput()).containsOnly(new StreamRecord<>(1)); + assertThat(TimeoutAfterCompletionTestFunction.TIMED_OUT).isFalse(); } } @@ -929,13 +918,13 @@ public void testTimeoutAfterComplete() throws Exception { * collected. */ @Test - public void testOrderedWaitUserExceptionHandling() throws Exception { + void testOrderedWaitUserExceptionHandling() throws Exception { testUserExceptionHandling( AsyncDataStream.OutputMode.ORDERED, AsyncRetryStrategies.NO_RETRY_STRATEGY); } @Test - public void testOrderedWaitUserExceptionHandlingWithRetry() throws Exception { + void testOrderedWaitUserExceptionHandlingWithRetry() throws Exception { testUserExceptionHandling(AsyncDataStream.OutputMode.ORDERED, exceptionRetryStrategy); } @@ -947,13 +936,13 @@ public void testOrderedWaitUserExceptionHandlingWithRetry() throws Exception { * collected. */ @Test - public void testUnorderedWaitUserExceptionHandling() throws Exception { + void testUnorderedWaitUserExceptionHandling() throws Exception { testUserExceptionHandling( AsyncDataStream.OutputMode.UNORDERED, AsyncRetryStrategies.NO_RETRY_STRATEGY); } @Test - public void testUnorderedWaitUserExceptionHandlingWithRetry() throws Exception { + void testUnorderedWaitUserExceptionHandlingWithRetry() throws Exception { testUserExceptionHandling(AsyncDataStream.OutputMode.UNORDERED, exceptionRetryStrategy); } @@ -980,7 +969,7 @@ private void testUserExceptionHandling( harness.close(); } - assertTrue(harness.getEnvironment().getActualExternalFailureCause().isPresent()); + assertThat(harness.getEnvironment().getActualExternalFailureCause()).isPresent(); } /** AsyncFunction which completes the result with an {@link Exception}. */ @@ -1002,13 +991,13 @@ public void asyncInvoke(Integer input, ResultFuture resultFuture) * handling means that a StreamElementQueueEntry is completed in case of a timeout exception. */ @Test - public void testOrderedWaitTimeoutHandling() throws Exception { + void testOrderedWaitTimeoutHandling() throws Exception { testTimeoutExceptionHandling( AsyncDataStream.OutputMode.ORDERED, AsyncRetryStrategies.NO_RETRY_STRATEGY); } @Test - public void testOrderedWaitTimeoutHandlingWithRetry() throws Exception { + void testOrderedWaitTimeoutHandlingWithRetry() throws Exception { testTimeoutExceptionHandling( AsyncDataStream.OutputMode.ORDERED, emptyResultFixedDelayRetryStrategy); } @@ -1020,13 +1009,13 @@ public void testOrderedWaitTimeoutHandlingWithRetry() throws Exception { * handling means that a StreamElementQueueEntry is completed in case of a timeout exception. */ @Test - public void testUnorderedWaitTimeoutHandling() throws Exception { + void testUnorderedWaitTimeoutHandling() throws Exception { testTimeoutExceptionHandling( AsyncDataStream.OutputMode.UNORDERED, AsyncRetryStrategies.NO_RETRY_STRATEGY); } @Test - public void testUnorderedWaitTimeoutHandlingWithRetry() throws Exception { + void testUnorderedWaitTimeoutHandlingWithRetry() throws Exception { testTimeoutExceptionHandling( AsyncDataStream.OutputMode.UNORDERED, emptyResultFixedDelayRetryStrategy); } @@ -1057,8 +1046,9 @@ private void testTimeoutExceptionHandling( * *

    See FLINK-7949 */ - @Test(timeout = 10000) - public void testRestartWithFullQueue() throws Exception { + @Test + @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) + void testRestartWithFullQueue() throws Exception { final int capacity = 10; // 1. create the snapshot which contains capacity + 1 elements @@ -1126,16 +1116,16 @@ public void testRestartWithFullQueue() throws Exception { .map(r -> ((StreamRecord) r).getValue()) .collect(Collectors.toList()); - assertThat(outputElements, Matchers.equalTo(expectedOutput)); + assertThat(outputElements).isEqualTo(expectedOutput); } @Test - public void testIgnoreAsyncOperatorRecordsOnDrain() throws Exception { + void testIgnoreAsyncOperatorRecordsOnDrain() throws Exception { testIgnoreAsyncOperatorRecordsOnDrain(AsyncRetryStrategies.NO_RETRY_STRATEGY); } @Test - public void testIgnoreAsyncOperatorRecordsOnDrainWithRetry() throws Exception { + void testIgnoreAsyncOperatorRecordsOnDrainWithRetry() throws Exception { testIgnoreAsyncOperatorRecordsOnDrain(emptyResultFixedDelayRetryStrategy); } @@ -1168,20 +1158,20 @@ private void testIgnoreAsyncOperatorRecordsOnDrain(AsyncRetryStrategy asyncRetry // then: All records from async operator should be ignored during drain since they will // be processed on recovery. harness.finishProcessing(); - assertTrue(harness.getOutput().isEmpty()); + assertThat(harness.getOutput()).isEmpty(); } } /** Test the AsyncWaitOperator with ordered mode and processing time. */ @Test - public void testProcessingTimeOrderedWithRetry() throws Exception { + void testProcessingTimeOrderedWithRetry() throws Exception { testProcessingTimeWithRetry( AsyncDataStream.OutputMode.ORDERED, new OddInputEmptyResultAsyncFunction()); } /** Test the AsyncWaitOperator with unordered mode and processing time. */ @Test - public void testProcessingTimeUnorderedWithRetry() throws Exception { + void testProcessingTimeUnorderedWithRetry() throws Exception { testProcessingTimeWithRetry( AsyncDataStream.OutputMode.UNORDERED, new OddInputEmptyResultAsyncFunction()); } @@ -1191,7 +1181,7 @@ public void testProcessingTimeUnorderedWithRetry() throws Exception { * processing time. */ @Test - public void testProcessingTimeRepeatedCompleteUnorderedWithRetry() throws Exception { + void testProcessingTimeRepeatedCompleteUnorderedWithRetry() throws Exception { testProcessingTimeWithRetry( AsyncDataStream.OutputMode.UNORDERED, new IllWrittenOddInputEmptyResultAsyncFunction()); @@ -1202,7 +1192,7 @@ public void testProcessingTimeRepeatedCompleteUnorderedWithRetry() throws Except * processing time. */ @Test - public void testProcessingTimeRepeatedCompleteOrderedWithRetry() throws Exception { + void testProcessingTimeRepeatedCompleteOrderedWithRetry() throws Exception { testProcessingTimeWithRetry( AsyncDataStream.OutputMode.ORDERED, new IllWrittenOddInputEmptyResultAsyncFunction()); @@ -1264,7 +1254,7 @@ private void testProcessingTimeWithRetry( * processing time. */ @Test - public void testProcessingTimeWithTimeoutFunctionUnorderedWithRetry() throws Exception { + void testProcessingTimeWithTimeoutFunctionUnorderedWithRetry() throws Exception { testProcessingTimeAlwaysTimeoutFunctionWithRetry(AsyncDataStream.OutputMode.UNORDERED); } @@ -1273,7 +1263,7 @@ public void testProcessingTimeWithTimeoutFunctionUnorderedWithRetry() throws Exc * processing time. */ @Test - public void testProcessingTimeWithTimeoutFunctionOrderedWithRetry() throws Exception { + void testProcessingTimeWithTimeoutFunctionOrderedWithRetry() throws Exception { testProcessingTimeAlwaysTimeoutFunctionWithRetry(AsyncDataStream.OutputMode.ORDERED); } @@ -1326,8 +1316,8 @@ private void testProcessingTimeAlwaysTimeoutFunctionWithRetry(AsyncDataStream.Ou // verify the elements' try count never beyond 2 (use <= instead of == to avoid unstable // case when test machine under high load) - assertTrue(asyncFunction.getTryCount(1) <= 2); - assertTrue(asyncFunction.getTryCount(2) <= 2); + assertThat(asyncFunction.getTryCount(1)).isLessThanOrEqualTo(2); + assertThat(asyncFunction.getTryCount(2)).isLessThanOrEqualTo(2); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/OrderedStreamElementQueueTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/OrderedStreamElementQueueTest.java index fddd12c975d5c..e5f83ab901d6d 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/OrderedStreamElementQueueTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/OrderedStreamElementQueueTest.java @@ -22,10 +22,8 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamElement; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; @@ -33,14 +31,15 @@ import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.popCompleted; import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.putSuccessfully; +import static org.assertj.core.api.Assertions.assertThat; /** {@link OrderedStreamElementQueue} specific tests. */ -public class OrderedStreamElementQueueTest extends TestLogger { +public class OrderedStreamElementQueueTest { /** * Tests that only the head element is pulled from the ordered queue if it has been completed. */ @Test - public void testCompletionOrder() { + void testCompletionOrder() { final OrderedStreamElementQueue queue = new OrderedStreamElementQueue<>(4); ResultFuture entry1 = putSuccessfully(queue, new StreamRecord<>(1, 0L)); @@ -48,16 +47,16 @@ public void testCompletionOrder() { putSuccessfully(queue, new Watermark(2L)); ResultFuture entry4 = putSuccessfully(queue, new StreamRecord<>(3, 3L)); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); - Assert.assertEquals(4, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).isEmpty(); + assertThat(queue.size()).isEqualTo(4L); + assertThat(queue.isEmpty()).isFalse(); entry2.complete(Collections.singleton(11)); entry4.complete(Collections.singleton(13)); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); - Assert.assertEquals(4, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).isEmpty(); + assertThat(queue.size()).isEqualTo(4L); + assertThat(queue.isEmpty()).isFalse(); entry1.complete(Collections.singleton(10)); @@ -67,8 +66,8 @@ public void testCompletionOrder() { new StreamRecord<>(11, 1L), new Watermark(2L), new StreamRecord<>(13, 3L)); - Assert.assertEquals(expected, popCompleted(queue)); - Assert.assertEquals(0, queue.size()); - Assert.assertTrue(queue.isEmpty()); + assertThat(popCompleted(queue)).isEqualTo(expected); + assertThat(queue.size()).isZero(); + assertThat(queue.isEmpty()).isTrue(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/QueueUtil.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/QueueUtil.java index 0dcbe62434b74..796f0892e12af 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/QueueUtil.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/QueueUtil.java @@ -26,21 +26,20 @@ import java.util.List; import java.util.Optional; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** Utility for putting elements inside a {@link StreamElementQueue}. */ class QueueUtil { static ResultFuture putSuccessfully( StreamElementQueue queue, StreamElement streamElement) { Optional> resultFuture = queue.tryPut(streamElement); - assertTrue(resultFuture.isPresent()); + assertThat(resultFuture).isPresent(); return resultFuture.get(); } static void putUnsuccessfully(StreamElementQueue queue, StreamElement streamElement) { Optional> resultFuture = queue.tryPut(streamElement); - assertFalse(resultFuture.isPresent()); + assertThat(resultFuture).isNotPresent(); } /** diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/StreamElementQueueTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/StreamElementQueueTest.java index 685964d2ce2f0..dd566405361b3 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/StreamElementQueueTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/StreamElementQueueTest.java @@ -22,13 +22,12 @@ import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; import org.apache.flink.util.Preconditions; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.Arrays; import java.util.Collection; @@ -37,25 +36,23 @@ import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.popCompleted; import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.putSuccessfully; import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.putUnsuccessfully; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for the basic functionality of {@link StreamElementQueue}. The basic operations consist of * putting and polling elements from the queue. */ -@RunWith(Parameterized.class) -public class StreamElementQueueTest extends TestLogger { - @Parameterized.Parameters - public static Collection outputModes() { +@ExtendWith(ParameterizedTestExtension.class) +class StreamElementQueueTest { + @Parameters + private static Collection outputModes() { return Arrays.asList( AsyncDataStream.OutputMode.ORDERED, AsyncDataStream.OutputMode.UNORDERED); } private final AsyncDataStream.OutputMode outputMode; - public StreamElementQueueTest(AsyncDataStream.OutputMode outputMode) { + StreamElementQueueTest(AsyncDataStream.OutputMode outputMode) { this.outputMode = Preconditions.checkNotNull(outputMode); } @@ -70,83 +67,81 @@ private StreamElementQueue createStreamElementQueue(int capacity) { } } - @Test - public void testPut() { + @TestTemplate + void testPut() { StreamElementQueue queue = createStreamElementQueue(2); Watermark watermark = new Watermark(0L); StreamRecord streamRecord = new StreamRecord<>(42, 1L); // add two elements to reach capacity - assertTrue(queue.tryPut(watermark).isPresent()); - assertTrue(queue.tryPut(streamRecord).isPresent()); + assertThat(queue.tryPut(watermark)).isPresent(); + assertThat(queue.tryPut(streamRecord)).isPresent(); - assertEquals(2, queue.size()); + assertThat(queue.size()).isEqualTo(2); // queue full, cannot add new element - assertFalse(queue.tryPut(new Watermark(2L)).isPresent()); + assertThat(queue.tryPut(new Watermark(2L))).isNotPresent(); // check if expected values are returned (for checkpointing) - assertEquals(Arrays.asList(watermark, streamRecord), queue.values()); + assertThat(queue.values()).containsExactly(watermark, streamRecord); } - @Test - public void testPop() { + @TestTemplate + void testPop() { StreamElementQueue queue = createStreamElementQueue(2); // add two elements to reach capacity putSuccessfully(queue, new Watermark(0L)); ResultFuture recordResult = putSuccessfully(queue, new StreamRecord<>(42, 1L)); - assertEquals(2, queue.size()); + assertThat(queue.size()).isEqualTo(2); // remove completed elements (watermarks are always completed) - assertEquals(Arrays.asList(new Watermark(0L)), popCompleted(queue)); - assertEquals(1, queue.size()); + assertThat(popCompleted(queue)).containsExactly(new Watermark(0L)); + assertThat(queue.size()).isOne(); // now complete the stream record recordResult.complete(Collections.singleton(43)); - assertEquals(Arrays.asList(new StreamRecord<>(43, 1L)), popCompleted(queue)); - assertEquals(0, queue.size()); - assertTrue(queue.isEmpty()); + assertThat(popCompleted(queue)).containsExactly(new StreamRecord<>(43, 1L)); + assertThat(queue.size()).isZero(); + assertThat(queue.isEmpty()).isTrue(); } /** Tests that a put operation fails if the queue is full. */ - @Test - public void testPutOnFull() throws Exception { + @TestTemplate + void testPutOnFull() { final StreamElementQueue queue = createStreamElementQueue(1); // fill up queue ResultFuture resultFuture = putSuccessfully(queue, new StreamRecord<>(42, 0L)); - assertEquals(1, queue.size()); + assertThat(queue.size()).isOne(); // cannot add more putUnsuccessfully(queue, new StreamRecord<>(43, 1L)); // popping the completed element frees the queue again resultFuture.complete(Collections.singleton(42 * 42)); - assertEquals(Arrays.asList(new StreamRecord(42 * 42, 0L)), popCompleted(queue)); + assertThat(popCompleted(queue)).containsExactly(new StreamRecord<>(42 * 42, 0L)); // now the put operation should complete putSuccessfully(queue, new StreamRecord<>(43, 1L)); } /** Tests two adjacent watermarks can be processed successfully. */ - @Test - public void testWatermarkOnly() { + @TestTemplate + void testWatermarkOnly() { final StreamElementQueue queue = createStreamElementQueue(2); putSuccessfully(queue, new Watermark(2L)); putSuccessfully(queue, new Watermark(5L)); - Assert.assertEquals(2, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(queue.size()).isEqualTo(2); + assertThat(queue.isEmpty()).isFalse(); - Assert.assertEquals( - Arrays.asList(new Watermark(2L), new Watermark(5L)), popCompleted(queue)); - Assert.assertEquals(0, queue.size()); - Assert.assertTrue(queue.isEmpty()); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); + assertThat(popCompleted(queue)).containsExactly(new Watermark(2L), new Watermark(5L)); + assertThat(queue.size()).isZero(); + assertThat(popCompleted(queue)).isEmpty(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/UnorderedStreamElementQueueTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/UnorderedStreamElementQueueTest.java index 2b10371fbceaf..35b8292aef482 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/UnorderedStreamElementQueueTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/UnorderedStreamElementQueueTest.java @@ -21,22 +21,20 @@ import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; -import java.util.Collections; import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.popCompleted; import static org.apache.flink.streaming.api.operators.async.queue.QueueUtil.putSuccessfully; +import static org.assertj.core.api.Assertions.assertThat; /** {@link UnorderedStreamElementQueue} specific tests. */ -public class UnorderedStreamElementQueueTest extends TestLogger { +class UnorderedStreamElementQueueTest { /** Tests that only elements before the oldest watermark are returned if they are completed. */ @Test - public void testCompletionOrder() { + void testCompletionOrder() { final UnorderedStreamElementQueue queue = new UnorderedStreamElementQueue<>(8); ResultFuture record1 = putSuccessfully(queue, new StreamRecord<>(1, 0L)); @@ -48,52 +46,51 @@ public void testCompletionOrder() { ResultFuture record5 = putSuccessfully(queue, new StreamRecord<>(5, 6L)); ResultFuture record6 = putSuccessfully(queue, new StreamRecord<>(6, 7L)); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); - Assert.assertEquals(8, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).isEmpty(); + assertThat(queue.size()).isEqualTo(8); + assertThat(queue.isEmpty()).isFalse(); // this should not make any item completed, because R3 is behind W1 record3.complete(Arrays.asList(13)); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); - Assert.assertEquals(8, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).isEmpty(); + assertThat(queue.size()).isEqualTo(8); + assertThat(queue.isEmpty()).isFalse(); record2.complete(Arrays.asList(12)); - Assert.assertEquals(Arrays.asList(new StreamRecord<>(12, 1L)), popCompleted(queue)); - Assert.assertEquals(7, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).containsExactly(new StreamRecord<>(12, 1L)); + assertThat(queue.size()).isEqualTo(7); + assertThat(queue.isEmpty()).isFalse(); // Should not be completed because R1 has not been completed yet record6.complete(Arrays.asList(16)); record4.complete(Arrays.asList(14)); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); - Assert.assertEquals(7, queue.size()); - Assert.assertFalse(queue.isEmpty()); + assertThat(popCompleted(queue)).isEmpty(); + assertThat(queue.size()).isEqualTo(7); + assertThat(queue.isEmpty()).isFalse(); // Now W1, R3, R4 and W2 are completed and should be pollable record1.complete(Arrays.asList(11)); - Assert.assertEquals( - Arrays.asList( + assertThat(popCompleted(queue)) + .containsExactly( new StreamRecord<>(11, 0L), new Watermark(2L), new StreamRecord<>(13, 3L), new StreamRecord<>(14, 4L), new Watermark(5L), - new StreamRecord<>(16, 7L)), - popCompleted(queue)); - Assert.assertEquals(1, queue.size()); - Assert.assertFalse(queue.isEmpty()); + new StreamRecord<>(16, 7L)); + assertThat(queue.size()).isOne(); + assertThat(queue.isEmpty()).isFalse(); // only R5 left in the queue record5.complete(Arrays.asList(15)); - Assert.assertEquals(Arrays.asList(new StreamRecord<>(15, 6L)), popCompleted(queue)); - Assert.assertEquals(0, queue.size()); - Assert.assertTrue(queue.isEmpty()); - Assert.assertEquals(Collections.emptyList(), popCompleted(queue)); + assertThat(popCompleted(queue)).containsExactly(new StreamRecord<>(15, 6L)); + assertThat(queue.size()).isZero(); + assertThat(queue.isEmpty()).isTrue(); + assertThat(popCompleted(queue)).isEmpty(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java index d7d2ba4aab40e..54dbfd3eaac7b 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java @@ -39,7 +39,7 @@ import org.apache.flink.util.OutputTag; import org.apache.flink.util.Preconditions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collections; @@ -55,20 +55,18 @@ import java.util.function.Function; import static org.apache.flink.runtime.state.KeyGroupRangeAssignment.assignKeyToParallelOperator; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for the {@link CoBroadcastWithKeyedOperator}. */ -public class CoBroadcastWithKeyedOperatorTest { +class CoBroadcastWithKeyedOperatorTest { private static final MapStateDescriptor STATE_DESCRIPTOR = new MapStateDescriptor<>( "broadcast-state", BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO); @Test - public void testKeyQuerying() throws Exception { + void testKeyQuerying() throws Exception { class KeyQueryingProcessFunction extends KeyedBroadcastProcessFunction< @@ -78,7 +76,7 @@ class KeyQueryingProcessFunction public void processElement( Tuple2 value, ReadOnlyContext ctx, Collector out) throws Exception { - assertTrue("Did not get expected key.", ctx.getCurrentKey().equals(value.f0)); + assertThat(ctx.getCurrentKey()).isEqualTo(value.f0); // we check that we receive this output, to ensure that the assert was actually // checked @@ -116,7 +114,7 @@ public void processBroadcastElement(String value, Context ctx, Collector /** Test the iteration over the keyed state on the broadcast side. */ @Test - public void testAccessToKeyedStateIt() throws Exception { + void testAccessToKeyedStateIt() throws Exception { final List test1content = new ArrayList<>(); test1content.add("test1"); test1content.add("test1"); @@ -198,7 +196,7 @@ public void process(String key, ListState state) throws Exception { while (it.hasNext()) { list.add(it.next()); } - assertEquals(expectedKeyedStates.get(key), list); + assertThat(list).isEqualTo(expectedKeyedStates.get(key)); } }); } @@ -211,7 +209,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testFunctionWithTimer() throws Exception { + void testFunctionWithTimer() throws Exception { final String expectedKey = "6"; try (TwoInputStreamOperatorTestHarness testHarness = @@ -280,13 +278,13 @@ public void processElement(String value, ReadOnlyContext ctx, Collector @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(expectedKey, ctx.getCurrentKey()); + assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey); out.collect("TIMER:" + timestamp); } } @Test - public void testSideOutput() throws Exception { + void testSideOutput() throws Exception { try (TwoInputStreamOperatorTestHarness testHarness = getInitializedTestHarness( BasicTypeInfo.STRING_TYPE_INFO, @@ -358,7 +356,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testFunctionWithBroadcastState() throws Exception { + void testFunctionWithBroadcastState() throws Exception { final Map expectedBroadcastState = new HashMap<>(); expectedBroadcastState.put("5.key", 5); expectedBroadcastState.put("34.key", 34); @@ -391,29 +389,29 @@ public void testFunctionWithBroadcastState() throws Exception { testHarness.processWatermark2(new Watermark(50L)); Queue output = testHarness.getOutput(); - assertEquals(3L, output.size()); + assertThat(output).hasSize(3); Object firstRawWm = output.poll(); - assertTrue(firstRawWm instanceof Watermark); + assertThat(firstRawWm).isInstanceOf(Watermark.class); Watermark firstWm = (Watermark) firstRawWm; - assertEquals(10L, firstWm.getTimestamp()); + assertThat(firstWm.getTimestamp()).isEqualTo(10L); Object rawOutputElem = output.poll(); - assertTrue(rawOutputElem instanceof StreamRecord); + assertThat(rawOutputElem).isInstanceOf(StreamRecord.class); StreamRecord outputRec = (StreamRecord) rawOutputElem; - assertTrue(outputRec.getValue() instanceof String); + assertThat(outputRec.getValue()).isInstanceOf(String.class); String outputElem = (String) outputRec.getValue(); expectedBroadcastState.put("51.key", 51); List> expectedEntries = new ArrayList<>(); expectedEntries.addAll(expectedBroadcastState.entrySet()); String expected = "TS:41 " + mapToString(expectedEntries); - assertEquals(expected, outputElem); + assertThat(outputElem).isEqualTo(expected); Object secondRawWm = output.poll(); - assertTrue(secondRawWm instanceof Watermark); + assertThat(secondRawWm).isInstanceOf(Watermark.class); Watermark secondWm = (Watermark) secondRawWm; - assertEquals(50L, secondWm.getTimestamp()); + assertThat(secondWm.getTimestamp()).isEqualTo(50L); } } @@ -454,14 +452,13 @@ public void processElement(String value, ReadOnlyContext ctx, Collector Iterator> iter = broadcastStateIt.iterator(); for (int i = 0; i < expectedBroadcastState.size(); i++) { - assertTrue(iter.hasNext()); + assertThat(iter).hasNext(); Map.Entry entry = iter.next(); - assertTrue(expectedBroadcastState.containsKey(entry.getKey())); - assertEquals(expectedBroadcastState.get(entry.getKey()), entry.getValue()); + assertThat(expectedBroadcastState).containsEntry(entry.getKey(), entry.getValue()); } - assertFalse(iter.hasNext()); + assertThat(iter).isExhausted(); ctx.timerService().registerEventTimeTimer(timerTs); } @@ -477,14 +474,14 @@ public void onTimer(long timestamp, OnTimerContext ctx, Collector out) map.add(iter.next()); } - assertEquals(expectedKey, ctx.getCurrentKey()); + assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey); final String mapToStr = mapToString(map); out.collect("TS:" + timestamp + " " + mapToStr); } } @Test - public void testScaleUp() throws Exception { + void testScaleUp() throws Exception { final Set keysToRegister = new HashSet<>(); keysToRegister.add("test1"); keysToRegister.add("test2"); @@ -567,28 +564,28 @@ public void testScaleUp() throws Exception { Queue output2 = testHarness2.getOutput(); Queue output3 = testHarness3.getOutput(); - assertEquals(expected.size(), output1.size()); + assertThat(output1).hasSameSizeAs(expected); for (Object o : output1) { StreamRecord rec = (StreamRecord) o; - assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - assertEquals(expected.size(), output2.size()); + assertThat(output2).hasSameSizeAs(expected); for (Object o : output2) { StreamRecord rec = (StreamRecord) o; - assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - assertEquals(expected.size(), output3.size()); + assertThat(output3).hasSameSizeAs(expected); for (Object o : output3) { StreamRecord rec = (StreamRecord) o; - assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } } } @Test - public void testScaleDown() throws Exception { + void testScaleDown() throws Exception { final Set keysToRegister = new HashSet<>(); keysToRegister.add("test1"); keysToRegister.add("test2"); @@ -669,16 +666,16 @@ public void testScaleDown() throws Exception { Queue output1 = testHarness1.getOutput(); Queue output2 = testHarness2.getOutput(); - assertEquals(expected.size(), output1.size()); + assertThat(output1).hasSameSizeAs(expected); for (Object o : output1) { StreamRecord rec = (StreamRecord) o; - assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - assertEquals(expected.size(), output2.size()); + assertThat(output2).hasSameSizeAs(expected); for (Object o : output2) { StreamRecord rec = (StreamRecord) o; - assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } } } @@ -735,10 +732,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testNoKeyedStateOnBroadcastSide() throws Exception { - - boolean exceptionThrown = false; - + void testNoKeyedStateOnBroadcastSide() throws Exception { try (TwoInputStreamOperatorTestHarness testHarness = getInitializedTestHarness( BasicTypeInfo.STRING_TYPE_INFO, @@ -755,9 +749,14 @@ public void testNoKeyedStateOnBroadcastSide() throws Exception { public void processBroadcastElement( Integer value, Context ctx, Collector out) throws Exception { - getRuntimeContext() - .getState(valueState) - .value(); // this should fail + assertThatThrownBy( + () -> + getRuntimeContext() + .getState(valueState) + .value()) + .isInstanceOf(NullPointerException.class) + .hasMessage( + "No key set. This method should not be called outside of a keyed context."); } @Override @@ -770,15 +769,6 @@ public void processElement( testHarness.processWatermark1(new Watermark(10L)); testHarness.processWatermark2(new Watermark(10L)); testHarness.processElement2(new StreamRecord<>(5, 12L)); - } catch (NullPointerException e) { - assertEquals( - "No key set. This method should not be called outside of a keyed context.", - e.getMessage()); - exceptionThrown = true; - } - - if (!exceptionThrown) { - fail("No exception thrown"); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithNonKeyedOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithNonKeyedOperatorTest.java index e33a4d25021bf..3ab85f7fbe8a0 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithNonKeyedOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithNonKeyedOperatorTest.java @@ -32,8 +32,7 @@ import org.apache.flink.util.OutputTag; import org.apache.flink.util.Preconditions; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.HashSet; @@ -42,8 +41,11 @@ import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + /** Tests for the {@link CoBroadcastWithNonKeyedOperator}. */ -public class CoBroadcastWithNonKeyedOperatorTest { +class CoBroadcastWithNonKeyedOperatorTest { private static final MapStateDescriptor STATE_DESCRIPTOR = new MapStateDescriptor<>( @@ -56,7 +58,7 @@ public class CoBroadcastWithNonKeyedOperatorTest { BasicTypeInfo.STRING_TYPE_INFO); @Test - public void testMultiStateSupport() throws Exception { + void testMultiStateSupport() throws Exception { try (TwoInputStreamOperatorTestHarness testHarness = getInitializedTestHarness( new FunctionWithMultipleStates(), STATE_DESCRIPTOR, STATE_DESCRIPTOR_A)) { @@ -105,7 +107,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testBroadcastState() throws Exception { + void testBroadcastState() throws Exception { final Set keysToRegister = new HashSet<>(); keysToRegister.add("test1"); @@ -169,14 +171,14 @@ public void processElement(String value, ReadOnlyContext ctx, Collector retrievedKeySet.add(entry.getKey()); } - Assert.assertEquals(keysToRegister, retrievedKeySet); + assertThat(retrievedKeySet).isEqualTo(keysToRegister); out.collect(value + "WM:" + ctx.currentWatermark() + " TS:" + ctx.timestamp()); } } @Test - public void testSideOutput() throws Exception { + void testSideOutput() throws Exception { try (TwoInputStreamOperatorTestHarness testHarness = getInitializedTestHarness(new FunctionWithSideOutput(), STATE_DESCRIPTOR)) { @@ -244,7 +246,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testScaleUp() throws Exception { + void testScaleUp() throws Exception { final Set keysToRegister = new HashSet<>(); keysToRegister.add("test1"); keysToRegister.add("test2"); @@ -316,28 +318,28 @@ public void testScaleUp() throws Exception { Queue output2 = testHarness2.getOutput(); Queue output3 = testHarness3.getOutput(); - Assert.assertEquals(expected.size(), output1.size()); + assertThat(output1).hasSameSizeAs(expected); for (Object o : output1) { StreamRecord rec = (StreamRecord) o; - Assert.assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - Assert.assertEquals(expected.size(), output2.size()); + assertThat(output2).hasSameSizeAs(expected); for (Object o : output2) { StreamRecord rec = (StreamRecord) o; - Assert.assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - Assert.assertEquals(expected.size(), output3.size()); + assertThat(output3).hasSameSizeAs(expected); for (Object o : output3) { StreamRecord rec = (StreamRecord) o; - Assert.assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } } } @Test - public void testScaleDown() throws Exception { + void testScaleDown() throws Exception { final Set keysToRegister = new HashSet<>(); keysToRegister.add("test1"); keysToRegister.add("test2"); @@ -409,16 +411,16 @@ public void testScaleDown() throws Exception { Queue output1 = testHarness1.getOutput(); Queue output2 = testHarness2.getOutput(); - Assert.assertEquals(expected.size(), output1.size()); + assertThat(output1).hasSameSizeAs(expected); for (Object o : output1) { StreamRecord rec = (StreamRecord) o; - Assert.assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } - Assert.assertEquals(expected.size(), output2.size()); + assertThat(output2).hasSameSizeAs(expected); for (Object o : output2) { StreamRecord rec = (StreamRecord) o; - Assert.assertTrue(expected.contains(rec.getValue())); + assertThat(rec.getValue()).isIn(expected); } } } @@ -454,10 +456,7 @@ public void processElement(String value, ReadOnlyContext ctx, Collector } @Test - public void testNoKeyedStateOnBroadcastSide() throws Exception { - - boolean exceptionThrown = false; - + void testNoKeyedStateOnBroadcastSide() throws Exception { final ValueStateDescriptor valueState = new ValueStateDescriptor<>("any", BasicTypeInfo.STRING_TYPE_INFO); @@ -470,9 +469,17 @@ public void testNoKeyedStateOnBroadcastSide() throws Exception { public void processBroadcastElement( Integer value, Context ctx, Collector out) throws Exception { - getRuntimeContext() - .getState(valueState) - .value(); // this should fail + assertThatThrownBy( + () -> + getRuntimeContext() + .getState(valueState) + .value()) + .isInstanceOf(NullPointerException.class) + .hasMessage( + String.format( + "Keyed state '%s' with type %s can only be used on a 'keyed stream', i.e., after a 'keyBy()' operation.", + valueState.getName(), + valueState.getType())); } @Override @@ -485,22 +492,11 @@ public void processElement( testHarness.processWatermark1(new Watermark(10L)); testHarness.processWatermark2(new Watermark(10L)); testHarness.processElement2(new StreamRecord<>(5, 12L)); - } catch (NullPointerException e) { - Assert.assertEquals( - String.format( - "Keyed state '%s' with type %s can only be used on a 'keyed stream', i.e., after a 'keyBy()' operation.", - valueState.getName(), valueState.getType()), - e.getMessage()); - exceptionThrown = true; - } - - if (!exceptionThrown) { - Assert.fail("No exception thrown"); } } @Test - public void testNoKeyedStateOnNonBroadcastSide() throws Exception { + void testNoKeyedStateOnNonBroadcastSide() throws Exception { boolean exceptionThrown = false; @@ -523,25 +519,22 @@ public void processBroadcastElement( public void processElement( String value, ReadOnlyContext ctx, Collector out) throws Exception { - getRuntimeContext() - .getState(valueState) - .value(); // this should fail + assertThatThrownBy( + () -> + getRuntimeContext() + .getState(valueState) + .value()) + .isInstanceOf(NullPointerException.class) + .hasMessage( + String.format( + "Keyed state '%s' with type %s can only be used on a 'keyed stream', i.e., after a 'keyBy()' operation.", + valueState.getName(), + valueState.getType())); } })) { testHarness.processWatermark1(new Watermark(10L)); testHarness.processWatermark2(new Watermark(10L)); testHarness.processElement1(new StreamRecord<>("5", 12L)); - } catch (NullPointerException e) { - Assert.assertEquals( - String.format( - "Keyed state '%s' with type %s can only be used on a 'keyed stream', i.e., after a 'keyBy()' operation.", - valueState.getName(), valueState.getType()), - e.getMessage()); - exceptionThrown = true; - } - - if (!exceptionThrown) { - Assert.fail("No exception thrown"); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoProcessOperatorTest.java index ccc825f25bc4d..7fd24ae442b5d 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoProcessOperatorTest.java @@ -24,17 +24,16 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; import org.apache.flink.util.Collector; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.ConcurrentLinkedQueue; /** Tests {@link CoProcessOperator}. */ -public class CoProcessOperatorTest extends TestLogger { +class CoProcessOperatorTest { @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { CoProcessOperator operator = new CoProcessOperator<>(new WatermarkQueryingProcessFunction()); @@ -67,7 +66,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { CoProcessOperator operator = new CoProcessOperator<>(new ProcessingTimeQueryingProcessFunction()); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamFlatMapTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamFlatMapTest.java index 3f4c2d13f6e10..b690b7a611c24 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamFlatMapTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamFlatMapTest.java @@ -26,12 +26,13 @@ import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; import org.apache.flink.util.Collector; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.Serializable; import java.util.concurrent.ConcurrentLinkedQueue; +import static org.assertj.core.api.Assertions.assertThat; + /** * Tests for {@link CoStreamFlatMap}. These test that: * @@ -41,7 +42,7 @@ *
  • Watermarks are correctly forwarded * */ -public class CoStreamFlatMapTest implements Serializable { +class CoStreamFlatMapTest implements Serializable { private static final long serialVersionUID = 1L; private static final class MyCoFlatMap implements CoFlatMapFunction { @@ -61,7 +62,7 @@ public void flatMap2(Integer value, Collector coll) { } @Test - public void testCoFlatMap() throws Exception { + void testCoFlatMap() throws Exception { CoStreamFlatMap operator = new CoStreamFlatMap(new MyCoFlatMap()); @@ -107,7 +108,7 @@ public void testCoFlatMap() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { CoStreamFlatMap operator = new CoStreamFlatMap(new TestOpenCloseCoFlatMapFunction()); @@ -123,10 +124,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", - TestOpenCloseCoFlatMapFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseCoFlatMapFunction.closeCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -141,34 +142,26 @@ private static class TestOpenCloseCoFlatMapFunction @Override public void open(OpenContext openContext) throws Exception { super.open(openContext); - if (closeCalled) { - Assert.fail("Close called before open."); - } + assertThat(closeCalled).as("Close called before open.").isFalse(); openCalled = true; } @Override public void close() throws Exception { super.close(); - if (!openCalled) { - Assert.fail("Open was not called before close."); - } + assertThat(openCalled).as("Open was not called before close.").isTrue(); closeCalled = true; } @Override public void flatMap1(String value, Collector out) throws Exception { - if (!openCalled) { - Assert.fail("Open was not called before run."); - } + assertThat(openCalled).as("Open was not called before run.").isTrue(); out.collect(value); } @Override public void flatMap2(Integer value, Collector out) throws Exception { - if (!openCalled) { - Assert.fail("Open was not called before run."); - } + assertThat(openCalled).as("Open was not called before run.").isTrue(); out.collect(value.toString()); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamMapTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamMapTest.java index 825909997e531..ca7153fdb333e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamMapTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/CoStreamMapTest.java @@ -25,12 +25,13 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.Serializable; import java.util.concurrent.ConcurrentLinkedQueue; +import static org.assertj.core.api.Assertions.assertThat; + /** * Tests for {@link org.apache.flink.streaming.api.operators.co.CoStreamMap}. These test that: * @@ -40,7 +41,7 @@ *
  • Watermarks are correctly forwarded * */ -public class CoStreamMapTest implements Serializable { +class CoStreamMapTest implements Serializable { private static final long serialVersionUID = 1L; private static final class MyCoMap implements CoMapFunction { @@ -58,7 +59,7 @@ public String map2(Integer value) { } @Test - public void testCoMap() throws Exception { + void testCoMap() throws Exception { CoStreamMap operator = new CoStreamMap(new MyCoMap()); @@ -102,7 +103,7 @@ public void testCoMap() throws Exception { } @Test - public void testOpenClose() throws Exception { + void testOpenClose() throws Exception { CoStreamMap operator = new CoStreamMap(new TestOpenCloseCoMapFunction()); @@ -118,9 +119,10 @@ public void testOpenClose() throws Exception { testHarness.close(); - Assert.assertTrue( - "RichFunction methods where not called.", TestOpenCloseCoMapFunction.closeCalled); - Assert.assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); + assertThat(TestOpenCloseCoMapFunction.closeCalled) + .as("RichFunction methods where not called.") + .isTrue(); + assertThat(testHarness.getOutput()).isNotEmpty(); } // This must only be used in one test, otherwise the static fields will be changed @@ -135,34 +137,26 @@ private static class TestOpenCloseCoMapFunction @Override public void open(OpenContext openContext) throws Exception { super.open(openContext); - if (closeCalled) { - Assert.fail("Close called before open."); - } + assertThat(closeCalled).as("Close was called before open.").isFalse(); openCalled = true; } @Override public void close() throws Exception { super.close(); - if (!openCalled) { - Assert.fail("Open was not called before close."); - } + assertThat(openCalled).as("Open was not called before close.").isTrue(); closeCalled = true; } @Override public String map1(Double value) throws Exception { - if (!openCalled) { - Assert.fail("Open was not called before run."); - } + assertThat(openCalled).as("Open was not called before run.").isTrue(); return value.toString(); } @Override public String map2(Integer value) throws Exception { - if (!openCalled) { - Assert.fail("Open was not called before run."); - } + assertThat(openCalled).as("Open was not called before run.").isTrue(); return value.toString(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/IntervalJoinOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/IntervalJoinOperatorTest.java index dc68c5ad2cc06..b0bcdc0e482da 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/IntervalJoinOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/IntervalJoinOperatorTest.java @@ -32,6 +32,8 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.KeyedTwoInputStreamOperatorTestHarness; import org.apache.flink.streaming.util.TestHarnessUtil; +import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension; +import org.apache.flink.testutils.junit.extensions.parameterized.Parameters; import org.apache.flink.util.Collector; import org.apache.flink.util.FlinkException; import org.apache.flink.util.OutputTag; @@ -39,11 +41,8 @@ import org.apache.flink.shaded.guava31.com.google.common.collect.Iterables; import org.apache.flink.shaded.guava31.com.google.common.collect.Lists; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.Arrays; import java.util.Collection; @@ -51,14 +50,17 @@ import java.util.Queue; import java.util.stream.Collectors; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + /** Tests for {@link IntervalJoinOperator}. Those tests cover correctness and cleaning of state */ -@RunWith(Parameterized.class) -public class IntervalJoinOperatorTest { +@ExtendWith(ParameterizedTestExtension.class) +class IntervalJoinOperatorTest { private final boolean lhsFasterThanRhs; @Parameters(name = "lhs faster than rhs: {0}") - public static Collection data() { + private static Collection data() { return Arrays.asList(new Object[][] {{true}, {false}}); } @@ -66,8 +68,8 @@ public IntervalJoinOperatorTest(boolean lhsFasterThanRhs) { this.lhsFasterThanRhs = lhsFasterThanRhs; } - @Test - public void testImplementationMirrorsCorrectly() throws Exception { + @TestTemplate + void testImplementationMirrorsCorrectly() throws Exception { long lowerBound = 1; long upperBound = 3; @@ -98,8 +100,8 @@ public void testImplementationMirrorsCorrectly() throws Exception { .close(); } - @Test // lhs - 2 <= rhs <= rhs + 2 - public void testNegativeInclusiveAndNegativeInclusive() throws Exception { + @TestTemplate // lhs - 2 <= rhs <= rhs + 2 + void testNegativeInclusiveAndNegativeInclusive() throws Exception { setupHarness(-2, true, -1, true) .processElementsAndWatermarks(1, 4) @@ -113,8 +115,8 @@ public void testNegativeInclusiveAndNegativeInclusive() throws Exception { .close(); } - @Test // lhs - 1 <= rhs <= rhs + 1 - public void testNegativeInclusiveAndPositiveInclusive() throws Exception { + @TestTemplate // lhs - 1 <= rhs <= rhs + 1 + void testNegativeInclusiveAndPositiveInclusive() throws Exception { setupHarness(-1, true, 1, true) .processElementsAndWatermarks(1, 4) @@ -133,8 +135,8 @@ public void testNegativeInclusiveAndPositiveInclusive() throws Exception { .close(); } - @Test // lhs + 1 <= rhs <= lhs + 2 - public void testPositiveInclusiveAndPositiveInclusive() throws Exception { + @TestTemplate // lhs + 1 <= rhs <= lhs + 2 + void testPositiveInclusiveAndPositiveInclusive() throws Exception { setupHarness(1, true, 2, true) .processElementsAndWatermarks(1, 4) @@ -148,8 +150,8 @@ public void testPositiveInclusiveAndPositiveInclusive() throws Exception { .close(); } - @Test - public void testNegativeExclusiveAndNegativeExlusive() throws Exception { + @TestTemplate + void testNegativeExclusiveAndNegativeExlusive() throws Exception { setupHarness(-3, false, -1, false) .processElementsAndWatermarks(1, 4) @@ -158,8 +160,8 @@ public void testNegativeExclusiveAndNegativeExlusive() throws Exception { .close(); } - @Test - public void testNegativeExclusiveAndPositiveExlusive() throws Exception { + @TestTemplate + void testNegativeExclusiveAndPositiveExlusive() throws Exception { setupHarness(-1, false, 1, false) .processElementsAndWatermarks(1, 4) @@ -172,8 +174,8 @@ public void testNegativeExclusiveAndPositiveExlusive() throws Exception { .close(); } - @Test - public void testPositiveExclusiveAndPositiveExlusive() throws Exception { + @TestTemplate + void testPositiveExclusiveAndPositiveExlusive() throws Exception { setupHarness(1, false, 3, false) .processElementsAndWatermarks(1, 4) @@ -182,8 +184,8 @@ public void testPositiveExclusiveAndPositiveExlusive() throws Exception { .close(); } - @Test - public void testStateCleanupNegativeInclusiveNegativeInclusive() throws Exception { + @TestTemplate + void testStateCleanupNegativeInclusiveNegativeInclusive() throws Exception { setupHarness(-1, true, 0, true) .processElement1(1) @@ -212,8 +214,8 @@ public void testStateCleanupNegativeInclusiveNegativeInclusive() throws Exceptio .close(); } - @Test - public void testStateCleanupNegativePositiveNegativeExlusive() throws Exception { + @TestTemplate + void testStateCleanupNegativePositiveNegativeExlusive() throws Exception { setupHarness(-2, false, 1, false) .processElement1(1) .processElement1(2) @@ -241,8 +243,8 @@ public void testStateCleanupNegativePositiveNegativeExlusive() throws Exception .close(); } - @Test - public void testStateCleanupPositiveInclusivePositiveInclusive() throws Exception { + @TestTemplate + void testStateCleanupPositiveInclusivePositiveInclusive() throws Exception { setupHarness(0, true, 1, true) .processElement1(1) .processElement1(2) @@ -270,8 +272,8 @@ public void testStateCleanupPositiveInclusivePositiveInclusive() throws Exceptio .close(); } - @Test - public void testStateCleanupPositiveExlusivePositiveExclusive() throws Exception { + @TestTemplate + void testStateCleanupPositiveExlusivePositiveExclusive() throws Exception { setupHarness(-1, false, 2, false) .processElement1(1) .processElement1(2) @@ -299,8 +301,8 @@ public void testStateCleanupPositiveExlusivePositiveExclusive() throws Exception .close(); } - @Test - public void testRestoreFromSnapshot() throws Exception { + @TestTemplate + void testRestoreFromSnapshot() throws Exception { // config int lowerBound = -1; @@ -382,8 +384,8 @@ public void testRestoreFromSnapshot() throws Exception { } } - @Test - public void testContextCorrectLeftTimestamp() throws Exception { + @TestTemplate + void testContextCorrectLeftTimestamp() throws Exception { IntervalJoinOperator> op = new IntervalJoinOperator<>( @@ -403,7 +405,7 @@ public void processElement( Context ctx, Collector> out) throws Exception { - Assert.assertEquals(left.ts, ctx.getLeftTimestamp()); + assertThat(ctx.getLeftTimestamp()).isEqualTo(left.ts); } }); @@ -421,8 +423,8 @@ public void processElement( } } - @Test - public void testReturnsCorrectTimestamp() throws Exception { + @TestTemplate + void testReturnsCorrectTimestamp() throws Exception { IntervalJoinOperator> op = new IntervalJoinOperator<>( -1, @@ -444,8 +446,8 @@ public void processElement( Context ctx, Collector> out) throws Exception { - Assert.assertEquals( - Math.max(left.ts, right.ts), ctx.getTimestamp()); + assertThat(ctx.getTimestamp()) + .isEqualTo(Math.max(left.ts, right.ts)); } }); @@ -463,8 +465,8 @@ public void processElement( } } - @Test - public void testContextCorrectRightTimestamp() throws Exception { + @TestTemplate + void testContextCorrectRightTimestamp() throws Exception { IntervalJoinOperator> op = new IntervalJoinOperator<>( @@ -484,7 +486,7 @@ public void processElement( Context ctx, Collector> out) throws Exception { - Assert.assertEquals(right.ts, ctx.getRightTimestamp()); + assertThat(ctx.getRightTimestamp()).isEqualTo(right.ts); } }); @@ -502,31 +504,40 @@ public void processElement( } } - @Test(expected = FlinkException.class) - public void testFailsWithNoTimestampsLeft() throws Exception { - TestHarness newTestHarness = createTestHarness(0L, true, 0L, true); + @TestTemplate + void testFailsWithNoTimestampsLeft() throws Exception { + try (TestHarness newTestHarness = createTestHarness(0L, true, 0L, true)) { - newTestHarness.setup(); - newTestHarness.open(); + newTestHarness.setup(); + newTestHarness.open(); - // note that the StreamRecord has no timestamp in constructor - newTestHarness.processElement1(new StreamRecord<>(new TestElem(0, "lhs"))); + // note that the StreamRecord has no timestamp in constructor + assertThatThrownBy( + () -> + newTestHarness.processElement1( + new StreamRecord<>(new TestElem(0, "lhs")))) + .isInstanceOf(FlinkException.class); + } } - @Test(expected = FlinkException.class) - public void testFailsWithNoTimestampsRight() throws Exception { + @TestTemplate // (expected = FlinkException.class) + void testFailsWithNoTimestampsRight() throws Exception { try (TestHarness newTestHarness = createTestHarness(0L, true, 0L, true)) { newTestHarness.setup(); newTestHarness.open(); // note that the StreamRecord has no timestamp in constructor - newTestHarness.processElement2(new StreamRecord<>(new TestElem(0, "rhs"))); + assertThatThrownBy( + () -> + newTestHarness.processElement2( + new StreamRecord<>(new TestElem(0, "rhs")))) + .isInstanceOf(FlinkException.class); } } - @Test - public void testDiscardsLateData() throws Exception { + @TestTemplate + void testDiscardsLateData() throws Exception { setupHarness(-1, true, 1, true) .processElement1(1) .processElement2(1) @@ -559,8 +570,8 @@ public void testDiscardsLateData() throws Exception { .close(); } - @Test - public void testLateData() throws Exception { + @TestTemplate + void testLateData() throws Exception { OutputTag leftLateTag = new OutputTag("left_late") {}; OutputTag rightLateTag = new OutputTag("right_late") {}; setupHarness(-1, true, 1, true, leftLateTag, rightLateTag) @@ -588,8 +599,7 @@ public void testLateData() throws Exception { } private void assertEmpty(MapState state) throws Exception { - boolean stateIsEmpty = Iterables.size(state.keys()) == 0; - Assert.assertTrue("state not empty", stateIsEmpty); + assertThat(state.keys()).isEmpty(); } private void assertContainsOnly(MapState state, long... ts) throws Exception { @@ -599,7 +609,7 @@ private void assertContainsOnly(MapState state, long... ts) throws Exce + Arrays.toString(ts) + "\n Actual: " + state.keys(); - Assert.assertTrue(message, state.contains(t)); + assertThat(state.contains(t)).as(message).isTrue(); } String message = @@ -607,7 +617,7 @@ private void assertContainsOnly(MapState state, long... ts) throws Exce + Arrays.toString(ts) + "\n Actual: " + state.keys(); - Assert.assertEquals(message, ts.length, Iterables.size(state.keys())); + assertThat(state.keys()).as(message).hasSize(ts.length); } private void assertOutput( @@ -621,11 +631,12 @@ private void assertOutput( int expectedSize = Iterables.size(expectedOutput); - Assert.assertEquals( - "Expected and actual size of stream records different", expectedSize, actualSize); + assertThat(actualSize) + .as("Expected and actual size of stream records different") + .isEqualTo(expectedSize); for (StreamRecord record : expectedOutput) { - Assert.assertTrue(actualOutput.contains(record)); + assertThat(actualOutput.contains(record)).isTrue(); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperatorTest.java index 874b3afd872f5..8697c4e7b2cfc 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperatorTest.java @@ -33,20 +33,19 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; import org.apache.flink.util.Collector; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests {@link KeyedCoProcessOperator}. */ -public class KeyedCoProcessOperatorTest extends TestLogger { +class KeyedCoProcessOperatorTest { @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new WatermarkQueryingProcessFunction()); @@ -83,7 +82,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new ProcessingTimeQueryingProcessFunction()); @@ -116,7 +115,7 @@ public void testTimestampAndProcessingTimeQuerying() throws Exception { } @Test - public void testEventTimeTimers() throws Exception { + void testEventTimeTimers() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new EventTimeTriggeringProcessFunction()); @@ -156,7 +155,7 @@ public void testEventTimeTimers() throws Exception { } @Test - public void testProcessingTimeTimers() throws Exception { + void testProcessingTimeTimers() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new ProcessingTimeTriggeringProcessFunction()); @@ -192,7 +191,7 @@ public void testProcessingTimeTimers() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testEventTimeTimerWithState() throws Exception { + void testEventTimeTimerWithState() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new EventTimeTriggeringStatefulProcessFunction()); @@ -243,7 +242,7 @@ public void testEventTimeTimerWithState() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testProcessingTimeTimerWithState() throws Exception { + void testProcessingTimeTimerWithState() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new ProcessingTimeTriggeringStatefulProcessFunction()); @@ -284,7 +283,7 @@ public void testProcessingTimeTimerWithState() throws Exception { } @Test - public void testSnapshotAndRestore() throws Exception { + void testSnapshotAndRestore() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new BothTriggeringProcessFunction()); @@ -337,7 +336,7 @@ public void testSnapshotAndRestore() throws Exception { } @Test - public void testGetCurrentKeyFromContext() throws Exception { + void testGetCurrentKeyFromContext() throws Exception { KeyedCoProcessOperator operator = new KeyedCoProcessOperator<>(new AppendCurrentKeyProcessFunction()); @@ -441,8 +440,7 @@ public void processElement2(String value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - - assertEquals(TimeDomain.EVENT_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.EVENT_TIME); out.collect(ctx.getCurrentKey() + ":" + 1777); } } @@ -484,7 +482,7 @@ private void handleValue( @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(TimeDomain.EVENT_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.EVENT_TIME); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } @@ -511,8 +509,7 @@ public void processElement2(String value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - - assertEquals(TimeDomain.PROCESSING_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.PROCESSING_TIME); out.collect("" + 1777); } } @@ -586,7 +583,7 @@ private void handleValue( @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(TimeDomain.PROCESSING_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.PROCESSING_TIME); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/LegacyKeyedCoProcessOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/LegacyKeyedCoProcessOperatorTest.java index d698b72b84898..9b9d4813b6024 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/LegacyKeyedCoProcessOperatorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/co/LegacyKeyedCoProcessOperatorTest.java @@ -33,20 +33,19 @@ import org.apache.flink.streaming.util.TestHarnessUtil; import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; import org.apache.flink.util.Collector; -import org.apache.flink.util.TestLogger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests {@link LegacyKeyedCoProcessOperator}. */ -public class LegacyKeyedCoProcessOperatorTest extends TestLogger { +class LegacyKeyedCoProcessOperatorTest { @Test - public void testTimestampAndWatermarkQuerying() throws Exception { + void testTimestampAndWatermarkQuerying() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>(new WatermarkQueryingProcessFunction()); @@ -83,7 +82,7 @@ public void testTimestampAndWatermarkQuerying() throws Exception { } @Test - public void testTimestampAndProcessingTimeQuerying() throws Exception { + void testTimestampAndProcessingTimeQuerying() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>(new ProcessingTimeQueryingProcessFunction()); @@ -116,7 +115,7 @@ public void testTimestampAndProcessingTimeQuerying() throws Exception { } @Test - public void testEventTimeTimers() throws Exception { + void testEventTimeTimers() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>(new EventTimeTriggeringProcessFunction()); @@ -156,7 +155,7 @@ public void testEventTimeTimers() throws Exception { } @Test - public void testProcessingTimeTimers() throws Exception { + void testProcessingTimeTimers() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>(new ProcessingTimeTriggeringProcessFunction()); @@ -192,7 +191,7 @@ public void testProcessingTimeTimers() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testEventTimeTimerWithState() throws Exception { + void testEventTimeTimerWithState() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>( @@ -244,7 +243,7 @@ public void testEventTimeTimerWithState() throws Exception { /** Verifies that we don't have leakage between different keys. */ @Test - public void testProcessingTimeTimerWithState() throws Exception { + void testProcessingTimeTimerWithState() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>( @@ -286,7 +285,7 @@ public void testProcessingTimeTimerWithState() throws Exception { } @Test - public void testSnapshotAndRestore() throws Exception { + void testSnapshotAndRestore() throws Exception { LegacyKeyedCoProcessOperator operator = new LegacyKeyedCoProcessOperator<>(new BothTriggeringProcessFunction()); @@ -413,7 +412,7 @@ public void processElement2(String value, Context ctx, Collector out) public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(TimeDomain.EVENT_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.EVENT_TIME); out.collect("" + 1777); } } @@ -455,7 +454,7 @@ private void handleValue( @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(TimeDomain.EVENT_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.EVENT_TIME); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } @@ -482,8 +481,7 @@ public void processElement2(String value, Context ctx, Collector out) @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - - assertEquals(TimeDomain.PROCESSING_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.PROCESSING_TIME); out.collect("" + 1777); } } @@ -557,7 +555,7 @@ private void handleValue( @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector out) throws Exception { - assertEquals(TimeDomain.PROCESSING_TIME, ctx.timeDomain()); + assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.PROCESSING_TIME); out.collect("STATE:" + getRuntimeContext().getState(state).value()); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultBufferTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultBufferTest.java index 4fa2b835c8e98..1976df86e74a4 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultBufferTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultBufferTest.java @@ -22,8 +22,7 @@ import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.ByteArrayOutputStream; import java.util.ArrayList; @@ -31,13 +30,16 @@ import java.util.Collections; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + /** Tests for {@link AbstractCollectResultBuffer} and its subclasses. */ -public class CollectResultBufferTest { +class CollectResultBufferTest { private static final TypeSerializer serializer = IntSerializer.INSTANCE; @Test - public void testUncheckpointedValidResponse() throws Exception { + void testUncheckpointedValidResponse() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new UncheckpointedCollectResultBuffer<>(serializer, false); @@ -52,7 +54,7 @@ public void testUncheckpointedValidResponse() throws Exception { buffer.dealWithResponse(response, 0); // for uncheckpointed buffer, results can be instantly seen by user for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } expected = Arrays.asList(4, 5); @@ -62,13 +64,13 @@ public void testUncheckpointedValidResponse() throws Exception { version, 0, createSerializedResults(Arrays.asList(3, 4, 5))); buffer.dealWithResponse(response, 2); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); } @Test - public void testUncheckpointedFaultTolerance() throws Exception { + void testUncheckpointedFaultTolerance() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new UncheckpointedCollectResultBuffer<>(serializer, true); @@ -82,7 +84,7 @@ public void testUncheckpointedFaultTolerance() throws Exception { response = new CollectCoordinationResponse(version, 0, createSerializedResults(expected)); buffer.dealWithResponse(response, 0); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } // version changed, job restarted @@ -94,12 +96,12 @@ public void testUncheckpointedFaultTolerance() throws Exception { response = new CollectCoordinationResponse(version, 0, createSerializedResults(expected)); buffer.dealWithResponse(response, 0); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } } - @Test(expected = RuntimeException.class) - public void testUncheckpointedNotFaultTolerance() throws Exception { + @Test // (expected = RuntimeException.class) + void testUncheckpointedNotFaultTolerance() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new UncheckpointedCollectResultBuffer<>(serializer, false); @@ -113,17 +115,19 @@ public void testUncheckpointedNotFaultTolerance() throws Exception { response = new CollectCoordinationResponse(version, 0, createSerializedResults(expected)); buffer.dealWithResponse(response, 0); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } // version changed, job restarted version = "another"; - response = new CollectCoordinationResponse(version, 0, Collections.emptyList()); - buffer.dealWithResponse(response, 0); + CollectCoordinationResponse anotherResponse = + new CollectCoordinationResponse(version, 0, Collections.emptyList()); + assertThatThrownBy(() -> buffer.dealWithResponse(anotherResponse, 0)) + .isInstanceOf(RuntimeException.class); } @Test - public void testCheckpointedValidResponse() throws Exception { + void testCheckpointedValidResponse() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new CheckpointedCollectResultBuffer<>(serializer); @@ -137,7 +141,7 @@ public void testCheckpointedValidResponse() throws Exception { response = new CollectCoordinationResponse(version, 0, createSerializedResults(expected)); buffer.dealWithResponse(response, 0); // for checkpointed buffer, results can only be seen after a checkpoint - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); response = new CollectCoordinationResponse( @@ -145,7 +149,7 @@ public void testCheckpointedValidResponse() throws Exception { buffer.dealWithResponse(response, 3); // results before checkpoint can be seen now for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } expected = Arrays.asList(4, 5, 6); @@ -156,7 +160,7 @@ public void testCheckpointedValidResponse() throws Exception { buffer.dealWithResponse(response, 5); // results before checkpoint can be seen now for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } // send some uncommitted data @@ -172,19 +176,19 @@ public void testCheckpointedValidResponse() throws Exception { buffer.dealWithResponse(response, 7); // results before checkpoint can be seen now for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } buffer.complete(); expected = Arrays.asList(8, 9, 10); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); } @Test - public void testCheckpointedRestart() throws Exception { + void testCheckpointedRestart() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new CheckpointedCollectResultBuffer<>(serializer); @@ -199,7 +203,7 @@ public void testCheckpointedRestart() throws Exception { version, 0, createSerializedResults(Arrays.asList(1, 2, 3))); buffer.dealWithResponse(response, 0); // for checkpointed buffer, results can only be seen after a checkpoint - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); // version changed, job restarted version = "another"; @@ -211,19 +215,19 @@ public void testCheckpointedRestart() throws Exception { response = new CollectCoordinationResponse(version, 0, createSerializedResults(expected)); buffer.dealWithResponse(response, 0); // checkpoint still not done - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); // checkpoint completed response = new CollectCoordinationResponse(version, 3, Collections.emptyList()); buffer.dealWithResponse(response, 0); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); } @Test - public void testImmediateAccumulatorResult() throws Exception { + void testImmediateAccumulatorResult() throws Exception { String version = "version"; AbstractCollectResultBuffer buffer = new UncheckpointedCollectResultBuffer<>(serializer, false); @@ -237,9 +241,9 @@ public void testImmediateAccumulatorResult() throws Exception { buffer.complete(); for (Integer expectedValue : expected) { - Assert.assertEquals(expectedValue, buffer.next()); + assertThat(buffer.next()).isEqualTo(expectedValue); } - Assert.assertNull(buffer.next()); + assertThat(buffer.next()).isNull(); } private List createSerializedResults(List values) throws Exception { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultIteratorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultIteratorTest.java index a4486507dacc0..8755723647389 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultIteratorTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectResultIteratorTest.java @@ -30,13 +30,10 @@ import org.apache.flink.streaming.api.operators.collect.utils.TestJobClient; import org.apache.flink.streaming.api.operators.collect.utils.TestUncheckpointedCoordinationRequestHandler; import org.apache.flink.util.OptionalFailure; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -44,8 +41,11 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; +import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture; +import static org.assertj.core.api.Assertions.assertThat; + /** Tests for {@link CollectResultIterator}. */ -public class CollectResultIteratorTest extends TestLogger { +class CollectResultIteratorTest { private final TypeSerializer serializer = IntSerializer.INSTANCE; @@ -54,7 +54,7 @@ public class CollectResultIteratorTest extends TestLogger { private static final String ACCUMULATOR_NAME = "accumulatorName"; @Test - public void testUncheckpointedIterator() throws Exception { + void testUncheckpointedIterator() throws Exception { Random random = new Random(); // run this random test multiple times @@ -82,7 +82,7 @@ public void testUncheckpointedIterator() throws Exception { // this is an at least once iterator, so we expect each value to at least appear Set actualSet = new HashSet<>(actual); for (int expectedValue : expected) { - Assert.assertTrue(actualSet.contains(expectedValue)); + assertThat(actualSet).contains(expectedValue); } iterator.close(); @@ -90,7 +90,7 @@ public void testUncheckpointedIterator() throws Exception { } @Test - public void testCheckpointedIterator() throws Exception { + void testCheckpointedIterator() throws Exception { // run this random test multiple times for (int testCount = 200; testCount > 0; testCount--) { List expected = new ArrayList<>(); @@ -109,19 +109,16 @@ public void testCheckpointedIterator() throws Exception { while (iterator.hasNext()) { actual.add(iterator.next()); } - Assert.assertEquals(expected.size(), actual.size()); - - Collections.sort(expected); - Collections.sort(actual); - Assert.assertArrayEquals( - expected.toArray(new Integer[0]), actual.toArray(new Integer[0])); + assertThat(actual) + .hasSameSizeAs(expected) + .containsExactlyInAnyOrderElementsOf(expected); iterator.close(); } } @Test - public void testEarlyClose() throws Exception { + void testEarlyClose() throws Exception { List expected = new ArrayList<>(); for (int i = 0; i < 200; i++) { expected.add(i); @@ -136,13 +133,15 @@ public void testEarlyClose() throws Exception { JobClient jobClient = tuple2.f1; for (int i = 0; i < 100; i++) { - Assert.assertTrue(iterator.hasNext()); - Assert.assertNotNull(iterator.next()); + assertThat(iterator).hasNext(); + assertThat(iterator.next()).isNotNull(); } - Assert.assertTrue(iterator.hasNext()); + assertThat(iterator).hasNext(); iterator.close(); - Assert.assertEquals(JobStatus.CANCELED, jobClient.getJobStatus().get()); + assertThatFuture(jobClient.getJobStatus()) + .eventuallySucceeds() + .isEqualTo(JobStatus.CANCELED); } private Tuple2, JobClient> createIteratorAndJobClient( diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionRandomITCase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionRandomITCase.java index 75ed2a4e8a1bb..60744f2e7f623 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionRandomITCase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionRandomITCase.java @@ -24,12 +24,9 @@ import org.apache.flink.streaming.api.operators.collect.utils.CollectSinkFunctionTestWrapper; import org.apache.flink.streaming.api.operators.collect.utils.TestJobClient; import org.apache.flink.util.OptionalFailure; -import org.apache.flink.util.TestLogger; import org.apache.flink.util.function.RunnableWithException; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collections; @@ -42,12 +39,13 @@ import java.util.concurrent.CompletableFuture; import static org.apache.flink.streaming.api.operators.collect.utils.CollectSinkFunctionTestWrapper.ACCUMULATOR_NAME; +import static org.assertj.core.api.Assertions.assertThat; /** * Random IT cases for {@link CollectSinkFunction}. It will perform random insert, random checkpoint * and random restart. */ -public class CollectSinkFunctionRandomITCase extends TestLogger { +class CollectSinkFunctionRandomITCase { private static final int MAX_RESULTS_PER_BATCH = 3; private static final JobID TEST_JOB_ID = new JobID(); @@ -59,7 +57,7 @@ public class CollectSinkFunctionRandomITCase extends TestLogger { private boolean jobFinished; @Test - public void testUncheckpointedFunction() throws Exception { + void testUncheckpointedFunction() throws Exception { // run multiple times for this random test for (int testCount = 30; testCount > 0; testCount--) { functionWrapper = @@ -80,7 +78,7 @@ public void testUncheckpointedFunction() throws Exception { } @Test - public void testCheckpointedFunction() throws Exception { + void testCheckpointedFunction() throws Exception { // run multiple times for this random test for (int testCount = 30; testCount > 0; testCount--) { functionWrapper = @@ -124,7 +122,7 @@ private List runFunctionRandomTest(Thread feeder) throws Exception { private void assertResultsEqualAfterSort(List expected, List actual) { Collections.sort(expected); Collections.sort(actual); - Assert.assertThat(actual, CoreMatchers.is(expected)); + assertThat(actual).isEqualTo(expected); } /** diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionTest.java index fdf56763429c1..34efd510a6dfc 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/CollectSinkFunctionTest.java @@ -22,7 +22,6 @@ import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.streaming.api.operators.collect.utils.CollectSinkFunctionTestWrapper; import org.apache.flink.streaming.api.operators.collect.utils.CollectTestUtils; -import org.apache.flink.util.TestLogger; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -38,20 +37,20 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Tests for {@link CollectSinkFunction}. */ -public class CollectSinkFunctionTest extends TestLogger { +class CollectSinkFunctionTest { private static final TypeSerializer serializer = IntSerializer.INSTANCE; private CollectSinkFunctionTestWrapper functionWrapper; @BeforeEach - public void before() throws Exception { + void before() throws Exception { // max bytes per batch = 3 * sizeof(int) functionWrapper = new CollectSinkFunctionTestWrapper<>(serializer, 12); } @AfterEach - public void after() throws Exception { + void after() throws Exception { functionWrapper.closeWrapper(); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/AbstractTestCoordinationRequestHandler.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/AbstractTestCoordinationRequestHandler.java index eda6166c62414..e837044517d88 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/AbstractTestCoordinationRequestHandler.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/AbstractTestCoordinationRequestHandler.java @@ -29,8 +29,6 @@ import org.apache.flink.streaming.api.operators.collect.CollectSinkFunction; import org.apache.flink.util.OptionalFailure; -import org.junit.Assert; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -43,6 +41,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; +import static org.assertj.core.api.Assertions.assertThat; + /** A {@link CoordinationRequestHandler} to test fetching SELECT query results. */ public abstract class AbstractTestCoordinationRequestHandler implements CoordinationRequestHandler { @@ -62,8 +62,7 @@ public abstract class AbstractTestCoordinationRequestHandler protected final Random random; protected boolean closed; - public AbstractTestCoordinationRequestHandler( - TypeSerializer serializer, String accumulatorName) { + AbstractTestCoordinationRequestHandler(TypeSerializer serializer, String accumulatorName) { this.serializer = serializer; this.accumulatorName = accumulatorName; @@ -85,11 +84,11 @@ public CompletableFuture handleCoordinationRequest( throw new RuntimeException("Handler closed"); } - Assert.assertTrue(request instanceof CollectCoordinationRequest); + assertThat(request).isInstanceOf(CollectCoordinationRequest.class); CollectCoordinationRequest collectRequest = (CollectCoordinationRequest) request; updateBufferedResults(); - Assert.assertTrue(offset <= collectRequest.getOffset()); + assertThat(offset).isLessThanOrEqualTo(collectRequest.getOffset()); List subList = Collections.emptyList(); if (collectRequest.getVersion().equals(version)) { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectSinkFunctionTestWrapper.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectSinkFunctionTestWrapper.java index 4ac3a2e0ff357..e76f3dcac67fd 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectSinkFunctionTestWrapper.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectSinkFunctionTestWrapper.java @@ -36,12 +36,12 @@ import org.apache.flink.streaming.api.operators.collect.CollectSinkOperatorCoordinator; import org.apache.flink.streaming.util.MockStreamingRuntimeContext; -import org.junit.Assert; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import static org.assertj.core.api.Assertions.assertThat; + /** * A wrapper class for creating, checkpointing and closing {@link * org.apache.flink.streaming.api.operators.collect.CollectSinkFunction} for tests. @@ -166,7 +166,7 @@ public Tuple2 getAccumulatorResults() throws List serializedResults = SerializedListAccumulator.deserializeList( accLocalValue, BytePrimitiveArraySerializer.INSTANCE); - Assert.assertEquals(1, serializedResults.size()); + assertThat(serializedResults).hasSize(1); byte[] serializedResult = serializedResults.get(0); return CollectSinkFunction.deserializeAccumulatorResult(serializedResult); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectTestUtils.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectTestUtils.java index 8448f951d8cc2..3613af245ce09 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectTestUtils.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/CollectTestUtils.java @@ -23,14 +23,13 @@ import org.apache.flink.core.memory.DataOutputViewStreamWrapper; import org.apache.flink.streaming.api.operators.collect.CollectCoordinationResponse; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; + /** Utilities for testing collecting mechanism. */ public class CollectTestUtils { @@ -56,14 +55,10 @@ public static void assertResponseEquals( List expected, TypeSerializer serializer) throws IOException { - Assert.assertEquals(version, response.getVersion()); - Assert.assertEquals(lastCheckpointedOffset, response.getLastCheckpointedOffset()); + assertThat(response.getVersion()).isEqualTo(version); + assertThat(response.getLastCheckpointedOffset()).isEqualTo(lastCheckpointedOffset); List results = response.getResults(serializer); - assertResultsEqual(expected, results); - } - - public static void assertResultsEqual(List expected, List actual) { - Assert.assertThat(actual, CoreMatchers.is(expected)); + assertThat(results).isEqualTo(expected); } public static void assertAccumulatorResult( @@ -78,9 +73,9 @@ public static void assertAccumulatorResult( CollectCoordinationResponse response = accResults.f1; List actualResults = response.getResults(serializer); - Assert.assertEquals(expectedOffset, offset); - Assert.assertEquals(expectedVersion, response.getVersion()); - Assert.assertEquals(expectedLastCheckpointedOffset, response.getLastCheckpointedOffset()); - assertResultsEqual(expectedResults, actualResults); + assertThat(offset).isEqualTo(expectedOffset); + assertThat(response.getVersion()).isEqualTo(expectedVersion); + assertThat(response.getLastCheckpointedOffset()).isEqualTo(expectedLastCheckpointedOffset); + assertThat(actualResults).isEqualTo(expectedResults); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/TestJobClient.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/TestJobClient.java index 144f3f906dcfa..c9ecfa9d0c8e7 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/TestJobClient.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/collect/utils/TestJobClient.java @@ -30,13 +30,13 @@ import org.apache.flink.runtime.operators.coordination.CoordinationResponse; import org.apache.flink.util.OptionalFailure; -import org.junit.Assert; - import javax.annotation.Nullable; import java.util.Map; import java.util.concurrent.CompletableFuture; +import static org.assertj.core.api.Assertions.assertThat; + /** A {@link JobClient} to test fetching SELECT query results. */ public class TestJobClient implements JobClient, CoordinationRequestGateway { @@ -109,7 +109,7 @@ public CompletableFuture sendCoordinationRequest( throw new RuntimeException("Job terminated"); } - Assert.assertEquals(this.operatorId, operatorId); + assertThat(operatorId).isEqualTo(this.operatorId); CoordinationResponse response; try { response = handler.handleCoordinationRequest(request).get(); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/FixedLengthKeyAndValueSerializerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/FixedLengthKeyAndValueSerializerTest.java index 44444f3ae965f..67c6f6e8b9a38 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/FixedLengthKeyAndValueSerializerTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/FixedLengthKeyAndValueSerializerTest.java @@ -59,14 +59,14 @@ protected Tuple2>[] getTestData() { @Override @Test - public void testConfigSnapshotInstantiation() { + protected void testConfigSnapshotInstantiation() { assertThatThrownBy(() -> super.testConfigSnapshotInstantiation()) .isInstanceOf(UnsupportedOperationException.class); } @Override @Test - public void testSnapshotConfigurationAndReconfigure() throws Exception { + protected void testSnapshotConfigurationAndReconfigure() { assertThatThrownBy(() -> super.testSnapshotConfigurationAndReconfigure()) .isInstanceOf(UnsupportedOperationException.class); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/LargeSortingDataInputITCase.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/LargeSortingDataInputITCase.java index 1d6a76547dc5c..421baa06e3683 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/LargeSortingDataInputITCase.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/LargeSortingDataInputITCase.java @@ -46,8 +46,7 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.watermarkstatus.WatermarkStatus; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.LinkedHashSet; @@ -56,8 +55,7 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Longer running IT tests for {@link SortingDataInput} and {@link MultiInputSortingDataInput}. @@ -65,9 +63,9 @@ * @see SortingDataInputTest * @see MultiInputSortingDataInputsTest */ -public class LargeSortingDataInputITCase { +class LargeSortingDataInputITCase { @Test - public void intKeySorting() throws Exception { + void intKeySorting() throws Exception { int numberOfRecords = 500_000; GeneratedRecordsDataInput input = new GeneratedRecordsDataInput(numberOfRecords, 0); KeySelector, Integer> keySelector = value -> value.f0; @@ -91,12 +89,12 @@ public void intKeySorting() throws Exception { inputStatus = sortingDataInput.emitNext(output); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat(output.getSeenRecords(), equalTo(numberOfRecords)); + assertThat(output.getSeenRecords()).isEqualTo(numberOfRecords); } } @Test - public void stringKeySorting() throws Exception { + void stringKeySorting() throws Exception { int numberOfRecords = 500_000; GeneratedRecordsDataInput input = new GeneratedRecordsDataInput(numberOfRecords, 0); KeySelector, String> keySelector = value -> value.f1; @@ -120,13 +118,13 @@ public void stringKeySorting() throws Exception { inputStatus = sortingDataInput.emitNext(output); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat(output.getSeenRecords(), equalTo(numberOfRecords)); + assertThat(output.getSeenRecords()).isEqualTo(numberOfRecords); } } @Test @SuppressWarnings({"rawtypes", "unchecked"}) - public void multiInputKeySorting() throws Exception { + void multiInputKeySorting() throws Exception { int numberOfRecords = 500_000; GeneratedRecordsDataInput input1 = new GeneratedRecordsDataInput(numberOfRecords, 0); GeneratedRecordsDataInput input2 = new GeneratedRecordsDataInput(numberOfRecords, 1); @@ -174,7 +172,7 @@ sortedInput2, output, new DummyOperatorChain()) inputStatus = multiSortedProcessor.processInput(); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat(output.getSeenRecords(), equalTo(numberOfRecords * 2)); + assertThat(output.getSeenRecords()).isEqualTo(numberOfRecords * 2); } } } @@ -204,9 +202,9 @@ public void emitRecord(StreamRecord> streamRecor this.seenRecords++; E incomingKey = keySelector.getKey(streamRecord.getValue()); if (!Objects.equals(incomingKey, currentKey)) { - if (!seenKeys.add(incomingKey)) { - Assert.fail("Received an out of order key: " + incomingKey); - } + assertThat(seenKeys.add(incomingKey)) + .as("Received an out of order key: " + incomingKey) + .isTrue(); this.currentKey = incomingKey; } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java index 11368c22f4487..67d89c984804d 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/MultiInputSortingDataInputsTest.java @@ -36,29 +36,28 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamElement; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.List; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link MultiInputSortingDataInput}. */ -public class MultiInputSortingDataInputsTest { +class MultiInputSortingDataInputsTest { @Test - public void passThroughThenSortedInput() throws Exception { + void passThroughThenSortedInput() throws Exception { twoInputOrderTest(1, 0); } @Test - public void sortedThenPassThroughInput() throws Exception { + void sortedThenPassThroughInput() throws Exception { twoInputOrderTest(0, 1); } @SuppressWarnings("unchecked") - public void twoInputOrderTest(int preferredIndex, int sortedIndex) throws Exception { + void twoInputOrderTest(int preferredIndex, int sortedIndex) throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); List sortedInputElements = @@ -130,26 +129,25 @@ public void twoInputOrderTest(int preferredIndex, int sortedIndex) throws Except } } - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(99, 3), - new StreamRecord<>(99, 1), - new Watermark(99L), // max watermark from the preferred input - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 3), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 3), - Watermark.MAX_WATERMARK // max watermark from the sorted input - ))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(99, 3), + new StreamRecord<>(99, 1), + // max watermark from the preferred input + new Watermark(99L), + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 3), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 3), + // max watermark from the sorted input + Watermark.MAX_WATERMARK); } @Test @SuppressWarnings("unchecked") - public void simpleFixedLengthKeySorting() throws Exception { + void simpleFixedLengthKeySorting() throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); List elements = Arrays.asList( @@ -204,30 +202,29 @@ input2, collectingDataOutput, new DummyOperatorChain()) } } - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 3), - new StreamRecord<>(1, 3), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 3), - Watermark.MAX_WATERMARK, // max watermark from one of the inputs - new StreamRecord<>(2, 3), - Watermark.MAX_WATERMARK // max watermark from the other input - ))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 3), + new StreamRecord<>(1, 3), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 3), + // max watermark from one of the inputs + Watermark.MAX_WATERMARK, + new StreamRecord<>(2, 3), + // max watermark from the other input + Watermark.MAX_WATERMARK); } @Test @SuppressWarnings("unchecked") - public void watermarkPropagation() throws Exception { + void watermarkPropagation() throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); List elements1 = Arrays.asList( @@ -285,17 +282,16 @@ input2, collectingDataOutput, new DummyOperatorChain()) } } - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(0, 3), - new StreamRecord<>(1, 3), - new Watermark(3), // watermark from the second input - new StreamRecord<>(2, 3), - new StreamRecord<>(3, 3), - new Watermark(7) // watermark from the first input - ))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(0, 3), + new StreamRecord<>(1, 3), + // watermark from the second input + new Watermark(3), + new StreamRecord<>(2, 3), + new StreamRecord<>(3, 3), + // watermark from the first input + new Watermark(7)); } private static class DummyOperatorChain implements BoundedMultiInput { diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/SortingDataInputTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/SortingDataInputTest.java index 3271f64e2d70a..e04d45a9fd307 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/SortingDataInputTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/SortingDataInputTest.java @@ -29,12 +29,11 @@ import org.apache.flink.streaming.runtime.io.DataInputStatus; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link SortingDataInput}. @@ -42,9 +41,9 @@ *

    These are rather simple unit tests. See also {@link LargeSortingDataInputITCase} for more * thorough tests. */ -public class SortingDataInputTest { +class SortingDataInputTest { @Test - public void simpleFixedLengthKeySorting() throws Exception { + void simpleFixedLengthKeySorting() throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); CollectionDataInput input = new CollectionDataInput<>( @@ -75,20 +74,18 @@ public void simpleFixedLengthKeySorting() throws Exception { inputStatus = sortingDataInput.emitNext(collectingDataOutput); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 3), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 3)))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 3), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 3)); } @Test - public void watermarkPropagation() throws Exception { + void watermarkPropagation() throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); CollectionDataInput input = new CollectionDataInput<>( @@ -125,21 +122,19 @@ public void watermarkPropagation() throws Exception { inputStatus = sortingDataInput.emitNext(collectingDataOutput); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 3), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 3), - new Watermark(6)))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 3), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 3), + new Watermark(6)); } @Test - public void simpleVariableLengthKeySorting() throws Exception { + void simpleVariableLengthKeySorting() throws Exception { CollectingDataOutput collectingDataOutput = new CollectingDataOutput<>(); CollectionDataInput input = new CollectionDataInput<>( @@ -170,15 +165,13 @@ public void simpleVariableLengthKeySorting() throws Exception { inputStatus = sortingDataInput.emitNext(collectingDataOutput); } while (inputStatus != DataInputStatus.END_OF_INPUT); - assertThat( - collectingDataOutput.events, - equalTo( - Arrays.asList( - new StreamRecord<>(1, 1), - new StreamRecord<>(1, 2), - new StreamRecord<>(1, 3), - new StreamRecord<>(2, 1), - new StreamRecord<>(2, 2), - new StreamRecord<>(2, 3)))); + assertThat(collectingDataOutput.events) + .containsExactly( + new StreamRecord<>(1, 1), + new StreamRecord<>(1, 2), + new StreamRecord<>(1, 3), + new StreamRecord<>(2, 1), + new StreamRecord<>(2, 2), + new StreamRecord<>(2, 3)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/VariableLengthKeyAndValueSerializerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/VariableLengthKeyAndValueSerializerTest.java index 2ff1999c817ba..6dd4042e5dd95 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/VariableLengthKeyAndValueSerializerTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sort/VariableLengthKeyAndValueSerializerTest.java @@ -56,14 +56,14 @@ protected Tuple2>[] getTestData() { @Override @Test - public void testConfigSnapshotInstantiation() { + protected void testConfigSnapshotInstantiation() { assertThatThrownBy(() -> super.testConfigSnapshotInstantiation()) .isInstanceOf(UnsupportedOperationException.class); } @Override @Test - public void testSnapshotConfigurationAndReconfigure() throws Exception { + protected void testSnapshotConfigurationAndReconfigure() { assertThatThrownBy(() -> super.testSnapshotConfigurationAndReconfigure()) .isInstanceOf(UnsupportedOperationException.class); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalTimeServiceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalTimeServiceTest.java index 933efd58b0f5f..72ce7e4b6aaf7 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalTimeServiceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalTimeServiceTest.java @@ -41,39 +41,28 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.tasks.StreamTaskCancellationContext; import org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService; -import org.apache.flink.util.TestLogger; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.BiConsumer; import java.util.function.Consumer; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.fail; /** * Tests for {@link BatchExecutionInternalTimeServiceManager} and {@link * BatchExecutionInternalTimeService}. */ -public class BatchExecutionInternalTimeServiceTest extends TestLogger { +class BatchExecutionInternalTimeServiceTest { public static final IntSerializer KEY_SERIALIZER = new IntSerializer(); - @Rule public ExpectedException expectedException = ExpectedException.none(); - @Test - public void testBatchExecutionManagerCanBeInstantiatedWithBatchStateBackend() throws Exception { - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage( - "Batch execution specific time service can work only with BatchExecutionKeyedStateBackend"); - + void testBatchExecutionManagerCanBeInstantiatedWithBatchStateBackend() throws Exception { MockEnvironment mockEnvironment = MockEnvironment.builder().build(); AbstractStateBackend abstractStateBackend = new MemoryStateBackend(); JobID jobID = new JobID(); @@ -94,47 +83,59 @@ public void testBatchExecutionManagerCanBeInstantiatedWithBatchStateBackend() th new UnregisteredMetricsGroup(), Collections.emptyList(), cancelStreamRegistry)); - BatchExecutionInternalTimeServiceManager.create( - stateBackend, - this.getClass().getClassLoader(), - new DummyKeyContext(), - new TestProcessingTimeService(), - Collections.emptyList(), - StreamTaskCancellationContext.alwaysRunning()); + + assertThatThrownBy( + () -> + BatchExecutionInternalTimeServiceManager.create( + stateBackend, + this.getClass().getClassLoader(), + new DummyKeyContext(), + new TestProcessingTimeService(), + Collections.emptyList(), + StreamTaskCancellationContext.alwaysRunning())) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Batch execution specific time service can work only with BatchExecutionKeyedStateBackend"); } @Test - public void testForEachEventTimeTimerUnsupported() { - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage( - "The BatchExecutionInternalTimeService should not be used in State Processor API"); - + void testForEachEventTimeTimerUnsupported() { BatchExecutionInternalTimeService timeService = new BatchExecutionInternalTimeService<>( new TestProcessingTimeService(), LambdaTrigger.eventTimeTrigger(timer -> {})); - timeService.forEachEventTimeTimer( - (o, aLong) -> fail("The forEachEventTimeTimer() should not be supported")); + assertThatThrownBy( + () -> + timeService.forEachEventTimeTimer( + (o, aLong) -> + fail( + "The forEachEventTimeTimer() should not be supported"))) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining( + "The BatchExecutionInternalTimeService should not be used in State Processor API"); } @Test - public void testForEachProcessingTimeTimerUnsupported() { - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage( - "The BatchExecutionInternalTimeService should not be used in State Processor API"); - + void testForEachProcessingTimeTimerUnsupported() { BatchExecutionInternalTimeService timeService = new BatchExecutionInternalTimeService<>( new TestProcessingTimeService(), LambdaTrigger.eventTimeTrigger(timer -> {})); - timeService.forEachEventTimeTimer( - (o, aLong) -> fail("The forEachProcessingTimeTimer() should not be supported")); + assertThatThrownBy( + () -> + timeService.forEachEventTimeTimer( + (o, aLong) -> + fail( + "The forEachProcessingTimeTimer() should not be supported"))) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining( + "The BatchExecutionInternalTimeService should not be used in State Processor API"); } @Test - public void testFiringEventTimeTimers() throws Exception { + void testFiringEventTimeTimers() throws Exception { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -166,11 +167,11 @@ public void testFiringEventTimeTimers() throws Exception { // changing the current key fires all timers keyedStatedBackend.setCurrentKey(2); - assertThat(timers, equalTo(Collections.singletonList(150L))); + assertThat(timers).containsExactly(150L); } @Test - public void testSettingSameKeyDoesNotFireTimers() { + void testSettingSameKeyDoesNotFireTimers() { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -195,11 +196,11 @@ public void testSettingSameKeyDoesNotFireTimers() { timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, 123); keyedStatedBackend.setCurrentKey(1); - assertThat(timers, equalTo(Collections.emptyList())); + assertThat(timers).isEmpty(); } @Test - public void testCurrentWatermark() throws Exception { + void testCurrentWatermark() throws Exception { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -216,7 +217,7 @@ public void testCurrentWatermark() throws Exception { TriggerWithTimerServiceAccess eventTimeTrigger = TriggerWithTimerServiceAccess.eventTimeTrigger( (timer, timerService) -> { - assertThat(timerService.currentWatermark(), equalTo(Long.MAX_VALUE)); + assertThat(timerService.currentWatermark()).isEqualTo(Long.MAX_VALUE); timers.add(timer.getTimestamp()); }); InternalTimerService timerService = @@ -224,28 +225,28 @@ public void testCurrentWatermark() throws Exception { "test", KEY_SERIALIZER, new VoidNamespaceSerializer(), eventTimeTrigger); eventTimeTrigger.setTimerService(timerService); - assertThat(timerService.currentWatermark(), equalTo(Long.MIN_VALUE)); + assertThat(timerService.currentWatermark()).isEqualTo(Long.MIN_VALUE); keyedStatedBackend.setCurrentKey(1); timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, 123); - assertThat(timerService.currentWatermark(), equalTo(Long.MIN_VALUE)); + assertThat(timerService.currentWatermark()).isEqualTo(Long.MIN_VALUE); // advancing the watermark to a value different than Long.MAX_VALUE should have no effect timeServiceManager.advanceWatermark(new Watermark(1000)); - assertThat(timerService.currentWatermark(), equalTo(Long.MIN_VALUE)); + assertThat(timerService.currentWatermark()).isEqualTo(Long.MIN_VALUE); // changing the current key fires all timers keyedStatedBackend.setCurrentKey(2); - assertThat(timerService.currentWatermark(), equalTo(Long.MIN_VALUE)); + assertThat(timerService.currentWatermark()).isEqualTo(Long.MIN_VALUE); timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, 124); // advancing the watermark to Long.MAX_VALUE should fire remaining key timeServiceManager.advanceWatermark(Watermark.MAX_WATERMARK); - assertThat(timers, equalTo(Arrays.asList(123L, 124L))); + assertThat(timers).containsExactly(123L, 124L); } @Test - public void testProcessingTimeTimers() { + void testProcessingTimeTimers() { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -272,15 +273,15 @@ public void testProcessingTimeTimers() { timerService.registerProcessingTimeTimer(VoidNamespace.INSTANCE, 150); // we should never register physical timers - assertThat(processingTimeService.getNumActiveTimers(), equalTo(0)); + assertThat(processingTimeService.getNumActiveTimers()).isZero(); // changing the current key fires all timers keyedStatedBackend.setCurrentKey(2); - assertThat(timers, equalTo(Collections.singletonList(150L))); + assertThat(timers).containsExactly(150L); } @Test - public void testIgnoringEventTimeTimersFromWithinCallback() { + void testIgnoringEventTimeTimersFromWithinCallback() { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -311,16 +312,16 @@ public void testIgnoringEventTimeTimersFromWithinCallback() { timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, 150); // we should never register physical timers - assertThat(processingTimeService.getNumActiveTimers(), equalTo(0)); + assertThat(processingTimeService.getNumActiveTimers()).isZero(); // changing the current key fires all timers keyedStatedBackend.setCurrentKey(2); // We check that the timer from the callback is ignored - assertThat(timers, equalTo(Collections.singletonList(150L))); + assertThat(timers).containsExactly(150L); } @Test - public void testIgnoringProcessingTimeTimersFromWithinCallback() { + void testIgnoringProcessingTimeTimersFromWithinCallback() { BatchExecutionKeyedStateBackend keyedStatedBackend = new BatchExecutionKeyedStateBackend<>( KEY_SERIALIZER, new KeyGroupRange(0, 1), new ExecutionConfig()); @@ -351,12 +352,12 @@ public void testIgnoringProcessingTimeTimersFromWithinCallback() { timerService.registerProcessingTimeTimer(VoidNamespace.INSTANCE, 150); // we should never register physical timers - assertThat(processingTimeService.getNumActiveTimers(), equalTo(0)); + assertThat(processingTimeService.getNumActiveTimers()).isZero(); // changing the current key fires all timers keyedStatedBackend.setCurrentKey(2); // We check that the timer from the callback is ignored - assertThat(timers, equalTo(Collections.singletonList(150L))); + assertThat(timers).containsExactly(150L); } private static class TriggerWithTimerServiceAccess implements Triggerable { @@ -378,14 +379,13 @@ public static TriggerWithTimerServiceAccess eventTimeTrigger( return new TriggerWithTimerServiceAccess<>( eventTimeHandler, (timer, timeService) -> - Assert.fail("We did not expect processing timer to be triggered.")); + fail("We did not expect processing timer to be triggered.")); } public static TriggerWithTimerServiceAccess processingTimeTrigger( BiConsumer, InternalTimerService> processingTimeHandler) { return new TriggerWithTimerServiceAccess<>( - (timer, timeService) -> - Assert.fail("We did not expect event timer to be triggered."), + (timer, timeService) -> fail("We did not expect event timer to be triggered."), processingTimeHandler); } @@ -413,13 +413,13 @@ public static LambdaTrigger eventTimeTrigger( Consumer> eventTimeHandler) { return new LambdaTrigger<>( eventTimeHandler, - timer -> Assert.fail("We did not expect processing timer to be triggered.")); + timer -> fail("We did not expect processing timer to be triggered.")); } public static LambdaTrigger processingTimeTrigger( Consumer> processingTimeHandler) { return new LambdaTrigger<>( - timer -> Assert.fail("We did not expect event timer to be triggered."), + timer -> fail("We did not expect event timer to be triggered."), processingTimeHandler); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendTest.java index 86ccb2a26c64d..70232ddfcda7e 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendTest.java @@ -42,11 +42,8 @@ import org.apache.flink.runtime.state.internal.InternalAggregatingState; import org.apache.flink.runtime.state.internal.InternalListState; import org.apache.flink.runtime.state.internal.InternalReducingState; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Arrays; @@ -57,14 +54,8 @@ import java.util.concurrent.ThreadLocalRandom; import static java.util.Arrays.asList; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Tests copied over from {@link StateBackendTestBase} and adjusted to make sense for a single key @@ -75,9 +66,7 @@ * queryable state etc. Moreover the tests had to be adjusted as the state backend assumes keys are * grouped. */ -public class BatchExecutionStateBackendTest extends TestLogger { - - @Rule public final ExpectedException expectedException = ExpectedException.none(); +class BatchExecutionStateBackendTest { private CheckpointableKeyedStateBackend createKeyedBackend( TypeSerializer keySerializer) { @@ -90,7 +79,7 @@ private CheckpointableKeyedStateBackend createKeyedBackend( * {@code null}. */ @Test - public void testListStateAddNull() throws Exception { + void testListStateAddNull() throws Exception { CheckpointableKeyedStateBackend keyedBackend = createKeyedBackend(StringSerializer.INSTANCE); @@ -103,10 +92,9 @@ public void testListStateAddNull() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("abc"); - assertNull(state.get()); + assertThat(state.get()).isNull(); - expectedException.expect(NullPointerException.class); - state.add(null); + assertThatThrownBy(() -> state.add(null)).isInstanceOf(NullPointerException.class); } finally { keyedBackend.close(); keyedBackend.dispose(); @@ -118,7 +106,7 @@ public void testListStateAddNull() throws Exception { * ListState#addAll(List)} to be called with {@code null} entries in the list of entries to add. */ @Test - public void testListStateAddAllNullEntries() throws Exception { + void testListStateAddAllNullEntries() throws Exception { CheckpointableKeyedStateBackend keyedBackend = createKeyedBackend(StringSerializer.INSTANCE); @@ -131,15 +119,14 @@ public void testListStateAddAllNullEntries() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("abc"); - assertNull(state.get()); - - expectedException.expect(NullPointerException.class); + assertThat(state.get()).isNull(); List adding = new ArrayList<>(); adding.add(3L); adding.add(null); adding.add(5L); - state.addAll(adding); + + assertThatThrownBy(() -> state.addAll(adding)).isInstanceOf(NullPointerException.class); } finally { keyedBackend.close(); keyedBackend.dispose(); @@ -151,7 +138,7 @@ public void testListStateAddAllNullEntries() throws Exception { * ListState#addAll(List)} to be called with {@code null}. */ @Test - public void testListStateAddAllNull() throws Exception { + void testListStateAddAllNull() throws Exception { CheckpointableKeyedStateBackend keyedBackend = createKeyedBackend(StringSerializer.INSTANCE); @@ -164,10 +151,9 @@ public void testListStateAddAllNull() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("abc"); - assertNull(state.get()); + assertThat(state.get()).isNull(); - expectedException.expect(NullPointerException.class); - state.addAll(null); + assertThatThrownBy(() -> state.addAll(null)).isInstanceOf(NullPointerException.class); } finally { keyedBackend.close(); keyedBackend.dispose(); @@ -179,7 +165,7 @@ public void testListStateAddAllNull() throws Exception { * ListState#update(List)} to be called with {@code null} entries in the list of entries to add. */ @Test - public void testListStateUpdateNullEntries() throws Exception { + void testListStateUpdateNullEntries() throws Exception { CheckpointableKeyedStateBackend keyedBackend = createKeyedBackend(StringSerializer.INSTANCE); @@ -192,15 +178,14 @@ public void testListStateUpdateNullEntries() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("abc"); - assertNull(state.get()); - - expectedException.expect(NullPointerException.class); + assertThat(state.get()).isNull(); List adding = new ArrayList<>(); adding.add(3L); adding.add(null); adding.add(5L); - state.update(adding); + + assertThatThrownBy(() -> state.update(adding)).isInstanceOf(NullPointerException.class); } finally { keyedBackend.close(); keyedBackend.dispose(); @@ -212,7 +197,7 @@ public void testListStateUpdateNullEntries() throws Exception { * ListState#update(List)} to be called with {@code null}. */ @Test - public void testListStateUpdateNull() throws Exception { + void testListStateUpdateNull() throws Exception { CheckpointableKeyedStateBackend keyedBackend = createKeyedBackend(StringSerializer.INSTANCE); @@ -225,10 +210,9 @@ public void testListStateUpdateNull() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("abc"); - assertNull(state.get()); + assertThat(state.get()).isNull(); - expectedException.expect(NullPointerException.class); - state.update(null); + assertThatThrownBy(() -> state.update(null)).isInstanceOf(NullPointerException.class); } finally { keyedBackend.close(); keyedBackend.dispose(); @@ -236,7 +220,7 @@ public void testListStateUpdateNull() throws Exception { } @Test - public void testListStateAPIs() throws Exception { + void testListStateAPIs() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -248,28 +232,28 @@ public void testListStateAPIs() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("g"); - assertNull(state.get()); - assertNull(state.get()); + assertThat(state.get()).isNull(); + assertThat(state.get()).isNull(); state.addAll(Collections.emptyList()); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.addAll(Arrays.asList(3L, 4L)); - assertThat(state.get(), containsInAnyOrder(3L, 4L)); - assertThat(state.get(), containsInAnyOrder(3L, 4L)); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L); state.addAll(new ArrayList<>()); - assertThat(state.get(), containsInAnyOrder(3L, 4L)); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L); state.addAll(Arrays.asList(5L, 6L)); - assertThat(state.get(), containsInAnyOrder(3L, 4L, 5L, 6L)); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L, 5L, 6L); state.addAll(new ArrayList<>()); - assertThat(state.get(), containsInAnyOrder(3L, 4L, 5L, 6L)); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L, 5L, 6L); - assertThat(state.get(), containsInAnyOrder(3L, 4L, 5L, 6L)); + assertThat(state.get()).containsExactlyInAnyOrder(3L, 4L, 5L, 6L); state.update(Arrays.asList(1L, 2L)); - assertThat(state.get(), containsInAnyOrder(1L, 2L)); + assertThat(state.get()).containsExactlyInAnyOrder(1L, 2L); } } @Test - public void testListStateMergingOverThreeNamespaces() throws Exception { + void testListStateMergingOverThreeNamespaces() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -301,19 +285,19 @@ public void testListStateMergingOverThreeNamespaces() throws Exception { state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertThat(state.get(), containsInAnyOrder(11L, 22L, 33L, 44L, 55L)); + assertThat(state.get()).containsExactlyInAnyOrder(11L, 22L, 33L, 44L, 55L); // make sure all lists / maps are cleared keyedBackend.setCurrentKey("abc"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testListStateMergingWithEmptyNamespace() throws Exception { + void testListStateMergingWithEmptyNamespace() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -344,19 +328,19 @@ public void testListStateMergingWithEmptyNamespace() throws Exception { keyedBackend.setCurrentKey("def"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertThat(state.get(), containsInAnyOrder(11L, 22L, 33L, 44L, 55L)); + assertThat(state.get()).containsExactlyInAnyOrder(11L, 22L, 33L, 44L, 55L); // make sure all lists / maps are cleared keyedBackend.setCurrentKey("def"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testListStateMergingEmpty() throws Exception { + void testListStateMergingEmpty() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -377,19 +361,19 @@ public void testListStateMergingEmpty() throws Exception { keyedBackend.setCurrentKey("ghi"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertNull(state.get()); + assertThat(state.get()).isNull(); // make sure all lists / maps are cleared keyedBackend.setCurrentKey("ghi"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testListStateMergingAllInTargetNamespace() throws Exception { + void testListStateMergingAllInTargetNamespace() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -418,17 +402,17 @@ public void testListStateMergingAllInTargetNamespace() throws Exception { keyedBackend.setCurrentKey("jkl"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertThat(state.get(), containsInAnyOrder(11L, 22L, 33L, 44L, 55L)); + assertThat(state.get()).containsExactlyInAnyOrder(11L, 22L, 33L, 44L, 55L); keyedBackend.setCurrentKey("jkl"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testListStateMergingInASingleNamespace() throws Exception { + void testListStateMergingInASingleNamespace() throws Exception { final ListStateDescriptor stateDescr = new ListStateDescriptor<>("my-state", Long.class); @@ -457,19 +441,19 @@ public void testListStateMergingInASingleNamespace() throws Exception { keyedBackend.setCurrentKey("mno"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertThat(state.get(), containsInAnyOrder(11L, 22L, 33L, 44L, 55L)); + assertThat(state.get()).containsExactlyInAnyOrder(11L, 22L, 33L, 44L, 55L); // make sure all lists / maps are cleared keyedBackend.setCurrentKey("mno"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testReducingStateAddAndGet() throws Exception { + void testReducingStateAddAndGet() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -481,18 +465,18 @@ public void testReducingStateAddAndGet() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("def"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(17L); state.add(11L); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); keyedBackend.setCurrentKey("def"); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); keyedBackend.setCurrentKey("g"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(1L); state.add(2L); @@ -502,12 +486,12 @@ public void testReducingStateAddAndGet() throws Exception { state.add(1L); keyedBackend.setCurrentKey("g"); - assertEquals(9L, state.get().longValue()); + assertThat(state.get()).isEqualTo(9L); } } @Test - public void testReducingStateMergingOverThreeNamespaces() throws Exception { + void testReducingStateMergingOverThreeNamespaces() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -542,17 +526,17 @@ public void testReducingStateMergingOverThreeNamespaces() throws Exception { keyedBackend.setCurrentKey("abc"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("abc"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testReducingStateMergingWithEmpty() throws Exception { + void testReducingStateMergingWithEmpty() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -585,17 +569,17 @@ public void testReducingStateMergingWithEmpty() throws Exception { keyedBackend.setCurrentKey("def"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("def"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testReducingStateMergingEmpty() throws Exception { + void testReducingStateMergingEmpty() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -616,12 +600,12 @@ public void testReducingStateMergingEmpty() throws Exception { keyedBackend.setCurrentKey("ghi"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testReducingStateMergingInTargetNamespace() throws Exception { + void testReducingStateMergingInTargetNamespace() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -652,17 +636,17 @@ public void testReducingStateMergingInTargetNamespace() throws Exception { keyedBackend.setCurrentKey("jkl"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("jkl"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testReducingStateMergingInASingleNamespace() throws Exception { + void testReducingStateMergingInASingleNamespace() throws Exception { final ReducingStateDescriptor stateDescr = new ReducingStateDescriptor<>("my-state", Long::sum, Long.class); @@ -693,17 +677,17 @@ public void testReducingStateMergingInASingleNamespace() throws Exception { keyedBackend.setCurrentKey("mno"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("mno"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateAddAndGetWithMutableAccumulator() throws Exception { + void testAggregatingStateAddAndGetWithMutableAccumulator() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( @@ -716,21 +700,21 @@ public void testAggregatingStateAddAndGetWithMutableAccumulator() throws Excepti VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("def"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(17L); state.add(11L); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); keyedBackend.setCurrentKey("def"); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); keyedBackend.setCurrentKey("def"); - assertNull(state.get()); + assertThat(state.get()).isNull(); keyedBackend.setCurrentKey("g"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(1L); state.add(2L); @@ -740,15 +724,14 @@ public void testAggregatingStateAddAndGetWithMutableAccumulator() throws Excepti state.add(1L); keyedBackend.setCurrentKey("g"); - assertEquals(9L, state.get().longValue()); + assertThat(state.get()).isEqualTo(9L); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithMutableAccumulatorOverThreeNamespaces() - throws Exception { + void testAggregatingStateMergingWithMutableAccumulatorOverThreeNamespaces() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new MutableAggregatingAddingFunction(), MutableLong.class); @@ -783,17 +766,17 @@ public void testAggregatingStateMergingWithMutableAccumulatorOverThreeNamespaces keyedBackend.setCurrentKey("abc"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("abc"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithMutableAccumulatorWithEmpty() throws Exception { + void testAggregatingStateMergingWithMutableAccumulatorWithEmpty() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new MutableAggregatingAddingFunction(), MutableLong.class); @@ -826,17 +809,17 @@ public void testAggregatingStateMergingWithMutableAccumulatorWithEmpty() throws keyedBackend.setCurrentKey("def"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("def"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithMutableAccumulatorEmpty() throws Exception { + void testAggregatingStateMergingWithMutableAccumulatorEmpty() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new MutableAggregatingAddingFunction(), MutableLong.class); @@ -857,13 +840,12 @@ public void testAggregatingStateMergingWithMutableAccumulatorEmpty() throws Exce keyedBackend.setCurrentKey("ghi"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithMutableAccumulatorInTargetNamespace() - throws Exception { + void testAggregatingStateMergingWithMutableAccumulatorInTargetNamespace() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new MutableAggregatingAddingFunction(), MutableLong.class); @@ -894,18 +876,17 @@ public void testAggregatingStateMergingWithMutableAccumulatorInTargetNamespace() keyedBackend.setCurrentKey("jkl"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("jkl"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithMutableAccumulatorInASingleNamespace() - throws Exception { + void testAggregatingStateMergingWithMutableAccumulatorInASingleNamespace() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new MutableAggregatingAddingFunction(), MutableLong.class); @@ -936,17 +917,17 @@ public void testAggregatingStateMergingWithMutableAccumulatorInASingleNamespace( keyedBackend.setCurrentKey("mno"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("mno"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateAddAndGetWithImmutableAccumulator() throws Exception { + void testAggregatingStateAddAndGetWithImmutableAccumulator() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( @@ -959,18 +940,18 @@ public void testAggregatingStateAddAndGetWithImmutableAccumulator() throws Excep VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescr); keyedBackend.setCurrentKey("def"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(17L); state.add(11L); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); keyedBackend.setCurrentKey("def"); - assertEquals(28L, state.get().longValue()); + assertThat(state.get()).isEqualTo(28L); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); keyedBackend.setCurrentKey("g"); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add(1L); state.add(2L); @@ -980,15 +961,14 @@ public void testAggregatingStateAddAndGetWithImmutableAccumulator() throws Excep state.add(1L); keyedBackend.setCurrentKey("g"); - assertEquals(9L, state.get().longValue()); + assertThat(state.get()).isEqualTo(9L); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithImmutableAccumulatorOverThreeNamespaces() - throws Exception { + void testAggregatingStateMergingWithImmutableAccumulatorOverThreeNamespaces() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new ImmutableAggregatingAddingFunction(), Long.class); @@ -1023,17 +1003,17 @@ public void testAggregatingStateMergingWithImmutableAccumulatorOverThreeNamespac keyedBackend.setCurrentKey("abc"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("abc"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithImmutableAccumulatorWithEmpty() throws Exception { + void testAggregatingStateMergingWithImmutableAccumulatorWithEmpty() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new ImmutableAggregatingAddingFunction(), Long.class); @@ -1066,17 +1046,17 @@ public void testAggregatingStateMergingWithImmutableAccumulatorWithEmpty() throw keyedBackend.setCurrentKey("def"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("def"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithImmutableAccumulatorEmpty() throws Exception { + void testAggregatingStateMergingWithImmutableAccumulatorEmpty() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new ImmutableAggregatingAddingFunction(), Long.class); @@ -1097,13 +1077,12 @@ public void testAggregatingStateMergingWithImmutableAccumulatorEmpty() throws Ex keyedBackend.setCurrentKey("ghi"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithImmutableAccumulatorInTargetNamespace() - throws Exception { + void testAggregatingStateMergingWithImmutableAccumulatorInTargetNamespace() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new ImmutableAggregatingAddingFunction(), Long.class); @@ -1134,18 +1113,17 @@ public void testAggregatingStateMergingWithImmutableAccumulatorInTargetNamespace keyedBackend.setCurrentKey("jkl"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("jkl"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testAggregatingStateMergingWithImmutableAccumulatorInASingleNamespace() - throws Exception { + void testAggregatingStateMergingWithImmutableAccumulatorInASingleNamespace() throws Exception { final AggregatingStateDescriptor stateDescr = new AggregatingStateDescriptor<>( "my-state", new ImmutableAggregatingAddingFunction(), Long.class); @@ -1176,17 +1154,17 @@ public void testAggregatingStateMergingWithImmutableAccumulatorInASingleNamespac keyedBackend.setCurrentKey("mno"); state.mergeNamespaces(namespace1, asList(namespace2, namespace3)); state.setCurrentNamespace(namespace1); - assertEquals(expectedResult, state.get()); + assertThat(state.get()).isEqualTo(expectedResult); keyedBackend.setCurrentKey("mno"); state.setCurrentNamespace(namespace1); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); } } @Test - public void testMapStateIsEmpty() throws Exception { + void testMapStateIsEmpty() throws Exception { MapStateDescriptor kvId = new MapStateDescriptor<>("id", Integer.class, Long.class); @@ -1198,19 +1176,19 @@ public void testMapStateIsEmpty() throws Exception { backend.getPartitionedState( VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertTrue(state.isEmpty()); + assertThat(state.isEmpty()).isTrue(); int stateSize = 1024; for (int i = 0; i < stateSize; i++) { state.put(i, i * 2L); - assertFalse(state.isEmpty()); + assertThat(state.isEmpty()).isFalse(); } for (int i = 0; i < stateSize; i++) { - assertFalse(state.isEmpty()); + assertThat(state.isEmpty()).isFalse(); state.remove(i); } - assertTrue(state.isEmpty()); + assertThat(state.isEmpty()).isTrue(); } finally { backend.dispose(); @@ -1222,7 +1200,7 @@ public void testMapStateIsEmpty() throws Exception { * more details. */ @Test - public void testMapStateIteratorArbitraryAccess() throws Exception { + void testMapStateIteratorArbitraryAccess() throws Exception { MapStateDescriptor kvId = new MapStateDescriptor<>("id", Integer.class, Long.class); @@ -1242,16 +1220,12 @@ public void testMapStateIteratorArbitraryAccess() throws Exception { int iteratorCount = 0; while (iterator.hasNext()) { Map.Entry entry = iterator.next(); - assertEquals(iteratorCount, (int) entry.getKey()); + assertThat(entry.getKey()).isEqualTo(iteratorCount); switch (ThreadLocalRandom.current().nextInt() % 3) { case 0: // remove twice iterator.remove(); - try { - iterator.remove(); - fail(); - } catch (IllegalStateException e) { - // ignore expected exception - } + assertThatThrownBy(iterator::remove) + .isInstanceOf(IllegalStateException.class); break; case 1: // hasNext -> remove iterator.hasNext(); @@ -1262,7 +1236,7 @@ public void testMapStateIteratorArbitraryAccess() throws Exception { } iteratorCount++; } - assertEquals(stateSize, iteratorCount); + assertThat(iteratorCount).isEqualTo(stateSize); } finally { backend.dispose(); } @@ -1270,7 +1244,7 @@ public void testMapStateIteratorArbitraryAccess() throws Exception { /** Verify that {@link ValueStateDescriptor} allows {@code null} as default. */ @Test - public void testValueStateNullAsDefaultValue() throws Exception { + void testValueStateNullAsDefaultValue() throws Exception { CheckpointableKeyedStateBackend backend = createKeyedBackend(IntSerializer.INSTANCE); @@ -1281,20 +1255,20 @@ public void testValueStateNullAsDefaultValue() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertNull(state.value()); + assertThat(state.value()).isNull(); state.update("Ciao"); - assertEquals("Ciao", state.value()); + assertThat(state.value()).isEqualTo("Ciao"); state.clear(); - assertNull(state.value()); + assertThat(state.value()).isNull(); backend.dispose(); } /** Verify that an empty {@code ValueState} will yield the default value. */ @Test - public void testValueStateDefaultValue() throws Exception { + void testValueStateDefaultValue() throws Exception { CheckpointableKeyedStateBackend backend = createKeyedBackend(IntSerializer.INSTANCE); @@ -1305,20 +1279,20 @@ public void testValueStateDefaultValue() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertEquals("Hello", state.value()); + assertThat(state.value()).isEqualTo("Hello"); state.update("Ciao"); - assertEquals("Ciao", state.value()); + assertThat(state.value()).isEqualTo("Ciao"); state.clear(); - assertEquals("Hello", state.value()); + assertThat(state.value()).isEqualTo("Hello"); backend.dispose(); } /** Verify that an empty {@code ReduceState} yields {@code null}. */ @Test - public void testReducingStateDefaultValue() throws Exception { + void testReducingStateDefaultValue() throws Exception { CheckpointableKeyedStateBackend backend = createKeyedBackend(IntSerializer.INSTANCE); @@ -1330,20 +1304,20 @@ public void testReducingStateDefaultValue() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.add("Ciao"); - assertEquals("Ciao", state.get()); + assertThat(state.get()).isEqualTo("Ciao"); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); backend.dispose(); } /** Verify that an empty {@code ListState} yields {@code null}. */ @Test - public void testListStateDefaultValue() throws Exception { + void testListStateDefaultValue() throws Exception { CheckpointableKeyedStateBackend backend = createKeyedBackend(IntSerializer.INSTANCE); @@ -1354,20 +1328,20 @@ public void testListStateDefaultValue() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertNull(state.get()); + assertThat(state.get()).isNull(); state.update(Arrays.asList("Ciao", "Bello")); - assertThat(state.get(), containsInAnyOrder("Ciao", "Bello")); + assertThat(state.get()).containsExactlyInAnyOrder("Ciao", "Bello"); state.clear(); - assertNull(state.get()); + assertThat(state.get()).isNull(); backend.dispose(); } /** Verify that an empty {@code MapState} yields {@code null}. */ @Test - public void testMapStateDefaultValue() throws Exception { + void testMapStateDefaultValue() throws Exception { CheckpointableKeyedStateBackend backend = createKeyedBackend(IntSerializer.INSTANCE); @@ -1379,19 +1353,19 @@ public void testMapStateDefaultValue() throws Exception { VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); backend.setCurrentKey(1); - assertNotNull(state.entries()); - assertFalse(state.entries().iterator().hasNext()); + assertThat(state.entries()).isNotNull(); + assertThat(state.entries().iterator()).isExhausted(); state.put("Ciao", "Hello"); state.put("Bello", "Nice"); - assertNotNull(state.entries()); - assertEquals(state.get("Ciao"), "Hello"); - assertEquals(state.get("Bello"), "Nice"); + assertThat(state.entries()).isNotNull(); + assertThat(state.get("Ciao")).isEqualTo("Hello"); + assertThat(state.get("Bello")).isEqualTo("Nice"); state.clear(); - assertNotNull(state.entries()); - assertFalse(state.entries().iterator().hasNext()); + assertThat(state.entries()).isNotNull(); + assertThat(state.entries().iterator()).isExhausted(); backend.dispose(); } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendVerificationTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendVerificationTest.java index 67062bcf75dbd..ae5d43a223dc5 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendVerificationTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendVerificationTest.java @@ -24,37 +24,36 @@ import org.apache.flink.runtime.state.CheckpointStreamFactory; import org.apache.flink.runtime.state.KeyGroupRange; import org.apache.flink.runtime.state.memory.MemCheckpointStreamFactory; -import org.apache.flink.util.TestLogger; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Tests that verify an exception is thrown in methods that are not supported in the BATCH runtime * mode. */ -public class BatchExecutionStateBackendVerificationTest extends TestLogger { +class BatchExecutionStateBackendVerificationTest { private static final LongSerializer LONG_SERIALIZER = new LongSerializer(); - @Rule public ExpectedException expectedException = ExpectedException.none(); - @Test - public void verifySnapshotNotSupported() { - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Snapshotting is not supported in BATCH runtime mode."); - + void verifySnapshotNotSupported() { BatchExecutionKeyedStateBackend stateBackend = new BatchExecutionKeyedStateBackend<>( LONG_SERIALIZER, new KeyGroupRange(0, 9), new ExecutionConfig()); long checkpointId = 0L; CheckpointStreamFactory streamFactory = new MemCheckpointStreamFactory(10); - stateBackend.snapshot( - checkpointId, - 0L, - streamFactory, - CheckpointOptions.forCheckpointWithDefaultLocation()); + + assertThatThrownBy( + () -> + stateBackend.snapshot( + checkpointId, + 0L, + streamFactory, + CheckpointOptions.forCheckpointWithDefaultLocation())) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessageContaining("Snapshotting is not supported in BATCH runtime mode."); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/SourceOutputWithWatermarksTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/SourceOutputWithWatermarksTest.java index 20e154b40830d..adbb0e665d6df 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/SourceOutputWithWatermarksTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/SourceOutputWithWatermarksTest.java @@ -27,15 +27,12 @@ import org.apache.flink.streaming.runtime.io.PushingAsyncDataInput; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for the {@link SourceOutputWithWatermarks}. */ -public class SourceOutputWithWatermarksTest { +class SourceOutputWithWatermarksTest { /** * Creates a new SourceOutputWithWatermarks that emits records to the given DataOutput and @@ -58,7 +55,7 @@ private static SourceOutputWithWatermarks createWithSameOutputs( } @Test - public void testNoTimestampValue() { + void testNoTimestampValue() { final CollectingDataOutput dataOutput = new CollectingDataOutput<>(); final SourceOutputWithWatermarks out = createWithSameOutputs( @@ -67,12 +64,13 @@ public void testNoTimestampValue() { out.collect(17); final Object event = dataOutput.events.get(0); - assertThat(event, instanceOf(StreamRecord.class)); - assertEquals(TimestampAssigner.NO_TIMESTAMP, ((StreamRecord) event).getTimestamp()); + assertThat(event).isInstanceOf(StreamRecord.class); + assertThat(((StreamRecord) event).getTimestamp()) + .isEqualTo(TimestampAssigner.NO_TIMESTAMP); } @Test - public void eventsAreBeforeWatermarks() { + void eventsAreBeforeWatermarks() { final CollectingDataOutput dataOutput = new CollectingDataOutput<>(); final SourceOutputWithWatermarks out = createWithSameOutputs( @@ -82,11 +80,10 @@ public void eventsAreBeforeWatermarks() { out.collect(42, 12345L); - assertThat( - dataOutput.events, - contains( + assertThat(dataOutput.events) + .contains( new StreamRecord<>(42, 12345L), - new org.apache.flink.streaming.api.watermark.Watermark(12345L))); + new org.apache.flink.streaming.api.watermark.Watermark(12345L)); } // ------------------------------------------------------------------------ diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/WatermarkToDataOutputTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/WatermarkToDataOutputTest.java index 9177ba09f7cbb..2e261dfa08d7b 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/WatermarkToDataOutputTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/source/WatermarkToDataOutputTest.java @@ -21,26 +21,25 @@ import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.watermarkstatus.WatermarkStatus; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** Unit tests for the {@link WatermarkToDataOutput}. */ -public class WatermarkToDataOutputTest { +class WatermarkToDataOutputTest { @Test - public void testInitialZeroWatermark() { + void testInitialZeroWatermark() { final CollectingDataOutput testingOutput = new CollectingDataOutput<>(); final WatermarkToDataOutput wmOutput = new WatermarkToDataOutput(testingOutput); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(0L)); - assertThat(testingOutput.events, contains(new Watermark(0L))); + assertThat(testingOutput.events).contains(new Watermark(0L)); } @Test - public void testWatermarksDoNotRegress() { + void testWatermarksDoNotRegress() { final CollectingDataOutput testingOutput = new CollectingDataOutput<>(); final WatermarkToDataOutput wmOutput = new WatermarkToDataOutput(testingOutput); @@ -51,21 +50,19 @@ public void testWatermarksDoNotRegress() { wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(17L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(18L)); - assertThat( - testingOutput.events, - contains(new Watermark(12L), new Watermark(17L), new Watermark(18L))); + assertThat(testingOutput.events) + .contains(new Watermark(12L), new Watermark(17L), new Watermark(18L)); } @Test - public void becomingActiveEmitsStatus() { + void becomingActiveEmitsStatus() { final CollectingDataOutput testingOutput = new CollectingDataOutput<>(); final WatermarkToDataOutput wmOutput = new WatermarkToDataOutput(testingOutput); wmOutput.markIdle(); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(100L)); - assertThat( - testingOutput.events, - contains(WatermarkStatus.IDLE, WatermarkStatus.ACTIVE, new Watermark(100L))); + assertThat(testingOutput.events) + .contains(WatermarkStatus.IDLE, WatermarkStatus.ACTIVE, new Watermark(100L)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/windowing/functions/InternalWindowFunctionTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/windowing/functions/InternalWindowFunctionTest.java index 87466314d6fbe..978d5f554ab7f 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/windowing/functions/InternalWindowFunctionTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/windowing/functions/InternalWindowFunctionTest.java @@ -46,7 +46,7 @@ import org.apache.flink.util.Collector; import org.hamcrest.collection.IsIterableContainingInOrder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -69,11 +69,11 @@ import static org.mockito.hamcrest.MockitoHamcrest.argThat; /** Tests for {@link InternalWindowFunction}. */ -public class InternalWindowFunctionTest { +class InternalWindowFunctionTest { @SuppressWarnings("unchecked") @Test - public void testInternalIterableAllWindowFunction() throws Exception { + void testInternalIterableAllWindowFunction() throws Exception { AllWindowFunctionMock mock = mock(AllWindowFunctionMock.class); InternalIterableAllWindowFunction windowFunction = @@ -117,7 +117,7 @@ public void testInternalIterableAllWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalIterableProcessAllWindowFunction() throws Exception { + void testInternalIterableProcessAllWindowFunction() throws Exception { ProcessAllWindowFunctionMock mock = mock(ProcessAllWindowFunctionMock.class); InternalIterableProcessAllWindowFunction windowFunction = @@ -160,7 +160,7 @@ public void testInternalIterableProcessAllWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalIterableWindowFunction() throws Exception { + void testInternalIterableWindowFunction() throws Exception { WindowFunctionMock mock = mock(WindowFunctionMock.class); InternalIterableWindowFunction windowFunction = @@ -203,7 +203,7 @@ public void testInternalIterableWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalIterableProcessWindowFunction() throws Exception { + void testInternalIterableProcessWindowFunction() throws Exception { ProcessWindowFunctionMock mock = mock(ProcessWindowFunctionMock.class); InternalIterableProcessWindowFunction windowFunction = @@ -267,7 +267,7 @@ public Object answer(InvocationOnMock invocationOnMock) @SuppressWarnings("unchecked") @Test - public void testInternalSingleValueWindowFunction() throws Exception { + void testInternalSingleValueWindowFunction() throws Exception { WindowFunctionMock mock = mock(WindowFunctionMock.class); InternalSingleValueWindowFunction windowFunction = @@ -315,7 +315,7 @@ public void testInternalSingleValueWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalSingleValueAllWindowFunction() throws Exception { + void testInternalSingleValueAllWindowFunction() throws Exception { AllWindowFunctionMock mock = mock(AllWindowFunctionMock.class); InternalSingleValueAllWindowFunction windowFunction = @@ -362,7 +362,7 @@ public void testInternalSingleValueAllWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalSingleValueProcessAllWindowFunction() throws Exception { + void testInternalSingleValueProcessAllWindowFunction() throws Exception { ProcessAllWindowFunctionMock mock = mock(ProcessAllWindowFunctionMock.class); InternalSingleValueProcessAllWindowFunction windowFunction = @@ -409,7 +409,7 @@ public void testInternalSingleValueProcessAllWindowFunction() throws Exception { @SuppressWarnings("unchecked") @Test - public void testInternalSingleValueProcessWindowFunction() throws Exception { + void testInternalSingleValueProcessWindowFunction() throws Exception { ProcessWindowFunctionMock mock = mock(ProcessWindowFunctionMock.class); InternalSingleValueProcessWindowFunction windowFunction = @@ -476,7 +476,7 @@ public Object answer(InvocationOnMock invocationOnMock) @SuppressWarnings("unchecked") @Test - public void testInternalAggregateProcessWindowFunction() throws Exception { + void testInternalAggregateProcessWindowFunction() throws Exception { AggregateProcessWindowFunctionMock mock = mock(AggregateProcessWindowFunctionMock.class); @@ -585,7 +585,7 @@ public Object answer(InvocationOnMock invocationOnMock) @SuppressWarnings("unchecked") @Test - public void testInternalAggregateProcessAllWindowFunction() throws Exception { + void testInternalAggregateProcessAllWindowFunction() throws Exception { AggregateProcessAllWindowFunctionMock mock = mock(AggregateProcessAllWindowFunctionMock.class); diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/streamtask/StreamIterationHeadTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/streamtask/StreamIterationHeadTest.java index f128061964494..154f84a565aab 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/streamtask/StreamIterationHeadTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/streamtask/StreamIterationHeadTest.java @@ -22,15 +22,15 @@ import org.apache.flink.streaming.runtime.tasks.StreamIterationHead; import org.apache.flink.streaming.runtime.tasks.StreamTaskTestHarness; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link StreamIterationHead}. */ -public class StreamIterationHeadTest { +class StreamIterationHeadTest { @Test - public void testIterationHeadWatermarkEmission() throws Exception { + void testIterationHeadWatermarkEmission() throws Exception { StreamTaskTestHarness harness = new StreamTaskTestHarness<>(StreamIterationHead::new, BasicTypeInfo.INT_TYPE_INFO); harness.setupOutputForSingletonOperatorChain(); @@ -40,7 +40,7 @@ public void testIterationHeadWatermarkEmission() throws Exception { harness.invoke(); harness.waitForTaskCompletion(); - assertEquals(1, harness.getOutput().size()); - assertEquals(new Watermark(Long.MAX_VALUE), harness.getOutput().peek()); + assertThat(harness.getOutput()).hasSize(1); + assertThat(harness.getOutput().peek()).isEqualTo(new Watermark(Long.MAX_VALUE)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/CosineDistanceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/CosineDistanceTest.java index ba4b8aa5cf34b..c3d606709afd2 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/CosineDistanceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/CosineDistanceTest.java @@ -19,16 +19,17 @@ import org.apache.flink.streaming.api.functions.windowing.delta.CosineDistance; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.offset; /** Tests for {@link CosineDistance}. */ -public class CosineDistanceTest { +class CosineDistanceTest { @SuppressWarnings({"rawtypes", "unchecked"}) @Test - public void testCosineDistance() { + void testCosineDistance() { // Reference calculated using wolfram alpha double[][][] testdata = { @@ -47,14 +48,13 @@ public void testCosineDistance() { }; for (int i = 0; i < testdata.length; i++) { - assertEquals( - "Wrong result for inputs " - + arrayToString(testdata[i][0]) - + " and " - + arrayToString(testdata[i][0]), - referenceSolutions[i], - new CosineDistance().getDelta(testdata[i][0], testdata[i][1]), - 0.000001); + assertThat(new CosineDistance().getDelta(testdata[i][0], testdata[i][1])) + .as( + "Wrong result for inputs " + + arrayToString(testdata[i][0]) + + " and " + + arrayToString(testdata[i][0])) + .isCloseTo(referenceSolutions[i], offset(0.000001)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/EuclideanDistanceTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/EuclideanDistanceTest.java index a1756787af98a..3de1ab451a1c2 100644 --- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/EuclideanDistanceTest.java +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/windowing/deltafunction/EuclideanDistanceTest.java @@ -19,16 +19,17 @@ import org.apache.flink.streaming.api.functions.windowing.delta.EuclideanDistance; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.offset; /** Tests for {@link EuclideanDistance}. */ -public class EuclideanDistanceTest { +class EuclideanDistanceTest { @SuppressWarnings({"rawtypes", "unchecked"}) @Test - public void testEuclideanDistance() { + void testEuclideanDistance() { // Reference calculated using wolfram alpha double[][][] testdata = { @@ -47,14 +48,13 @@ public void testEuclideanDistance() { }; for (int i = 0; i < testdata.length; i++) { - assertEquals( - "Wrong result for inputs " - + arrayToString(testdata[i][0]) - + " and " - + arrayToString(testdata[i][0]), - referenceSolutions[i], - new EuclideanDistance().getDelta(testdata[i][0], testdata[i][1]), - 0.000001); + assertThat(new EuclideanDistance().getDelta(testdata[i][0], testdata[i][1])) + .as( + "Wrong result for inputs " + + arrayToString(testdata[i][0]) + + " and " + + arrayToString(testdata[i][0])) + .isCloseTo(referenceSolutions[i], offset(0.000001)); } } diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/util/BlockingSourceContext.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/util/BlockingSourceContext.java new file mode 100644 index 0000000000000..c02ffa5712e92 --- /dev/null +++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/util/BlockingSourceContext.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.streaming.util; + +import org.apache.flink.core.testutils.OneShotLatch; +import org.apache.flink.streaming.api.functions.source.SourceFunction; +import org.apache.flink.streaming.api.watermark.Watermark; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test SourceContext. */ +public class BlockingSourceContext implements SourceFunction.SourceContext { + + private final String name; + + private final Object lock; + private final OneShotLatch latchToTrigger; + private final OneShotLatch latchToWait; + private final ConcurrentHashMap> collector; + + private final int threshold; + private int counter = 0; + + private final List localOutput; + + public BlockingSourceContext( + String name, + OneShotLatch latchToTrigger, + OneShotLatch latchToWait, + ConcurrentHashMap> output, + int elemToFire) { + this.name = name; + this.lock = new Object(); + this.latchToTrigger = latchToTrigger; + this.latchToWait = latchToWait; + this.collector = output; + this.threshold = elemToFire; + + this.localOutput = new ArrayList<>(); + List prev = collector.put(name, localOutput); + assertThat(prev).isNull(); + } + + @Override + public void collectWithTimestamp(T element, long timestamp) { + collect(element); + } + + @Override + public void collect(T element) { + localOutput.add(element); + if (++counter == threshold) { + latchToTrigger.trigger(); + try { + if (!latchToWait.isTriggered()) { + latchToWait.await(); + } + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + @Override + public void emitWatermark(Watermark mark) { + throw new UnsupportedOperationException(); + } + + @Override + public void markAsTemporarilyIdle() { + throw new UnsupportedOperationException(); + } + + @Override + public Object getCheckpointLock() { + return lock; + } + + @Override + public void close() {} +} diff --git a/flink-streaming-java/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension b/flink-streaming-java/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension new file mode 100644 index 0000000000000..28999133c2b0f --- /dev/null +++ b/flink-streaming-java/src/test/resources/META-INF/services/org.junit.jupiter.api.extension.Extension @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.flink.util.TestLoggerExtension \ No newline at end of file