From 5ab4ff0a1a88c78e4389f6b6412d3b66f789833c Mon Sep 17 00:00:00 2001 From: Dimitar Dimitrov <30328539+dimitarndimitrov@users.noreply.github.com> Date: Tue, 7 Jun 2022 18:42:46 +0200 Subject: [PATCH] KREST-4067 Introduce import control to Kafka REST. (#1022) This change enables Checkstyle's ImportControl module so that it's easier to detect when a non-public Kafka API is being used. This is needed as non-public APIs provide much fewer guarantees, at least in terms of API stability, and can be source of breakages or subtle behavior changes. For that purpose, Checkstyle has also been enabled on test sources and a main Checkstyle config has been added. - All Checkstyle rules other than ImportControl rules have been disabled for test sources though, in order to minimize the scope of this change. All pre-existing usages of non-public APIs have been grandfathered (i.e. allowlisted in the ImportControl ruleset). Subsequent usages of non-public APIs will be much easier to spot from here on though. --- checkstyle/checkstyle.xml | 12 ++ checkstyle/import_control.xml | 149 ++++++++++++++++++ checkstyle/suppressions.xml | 46 +++++- .../kafkarest/KafkaRestConfigTest.java | 15 ++ .../confluent/kafkarest/OpenConfigEntry.java | 15 ++ .../io/confluent/kafkarest/TestUtils.java | 11 +- .../controllers/AclManagerImplTest.java | 6 +- .../RecordSerializerFacadeTest.java | 3 +- .../controllers/SchemaManagerImplTest.java | 21 ++- .../SchemaRecordSerializerTest.java | 3 +- .../extension/EnumConverterProviderTest.java | 15 ++ .../integration/AbstractConsumerTest.java | 28 ++-- .../integration/AbstractProducerTest.java | 8 +- .../integration/ClusterTestHarness.java | 21 +-- .../integration/ConsumerBinaryTest.java | 1 + .../integration/ConsumerJsonTest.java | 1 + .../integration/ConsumerTimeoutTest.java | 1 + .../integration/JsonProducerTest.java | 1 + .../KafkaRestStartUpIntegrationTest.java | 15 ++ ...adataAPITest.java => MetadataApiTest.java} | 5 +- .../kafkarest/integration/ProducerTest.java | 1 + .../SchemaRegistrySaslInheritTest.java | 6 +- .../v2/AvroProduceConsumeTest.java | 15 ++ .../v2/JsonSchemaProduceConsumeTest.java | 15 ++ .../v2/ProtobufProduceConsumeTest.java | 15 ++ .../v2/SchemaProduceConsumeTest.java | 15 ++ .../integration/v2/SeekToTimestampTest.java | 19 ++- .../v3/AclsResourceIntegrationTest.java | 23 ++- .../BrokerConfigsResourceIntegrationTest.java | 15 ++ ...ClusterConfigsResourceIntegrationTest.java | 15 ++ ...AllReassignmentsActionIntegrationTest.java | 15 ++ .../io/confluent/kafkarest/mock/MockTime.java | 1 + .../resources/ProduceRateLimitersTest.java | 15 ++ .../resources/v2/RootResourceTest.java | 10 +- .../v2/TopicsResourceAvroProduceTest.java | 4 +- .../v3/BrokerConfigResourceTest.java | 15 ++ .../v3/GetReassignmentActionTest.java | 15 ++ .../v3/ListAllTopicsConfigsActionTest.java | 2 +- .../resources/v3/ProduceActionTest.java | 88 +++++------ .../response/CrnFactoryImplTest.java | 15 ++ .../response/StreamingResponseTest.java | 21 ++- .../kafkarest/testing/KafkaRestFixture.java | 17 +- .../kafkarest/testing/SslFixture.java | 14 +- .../unit/DefaultKafkaRestContextTest.java | 15 ++ .../kafkarest/unit/UriUtilsTest.java | 16 +- .../v2/KafkaConsumerManagerTest.java | 19 ++- .../io/confluent/kafkarest/v2/LoadTest.java | 1 + .../confluent/kafkarest/v2/MockConsumer.java | 1 + pom.xml | 10 ++ 49 files changed, 652 insertions(+), 158 deletions(-) create mode 100644 checkstyle/checkstyle.xml create mode 100644 checkstyle/import_control.xml rename kafka-rest/src/test/java/io/confluent/kafkarest/integration/{MetadataAPITest.java => MetadataApiTest.java} (98%) diff --git a/checkstyle/checkstyle.xml b/checkstyle/checkstyle.xml new file mode 100644 index 0000000000..1a987cb501 --- /dev/null +++ b/checkstyle/checkstyle.xml @@ -0,0 +1,12 @@ + + + + + + + + + + \ No newline at end of file diff --git a/checkstyle/import_control.xml b/checkstyle/import_control.xml new file mode 100644 index 0000000000..c87c499e4b --- /dev/null +++ b/checkstyle/import_control.xml @@ -0,0 +1,149 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml index b31178b238..23c125ebb9 100644 --- a/checkstyle/suppressions.xml +++ b/checkstyle/suppressions.xml @@ -10,19 +10,57 @@ - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/KafkaRestConfigTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/KafkaRestConfigTest.java index cc537f2c8b..43f21e1a7e 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/KafkaRestConfigTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/KafkaRestConfigTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2019 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest; import static io.confluent.kafkarest.KafkaRestConfig.METRICS_REPORTER_CLASSES_CONFIG; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/OpenConfigEntry.java b/kafka-rest/src/test/java/io/confluent/kafkarest/OpenConfigEntry.java index 1656e67bc1..42452713db 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/OpenConfigEntry.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/OpenConfigEntry.java @@ -1,3 +1,18 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest; import static java.util.Objects.requireNonNull; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/TestUtils.java b/kafka-rest/src/test/java/io/confluent/kafkarest/TestUtils.java index 882f40b004..b2ef5d9f40 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/TestUtils.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/TestUtils.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -140,7 +141,8 @@ public static JsonNode jsonTree(String jsonData) { public static void assertPartitionsEqual(List a, List b) { assertEquals(a.size(), b.size()); for (int i = 0; i < a.size(); i++) { - PartitionOffset aOffset = a.get(i), bOffset = b.get(i); + PartitionOffset aOffset = a.get(i); + PartitionOffset bOffset = b.get(i); assertEquals(aOffset.getPartition(), bOffset.getPartition()); } } @@ -150,7 +152,8 @@ public static void assertPartitionOffsetsEqual(List a, List Map topicCounts( if (partition == null || record.partition() == partition) { Object msg = encodeComparable(record.value()); msgCounts.put(msg, (msgCounts.get(msg) == null ? 0 : msgCounts.get(msg)) + 1); - if (counter.incrementAndGet() == records.size()) return true; + if (counter.incrementAndGet() == records.size()) { + return true; + } } } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/AclManagerImplTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/AclManagerImplTest.java index 78e46b862e..948affed8a 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/AclManagerImplTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/AclManagerImplTest.java @@ -27,7 +27,11 @@ import io.confluent.kafkarest.entities.Acl; import io.confluent.kafkarest.entities.Cluster; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Optional; import java.util.concurrent.ExecutionException; import javax.ws.rs.NotFoundException; import org.apache.kafka.clients.admin.Admin; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/RecordSerializerFacadeTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/RecordSerializerFacadeTest.java index 4703b0debb..68f17cf07c 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/RecordSerializerFacadeTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/RecordSerializerFacadeTest.java @@ -121,7 +121,8 @@ public void noSchemaRegistryClientConfigured() { /* isKey= */ true)); assertEquals( - "Error serializing message. Schema Registry not defined, no Schema Registry client available to serialize message.", + "Error serializing message. Schema Registry not defined, " + + "no Schema Registry client available to serialize message.", rcve.getMessage()); assertEquals(42207, rcve.getErrorCode()); } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaManagerImplTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaManagerImplTest.java index 9e637f59c6..ed24421edf 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaManagerImplTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaManagerImplTest.java @@ -432,7 +432,8 @@ public void getSchema_avro_schemaId_schemaIdNotInSubject() throws Exception { /* rawSchema= */ Optional.empty(), /* isKey= */ true)); assertEquals( - "Error serializing message. Error when fetching schema version. subject = topic-1-key, schema = \"int\"\n" + "Error serializing message. Error when fetching schema version. " + + "subject = topic-1-key, schema = \"int\"\n" + "Subject Not Found; error code: 40401", rcve.getMessage()); assertEquals(42207, rcve.getErrorCode()); @@ -517,7 +518,8 @@ public void getSchema_avro_latestSchema_noSchema() { /* rawSchema= */ Optional.empty(), /* isKey= */ true)); assertEquals( - "Error serializing message. Error when fetching latest schema version. subject = topic-1-key\n" + "Error serializing message. Error when fetching latest schema version. " + + "subject = topic-1-key\n" + "Subject Not Found; error code: 40401", rcve.getMessage()); assertEquals(42207, rcve.getErrorCode()); @@ -570,7 +572,9 @@ public void getSchema_avro_schemaVersion_subjectNameStrategy_strategyReturnsNull assertTrue( rcve.getMessage() .startsWith( - "Cannot use schema_subject_strategy=io.confluent.kafkarest.controllers.SchemaManagerImplTest$NullReturningSubjectNameStrategy@")); + "Cannot use schema_subject_strategy=" + + "io.confluent.kafkarest.controllers.SchemaManagerImplTest" + + "$NullReturningSubjectNameStrategy@")); assertTrue(rcve.getMessage().endsWith(" without schema_id or schema.")); } @@ -766,7 +770,8 @@ public void errorRawSchemaCantParseSchema() { /* rawSchema= */ Optional.of(TextNode.valueOf("rawSchema").toString()), /* isKey= */ true)); assertEquals( - "Raw schema not supported with format = EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat", + "Raw schema not supported with format = " + + "EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat", rcve.getMessage()); assertEquals(400, rcve.getCode()); } @@ -801,7 +806,8 @@ public void errorRawSchemaNotSupportedWithSchema() { /* rawSchema= */ Optional.of(TextNode.valueOf("rawSchema").toString()), /* isKey= */ true)); assertEquals( - "Raw schema not supported with format = EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat", + "Raw schema not supported with format = " + + "EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat", bre.getMessage()); assertEquals(400, bre.getCode()); } @@ -844,7 +850,10 @@ public void errorRegisteringSchema() throws RestClientException, IOException { /* rawSchema= */ Optional.of(TextNode.valueOf("rawString").toString()), /* isKey= */ true)); assertEquals( - "Error serializing message. Error when registering schema. format = EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat, subject = subject1, schema = null\n" + "Error serializing message. Error when registering schema. " + + "format = EasyMock for class io.confluent.kafkarest.entities.EmbeddedFormat, " + + "subject = subject1, " + + "schema = null\n" + "Can't register Schema", rcve.getMessage()); assertEquals(42207, rcve.getErrorCode()); diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaRecordSerializerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaRecordSerializerTest.java index 3b5fb78d4a..dde8b68cd7 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaRecordSerializerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/controllers/SchemaRecordSerializerTest.java @@ -37,7 +37,8 @@ public void errorWhenNoSchemaRegistryDefined() { assertEquals(42207, rcve.getErrorCode()); assertEquals( - "Error serializing message. Schema Registry not defined, no Schema Registry client available to serialize message.", + "Error serializing message. Schema Registry not defined, " + + "no Schema Registry client available to serialize message.", rcve.getMessage()); } } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/extension/EnumConverterProviderTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/extension/EnumConverterProviderTest.java index 2e51503be3..585cc0027a 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/extension/EnumConverterProviderTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/extension/EnumConverterProviderTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.extension; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractConsumerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractConsumerTest.java index dfa0c35cfc..fc384ae8ef 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractConsumerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractConsumerTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import static io.confluent.kafkarest.TestUtils.assertErrorResponse; @@ -190,21 +191,20 @@ protected void assertEqualsMessages( // Since this is used for unkeyed messages, this can't rely on ordering of messages Map inputSetCounts = new HashMap(); for (ProducerRecord rec : records) { - Object - key = - TestUtils.encodeComparable( - (converter != null ? converter.convert(rec.key()) : rec.key())), - value = - TestUtils.encodeComparable( - (converter != null ? converter.convert(rec.value()) : rec.value())); + Object key = + TestUtils.encodeComparable( + (converter != null ? converter.convert(rec.key()) : rec.key())); + Object value = + TestUtils.encodeComparable( + (converter != null ? converter.convert(rec.value()) : rec.value())); inputSetCounts.put(key, (inputSetCounts.get(key) == null ? 0 : inputSetCounts.get(key)) + 1); inputSetCounts.put( value, (inputSetCounts.get(value) == null ? 0 : inputSetCounts.get(value)) + 1); } Map outputSetCounts = new HashMap(); for (ConsumerRecord rec : consumed) { - Object key = TestUtils.encodeComparable(rec.getKey()), - value = TestUtils.encodeComparable(rec.getValue()); + Object key = TestUtils.encodeComparable(rec.getKey()); + Object value = TestUtils.encodeComparable(rec.getValue()); outputSetCounts.put( key, (outputSetCounts.get(key) == null ? 0 : outputSetCounts.get(key)) + 1); outputSetCounts.put( @@ -267,7 +267,7 @@ private final void pause(int times) { try { Thread.sleep(times * ONE_SECOND_MS); } catch (InterruptedException e) { - + // Noop } } @@ -288,18 +288,18 @@ protected void consumeForTimeout( // request timeout, the iterator timeout used for "peeking", and the backoff period, as well // as some extra slack for general overhead (which apparently mostly comes from running the // request and can be quite substantial). - final int TIMEOUT = restConfig.getInt(KafkaRestConfig.CONSUMER_REQUEST_TIMEOUT_MS_CONFIG); - final long TIMEOUT_SLACK = + final int timeout = restConfig.getInt(KafkaRestConfig.CONSUMER_REQUEST_TIMEOUT_MS_CONFIG); + final long timeoutSlack = restConfig.getInt(KafkaRestConfig.CONSUMER_ITERATOR_BACKOFF_MS_CONFIG) + restConfig.getInt(KafkaRestConfig.CONSUMER_ITERATOR_TIMEOUT_MS_CONFIG) + ONE_SECOND_MS; // This test is inherently flakey, and probably needs a mocked back // end, but for now we can give it lots of slack long elapsed = finished - started; assertTrue( - elapsed > TIMEOUT, + elapsed > timeout, "Consumer request should not return before the timeout when no data is available"); assertTrue( - (elapsed - TIMEOUT) < TIMEOUT_SLACK, + (elapsed - timeout) < timeoutSlack, "Consumer request should timeout approximately within the request timeout period"); } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractProducerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractProducerTest.java index 8aa726a47c..3a22556d62 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractProducerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/AbstractProducerTest.java @@ -138,8 +138,8 @@ protected void testProduceToTopicFails( assertOKResponse(response, Versions.KAFKA_V2_JSON); final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class); - for (PartitionOffset pOffset : produceResponse.getOffsets()) { - assertNotNull(pOffset.getError()); + for (PartitionOffset partitionOffset : produceResponse.getOffsets()) { + assertNotNull(partitionOffset.getError()); } } @@ -163,8 +163,8 @@ protected void testProduceToAuthorizationError( assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus()); final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class); - for (PartitionOffset pOffset : produceResponse.getOffsets()) { - assertEquals(Errors.KAFKA_AUTHORIZATION_ERROR_CODE, (int) pOffset.getErrorCode()); + for (PartitionOffset partitionOffset : produceResponse.getOffsets()) { + assertEquals(Errors.KAFKA_AUTHORIZATION_ERROR_CODE, (int) partitionOffset.getErrorCode()); } } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ClusterTestHarness.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ClusterTestHarness.java index 0ada8c3a3f..14ad7343a4 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ClusterTestHarness.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ClusterTestHarness.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import static io.confluent.kafkarest.TestUtils.testWithRetry; @@ -544,6 +545,16 @@ protected final void createTopic( } } + protected final void createTopic( + String topicName, Map> replicasAssignments) { + createTopic( + topicName, + Optional.empty(), + Optional.empty(), + Optional.of(replicasAssignments), + restConfig.getAdminProperties()); + } + private CreateTopicsResult createTopicCall( String topicName, Optional numPartitions, @@ -598,16 +609,6 @@ protected final void setTopicConfig(String topicName, String configName, String } } - protected final void createTopic( - String topicName, Map> replicasAssignments) { - createTopic( - topicName, - Optional.empty(), - Optional.empty(), - Optional.of(replicasAssignments), - restConfig.getAdminProperties()); - } - protected final void alterPartitionReassignment( Map> reassignments) { Properties properties = restConfig.getAdminProperties(); diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerBinaryTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerBinaryTest.java index 532be7be80..39f65ecdfe 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerBinaryTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerBinaryTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import static io.confluent.kafkarest.TestUtils.assertErrorResponse; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerJsonTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerJsonTest.java index 85a7756fbf..f6d8e99c22 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerJsonTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerJsonTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import io.confluent.kafkarest.Versions; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerTimeoutTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerTimeoutTest.java index 4fad691815..0fd6de800d 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerTimeoutTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ConsumerTimeoutTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import io.confluent.kafkarest.Versions; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/JsonProducerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/JsonProducerTest.java index efdf588dab..7e5631da54 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/JsonProducerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/JsonProducerTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import io.confluent.kafka.serializers.KafkaJsonDeserializer; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/KafkaRestStartUpIntegrationTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/KafkaRestStartUpIntegrationTest.java index 072b7cfb50..a999326c19 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/KafkaRestStartUpIntegrationTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/KafkaRestStartUpIntegrationTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataAPITest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataApiTest.java similarity index 98% rename from kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataAPITest.java rename to kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataApiTest.java index 348a02e402..3e0b48f7de 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataAPITest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/MetadataApiTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import static io.confluent.kafkarest.TestUtils.assertErrorResponse; @@ -43,7 +44,7 @@ * Tests metadata access against a real cluster. This isn't exhaustive since the unit tests cover * corner cases; rather it verifies the basic functionality works against a real cluster. */ -public class MetadataAPITest extends ClusterTestHarness { +public class MetadataApiTest extends ClusterTestHarness { private static final String topic1Name = "topic1"; private static final List topic1Partitions = @@ -83,7 +84,7 @@ public class MetadataAPITest extends ClusterTestHarness { private static final short numReplicas = 2; - public MetadataAPITest() { + public MetadataApiTest() { super(2, false); } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ProducerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ProducerTest.java index 12f6cdcc95..88b866bb1e 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ProducerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/ProducerTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.integration; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/SchemaRegistrySaslInheritTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/SchemaRegistrySaslInheritTest.java index c769bd7a7f..a759dab977 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/SchemaRegistrySaslInheritTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/SchemaRegistrySaslInheritTest.java @@ -23,7 +23,11 @@ import io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestData; import io.confluent.kafkarest.entities.v3.ProduceResponse; import io.confluent.kafkarest.exceptions.v3.ErrorResponse; -import io.confluent.kafkarest.testing.*; +import io.confluent.kafkarest.testing.JvmPropertyFileLoginModuleFixture; +import io.confluent.kafkarest.testing.KafkaClusterFixture; +import io.confluent.kafkarest.testing.KafkaRestFixture; +import io.confluent.kafkarest.testing.SchemaRegistryFixture; +import io.confluent.kafkarest.testing.ZookeeperFixture; import javax.ws.rs.ProcessingException; import javax.ws.rs.client.Entity; import javax.ws.rs.core.MediaType; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/AvroProduceConsumeTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/AvroProduceConsumeTest.java index cdf2c08696..cb9a85a83f 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/AvroProduceConsumeTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/AvroProduceConsumeTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v2; import com.fasterxml.jackson.databind.node.IntNode; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/JsonSchemaProduceConsumeTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/JsonSchemaProduceConsumeTest.java index b712dae3e5..dd402692d4 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/JsonSchemaProduceConsumeTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/JsonSchemaProduceConsumeTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v2; import com.fasterxml.jackson.databind.JsonNode; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/ProtobufProduceConsumeTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/ProtobufProduceConsumeTest.java index 863624a2fc..7000bfbb88 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/ProtobufProduceConsumeTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/ProtobufProduceConsumeTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v2; import com.google.protobuf.Descriptors; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SchemaProduceConsumeTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SchemaProduceConsumeTest.java index 66e36ce127..fe02b515ef 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SchemaProduceConsumeTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SchemaProduceConsumeTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v2; import static java.util.Collections.singletonList; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SeekToTimestampTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SeekToTimestampTest.java index 060bc70bd3..637d184f87 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SeekToTimestampTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v2/SeekToTimestampTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v2; import io.confluent.kafkarest.Versions; @@ -16,13 +31,13 @@ public class SeekToTimestampTest extends AbstractConsumerTest { private static final String TOPIC_NAME = "topic-1"; private static final String CONSUMER_GROUP_ID = "consumer-group-1"; - private final List> RECORDS_BEFORE_TIMESTAMP = + private static final List> RECORDS_BEFORE_TIMESTAMP = Arrays.asList( new ProducerRecord<>(TOPIC_NAME, "value-1".getBytes()), new ProducerRecord<>(TOPIC_NAME, "value-2".getBytes()), new ProducerRecord<>(TOPIC_NAME, "value-3".getBytes())); - private final List> RECORDS_AFTER_TIMESTAMP = + private static final List> RECORDS_AFTER_TIMESTAMP = Arrays.asList( new ProducerRecord<>(TOPIC_NAME, "value-4".getBytes()), new ProducerRecord<>(TOPIC_NAME, "value-5".getBytes()), diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/AclsResourceIntegrationTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/AclsResourceIntegrationTest.java index a029ae1392..73929fce97 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/AclsResourceIntegrationTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/AclsResourceIntegrationTest.java @@ -298,18 +298,6 @@ public void testCreateSearchAndSeparateDelete() { public void testCreateSearchAndMultiDelete() { createAliceAndBobAcls(); - DeleteAclsResponse expectedMultiDeleteResponse = - DeleteAclsResponse.create( - ImmutableList.of( - ALICE_ACL_DATA - .setMetadata(Resource.Metadata.builder().setSelf(expectedAliceUrl).build()) - .setClusterId(clusterId) - .build(), - BOB_ACL_DATA - .setMetadata(Resource.Metadata.builder().setSelf(expectedBobUrl).build()) - .setClusterId(clusterId) - .build())); - Client webClient = getClient(); restApp.configureBaseApplication(webClient); @@ -323,6 +311,17 @@ public void testCreateSearchAndMultiDelete() { .delete(); assertEquals(Status.BAD_REQUEST.getStatusCode(), multiDeleteNoParamsResponse.getStatus()); + DeleteAclsResponse expectedMultiDeleteResponse = + DeleteAclsResponse.create( + ImmutableList.of( + ALICE_ACL_DATA + .setMetadata(Resource.Metadata.builder().setSelf(expectedAliceUrl).build()) + .setClusterId(clusterId) + .build(), + BOB_ACL_DATA + .setMetadata(Resource.Metadata.builder().setSelf(expectedBobUrl).build()) + .setClusterId(clusterId) + .build())); // Then ensure that a DELETE request with the parameters needed to search for and match both // ACLs does delete both ACLs at once. Response multiDeleteResourceTypeAll = diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/BrokerConfigsResourceIntegrationTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/BrokerConfigsResourceIntegrationTest.java index a2d9647d8a..4ed2cb028c 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/BrokerConfigsResourceIntegrationTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/BrokerConfigsResourceIntegrationTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v3; import static io.confluent.kafkarest.TestUtils.testWithRetry; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ClusterConfigsResourceIntegrationTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ClusterConfigsResourceIntegrationTest.java index 73ad8c9ac4..53405ef68e 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ClusterConfigsResourceIntegrationTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ClusterConfigsResourceIntegrationTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v3; import static io.confluent.kafkarest.TestUtils.testWithRetry; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ListAllReassignmentsActionIntegrationTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ListAllReassignmentsActionIntegrationTest.java index 43ddd4edd3..6d6b5a7019 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ListAllReassignmentsActionIntegrationTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/integration/v3/ListAllReassignmentsActionIntegrationTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.integration.v3; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/mock/MockTime.java b/kafka-rest/src/test/java/io/confluent/kafkarest/mock/MockTime.java index e3823a13df..e470a2de99 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/mock/MockTime.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/mock/MockTime.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.mock; import io.confluent.kafkarest.Time; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/ProduceRateLimitersTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/ProduceRateLimitersTest.java index 4d8d2b3dfb..19f07b39ad 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/ProduceRateLimitersTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/ProduceRateLimitersTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.resources; import static io.confluent.kafkarest.KafkaRestConfig.PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/RootResourceTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/RootResourceTest.java index 97c0bd8903..fabc15a2ac 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/RootResourceTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/RootResourceTest.java @@ -61,18 +61,18 @@ public void testInvalidAcceptMediatype() { @Test public void testInvalidEntityContentType() { - Response.Status UNSUPPORTED_MEDIA_TYPE = Response.Status.UNSUPPORTED_MEDIA_TYPE; + Response.Status unsupportedMediaType = Response.Status.UNSUPPORTED_MEDIA_TYPE; Response response = request("/", Versions.KAFKA_V2_JSON + ", " + Versions.GENERIC_REQUEST) .post(Entity.entity("", "text/plain")); assertErrorResponse( - UNSUPPORTED_MEDIA_TYPE, + unsupportedMediaType, response, - UNSUPPORTED_MEDIA_TYPE.getStatusCode(), + unsupportedMediaType.getStatusCode(), "HTTP " - + UNSUPPORTED_MEDIA_TYPE.getStatusCode() + + unsupportedMediaType.getStatusCode() + " " - + UNSUPPORTED_MEDIA_TYPE.getReasonPhrase(), + + unsupportedMediaType.getReasonPhrase(), Versions.KAFKA_V2_JSON); } } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/TopicsResourceAvroProduceTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/TopicsResourceAvroProduceTest.java index 798c0bb6c2..88dca3210b 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/TopicsResourceAvroProduceTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v2/TopicsResourceAvroProduceTest.java @@ -86,13 +86,13 @@ public class TopicsResourceAvroProduceTest TestUtils.jsonTree("{\"field\": 1}"), TestUtils.jsonTree("{\"field\": 2}"), }; - private final List RECORDS = + private static final List RECORDS = Arrays.asList( ProduceRecord.create(/* partition= */ 0, TEST_KEYS[0], TEST_VALUES[0]), ProduceRecord.create(/* partition= */ 0, TEST_KEYS[1], TEST_VALUES[1])); private static final TopicPartition PARTITION = new TopicPartition(TOPIC_NAME, 0); - private final List PRODUCE_RESULTS = + private static final List PRODUCE_RESULTS = Arrays.asList( new RecordMetadata(PARTITION, 0L, 0L, 0L, 0L, 1, 1), new RecordMetadata(PARTITION, 0L, 1L, 0L, 0L, 1, 1)); diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/BrokerConfigResourceTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/BrokerConfigResourceTest.java index 4cb698167a..8dd785e273 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/BrokerConfigResourceTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/BrokerConfigResourceTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.resources.v3; import static io.confluent.kafkarest.common.CompletableFutures.failedFuture; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/GetReassignmentActionTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/GetReassignmentActionTest.java index a414c66256..391adde93d 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/GetReassignmentActionTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/GetReassignmentActionTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.resources.v3; import static io.confluent.kafkarest.common.CompletableFutures.failedFuture; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ListAllTopicsConfigsActionTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ListAllTopicsConfigsActionTest.java index 6dfc42e48e..baa38184b6 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ListAllTopicsConfigsActionTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ListAllTopicsConfigsActionTest.java @@ -21,7 +21,7 @@ import static java.util.concurrent.CompletableFuture.completedFuture; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import io.confluent.kafkarest.controllers.TopicConfigManager; import io.confluent.kafkarest.controllers.TopicManager; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ProduceActionTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ProduceActionTest.java index 7b02ffa617..2c35985bce 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ProduceActionTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/resources/v3/ProduceActionTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.resources.v3; import static io.confluent.kafkarest.KafkaRestConfig.PRODUCE_MAX_BYTES_PER_SECOND; @@ -76,7 +91,7 @@ public static void cleanUp() { @Test public void produceNoSchemaRegistryDefined() throws Exception { // config - final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 1; + final int totalNumberOfProduceCalls = 1; Properties properties = new Properties(); properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100"); properties.put(PRODUCE_MAX_BYTES_PER_SECOND, Integer.toString(999999999)); @@ -87,7 +102,7 @@ public void produceNoSchemaRegistryDefined() throws Exception { // setup ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class); ChunkedOutput mockedChunkedOutput = - getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS); + getChunkedOutput(chunkedOutputFactory, totalNumberOfProduceCalls); Provider countLimitProvider = mock(Provider.class); Provider bytesLimitProvider = mock(Provider.class); @@ -155,8 +170,8 @@ public void produceNoSchemaRegistryDefined() throws Exception { @Test public void streamingRequests() throws Exception { // config - final int TOTAL_NUMBER_OF_PRODUCE_CALLS_PROD1 = 1; - final int TOTAL_NUMBER_OF_STREAMING_CALLS = 4; + final int totalNumberOfProduceCallsProd1 = 1; + final int totalNumberOfStreamingCalls = 4; Properties properties = new Properties(); properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, Integer.toString(10000)); properties.put(PRODUCE_MAX_BYTES_PER_SECOND, Integer.toString(999999999)); @@ -166,7 +181,7 @@ public void streamingRequests() throws Exception { // setup ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class); ChunkedOutput mockedChunkedOutput = - getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS_PROD1); + getChunkedOutput(chunkedOutputFactory, totalNumberOfProduceCallsProd1); Provider countLimitProvider = mock(Provider.class); Provider bytesLimitProvider = mock(Provider.class); @@ -218,7 +233,7 @@ public void streamingRequests() throws Exception { getProduceAction( properties, chunkedOutputFactory, - TOTAL_NUMBER_OF_STREAMING_CALLS, + totalNumberOfStreamingCalls, countLimitProvider, bytesLimitProvider, countLimiterGlobalProvider, @@ -262,7 +277,7 @@ public void streamingRequests() throws Exception { @Test public void produceWithByteLimit() throws Exception { // config - final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2; + final int totalNumberOfProduceCalls = 2; Properties properties = new Properties(); properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100"); properties.put( @@ -273,7 +288,7 @@ public void produceWithByteLimit() throws Exception { // setup ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class); ChunkedOutput mockedChunkedOutput = - getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS); + getChunkedOutput(chunkedOutputFactory, totalNumberOfProduceCalls); Provider countLimitProvider = mock(Provider.class); Provider bytesLimitProvider = mock(Provider.class); @@ -321,7 +336,7 @@ public void produceWithByteLimit() throws Exception { countLimiterGlobalProvider, bytesLimiterGlobalProvider); MappingIterator requests = - getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS); + getProduceRequestsMappingIterator(totalNumberOfProduceCalls); // expected results ProduceResponse produceResponse = getProduceResponse(0); @@ -333,7 +348,8 @@ public void produceWithByteLimit() throws Exception { ErrorResponse err = ErrorResponse.create( 429, - "Request rate limit exceeded: The rate limit of requests per second has been exceeded."); + "Request rate limit exceeded: " + + "The rate limit of requests per second has been exceeded."); ResultOrError resultOrErrorFail = ResultOrError.error(err); expect(mockedChunkedOutput.isClosed()).andReturn(false); mockedChunkedOutput.write(resultOrErrorFail); // failing second produce @@ -365,7 +381,7 @@ public void produceWithByteLimit() throws Exception { @Test public void produceWithCountLimit() throws Exception { // config - final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2; + final int totalNumberOfProduceCalls = 2; Properties properties = new Properties(); properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100"); properties.put( @@ -376,7 +392,7 @@ public void produceWithCountLimit() throws Exception { // setup ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class); ChunkedOutput mockedChunkedOutput = - getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS); + getChunkedOutput(chunkedOutputFactory, totalNumberOfProduceCalls); Provider countLimitProvider = mock(Provider.class); Provider bytesLimitProvider = mock(Provider.class); @@ -423,7 +439,7 @@ public void produceWithCountLimit() throws Exception { countLimiterGlobalProvider, bytesLimiterGlobalProvider); MappingIterator requests = - getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS); + getProduceRequestsMappingIterator(totalNumberOfProduceCalls); // expected results ProduceResponse produceResponse = getProduceResponse(0); @@ -435,7 +451,8 @@ public void produceWithCountLimit() throws Exception { ErrorResponse err = ErrorResponse.create( 429, - "Request rate limit exceeded: The rate limit of requests per second has been exceeded."); + "Request rate limit exceeded: " + + "The rate limit of requests per second has been exceeded."); ResultOrError resultOrErrorFail = ResultOrError.error(err); expect(mockedChunkedOutput.isClosed()).andReturn(false); mockedChunkedOutput.write(resultOrErrorFail); // failing second produce @@ -466,7 +483,7 @@ public void produceWithCountLimit() throws Exception { @Test public void produceNoLimit() throws Exception { // config - final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2; + final int totalNumberOfProduceCalls = 2; Properties properties = new Properties(); properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100"); properties.put( @@ -477,7 +494,7 @@ public void produceNoLimit() throws Exception { // setup ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class); ChunkedOutput mockedChunkedOutput = - getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS); + getChunkedOutput(chunkedOutputFactory, totalNumberOfProduceCalls); Provider countLimitProvider = mock(Provider.class); Provider bytesLimitProvider = mock(Provider.class); @@ -508,7 +525,7 @@ public void produceNoLimit() throws Exception { countLimiterGlobalProvider, bytesLimiterGlobalProvider); MappingIterator requests = - getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS); + getProduceRequestsMappingIterator(totalNumberOfProduceCalls); // expected results ProduceResponse produceResponse = getProduceResponse(0); @@ -613,7 +630,8 @@ private static Provider getRecordSerializerProvider(boolean er anyObject(), anyObject(), anyObject(), anyObject(), anyBoolean())) .andThrow( Errors.messageSerializationException( - "Schema Registry not defined, no Schema Registry client available to deserialize message.")) + "Schema Registry not defined, " + + "no Schema Registry client available to deserialize message.")) .anyTimes(); } replay(recordSerializerProvider, recordSerializer); @@ -678,36 +696,6 @@ private static MappingIterator getStreamingProduceRequestsMappin return requests; } - private static MappingIterator - getStreamingProduceRequestsMappingIteratorCombinations() throws IOException { - MappingIterator requests = mock(MappingIterator.class); - - ProduceRequest request = ProduceRequest.builder().setOriginalSize(25L).build(); - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(true); - expect(requests.nextValue()).andReturn(request); - - expect(requests.hasNext()).andReturn(false).times(1); - requests.close(); - replay(requests); - - return requests; - } - private static ChunkedOutput getChunkedOutput( ChunkedOutputFactory chunkedOutputFactory, int times) { ChunkedOutput mockedChunkedOutput = mock(ChunkedOutput.class); @@ -720,10 +708,6 @@ private static ProduceResponse getProduceResponse(int offset) { return getProduceResponse(offset, Optional.empty()); } - private static ProduceResponse getProduceResponse(int offset, int partitionId) { - return getProduceResponse(offset, Optional.empty(), partitionId); - } - private static ProduceResponse getProduceResponse(int offset, Optional waitFor) { return getProduceResponse(offset, waitFor, 0); } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/response/CrnFactoryImplTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/response/CrnFactoryImplTest.java index 4b0a11311e..4956823de2 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/response/CrnFactoryImplTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/response/CrnFactoryImplTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.response; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/response/StreamingResponseTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/response/StreamingResponseTest.java index 2539acc5b7..fde30ae6da 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/response/StreamingResponseTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/response/StreamingResponseTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.response; import static org.easymock.EasyMock.expect; @@ -170,7 +185,8 @@ public void testHasNextMappingException() throws IOException { ResultOrError.error( ErrorResponse.create( 400, - "Bad Request: Error processing JSON: Error thrown by mapping iterator describing problem.")); + "Bad Request: Error processing JSON: " + + "Error thrown by mapping iterator describing problem.")); expect(mockedChunkedOutputFactory.getChunkedOutput()).andReturn(mockedChunkedOutput); mockedChunkedOutput.write(resultOrError); @@ -208,7 +224,8 @@ public void testHasNextRuntimeException() throws IOException { ResultOrError.error( ErrorResponse.create( 400, - "Bad Request: Error processing message: IO error thrown by mapping iterator describing problem.")); + "Bad Request: Error processing message: " + + "IO error thrown by mapping iterator describing problem.")); expect(mockedChunkedOutputFactory.getChunkedOutput()).andReturn(mockedChunkedOutput); mockedChunkedOutput.write(resultOrError); diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/testing/KafkaRestFixture.java b/kafka-rest/src/test/java/io/confluent/kafkarest/testing/KafkaRestFixture.java index 228ef88281..896a88a81b 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/testing/KafkaRestFixture.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/testing/KafkaRestFixture.java @@ -141,16 +141,17 @@ public void afterEach(ExtensionContext extensionContext) { baseUri = null; } - public URI getBaseUri() { - checkState(baseUri != null); - return baseUri; - } - public ObjectMapper getObjectMapper() { checkState(application != null); return application.getJsonMapper(); } + private WebTarget target(Client client) { + checkState(application != null); + application.configureBaseApplication(client); + return client.target(baseUri); + } + public WebTarget target() { checkState(application != null); if (certificates != null) { @@ -167,12 +168,6 @@ public WebTarget targetAs(String keyName) { ClientBuilder.newBuilder().sslContext(certificates.getSslContext(keyName)).build()); } - private WebTarget target(Client client) { - checkState(application != null); - application.configureBaseApplication(client); - return client.target(baseUri); - } - public static Builder builder() { return new Builder(); } diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/testing/SslFixture.java b/kafka-rest/src/test/java/io/confluent/kafkarest/testing/SslFixture.java index b4be4d11e8..440331b2d9 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/testing/SslFixture.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/testing/SslFixture.java @@ -17,10 +17,6 @@ import static com.google.common.base.Preconditions.checkState; import static java.nio.file.Files.createTempFile; -import static org.apache.kafka.test.TestSslUtils.createKeyStore; -import static org.apache.kafka.test.TestSslUtils.createTrustStore; -import static org.apache.kafka.test.TestSslUtils.generateCertificate; -import static org.apache.kafka.test.TestSslUtils.generateKeyPair; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableMap; @@ -39,6 +35,7 @@ import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import org.apache.kafka.common.config.types.Password; +import org.apache.kafka.test.TestSslUtils; import org.glassfish.jersey.SslConfigurator; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; @@ -67,7 +64,7 @@ public void beforeEach(ExtensionContext extensionContext) throws Exception { keys = generateKeys(); trustStoreLocation = createTempFile("truststore", ".jks"); trustStorePassword = "truststore-pass"; - createTrustStore( + TestSslUtils.createTrustStore( trustStoreLocation.toString(), new Password(trustStorePassword), keys.entrySet().stream() @@ -77,13 +74,14 @@ public void beforeEach(ExtensionContext extensionContext) throws Exception { private ImmutableMap generateKeys() throws Exception { ImmutableMap.Builder keys = ImmutableMap.builder(); for (String keyName : keyNames) { - KeyPair keyPair = generateKeyPair("RSA"); + KeyPair keyPair = TestSslUtils.generateKeyPair("RSA"); X509Certificate certificate = - generateCertificate("CN=localhost, O=" + keyName, keyPair, 30, "SHA1withRSA"); + TestSslUtils.generateCertificate( + "CN=localhost, O=" + keyName, keyPair, 30, "SHA1withRSA"); Path keyStoreLocation = createTempFile(keyName + "-keystore", ".jks"); String keyStorePassword = keyName + "-pass"; String keyPassword = keyName + "-pass"; - createKeyStore( + TestSslUtils.createKeyStore( keyStoreLocation.toString(), new Password(keyStorePassword), new Password(keyPassword), diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/unit/DefaultKafkaRestContextTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/unit/DefaultKafkaRestContextTest.java index be30845132..2511726ec0 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/unit/DefaultKafkaRestContextTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/unit/DefaultKafkaRestContextTest.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Confluent Inc. + * + * Licensed under the Confluent Community License (the "License"); you may not use + * this file except in compliance with the License. You may obtain a copy of the + * License at + * + * http://www.confluent.io/confluent-community-license + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + package io.confluent.kafkarest.unit; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/unit/UriUtilsTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/unit/UriUtilsTest.java index 815ad72778..c3890dc750 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/unit/UriUtilsTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/unit/UriUtilsTest.java @@ -38,7 +38,7 @@ public void setUp() { } @Test - public void testAbsoluteURIBuilderDefaultHost() { + public void testAbsoluteUriBuilderDefaultHost() { KafkaRestConfig config = new KafkaRestConfig(); EasyMock.expect(uriInfo.getAbsolutePath()).andStubReturn(URI.create("http://foo.com")); EasyMock.expect(uriInfo.getBaseUri()).andReturn(URI.create("http://foo.com")); @@ -48,7 +48,7 @@ public void testAbsoluteURIBuilderDefaultHost() { } @Test - public void testAbsoluteURIBuilderOverrideHost() { + public void testAbsoluteUriBuilderOverrideHost() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); KafkaRestConfig config = new KafkaRestConfig(props); @@ -60,7 +60,7 @@ public void testAbsoluteURIBuilderOverrideHost() { } @Test - public void testAbsoluteURIBuilderWithPort() { + public void testAbsoluteUriBuilderWithPort() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.PORT_CONFIG, 5000); @@ -73,7 +73,7 @@ public void testAbsoluteURIBuilderWithPort() { } @Test - public void testAbsoluteURIBuilderWithInvalidListener() { + public void testAbsoluteUriBuilderWithInvalidListener() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.LISTENERS_CONFIG, "http:||0.0.0.0:9091"); @@ -86,7 +86,7 @@ public void testAbsoluteURIBuilderWithInvalidListener() { } @Test - public void testAbsoluteURIBuilderWithListenerForHttp() { + public void testAbsoluteUriBuilderWithListenerForHttp() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.LISTENERS_CONFIG, "http://0.0.0.0:9091,https://0.0.0.0:9092"); @@ -99,7 +99,7 @@ public void testAbsoluteURIBuilderWithListenerForHttp() { } @Test - public void testAbsoluteURIBuilderWithListenerForHttps() { + public void testAbsoluteUriBuilderWithListenerForHttps() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.LISTENERS_CONFIG, "http://0.0.0.0:9091,https://0.0.0.0:9092"); @@ -112,7 +112,7 @@ public void testAbsoluteURIBuilderWithListenerForHttps() { } @Test - public void testAbsoluteURIBuilderWithIPV6Listener() { + public void testAbsoluteUriBuilderWithIPV6Listener() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.LISTENERS_CONFIG, "http://[fe80:0:1:2:3:4:5:6]:9092"); @@ -125,7 +125,7 @@ public void testAbsoluteURIBuilderWithIPV6Listener() { } @Test - public void testAbsoluteURIBuilderWithTruncatedIPV6Listener() { + public void testAbsoluteUriBuilderWithTruncatedIPV6Listener() { Properties props = new Properties(); props.put(KafkaRestConfig.HOST_NAME_CONFIG, "bar.net"); props.put(KafkaRestConfig.LISTENERS_CONFIG, "http://[fe80::1]:9092"); diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/KafkaConsumerManagerTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/KafkaConsumerManagerTest.java index fb5c373dda..a91ba68d97 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/KafkaConsumerManagerTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/KafkaConsumerManagerTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.v2; import static java.util.Collections.singletonMap; @@ -645,14 +646,6 @@ private List> bootstrapConsumer( private List> bootstrapConsumer( final MockConsumer consumer, boolean toExpectCreate) { - List> referenceRecords = - Arrays.asList( - ConsumerRecord.create( - topicName, ByteString.copyFromUtf8("k1"), ByteString.copyFromUtf8("v1"), 0, 0), - ConsumerRecord.create( - topicName, ByteString.copyFromUtf8("k2"), ByteString.copyFromUtf8("v2"), 0, 1), - ConsumerRecord.create( - topicName, ByteString.copyFromUtf8("k3"), ByteString.copyFromUtf8("v3"), 0, 2)); if (toExpectCreate) { expectCreate(consumer); @@ -669,6 +662,16 @@ private List> bootstrapConsumer( new ConsumerSubscriptionRecord(Collections.singletonList(topicName), null)); consumer.rebalance(Collections.singletonList(new TopicPartition(topicName, 0))); consumer.updateBeginningOffsets(singletonMap(new TopicPartition(topicName, 0), 0L)); + + List> referenceRecords = + Arrays.asList( + ConsumerRecord.create( + topicName, ByteString.copyFromUtf8("k1"), ByteString.copyFromUtf8("v1"), 0, 0), + ConsumerRecord.create( + topicName, ByteString.copyFromUtf8("k2"), ByteString.copyFromUtf8("v2"), 0, 1), + ConsumerRecord.create( + topicName, ByteString.copyFromUtf8("k3"), ByteString.copyFromUtf8("v3"), 0, 2)); + for (ConsumerRecord record : referenceRecords) { consumer.addRecord( new org.apache.kafka.clients.consumer.ConsumerRecord<>( diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/LoadTest.java b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/LoadTest.java index c1a360cbd0..e8e416ff00 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/LoadTest.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/LoadTest.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.v2; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/MockConsumer.java b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/MockConsumer.java index b1f9b949ab..4fb8266548 100644 --- a/kafka-rest/src/test/java/io/confluent/kafkarest/v2/MockConsumer.java +++ b/kafka-rest/src/test/java/io/confluent/kafkarest/v2/MockConsumer.java @@ -12,6 +12,7 @@ * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ + package io.confluent.kafkarest.v2; import static java.util.Collections.unmodifiableMap; diff --git a/pom.xml b/pom.xml index a34ee50aa4..51a6b81355 100644 --- a/pom.xml +++ b/pom.xml @@ -46,6 +46,7 @@ https://packages.confluent.io/maven/ + checkstyle/checkstyle.xml checkstyle/suppressions.xml 7.3.0-0 @@ -156,6 +157,15 @@ + + org.apache.maven.plugins + maven-checkstyle-plugin + ${maven-checkstyle-plugin.version} + + true + + + com.diffplug.spotless spotless-maven-plugin