Skip to content

Commit a5a1e0f

Browse files
authored
Add a new versions of Kafka 3.6.0 and 3.5.1 (#60)
* Add a new versions of Kafka 3.6.0 and 3.5.1 Signed-off-by: see-quick <[email protected]> * add checkstyle expections Signed-off-by: see-quick <[email protected]> --------- Signed-off-by: see-quick <[email protected]>
1 parent 6537eba commit a5a1e0f

File tree

3 files changed

+105
-11
lines changed

3 files changed

+105
-11
lines changed

.checkstyle/checkstyle.xml

+8-2
Original file line numberDiff line numberDiff line change
@@ -87,9 +87,15 @@
8787
<!-- code quality -->
8888
<module name="MethodLength"/>
8989
<module name="ParameterNumber"/>
90-
<module name="ClassDataAbstractionCoupling"/>
90+
<module name="ClassDataAbstractionCoupling">
91+
<!-- default is 7-->
92+
<property name="max" value="10"/>
93+
</module>
9194
<module name="BooleanExpressionComplexity"/>
92-
<module name="ClassFanOutComplexity"/>
95+
<module name="ClassFanOutComplexity">
96+
<!-- default is 20-->
97+
<property name="max" value="27"/>
98+
</module>
9399
<module name="CyclomaticComplexity">
94100
<!-- default is 10-->
95101
<property name="max" value="11"/>

src/main/resources/kafka_versions.json

+2-1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
"3.2.3": "quay.io/strimzi-test-container/test-container:latest-kafka-3.2.3",
77
"3.3.2": "quay.io/strimzi-test-container/test-container:latest-kafka-3.3.2",
88
"3.4.1": "quay.io/strimzi-test-container/test-container:latest-kafka-3.4.1",
9-
"3.5.0": "quay.io/strimzi-test-container/test-container:latest-kafka-3.5.0"
9+
"3.5.1": "quay.io/strimzi-test-container/test-container:latest-kafka-3.5.1",
10+
"3.6.0": "quay.io/strimzi-test-container/test-container:latest-kafka-3.6.0"
1011
}
1112
}

src/test/java/io/strimzi/test/container/StrimziKafkaContainerIT.java

+95-8
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,19 @@
44
*/
55
package io.strimzi.test.container;
66

7+
import org.apache.kafka.clients.admin.AdminClient;
8+
import org.apache.kafka.clients.admin.AdminClientConfig;
9+
import org.apache.kafka.clients.admin.NewTopic;
10+
import org.apache.kafka.clients.consumer.ConsumerConfig;
11+
import org.apache.kafka.clients.consumer.ConsumerRecord;
12+
import org.apache.kafka.clients.consumer.ConsumerRecords;
13+
import org.apache.kafka.clients.consumer.KafkaConsumer;
14+
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
15+
import org.apache.kafka.clients.producer.KafkaProducer;
16+
import org.apache.kafka.clients.producer.ProducerConfig;
17+
import org.apache.kafka.clients.producer.ProducerRecord;
18+
import org.apache.kafka.common.serialization.StringDeserializer;
19+
import org.apache.kafka.common.serialization.StringSerializer;
720
import org.junit.jupiter.params.ParameterizedTest;
821
import org.junit.jupiter.params.provider.MethodSource;
922
import org.slf4j.Logger;
@@ -13,6 +26,7 @@
1326
import org.testcontainers.containers.Container;
1427
import org.testcontainers.containers.Network;
1528
import org.testcontainers.containers.ToxiproxyContainer;
29+
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
1630
import org.testcontainers.utility.DockerImageName;
1731
import org.testcontainers.utility.MountableFile;
1832

@@ -21,8 +35,16 @@
2135
import java.net.Socket;
2236
import java.net.SocketAddress;
2337
import java.net.SocketTimeoutException;
38+
import java.time.Duration;
39+
import java.util.Collection;
40+
import java.util.Collections;
2441
import java.util.HashMap;
42+
import java.util.Locale;
2543
import java.util.Map;
44+
import java.util.UUID;
45+
import java.util.concurrent.ExecutionException;
46+
import java.util.concurrent.TimeUnit;
47+
import java.util.concurrent.TimeoutException;
2648

2749
import static org.hamcrest.CoreMatchers.containsString;
2850
import static org.hamcrest.CoreMatchers.equalTo;
@@ -42,15 +64,16 @@ public class StrimziKafkaContainerIT extends AbstractIT {
4264
@MethodSource("retrieveKafkaVersionsFile")
4365
void testStartContainerWithEmptyConfiguration(final String imageName) {
4466
assumeDocker();
45-
systemUnderTest = new StrimziKafkaContainer(imageName)
46-
.withBrokerId(1)
47-
.waitForRunning();
48-
systemUnderTest.start();
4967

50-
assertThat(systemUnderTest.getBootstrapServers(), is("PLAINTEXT://"
51-
+ systemUnderTest.getContainerIpAddress() + ":" + systemUnderTest.getMappedPort(9092)));
68+
try (StrimziKafkaContainer systemUnderTest = new StrimziKafkaContainer(imageName)
69+
.withBrokerId(1)
70+
.waitForRunning()) {
5271

53-
systemUnderTest.stop();
72+
systemUnderTest.start();
73+
74+
assertThat(systemUnderTest.getBootstrapServers(), is("PLAINTEXT://"
75+
+ systemUnderTest.getContainerIpAddress() + ":" + systemUnderTest.getMappedPort(9092)));
76+
}
5477
}
5578

5679
@ParameterizedTest(name = "testStartContainerWithSomeConfiguration-{0}")
@@ -311,12 +334,76 @@ void testStartBrokerWithProxyContainer(final String imageName) {
311334
systemUnderTest.stop();
312335
}
313336

314-
@ParameterizedTest(name = "testStartBrokerWithProxyContainer-{0}")
337+
@ParameterizedTest(name = "testGetProxyWithNoContainer-{0}")
315338
@MethodSource("retrieveKafkaVersionsFile")
316339
void testGetProxyWithNoContainer(final String imageName) {
317340
systemUnderTest = new StrimziKafkaContainer(imageName)
318341
.waitForRunning();
319342
systemUnderTest.start();
320343
assertThrows(IllegalStateException.class, () -> systemUnderTest.getProxy());
321344
}
345+
346+
@Test
347+
void testKafkaContainerFunctionality() {
348+
// using try-with-resources for AdminClient, KafkaProducer and KafkaConsumer (implicit closing connection)
349+
try (StrimziKafkaContainer systemUnderTest = new StrimziKafkaContainer()
350+
.waitForRunning()) {
351+
352+
systemUnderTest.start();
353+
354+
try (final AdminClient adminClient = AdminClient.create(ImmutableMap.of(
355+
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, systemUnderTest.getBootstrapServers()));
356+
KafkaProducer<String, String> producer = new KafkaProducer<>(
357+
ImmutableMap.of(
358+
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, systemUnderTest.getBootstrapServers(),
359+
ProducerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString()
360+
),
361+
new StringSerializer(),
362+
new StringSerializer()
363+
);
364+
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(
365+
ImmutableMap.of(
366+
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, systemUnderTest.getBootstrapServers(),
367+
ConsumerConfig.GROUP_ID_CONFIG, "tc-" + UUID.randomUUID(),
368+
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OffsetResetStrategy.EARLIEST.name().toLowerCase(Locale.ROOT)
369+
),
370+
new StringDeserializer(),
371+
new StringDeserializer())) {
372+
373+
final String topicName = "example-topic";
374+
final String recordKey = "strimzi";
375+
final String recordValue = "the-best-project-in-the-world";
376+
377+
final Collection<NewTopic> topics = Collections.singletonList(new NewTopic(topicName, 1, (short) 1));
378+
adminClient.createTopics(topics).all().get(30, TimeUnit.SECONDS);
379+
380+
consumer.subscribe(Collections.singletonList(topicName));
381+
382+
producer.send(new ProducerRecord<>(topicName, recordKey, recordValue)).get();
383+
384+
Utils.waitFor("Consumer records are present", Duration.ofSeconds(10).toMillis(), Duration.ofMinutes(2).toMillis(),
385+
() -> {
386+
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
387+
388+
if (records.isEmpty()) {
389+
return false;
390+
}
391+
392+
// verify count
393+
assertThat(records.count(), is(1));
394+
395+
ConsumerRecord<String, String> consumerRecord = records.records(topicName).iterator().next();
396+
397+
// verify content of the record
398+
assertThat(consumerRecord.topic(), is(topicName));
399+
assertThat(consumerRecord.key(), is(recordKey));
400+
assertThat(consumerRecord.value(), is(recordValue));
401+
402+
return true;
403+
});
404+
} catch (ExecutionException | InterruptedException | TimeoutException e) {
405+
throw new RuntimeException(e);
406+
}
407+
}
408+
}
322409
}

0 commit comments

Comments
 (0)