Skip to content
This repository has been archived by the owner on Apr 11, 2024. It is now read-only.

Commit

Permalink
Testcontainers already has a builtin way to NOT run tests when docker…
Browse files Browse the repository at this point in the history
… isn't available. (opensearch-project#440)

Use that instead of home-baking our own solution.

Signed-off-by: Greg Schohn <[email protected]>
  • Loading branch information
gregschohn authored Nov 16, 2023
1 parent b8ddde0 commit e76841e
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 45 deletions.
1 change: 0 additions & 1 deletion TrafficCapture/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ jacocoTestReport {
}

reports {
println("Builddir=${buildDir}...${classDirectories}")
xml.required = true
xml.destination file("${buildDir}/reports/jacoco/test/jacocoTestReport.xml")
html.required = true
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,11 @@
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.Assume;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.opensearch.migrations.AssumeDockerIsAvailableExtension;
import org.opensearch.migrations.replay.kafka.KafkaProtobufConsumer;
import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource;
import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey;
Expand All @@ -40,8 +38,7 @@
import java.util.stream.Stream;

@Slf4j
@Testcontainers
@ExtendWith(AssumeDockerIsAvailableExtension.class)
@Testcontainers(disabledWithoutDocker = true)
@Tag("requiresDocker")
public class KafkaRestartingTrafficReplayerTest {
public static final int INITIAL_STOP_REPLAYER_REQUEST_COUNT = 1;
Expand All @@ -56,9 +53,8 @@ public class KafkaRestartingTrafficReplayerTest {

@Container
// see https://docs.confluent.io/platform/current/installation/versions-interoperability.html#cp-and-apache-kafka-compatibility
private KafkaContainer embeddedKafkaBroker
= AssumeDockerIsAvailableExtension.assumeNoThrow(
()->new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")));
private KafkaContainer embeddedKafkaBroker =
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0"));

private static class CounterLimitedReceiverFactory implements Supplier<Consumer<SourceTargetCaptureTuple>> {
AtomicInteger nextStopPointRef = new AtomicInteger(INITIAL_STOP_REPLAYER_REQUEST_COUNT);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,10 @@
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.jetbrains.annotations.NotNull;
import org.junit.Assume;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.opensearch.migrations.AssumeDockerIsAvailableExtension;
import org.opensearch.migrations.trafficcapture.protos.ReadObservation;
import org.opensearch.migrations.trafficcapture.protos.TrafficObservation;
import org.opensearch.migrations.trafficcapture.protos.TrafficStream;
Expand All @@ -32,9 +30,8 @@
import java.util.concurrent.atomic.AtomicInteger;

@Slf4j
@Testcontainers
@Testcontainers(disabledWithoutDocker = true)
@Tag("requiresDocker")
@ExtendWith(AssumeDockerIsAvailableExtension.class)
public class KafkaProtobufConsumerLongTermTest {

public static final String TEST_GROUP_CONSUMER_ID = "TEST_GROUP_CONSUMER_ID";
Expand All @@ -47,8 +44,7 @@ public class KafkaProtobufConsumerLongTermTest {
@Container
// see https://docs.confluent.io/platform/current/installation/versions-interoperability.html#cp-and-apache-kafka-compatibility
private KafkaContainer embeddedKafkaBroker =
AssumeDockerIsAvailableExtension.assumeNoThrow(
()->new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")));
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0"));


Producer<String, byte[]> buildKafkaProducer() {
Expand Down

0 comments on commit e76841e

Please sign in to comment.