Skip to content

Commit

Permalink
[FLINK-28405][Connector/Kafka] Update Confluent Platform images used …
Browse files Browse the repository at this point in the history
…for testing to v7.2.2. This closes apache#20170

* [FLINK-28405][Connector/Kafka] Update Confluent Platform images used for testing to v7.2.2

* [FLINK-28405][Connector/Kafka] Adding Guava to Schema Registry test since the Schema Registry client needs this, but doesn't bundle it

* [FLINK-28405][Connector/Kafka] Refactored SchemaRegistryContainer so that we can include the Schema Registry container in DockerImageVersions

* [FLINK-28405][Connector/Kafka] Make sure container gets cached
  • Loading branch information
MartijnVisser committed Oct 18, 2022
1 parent 3f863b1 commit 2ec9117
Show file tree
Hide file tree
Showing 11 changed files with 42 additions and 25 deletions.
4 changes: 1 addition & 3 deletions flink-connectors/flink-connector-kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -110,12 +110,10 @@ under the License.
<scope>test</scope>
</dependency>

<!-- Required to execute the kafka server for testing. Please change the zookeeper version accordingly when changing the Kafka version
https://github.com/apache/kafka/blob/839b886f9b732b151e1faeace7303c80641c08c4/gradle/dependencies.gradle#L122 -->
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>3.5.9</version>
<version>${zookeeper.version}</version>
<scope>test</scope>
</dependency>

Expand Down
18 changes: 16 additions & 2 deletions flink-end-to-end-tests/flink-end-to-end-tests-common-kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -115,14 +115,14 @@ under the License.
<!-- https://mvnrepository.com/artifact/io.confluent/kafka-avro-serializer -->
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>6.2.2</version>
<version>7.2.2</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-schema-registry-client</artifactId>
<version>6.2.2</version>
<version>7.2.2</version>
<scope>test</scope>
<exclusions>
<exclusion>
Expand Down Expand Up @@ -156,6 +156,12 @@ under the License.
<version>${project.version}</version>
<type>test-jar</type>
</dependency>
<!-- Needed by Schema Registry -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>30.1.1-jre</version>
</dependency>
</dependencies>

<build>
Expand Down Expand Up @@ -239,6 +245,14 @@ under the License.
<type>jar</type>
<outputDirectory>${project.build.directory}/dependencies</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>30.1.1-jre</version>
<destFileName>guava.jar</destFileName>
<type>jar</type>
<outputDirectory>${project.build.directory}/dependencies</outputDirectory>
</artifactItem>
</artifactItems>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ public class SQLClientSchemaRegistryITCase {
private static final Path sqlToolBoxJar = ResourceTestUtils.getResource(".*SqlToolbox.jar");
private final Path sqlConnectorKafkaJar = ResourceTestUtils.getResource(".*kafka.jar");

private final Path guavaJar = ResourceTestUtils.getResource(".*guava.jar");

@ClassRule public static final Network NETWORK = Network.newNetwork();

@ClassRule public static final Timeout TIMEOUT = new Timeout(10, TimeUnit.MINUTES);
Expand All @@ -84,7 +86,7 @@ public class SQLClientSchemaRegistryITCase {

@ClassRule
public static final SchemaRegistryContainer REGISTRY =
new SchemaRegistryContainer("6.2.2")
new SchemaRegistryContainer(DockerImageName.parse(DockerImageVersions.SCHEMA_REGISTRY))
.withKafka(INTER_CONTAINER_KAFKA_ALIAS + ":9092")
.withNetwork(NETWORK)
.withNetworkAliases(INTER_CONTAINER_REGISTRY_ALIAS)
Expand Down Expand Up @@ -254,7 +256,11 @@ private void executeSqlStatements(List<String> sqlLines) throws Exception {
flink.submitSQLJob(
new SQLJobSubmission.SQLJobSubmissionBuilder(sqlLines)
.addJars(
sqlAvroJar, sqlAvroRegistryJar, sqlConnectorKafkaJar, sqlToolBoxJar)
sqlAvroJar,
sqlAvroRegistryJar,
sqlConnectorKafkaJar,
sqlToolBoxJar,
guavaJar)
.build());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,16 @@
package org.apache.flink.tests.util.kafka.containers;

import org.testcontainers.containers.GenericContainer;
import org.testcontainers.utility.DockerImageName;

/**
* A container over an Confluent Schema Registry. It runs the schema registry on port 8082 in the
* docker network so that it does not overlap with Flink cluster.
*/
public class SchemaRegistryContainer extends GenericContainer<SchemaRegistryContainer> {

public SchemaRegistryContainer(String version) {
super("confluentinc/cp-schema-registry:" + version);
public SchemaRegistryContainer(DockerImageName imageName) {
super(imageName);
withExposedPorts(8082);
}

Expand Down
6 changes: 1 addition & 5 deletions flink-end-to-end-tests/test-scripts/kafka-common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,7 @@ function setup_kafka_dist {
function setup_confluent_dist {
# download confluent
mkdir -p $TEST_DATA_DIR
if [[ $CONFLUENT_MAJOR_VERSION =~ ^[6] ]]; then
CONFLUENT_URL="http:https://packages.confluent.io/archive/$CONFLUENT_MAJOR_VERSION/confluent-community-$CONFLUENT_VERSION.tar.gz"
else
CONFLUENT_URL="http:https://packages.confluent.io/archive/$CONFLUENT_MAJOR_VERSION/confluent-community-$CONFLUENT_VERSION-2.12.tar.gz"
fi
CONFLUENT_URL="http:https://packages.confluent.io/archive/$CONFLUENT_MAJOR_VERSION/confluent-community-$CONFLUENT_VERSION.tar.gz"
echo "Downloading confluent from $CONFLUENT_URL"
cache_path=$(get_artifact $CONFLUENT_URL)
ln "$cache_path" "${TEST_DATA_DIR}/confluent.tgz"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
set -Eeuo pipefail

KAFKA_VERSION="3.2.3"
CONFLUENT_VERSION="6.2.2"
CONFLUENT_MAJOR_VERSION="6.2"
CONFLUENT_VERSION="7.2.2"
CONFLUENT_MAJOR_VERSION="7.2"
# Check the Confluent Platform <> Apache Kafka compatibility matrix when updating KAFKA_VERSION
KAFKA_SQL_VERSION="universal"

Expand Down
4 changes: 2 additions & 2 deletions flink-end-to-end-tests/test-scripts/test_pyflink.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
set -Eeuo pipefail

KAFKA_VERSION="3.2.3"
CONFLUENT_VERSION="6.2.2"
CONFLUENT_MAJOR_VERSION="6.2"
CONFLUENT_VERSION="7.2.2"
CONFLUENT_MAJOR_VERSION="7.2"
# Check the Confluent Platform <> Apache Kafka compatibility matrix when updating KAFKA_VERSION
KAFKA_SQL_VERSION="universal"
SQL_JARS_DIR=${END_TO_END_DIR}/flink-sql-client-test/target/sql-jars
Expand Down
2 changes: 1 addition & 1 deletion flink-formats/flink-avro-confluent-registry/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ under the License.

<properties>
<kafka.version>3.2.3</kafka.version>
<confluent.version>6.2.2</confluent.version>
<confluent.version>7.2.2</confluent.version>
</properties>

<repositories>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ This project bundles the following dependencies under the Apache Software Licens
- com.fasterxml.jackson.core:jackson-databind:2.13.4
- com.fasterxml.jackson.core:jackson-annotations:2.13.4
- org.apache.commons:commons-compress:1.21
- io.confluent:kafka-schema-registry-client:6.2.2
- org.apache.kafka:kafka-clients:6.2.2-ccs
- io.confluent:common-config:6.2.2
- io.confluent:common-utils:6.2.2
- io.confluent:kafka-schema-registry-client:7.2.2
- org.apache.kafka:kafka-clients:7.2.2-ccs
- io.confluent:common-config:7.2.2
- io.confluent:common-utils:7.2.2
- org.glassfish.jersey.core:jersey-common:2.30

The binary distribution of this product bundles these dependencies under the Eclipse Public License - v 2.0 (https://www.eclipse.org/org/documents/epl-2.0/EPL-2.0.txt)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@
*/
public class DockerImageVersions {

public static final String KAFKA = "confluentinc/cp-kafka:6.2.2";
public static final String KAFKA = "confluentinc/cp-kafka:7.2.2";

public static final String SCHEMA_REGISTRY = "confluentinc/cp-schema-registry:7.2.2";

public static final String RABBITMQ = "rabbitmq:3.9.8-management-alpine";

Expand Down
2 changes: 1 addition & 1 deletion tools/azure-pipelines/cache_docker_images.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ then
fi

# This is the pattern that determines which containers we save.
DOCKER_IMAGE_CACHE_PATTERN="testcontainers|kafka|postgres|mysql|pulsar|cassandra"
DOCKER_IMAGE_CACHE_PATTERN="testcontainers|kafka|postgres|mysql|pulsar|cassandra|schema-registry"

# The path to the tar file that will contain the saved docker images.
DOCKER_IMAGES_CACHE_PATH="${DOCKER_IMAGES_CACHE_FOLDER}/cache.tar"
Expand Down

0 comments on commit 2ec9117

Please sign in to comment.