Skip to content

Commit

Permalink
[FLINK-11086] Clean up profiles and dependency exclusions
Browse files Browse the repository at this point in the history
This closes apache#11983
  • Loading branch information
rmetzger committed May 13, 2020
1 parent 2cc63a6 commit 47323a4
Show file tree
Hide file tree
Showing 15 changed files with 112 additions and 155 deletions.
2 changes: 1 addition & 1 deletion azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ jobs:
vmImage: 'ubuntu-latest'
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
environment: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Pe2e-hadoop"
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
run_end_to_end: false
container: flink-build-container
jdk: jdk8
45 changes: 9 additions & 36 deletions flink-connectors/flink-hbase/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -101,15 +101,15 @@ under the License.
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>

Expand Down Expand Up @@ -145,31 +145,6 @@ under the License.
<groupId>org.mortbay.jetty</groupId>
<artifactId>servlet-api-2.5</artifactId>
</exclusion>
<!-- The hadoop dependencies are handled through flink-shaded-hadoop -->
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
<!-- Bug in hbase annotations, can be removed when fixed. See FLINK-2153. -->
<exclusion>
<groupId>org.apache.hbase</groupId>
Expand Down Expand Up @@ -207,6 +182,10 @@ under the License.
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>

Expand All @@ -231,12 +210,6 @@ under the License.
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ private static Configuration initialize(Configuration conf) {
public static void setUp() throws Exception {
// HBase 1.4 does not work with Hadoop 3
// because it uses Guava 12.0.1, Hadoop 3 uses Guava 27.0-jre.
// There is not Guava version in between that works with both.
// There is no Guava version in between that works with both.
Assume.assumeTrue("This test is skipped for Hadoop versions above 3", VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0);

LOG.info("HBase minicluster: Starting");
Expand Down
32 changes: 26 additions & 6 deletions flink-end-to-end-tests/flink-bucketing-sink-test/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,39 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
<exclusions>
<!-- Needed for proper dependency convergence -->
<!-- needed for dependency convergence -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.commons.lang3.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Expand Down Expand Up @@ -95,7 +96,8 @@ public static Collection<Object[]> data() {
private Path result;
private Path sqlClientSessionConf;

private static final DownloadCache downloadCache = DownloadCache.get();
@ClassRule
public static final DownloadCache DOWNLOAD_CACHE = DownloadCache.get();

private static final Path sqlAvroJar = TestUtils.getResourceJar(".*avro.jar");
private static final Path sqlJsonJar = TestUtils.getResourceJar(".*json.jar");
Expand All @@ -112,15 +114,15 @@ public SQLClientKafkaITCase(String kafkaVersion, String kafkaSQLVersion, String

@Before
public void before() throws Exception {
downloadCache.before();
DOWNLOAD_CACHE.before();
Path tmpPath = tmp.getRoot().toPath();
LOG.info("The current temporary path: {}", tmpPath);
this.sqlClientSessionConf = tmpPath.resolve("sql-client-session.conf");
this.result = tmpPath.resolve("result");

apacheAvroJars.add(downloadCache.getOrDownload("https://repo1.maven.org/maven2/org/apache/avro/avro/1.8.2/avro-1.8.2.jar", tmpPath));
apacheAvroJars.add(downloadCache.getOrDownload("https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar", tmpPath));
apacheAvroJars.add(downloadCache.getOrDownload("https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar", tmpPath));
apacheAvroJars.add(DOWNLOAD_CACHE.getOrDownload("https://repo1.maven.org/maven2/org/apache/avro/avro/1.8.2/avro-1.8.2.jar", tmpPath));
apacheAvroJars.add(DOWNLOAD_CACHE.getOrDownload("https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar", tmpPath));
apacheAvroJars.add(DOWNLOAD_CACHE.getOrDownload("https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar", tmpPath));
}

@Test
Expand Down
51 changes: 51 additions & 0 deletions flink-end-to-end-tests/flink-end-to-end-tests-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,57 @@ under the License.
<artifactId>flink-dist_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
<exclusions>
<!-- needed for dependency convergence -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>com.squareup.okio</groupId>
<artifactId>okio</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
</exclusion>
<exclusion>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>

Expand Down

This file was deleted.

24 changes: 0 additions & 24 deletions flink-end-to-end-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ under the License.
<e2e.include.org.apache.flink.tests.util.categories.TravisGroup5>false</e2e.include.org.apache.flink.tests.util.categories.TravisGroup5>
<e2e.include.org.apache.flink.tests.util.categories.TravisGroup6>false</e2e.include.org.apache.flink.tests.util.categories.TravisGroup6>
<e2e.include.org.apache.flink.tests.util.categories.PreCommit>false</e2e.include.org.apache.flink.tests.util.categories.PreCommit>
<e2e.exclude.org.apache.flink.tests.util.categories.Hadoop>true</e2e.exclude.org.apache.flink.tests.util.categories.Hadoop>
<e2e.exclude.org.apache.flink.testutils.junit.FailsOnJava11>false</e2e.exclude.org.apache.flink.testutils.junit.FailsOnJava11>
</properties>

Expand Down Expand Up @@ -151,12 +150,6 @@ under the License.
<e2e.include.org.apache.flink.tests.util.categories.PreCommit>true</e2e.include.org.apache.flink.tests.util.categories.PreCommit>
</properties>
</profile>
<profile>
<id>e2e-hadoop</id>
<properties>
<e2e.exclude.org.apache.flink.tests.util.categories.Hadoop>false</e2e.exclude.org.apache.flink.tests.util.categories.Hadoop>
</properties>
</profile>
<profile>
<id>java11</id>
<activation>
Expand All @@ -172,11 +165,9 @@ under the License.
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<!-- <version>3.1.0</version> -->
<executions>
<execution>
<id>copy-resources</id>
<!-- here the phase you need -->
<phase>package</phase>
<goals>
<goal>copy-resources</goal>
Expand Down Expand Up @@ -298,21 +289,6 @@ under the License.
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>dependency-convergence</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<skip>true</skip>
</configuration>
</execution>
</executions>
</plugin>
</plugins>

<pluginManagement>
Expand Down
8 changes: 3 additions & 5 deletions flink-end-to-end-tests/run-nightly-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -135,10 +135,8 @@ if [[ ${PROFILE} != *"jdk11"* ]]; then
run_test "Running Kerberized YARN per-job on Docker test (custom fs plugin)" "$END_TO_END_DIR/test-scripts/test_yarn_job_kerberos_docker.sh dummy-fs"
run_test "Running Kerberized YARN application on Docker test (default input)" "$END_TO_END_DIR/test-scripts/test_yarn_application_kerberos_docker.sh"
run_test "Running Kerberized YARN application on Docker test (custom fs plugin)" "$END_TO_END_DIR/test-scripts/test_yarn_application_kerberos_docker.sh dummy-fs"
if [[ $PROFILE == *"include-hadoop"* ]]; then
run_test "Run Mesos WordCount test" "$END_TO_END_DIR/test-scripts/test_mesos_wordcount.sh"
run_test "Run Mesos multiple submission test" "$END_TO_END_DIR/test-scripts/test_mesos_multiple_submissions.sh"
fi
run_test "Run Mesos WordCount test" "$END_TO_END_DIR/test-scripts/test_mesos_wordcount.sh"
run_test "Run Mesos multiple submission test" "$END_TO_END_DIR/test-scripts/test_mesos_multiple_submissions.sh"
fi

run_test "Test PubSub connector with Docker based Google PubSub Emulator" "$END_TO_END_DIR/test-scripts/test_streaming_gcp_pubsub.sh"
Expand Down Expand Up @@ -174,7 +172,7 @@ run_test "Batch SQL end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_sq
run_test "Streaming SQL end-to-end test (Old planner)" "$END_TO_END_DIR/test-scripts/test_streaming_sql.sh old" "skip_check_exceptions"
run_test "Streaming SQL end-to-end test (Blink planner)" "$END_TO_END_DIR/test-scripts/test_streaming_sql.sh blink" "skip_check_exceptions"
# skip test if hadoop version is 2.4.1 (FLINK-16629)
if [[ $PROFILE == *"include-hadoop"* && $PROFILE != *"hadoop.version=2.4.1"* ]]; then
if [[ $PROFILE != *"hadoop.version=2.4.1"* ]]; then
run_test "Streaming bucketing end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_bucketing.sh" "skip_check_exceptions"
fi
run_test "Streaming File Sink end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_file_sink.sh" "skip_check_exceptions"
Expand Down
22 changes: 0 additions & 22 deletions flink-formats/flink-orc-nohive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -63,34 +63,12 @@ under the License.
<version>${orc.version}</version>
<classifier>nohive</classifier>
<exclusions>
<!-- Exclude ORC's Hadoop dependency and pull in Flink's Hadoop. -->
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-storage-api</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- Replacement for ORC's Hadoop dependency. -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
</dependency>

<!-- test dependencies -->
<dependency>
Expand Down
7 changes: 0 additions & 7 deletions flink-formats/flink-orc/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,6 @@ under the License.
</exclusions>
</dependency>

<!-- Replacement for ORC's Hadoop dependency. -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
Expand Down
Loading

0 comments on commit 47323a4

Please sign in to comment.