Skip to content

Commit

Permalink
[hotfix][tests] fix ineffective file existence check and remove dupli…
Browse files Browse the repository at this point in the history
…cate code
  • Loading branch information
Nico Kruber committed Apr 12, 2018
1 parent 57b3ce8 commit ca5573b
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
import static junit.framework.TestCase.assertTrue;
import static org.apache.flink.yarn.UtilsTest.addTestAppender;
import static org.apache.flink.yarn.UtilsTest.checkForLogString;
import static org.apache.flink.yarn.util.YarnTestUtils.getTestJarPath;
import static org.junit.Assume.assumeTrue;

/**
Expand Down Expand Up @@ -123,8 +124,7 @@ public void testClientStartup() throws IOException {
public void perJobYarnCluster() throws IOException {
LOG.info("Starting perJobYarnCluster()");
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
File exampleJarLocation = getTestJarPath("BatchWordCount.jar");
runWithArgs(new String[]{"run", "-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(),
"-yn", "1",
Expand Down Expand Up @@ -152,8 +152,7 @@ public void perJobYarnCluster() throws IOException {
public void perJobYarnClusterOffHeap() throws IOException {
LOG.info("Starting perJobYarnCluster()");
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
File exampleJarLocation = getTestJarPath("BatchWordCount.jar");

// set memory constraints (otherwise this is the same test as perJobYarnCluster() above)
final long taskManagerMemoryMB = 1024;
Expand Down Expand Up @@ -394,8 +393,7 @@ public void perJobYarnClusterWithParallelism() throws IOException {
// write log messages to stdout as well, so that the runWithArgs() method
// is catching the log output
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
File exampleJarLocation = getTestJarPath("BatchWordCount.jar");
runWithArgs(new String[]{"run",
"-p", "2", //test that the job is executed with a DOP of 2
"-m", "yarn-cluster",
Expand All @@ -419,9 +417,7 @@ public void perJobYarnClusterWithParallelism() throws IOException {
public void testDetachedPerJobYarnCluster() throws Exception {
LOG.info("Starting testDetachedPerJobYarnCluster()");

File exampleJarLocation = new File("target/programs/BatchWordCount.jar");

Assert.assertNotNull("Could not find batch wordcount jar", exampleJarLocation);
File exampleJarLocation = getTestJarPath("BatchWordCount.jar");

testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath());

Expand All @@ -435,9 +431,7 @@ public void testDetachedPerJobYarnCluster() throws Exception {
public void testDetachedPerJobYarnClusterWithStreamingJob() throws Exception {
LOG.info("Starting testDetachedPerJobYarnClusterWithStreamingJob()");

File exampleJarLocation = new File("target/programs/StreamingWordCount.jar");

Assert.assertNotNull("Could not find streaming wordcount jar", exampleJarLocation);
File exampleJarLocation = getTestJarPath("StreamingWordCount.jar");

testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import java.util.List;
import java.util.concurrent.CompletableFuture;

import static org.apache.flink.yarn.util.YarnTestUtils.getTestJarPath;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
Expand Down Expand Up @@ -100,9 +101,7 @@ public void testFlinkContainerMemory() throws Exception {
clusterDescriptor.setLocalJarPath(new Path(flinkUberjar.getAbsolutePath()));
clusterDescriptor.addShipFiles(Arrays.asList(flinkLibFolder.listFiles()));

final File streamingWordCountFile = new File("target/programs/WindowJoin.jar");

assertThat(streamingWordCountFile.exists(), is(true));
final File streamingWordCountFile = getTestJarPath("WindowJoin.jar");

final PackagedProgram packagedProgram = new PackagedProgram(streamingWordCountFile);
final JobGraph jobGraph = PackagedProgramUtils.createJobGraph(packagedProgram, configuration, 1);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.yarn.util;

import java.io.File;
import java.io.FileNotFoundException;

/**
* Utility methods for YARN tests.
*/
public class YarnTestUtils {
public static File getTestJarPath(String fileName) throws FileNotFoundException {
File f = new File("target/programs/" + fileName);
if (!f.exists()) {
throw new FileNotFoundException("Test jar " + f.getPath() + " not present. Invoke tests using maven "
+ "or build the jar using 'mvn process-test-classes' in flink-yarn-tests");
}
return f;
}
}

0 comments on commit ca5573b

Please sign in to comment.