Skip to content

Commit

Permalink
[FLINK-6575] [tests] Disable tests on Windows that use HDFS
Browse files Browse the repository at this point in the history
This closes apache#6575.
  • Loading branch information
zentol authored and tzulitai committed Jul 1, 2017
1 parent e575c6c commit 4cac6f4
Show file tree
Hide file tree
Showing 9 changed files with 67 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,13 @@
import org.apache.flink.streaming.util.OperatorSnapshotUtil;
import org.apache.flink.streaming.util.migration.MigrationTestUtil;
import org.apache.flink.streaming.util.migration.MigrationVersion;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.Path;

import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
Expand Down Expand Up @@ -75,6 +77,11 @@ public class BucketingSinkMigrationTest {
private static final String IN_PROGRESS_SUFFIX = ".in-progress";
private static final String VALID_LENGTH_SUFFIX = ".valid";

@BeforeClass
public static void verifyOS() {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());
}

@Parameterized.Parameters(name = "Migration Savepoint / Bucket Files Prefix: {0}")
public static Collection<Tuple2<MigrationVersion, String>> parameters () {
return Arrays.asList(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.util.NetUtils;
import org.apache.flink.util.OperatingSystem;

import org.apache.avro.Schema;
import org.apache.avro.file.DataFileConstants;
Expand All @@ -53,6 +54,7 @@
import org.apache.hadoop.io.Text;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
Expand Down Expand Up @@ -135,6 +137,8 @@ private <T> OneInputStreamOperatorTestHarness<T, Object> createTestSink(

@BeforeClass
public static void createHDFS() throws IOException {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());

Configuration conf = new Configuration();

File dataDir = tempFolder.newFolder();
Expand All @@ -152,7 +156,9 @@ public static void createHDFS() throws IOException {

@AfterClass
public static void destroyHDFS() {
hdfsCluster.shutdown();
if (hdfsCluster != null) {
hdfsCluster.shutdown();
}
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,12 @@
import org.apache.flink.streaming.connectors.fs.StringWriter;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
Expand All @@ -50,6 +53,11 @@ public class RollingSinkMigrationTest {
private static final String IN_PROGRESS_SUFFIX = ".in-progress";
private static final String VALID_LENGTH_SUFFIX = ".valid";

@BeforeClass
public static void verifyOS() {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());
}

@Test
public void testMigration() throws Exception {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,12 @@
import org.apache.flink.streaming.connectors.fs.StringWriter;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
Expand All @@ -49,6 +52,11 @@ public class RollingToBucketingMigrationTest {
private static final String IN_PROGRESS_SUFFIX = ".in-progress";
private static final String VALID_LENGTH_SUFFIX = ".valid";

@BeforeClass
public static void verifyOS() {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());
}

@Test
public void testMigration() throws Exception {
final File outDir = tempFolder.newFolder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,12 @@
import org.apache.flink.streaming.util.OperatorSnapshotUtil;
import org.apache.flink.streaming.util.migration.MigrationTestUtil;
import org.apache.flink.streaming.util.migration.MigrationVersion;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.FileUtils;

import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
Expand Down Expand Up @@ -96,6 +98,11 @@ public ContinuousFileProcessingMigrationTest(Tuple2<MigrationVersion, Long> migr
@ClassRule
public static TemporaryFolder tempFolder = new TemporaryFolder();

@BeforeClass
public static void verifyOS() {
Assume.assumeTrue("HDFS cluster cannot be start on Windows without extensions.", !OperatingSystem.isWindows());
}

/**
* Manually run this to write binary snapshot data. Remove @Ignore to run.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,15 @@
import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.util.OperatingSystem;
import org.apache.flink.util.Preconditions;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
Expand Down Expand Up @@ -88,6 +90,8 @@ public class ContinuousFileProcessingTest {

@BeforeClass
public static void createHDFS() {
Assume.assumeTrue("HDFS cluster cannot be start on Windows without extensions.", !OperatingSystem.isWindows());

try {
File hdfsDir = tempFolder.newFolder();

Expand All @@ -109,10 +113,8 @@ public static void createHDFS() {

@AfterClass
public static void destroyHDFS() {
try {
if (hdfsCluster != null) {
hdfsCluster.shutdown();
} catch (Throwable t) {
throw new RuntimeException(t);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,13 @@
import org.apache.flink.runtime.state.filesystem.FileStateHandle;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.memory.ByteStreamStateHandle;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;

Expand Down Expand Up @@ -71,6 +73,8 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {

@BeforeClass
public static void createHDFS() {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());

try {
tempDir = new File(ConfigConstants.DEFAULT_TASK_MANAGER_TMP_PATH, UUID.randomUUID().toString());

Expand All @@ -93,10 +97,14 @@ public static void createHDFS() {
@AfterClass
public static void destroyHDFS() {
try {
hdfsCluster.shutdown();
FileUtils.deleteDirectory(tempDir);
if (hdfsCluster != null) {
hdfsCluster.shutdown();
}
if (tempDir != null) {
FileUtils.deleteDirectory(tempDir);
}
} catch (IOException ignored) {
}
catch (Exception ignored) {}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.highavailability.FsNegativeRunningJobsRegistry;
import org.apache.flink.runtime.highavailability.RunningJobsRegistry.JobSchedulingStatus;
import org.apache.flink.util.OperatingSystem;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
Expand Down Expand Up @@ -53,6 +55,8 @@ public class FsNegativeRunningJobsRegistryTest {

@BeforeClass
public static void createHDFS() throws Exception {
Assume.assumeTrue("HDFS cluster cannot be start on Windows without extensions.", !OperatingSystem.isWindows());

final File tempDir = TEMP_DIR.newFolder();

Configuration hdConf = new Configuration();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.apache.flink.util.FileUtils;
import org.apache.flink.util.OperatingSystem;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -44,7 +45,9 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;

import java.io.File;
Expand All @@ -67,6 +70,11 @@ public class HDFSTest {
private org.apache.hadoop.fs.Path hdPath;
protected org.apache.hadoop.fs.FileSystem hdfs;

@BeforeClass
public static void verifyOS() {
Assume.assumeTrue("HDFS cluster cannot be started on Windows without extensions.", !OperatingSystem.isWindows());
}

@Before
public void createHDFS() {
try {
Expand Down

0 comments on commit 4cac6f4

Please sign in to comment.