Skip to content

Commit

Permalink
[FLINK-18721][yarn] Switch yarn deployment to the new active resource…
Browse files Browse the repository at this point in the history
… manager implementation

This closes apache#13311.
  • Loading branch information
KarmaGYZ authored and tillrohrmann committed Oct 9, 2020
1 parent 8ee269c commit bd8e97d
Show file tree
Hide file tree
Showing 14 changed files with 31 additions and 2,339 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.flink.runtime.clusterframework.TaskExecutorProcessSpec;
import org.apache.flink.runtime.clusterframework.TaskExecutorProcessUtils;
import org.apache.flink.util.TestLogger;
import org.apache.flink.yarn.configuration.YarnResourceManagerConfiguration;
import org.apache.flink.yarn.configuration.YarnResourceManagerDriverConfiguration;
import org.apache.flink.yarn.util.TestUtils;

import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -109,7 +109,7 @@ public void testCreateTaskExecutorCredentials() throws Exception {
env.put(ApplicationConstants.Environment.PWD.key(), home.getAbsolutePath());
env = Collections.unmodifiableMap(env);

final YarnResourceManagerConfiguration yarnResourceManagerConfiguration = new YarnResourceManagerConfiguration(env, "localhost", null);
final YarnResourceManagerDriverConfiguration yarnResourceManagerDriverConfiguration = new YarnResourceManagerDriverConfiguration(env, "localhost", null);

File credentialFile = temporaryFolder.newFile("container_tokens");
final Text amRmTokenKind = AMRMTokenIdentifier.KIND_NAME;
Expand Down Expand Up @@ -137,7 +137,7 @@ public void testCreateTaskExecutorCredentials() throws Exception {
Map<String, String> systemEnv = new HashMap<>(originalEnv);
systemEnv.put("HADOOP_TOKEN_FILE_LOCATION", credentialFile.getAbsolutePath());
CommonTestUtils.setEnv(systemEnv);
ctx = Utils.createTaskExecutorContext(flinkConf, yarnConf, yarnResourceManagerConfiguration, tmParams,
ctx = Utils.createTaskExecutorContext(flinkConf, yarnConf, yarnResourceManagerDriverConfiguration, tmParams,
"", workingDirectory, taskManagerMainClass, LOG);
} finally {
CommonTestUtils.setEnv(originalEnv);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ public abstract class YarnTestBase extends TestLogger {
Pattern.compile("Association with remote system \\[akka.tcp:https://flink@[^]]+\\] has failed, address is now gated for \\[50\\] ms. Reason: \\[Association failed with \\[akka.tcp:https://flink@[^]]+\\]\\] Caused by: \\[java.net.ConnectException: Connection refused: [^]]+\\]"),

// filter out expected ResourceManagerException caused by intended shutdown request
Pattern.compile(YarnResourceManager.ERROR_MESSAGE_ON_SHUTDOWN_REQUEST),
Pattern.compile(YarnResourceManagerDriver.ERROR_MESSAGE_ON_SHUTDOWN_REQUEST),

// this can happen in Akka 2.4 on shutdown.
Pattern.compile("java\\.util\\.concurrent\\.RejectedExecutionException: Worker has already been shutdown"),
Expand Down
31 changes: 3 additions & 28 deletions flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.flink.runtime.util.HadoopUtils;
import org.apache.flink.util.StringUtils;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.configuration.YarnResourceManagerConfiguration;
import org.apache.flink.yarn.configuration.YarnResourceManagerDriverConfiguration;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
Expand Down Expand Up @@ -344,7 +344,7 @@ private Utils() {
* @param yarnConfig
* The YARN configuration object.
* @param configuration
* The YarnResourceManager configurations.
* The YarnResourceManagerDriver configurations.
* @param tmParams
* The TaskExecutor container memory parameters.
* @param taskManagerDynamicProperties
Expand All @@ -364,7 +364,7 @@ private Utils() {
static ContainerLaunchContext createTaskExecutorContext(
org.apache.flink.configuration.Configuration flinkConfig,
YarnConfiguration yarnConfig,
YarnResourceManagerConfiguration configuration,
YarnResourceManagerDriverConfiguration configuration,
ContaineredTaskManagerParameters tmParams,
String taskManagerDynamicProperties,
String workingDirectory,
Expand Down Expand Up @@ -535,31 +535,6 @@ private static List<YarnLocalResourceDescriptor> decodeYarnLocalResourceDescript
return resourceDescriptors;
}

public static WorkerSpecContainerResourceAdapter createWorkerSpecContainerResourceAdapter(
org.apache.flink.configuration.Configuration flinkConfig,
YarnConfiguration yarnConfig) {

Resource unitResource = getUnitResource(yarnConfig);

return new WorkerSpecContainerResourceAdapter(
flinkConfig,
yarnConfig.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB),
yarnConfig.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES),
yarnConfig.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB),
yarnConfig.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES),
unitResource.getMemory(),
unitResource.getVirtualCores(),
ExternalResourceUtils.getExternalResources(flinkConfig, YarnConfigOptions.EXTERNAL_RESOURCE_YARN_CONFIG_KEY_SUFFIX));
}

static TaskExecutorProcessSpecContainerResourceAdapter createTaskExecutorProcessSpecContainerResourceAdapter(
org.apache.flink.configuration.Configuration flinkConfig,
YarnConfiguration yarnConfig) {
Expand Down
Loading

0 comments on commit bd8e97d

Please sign in to comment.