Skip to content

Commit

Permalink
Remove duplicate deepspeed config arg and allow users to force multin…
Browse files Browse the repository at this point in the history
…ode exec
  • Loading branch information
Quentin-Anthony committed May 16, 2023
1 parent 056e9ca commit b130d58
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 1 deletion.
5 changes: 5 additions & 0 deletions megatron/neox_arguments/deepspeed_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,11 @@ class NeoXArgsDeepspeedRunner(NeoXArgsTemplate):
If true, overrides the default check where DeepSpeed confirms that the headnode is accessible via ssh.
"""

force_multi: bool = False
"""
If true, Force multi-node launcher mode, helps in cases where user wants to launch on single remote node.
"""

comment: str = None
"""
Adds a `--comment` to the DeepSpeed launch command. In DeeperSpeed this is passed on to the SlurmLauncher as well. Sometime necessary for cluster rules, or so I've heard.
Expand Down
3 changes: 2 additions & 1 deletion megatron/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,8 @@ def setup_model_and_optimizer(neox_args, use_cache=False, iteration=None):
lr_scheduler=_lr_scheduler,
dist_init_required=False,
model_parameters=_model_params,
config_params=neox_args.deepspeed_config,
# Need to remove the below so that it doesn't conflict with --deepspeed_config required by autotuning
#config_params=neox_args.deepspeed_config,
mpu=mpu if not neox_args.is_pipe_parallel else None,
)
model.total_params = get_total_params(model.module)
Expand Down

0 comments on commit b130d58

Please sign in to comment.