Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

merge from magma #20

Merged
merged 10 commits into from
Jun 13, 2023
2 changes: 1 addition & 1 deletion configs/summit-70m-openclipH.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,5 +96,5 @@
"wall_clock_breakdown": true,

"tokenizer-type": "HFTokenizer"
# "tokenizer-type": "HFGPT2Tokenizer"

}
2 changes: 1 addition & 1 deletion configs/summit_setup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
"valid-data-paths": "/gpfs/alpine/csc499/proj-shared/LAION-400m-webdataset/data/{40001..41000}.tar",
"test-data-paths": "/gpfs/alpine/csc499/proj-shared/LAION-400m-webdataset/data/{41000..41455}.tar",

# we use tokenzier from huggingface, don't need vocal or merge file

"vocab-file": "./data/20B_tokenizer.json",


"save": "/gpfs/alpine/scratch/lfsm/csc499/checkpoints",
"load": "/gpfs/alpine/scratch/lfsm/csc499/checkpoints",
Expand Down
2 changes: 2 additions & 0 deletions megatron/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,7 @@ def _get_batch(neox_args, tokenizer, keys, data, datatype):
# Get the masks and position ids.
attention_mask, loss_mask, position_ids = get_ltor_masks_and_position_ids(
data=tokens,
pad_token=neox_args.tokenizer.pad_id,
eod_token=neox_args.tokenizer.eod,
eod_mask_loss=neox_args.eod_mask_loss,
)
Expand Down Expand Up @@ -360,6 +361,7 @@ def get_batch_pipe_image_text(input,neox_args):

attention_mask, loss_mask, position_ids = get_ltor_masks_and_position_ids(
data=captions,
pad_token=neox_args.tokenizer.pad_id,
eod_token=neox_args.tokenizer.eod,
eod_mask_loss=neox_args.eod_mask_loss,
)
Expand Down
2 changes: 2 additions & 0 deletions megatron/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def get_attn_mask(seq_length, device):

def get_ltor_masks_and_position_ids(
data,
pad_token,
eod_token,
eod_mask_loss=False,
):
Expand All @@ -94,6 +95,7 @@ def get_ltor_masks_and_position_ids(

# Loss mask.
loss_mask = torch.ones(data.size(), dtype=torch.float, device=data.device)
loss_mask[data == pad_token] = 0.0
if eod_mask_loss:
loss_mask[data == eod_token] = 0.0

Expand Down