Skip to content

Commit

Permalink
Fixed bug in testing script, swap to BoolTensor for PyTorch 1.2 support
Browse files Browse the repository at this point in the history
  • Loading branch information
sean.narenthiran committed Aug 26, 2019
1 parent e73ccf6 commit ea7b424
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 6 deletions.
2 changes: 1 addition & 1 deletion model.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def forward(self, x, lengths):
"""
for module in self.seq_module:
x = module(x)
mask = torch.ByteTensor(x.size()).fill_(0)
mask = torch.BoolTensor(x.size()).fill_(0)
if x.is_cuda:
mask = mask.cuda()
for i, length in enumerate(lengths):
Expand Down
6 changes: 1 addition & 5 deletions test.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,10 @@ def evaluate(test_loader, device, model, decoder, target_decoder, save_output=Fa

out, output_sizes = model(inputs, input_sizes)

if save_output:
# add output to data array, and continue
output_data.append((out.cpu().numpy(), output_sizes.numpy()))

decoded_output, _ = decoder.decode(out, output_sizes)
target_strings = target_decoder.convert_to_strings(split_targets)

if args.save_output is not None:
if save_output is not None:
# add output to data array, and continue
output_data.append((out.cpu().numpy(), output_sizes.numpy(), target_strings))
for x in range(len(target_strings)):
Expand Down

0 comments on commit ea7b424

Please sign in to comment.