"examples/vscode:/vscode.git/clone" did not exist on "b43c78e5d32b6eb8f367c52950336591ef8d82eb"
Commit 3ebf1a13 authored by VictorSanh's avatar VictorSanh
Browse files

Fix loss computation for indexes bigger than max_seq_length.

parent 629bd006
......@@ -485,9 +485,22 @@ class BertForQuestionAnswering(nn.Module):
start_logits, end_logits = logits.split(1, dim=-1)
if start_positions is not None and end_positions is not None:
loss_fct = CrossEntropyLoss()
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
#loss_fct = CrossEntropyLoss()
#start_loss = loss_fct(start_logits, start_positions)
#end_loss = loss_fct(end_logits, end_positions)
batch_size, seq_length = input_ids.size()
def compute_loss(logits, positions):
max_position = positions.max().item()
one_hot = torch.FloatTensor(batch_size, max(max_position, seq_length) +1).zero_()
one_hot = one_hot.scatter(1, positions, 1)
one_hot = one_hot[:, :seq_length]
log_probs = nn.functional.log_softmax(logits, dim = -1).view(batch_size, seq_length)
loss = -torch.mean(torch.sum(one_hot*log_probs), dim = -1)
return loss
start_loss = compute_loss(start_logits, start_positions)
end_loss = compute_loss(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
return total_loss, (start_logits, end_logits)
else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment