Unverified Commit 94d2f598 authored by Woosuk Kwon's avatar Woosuk Kwon Committed by GitHub
Browse files

Set replacement=True in torch.multinomial (#858)

parent 75c0ca9d
...@@ -302,7 +302,9 @@ def _sample_from_prompt( ...@@ -302,7 +302,9 @@ def _sample_from_prompt(
# Random sampling. # Random sampling.
# Sample `best_of` tokens for the prompt. # Sample `best_of` tokens for the prompt.
num_seqs = sampling_params.best_of num_seqs = sampling_params.best_of
next_token_ids = torch.multinomial(prob, num_samples=num_seqs) next_token_ids = torch.multinomial(prob,
num_samples=num_seqs,
replacement=True)
next_token_ids = next_token_ids.tolist() next_token_ids = next_token_ids.tolist()
return next_token_ids return next_token_ids
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment