Commit 667fc837 authored by baberabb's avatar baberabb
Browse files

disable tqdm

parent ac664bce
...@@ -84,6 +84,7 @@ class VLLM(LM): ...@@ -84,6 +84,7 @@ class VLLM(LM):
generate: bool = False, generate: bool = False,
max_tokens: int = None, max_tokens: int = None,
stop: Optional[List[str]] = None, stop: Optional[List[str]] = None,
use_tqdm=False,
**kwargs, **kwargs,
): ):
if "do_sample" in kwargs.keys(): if "do_sample" in kwargs.keys():
...@@ -95,13 +96,16 @@ class VLLM(LM): ...@@ -95,13 +96,16 @@ class VLLM(LM):
outputs = self.model.generate( outputs = self.model.generate(
prompt_token_ids=requests, prompt_token_ids=requests,
sampling_params=generate_sampling_params, sampling_params=generate_sampling_params,
use_tqdm=use_tqdm,
) )
else: else:
logliklihood_sampling_params = SamplingParams( logliklihood_sampling_params = SamplingParams(
temperature=0, prompt_logprobs=2, max_tokens=1 temperature=0, prompt_logprobs=2, max_tokens=1
) )
outputs = self.model.generate( outputs = self.model.generate(
prompt_token_ids=requests, sampling_params=logliklihood_sampling_params prompt_token_ids=requests,
sampling_params=logliklihood_sampling_params,
use_tqdm=use_tqdm,
) )
return outputs return outputs
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment