Unverified Commit 5c7cba23 authored by Hailey Schoelkopf's avatar Hailey Schoelkopf Committed by GitHub
Browse files

Fix `--gen_kwargs` and VLLM (`temperature` not respected) (#1800)



* Update vllm_causallms.py

* adjust

---------
Co-authored-by: default avatarlintangsutawika <lintang@eleuther.ai>
parent 3850e21a
...@@ -499,7 +499,10 @@ class VLLM(TemplateLM): ...@@ -499,7 +499,10 @@ class VLLM(TemplateLM):
def modify_gen_kwargs(kwargs: dict) -> dict: def modify_gen_kwargs(kwargs: dict) -> dict:
# sampling_params # sampling_params
do_sample = kwargs.pop("do_sample", None) do_sample = kwargs.pop("do_sample", None)
if do_sample is False or "temperature" not in kwargs: if do_sample is False and "temperature" not in kwargs:
eval_logger.debug(
"Got `do_sample=False` and no temperature value, setting VLLM temperature to 0.0 ..."
)
kwargs["temperature"] = 0.0 kwargs["temperature"] = 0.0
# hf defaults # hf defaults
kwargs["skip_special_tokens"] = kwargs.get("skip_special_tokens", False) kwargs["skip_special_tokens"] = kwargs.get("skip_special_tokens", False)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment