Unverified Commit 296f6892 authored by ishandhanani's avatar ishandhanani Committed by GitHub
Browse files

fix(server_args): handle tokenizer init conflicts (#11776)

parent 9edb7b51
......@@ -1406,6 +1406,26 @@ class ServerArgs:
"Please choose one tokenizer batching approach."
)
if self.skip_tokenizer_init:
if self.tokenizer_worker_num != 1:
logger.warning(
"skip_tokenizer_init=True disables tokenizer workers; forcing tokenizer_worker_num=1 "
f"(requested {self.tokenizer_worker_num})."
)
self.tokenizer_worker_num = 1
if self.enable_tokenizer_batch_encode:
logger.warning(
"skip_tokenizer_init=True ignores --enable-tokenizer-batch-encode; disabling it."
)
self.enable_tokenizer_batch_encode = False
if self.enable_dynamic_batch_tokenizer:
logger.warning(
"skip_tokenizer_init=True ignores --enable-dynamic-batch-tokenizer; disabling it."
)
self.enable_dynamic_batch_tokenizer = False
def _handle_environment_variables(self):
os.environ["SGLANG_ENABLE_TORCH_COMPILE"] = (
"1" if self.enable_torch_compile else "0"
......@@ -3279,7 +3299,6 @@ class ServerArgs:
" Please manually install torch 2.6.x."
)
# Check multi tokenizer
assert self.tokenizer_worker_num > 0, "Tokenizer worker num must >= 1"
self.validate_buckets_rule(
"--prompt-tokens-buckets", self.prompt_tokens_buckets
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment