Unverified Commit dc139757 authored by Ji Lin's avatar Ji Lin Committed by GitHub
Browse files

Merge pull request #45 from casperbh96/main

TinyChat: Fix logic for selecting MPT prompt templates (support for 8k variant).
parents e28cc4d0 977e1543
...@@ -135,7 +135,7 @@ def get_prompter(model_type, model_path = ""): ...@@ -135,7 +135,7 @@ def get_prompter(model_type, model_path = ""):
# return FalconPrompter() # return FalconPrompter()
return FalconSimplePrompter() return FalconSimplePrompter()
elif model_type.lower() == "mpt": elif model_type.lower() == "mpt":
if any(name in model_path for name in ["mpt-7b-chat", "mpt-30b-chat"]): if "mpt" and "chat" in model_path:
return MPTChatPrompter() return MPTChatPrompter()
else: else:
return MPTPrompter() return MPTPrompter()
...@@ -148,9 +148,9 @@ def get_stop_token_ids(model_type, model_path = ""): ...@@ -148,9 +148,9 @@ def get_stop_token_ids(model_type, model_path = ""):
elif model_type.lower() == "falcon": elif model_type.lower() == "falcon":
return [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] return [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
elif model_type.lower() == "mpt": elif model_type.lower() == "mpt":
if any(name in model_path for name in ["mpt-7b-chat", "mpt-30b-chat"]): if "mpt" and "chat" in model_path:
return [50278, 0] return [50278, 0]
else: else:
return [] return []
else: else:
raise ValueError(f"model type {model_type} is not supported") raise ValueError(f"model type {model_type} is not supported")
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment