Unverified Commit 3715be65 authored by Tong Gao's avatar Tong Gao Committed by GitHub
Browse files

[Fix] Fix llama configs (#72)


Co-authored-by: default avatarLeymore <zfz-960727@163.com>
parent e9cdb24d
from opencompass.models import HuggingFaceCausalLM from opencompass.models import HuggingFaceCausalLM
models = [ models = [
# LLaMA 13B # LLaMA 13B
dict( dict(
type=HuggingFaceCausalLM, type=HuggingFaceCausalLM,
abbr='llama-13b-hf', abbr='llama-13b-hf',
path="decapoda-research/llama-13b-hf", path="huggyllama/llama-13b",
tokenizer_path='decapoda-research/llama-13b-hf', tokenizer_path='huggyllama/llama-13b',
tokenizer_kwargs=dict(padding_side='left', tokenizer_kwargs=dict(padding_side='left',
truncation_side='left', truncation_side='left',
use_fast=False, use_fast=False,
......
from opencompass.models import HuggingFaceCausalLM from opencompass.models import HuggingFaceCausalLM
models = [ models = [
# LLaMA 30B # LLaMA 30B
dict( dict(
type=HuggingFaceCausalLM, type=HuggingFaceCausalLM,
abbr='llama-30b-hf', abbr='llama-30b-hf',
path="decapoda-research/llama-30b-hf", path="huggyllama/llama-30b",
tokenizer_path='decapoda-research/llama-30b-hf', tokenizer_path='huggyllama/llama-30b',
tokenizer_kwargs=dict(padding_side='left', tokenizer_kwargs=dict(padding_side='left',
truncation_side='left', truncation_side='left',
use_fast=False, use_fast=False,
......
from opencompass.models import HuggingFaceCausalLM from opencompass.models import HuggingFaceCausalLM
models = [ models = [
# LLaMA 65B # LLaMA 65B
dict( dict(
type=HuggingFaceCausalLM, type=HuggingFaceCausalLM,
abbr='llama-65b-hf', abbr='llama-65b-hf',
path="decapoda-research/llama-65b-hf", path="huggyllama/llama-65b",
tokenizer_path='decapoda-research/llama-65b-hf', tokenizer_path='huggyllama/llama-65b',
tokenizer_kwargs=dict(padding_side='left', tokenizer_kwargs=dict(padding_side='left',
truncation_side='left', truncation_side='left',
use_fast=False, use_fast=False,
......
from opencompass.models import HuggingFaceCausalLM from opencompass.models import HuggingFaceCausalLM
models = [ models = [
# LLaMA 7B # LLaMA 7B
dict( dict(
type=HuggingFaceCausalLM, type=HuggingFaceCausalLM,
abbr='llama-7b-hf', abbr='llama-7b-hf',
path="decapoda-research/llama-7b-hf", path="huggyllama/llama-7b",
tokenizer_path='decapoda-research/llama-7b-hf', tokenizer_path='huggyllama/llama-7b',
tokenizer_kwargs=dict(padding_side='left', tokenizer_kwargs=dict(padding_side='left',
truncation_side='left', truncation_side='left',
use_fast=False, use_fast=False,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment