Commit f2b3a0bd authored by shihm's avatar shihm
Browse files

add inference.py

parent 20e943f8
from transformers import AutoTokenizer, AutoModelForCausalLM
import os
import torch
model_path = "/home/download/baichuan-inc/Baichuan-M3-235B"
model_path = "/baichuan-inc/Baichuan-M3-235B"
os.environ['TRANSFORMERS_OFFLINE'] = '1'
os.environ['MODELSCOPE_OFFLINE'] = '1'
model = AutoModelForCausalLM.from_pretrained(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment