Commit fe15e17c authored by zzg_666's avatar zzg_666
Browse files

update

parent 8a2bc968
FROM image.sourcefind.cn:5000/dcu/admin/base/pytorch:2.3.0-py3.10-dtk24.04.3-ubuntu20.04
\ No newline at end of file
FROM image.sourcefind.cn:5000/dcu/admin/base/pytorch:2.5.1-ubuntu22.04-dtk25.04.2-py3.10
......@@ -26,7 +26,7 @@ Mixture-of-LoRAs(MoA)的新型参数高效调整方法,旨在为LLMs的多
### Docker(方法一)
推荐使用docker方式运行, 此处提供[光源](https://www.sourcefind.cn/#/service-details)拉取docker镜像的地址与使用步骤
```
docker pull image.sourcefind.cn:5000/dcu/admin/base/pytorch:2.3.0-py3.10-dtk24.04.3-ubuntu20.04
docker pull image.sourcefind.cn:5000/dcu/admin/base/pytorch:2.5.1-ubuntu22.04-dtk25.04.2-py3.10
docker run -it --shm-size=1024G -v /path/your_code_data/:/path/your_code_data/ -v /opt/hyhal:/opt/hyhal --privileged=true --device=/dev/kfd --device=/dev/dri/ --group-add video --name phi-4 <your IMAGE ID> bash # <your IMAGE ID>为以上拉取的docker的镜像ID替换
git clone http://developer.sourcefind.cn/codes/modelzoo/phi-4-multimodal-instruct_pytorch.git
......@@ -56,9 +56,9 @@ pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
关于本项目DCU显卡所需的特殊深度学习库可从[光合](https://developer.sourcefind.cn/tool/)开发者社区下载安装。
```
DTK驱动:dtk24.04.3
DTK驱动:dtk25.04.2
python:3.10
torch:2.3.0
torch:2.5.1
flash-attn:2.6.1
```
`Tips:以上dtk驱动、python、torch等DCU相关工具版本需要严格一一对应`
......@@ -82,7 +82,7 @@ pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
使用ms-swift框架微调
```
git clone https://github.com/modelscope/ms-swift.git
git clone -b v3.11.1 https://github.com/modelscope/ms-swift
cd ms-swift
......@@ -166,7 +166,7 @@ python phi4_speech_inference.py
### 精度
DCU与GPU精度一致,推理框架:vllm。
......
......@@ -5,7 +5,7 @@ from PIL import Image
import soundfile
from transformers import AutoModelForCausalLM, AutoProcessor, GenerationConfig
model_path = '/home/wanglch/Phi4/Phi-4-multimodal-instruct/'
model_path = 'LLM-Research/Phi-4-multimodal-instruct'
kwargs = {}
kwargs['torch_dtype'] = torch.bfloat16
......
......@@ -5,7 +5,7 @@ from PIL import Image
import soundfile
from transformers import AutoModelForCausalLM, AutoProcessor, GenerationConfig
model_path = '/home/wanglch/Phi4/Phi-4-multimodal-instruct'
model_path = 'LLM-Research/Phi-4-multimodal-instruct'
kwargs = {}
kwargs['torch_dtype'] = torch.bfloat16
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment