Commit f3be33a3 authored by Alisehen's avatar Alisehen
Browse files

add xpu parameters to install.sh

parent af9472b5
...@@ -62,9 +62,7 @@ cd ktransformers ...@@ -62,9 +62,7 @@ cd ktransformers
git submodule update --init git submodule update --init
# Install dependencies # Install dependencies
bash install.sh bash install.sh --dev xpu
pip uninstall triton pytorch-triton-xpu
pip install pytorch-triton-xpu==3.3.0 --extra-index-url https://download.pytorch.org/whl/xpu # to avoid potential triton import error
``` ```
## Running DeepSeek-R1 Models ## Running DeepSeek-R1 Models
......
#!/bin/bash #!/bin/bash
set -e set -e
# default backend
DEV="cuda"
# parse --dev argument
while [[ "$#" -gt 0 ]]; do
case $1 in
--dev) DEV="$2"; shift ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done
export DEV_BACKEND="$DEV"
echo "Selected backend: $DEV_BACKEND"
# clear build dirs # clear build dirs
rm -rf build rm -rf build
rm -rf *.egg-info rm -rf *.egg-info
...@@ -13,6 +27,14 @@ rm -rf ~/.ktransformers ...@@ -13,6 +27,14 @@ rm -rf ~/.ktransformers
echo "Installing python dependencies from requirements.txt" echo "Installing python dependencies from requirements.txt"
pip install -r requirements-local_chat.txt pip install -r requirements-local_chat.txt
pip install -r ktransformers/server/requirements.txt pip install -r ktransformers/server/requirements.txt
# XPU-specific fix for triton
if [[ "$DEV_BACKEND" == "xpu" ]]; then
echo "Replacing triton for XPU backend"
pip uninstall -y triton pytorch-triton-xpu || true
pip install pytorch-triton-xpu==3.3.0 --extra-index-url https://download.pytorch.org/whl/xpu
fi
echo "Installing ktransformers" echo "Installing ktransformers"
KTRANSFORMERS_FORCE_BUILD=TRUE pip install -v . --no-build-isolation KTRANSFORMERS_FORCE_BUILD=TRUE pip install -v . --no-build-isolation
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment