Commit 05326b04 authored by baberabb's avatar baberabb
Browse files

skip vllm test (requires CUDA)

parent b2282739
...@@ -63,7 +63,7 @@ jobs: ...@@ -63,7 +63,7 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install -e '.[testing,anthropic,vllm,sentencepiece]' --extra-index-url https://download.pytorch.org/whl/cpu pip install -e '.[testing,anthropic,sentencepiece]' --extra-index-url https://download.pytorch.org/whl/cpu
# Install optional git dependencies # Install optional git dependencies
# pip install bleurt@https://github.com/google-research/bleurt/archive/b610120347ef22b494b6d69b4316e303f5932516.zip#egg=bleurt # pip install bleurt@https://github.com/google-research/bleurt/archive/b610120347ef22b494b6d69b4316e303f5932516.zip#egg=bleurt
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi # if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
......
...@@ -8,6 +8,7 @@ import torch ...@@ -8,6 +8,7 @@ import torch
@pytest.mark.skip(reason="requires CUDA") @pytest.mark.skip(reason="requires CUDA")
class TEST_VLLM: class TEST_VLLM:
vllm = pytest.importorskip("vllm")
try: try:
from lm_eval.models.vllm_causallms import VLLM from lm_eval.models.vllm_causallms import VLLM
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment