"tools/vscode:/vscode.git/clone" did not exist on "ced9f6f40782eafe411a08dc906bf6a6cd904a75"
Commit 6ea742df authored by muyangli's avatar muyangli
Browse files

finished the ci

parent df5505ae
......@@ -2,10 +2,10 @@ name: pr_test
on:
workflow_dispatch:
# pull_request:
# branches: [main]
# issue_comment:
# types: [created]
pull_request:
branches: [main]
issue_comment:
types: [created]
concurrency:
group: pr_test
......@@ -30,7 +30,7 @@ jobs:
set-up-build-env:
runs-on: self-hosted
needs: [check-comment]
needs: [ check-comment ]
if: ${{ github.event_name != 'issue_comment' || needs.check-comment.outputs.should_run == 'true' }}
steps:
......@@ -54,7 +54,7 @@ jobs:
echo "Installing dependencies"
pip install torch torchvision torchaudio
pip install ninja wheel diffusers transformers accelerate sentencepiece protobuf huggingface_hub
build:
needs: set-up-build-env
......@@ -78,7 +78,7 @@ jobs:
if: ${{ github.event_name != 'issue_comment' || needs.check-comment.outputs.should_run == 'true' }}
steps:
- name: Run memory test
- name: Run FLUX memory test
run: |
which python
source $(conda info --base)/etc/profile.d/conda.sh
......@@ -86,9 +86,24 @@ jobs:
which python
HF_TOKEN=${{ secrets.HF_TOKEN }} pytest -v -x tests/flux/test_flux_memory.py
test-flux-other:
needs: build
runs-on: self-hosted
timeout-minutes: 60
if: ${{ github.event_name != 'issue_comment' || needs.check-comment.outputs.should_run == 'true' }}
steps:
- name: Run other FLUX tests
run: |
which python
source $(conda info --base)/etc/profile.d/conda.sh
conda activate test_env || { echo "Failed to activate conda env"; exit 1; }
which python
HF_TOKEN=${{ secrets.HF_TOKEN }} pytest -v -x tests/flux --ignore=tests/flux/test_flux_memory.py
clean-up:
if: always() && (github.event_name != 'issue_comment' || needs.check-comment.outputs.should_run == 'true')
needs: [set-up-build-env, test-flux-memory, test-flux-memory]
needs: [ set-up-build-env, test-flux-memory, test-flux-other ]
runs-on: self-hosted
steps:
......
import pytest
from nunchaku.utils import get_precision, is_turing
from .utils import run_test
@pytest.mark.skipif(is_turing(), reason="Skip tests due to using Turing GPUs")
@pytest.mark.parametrize(
"height,width,attention_impl,cpu_offload,expected_lpips,batch_size",
[
(1024, 1024, "nunchaku-fp16", False, 0.126, 2),
(1920, 1080, "flashattn2", False, 0.141, 4),
],
)
def test_int4_schnell(
height: int, width: int, attention_impl: str, cpu_offload: bool, expected_lpips: float, batch_size: int
):
run_test(
precision=get_precision(),
height=height,
width=width,
attention_impl=attention_impl,
cpu_offload=cpu_offload,
expected_lpips=expected_lpips,
batch_size=batch_size,
)
# skip this test
# import pytest
#
# from nunchaku.utils import get_precision, is_turing
# from .utils import run_test
#
#
# @pytest.mark.skipif(is_turing(), reason="Skip tests due to using Turing GPUs")
# @pytest.mark.parametrize(
# "height,width,attention_impl,cpu_offload,expected_lpips,batch_size",
# [
# (1024, 1024, "nunchaku-fp16", False, 0.126, 2),
# (1920, 1080, "flashattn2", False, 0.141, 4),
# ],
# )
# def test_int4_schnell(
# height: int, width: int, attention_impl: str, cpu_offload: bool, expected_lpips: float, batch_size: int
# ):
# run_test(
# precision=get_precision(),
# height=height,
# width=width,
# attention_impl=attention_impl,
# cpu_offload=cpu_offload,
# expected_lpips=expected_lpips,
# batch_size=batch_size,
# )
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment