test_flux_dev.py 921 Bytes
Newer Older
muyangli's avatar
muyangli committed
1
2
import pytest

3
from nunchaku.utils import get_precision, is_turing
Muyang Li's avatar
Muyang Li committed
4

5
from .utils import run_test
muyangli's avatar
muyangli committed
6
7


muyangli's avatar
muyangli committed
8
@pytest.mark.skipif(is_turing(), reason="Skip tests due to using Turing GPUs")
9
10
11
@pytest.mark.parametrize(
    "height,width,num_inference_steps,attention_impl,cpu_offload,expected_lpips",
    [
12
        (1024, 1024, 50, "flashattn2", False, 0.139 if get_precision() == "int4" else 0.146),
13
        (2048, 512, 25, "nunchaku-fp16", False, 0.168 if get_precision() == "int4" else 0.156),
14
15
16
17
    ],
)
def test_flux_dev(
    height: int, width: int, num_inference_steps: int, attention_impl: str, cpu_offload: bool, expected_lpips: float
muyangli's avatar
muyangli committed
18
):
19
20
21
22
23
24
25
    run_test(
        precision=get_precision(),
        model_name="flux.1-dev",
        height=height,
        width=width,
        num_inference_steps=num_inference_steps,
        attention_impl=attention_impl,
muyangli's avatar
muyangli committed
26
        cpu_offload=cpu_offload,
27
        expected_lpips=expected_lpips,
muyangli's avatar
muyangli committed
28
    )