Unverified Commit 0edf9ca0 authored by Anton Lozhkov's avatar Anton Lozhkov Committed by GitHub
Browse files

Fix hub-dependent tests for PRs (#1119)

* Remove the hub token

* replace repos

* style
parent c39a511b
...@@ -67,8 +67,6 @@ jobs: ...@@ -67,8 +67,6 @@ jobs:
- name: Run fast PyTorch CPU tests - name: Run fast PyTorch CPU tests
if: ${{ matrix.config.framework == 'pytorch' }} if: ${{ matrix.config.framework == 'pytorch' }}
env:
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
run: | run: |
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \ python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
-s -v -k "not Flax and not Onnx" \ -s -v -k "not Flax and not Onnx" \
...@@ -77,8 +75,6 @@ jobs: ...@@ -77,8 +75,6 @@ jobs:
- name: Run fast Flax TPU tests - name: Run fast Flax TPU tests
if: ${{ matrix.config.framework == 'flax' }} if: ${{ matrix.config.framework == 'flax' }}
env:
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
run: | run: |
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \ python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
-s -v -k "Flax" \ -s -v -k "Flax" \
...@@ -87,8 +83,6 @@ jobs: ...@@ -87,8 +83,6 @@ jobs:
- name: Run fast ONNXRuntime CPU tests - name: Run fast ONNXRuntime CPU tests
if: ${{ matrix.config.framework == 'onnxruntime' }} if: ${{ matrix.config.framework == 'onnxruntime' }}
env:
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
run: | run: |
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \ python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
-s -v -k "Onnx" \ -s -v -k "Onnx" \
...@@ -141,8 +135,6 @@ jobs: ...@@ -141,8 +135,6 @@ jobs:
- name: Run fast PyTorch tests on M1 (MPS) - name: Run fast PyTorch tests on M1 (MPS)
shell: arch -arch arm64 bash {0} shell: arch -arch arm64 bash {0}
env:
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
run: | run: |
${CONDA_RUN} python -m pytest -n 1 -s -v --make-reports=tests_torch_mps tests/ ${CONDA_RUN} python -m pytest -n 1 -s -v --make-reports=tests_torch_mps tests/
......
...@@ -244,28 +244,30 @@ class ConfigTester(unittest.TestCase): ...@@ -244,28 +244,30 @@ class ConfigTester(unittest.TestCase):
logger = logging.get_logger("diffusers.configuration_utils") logger = logging.get_logger("diffusers.configuration_utils")
with CaptureLogger(logger) as cap_logger: with CaptureLogger(logger) as cap_logger:
ddim = DDIMScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler") ddim = DDIMScheduler.from_config("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler")
assert ddim.__class__ == DDIMScheduler assert ddim.__class__ == DDIMScheduler
# no warning should be thrown # no warning should be thrown
assert cap_logger.out == "" assert cap_logger.out == ""
def test_load_ddim_from_euler(self): def test_load_euler_from_pndm(self):
logger = logging.get_logger("diffusers.configuration_utils") logger = logging.get_logger("diffusers.configuration_utils")
with CaptureLogger(logger) as cap_logger: with CaptureLogger(logger) as cap_logger:
euler = EulerDiscreteScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler") euler = EulerDiscreteScheduler.from_config(
"hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
)
assert euler.__class__ == EulerDiscreteScheduler assert euler.__class__ == EulerDiscreteScheduler
# no warning should be thrown # no warning should be thrown
assert cap_logger.out == "" assert cap_logger.out == ""
def test_load_ddim_from_euler_ancestral(self): def test_load_euler_ancestral_from_pndm(self):
logger = logging.get_logger("diffusers.configuration_utils") logger = logging.get_logger("diffusers.configuration_utils")
with CaptureLogger(logger) as cap_logger: with CaptureLogger(logger) as cap_logger:
euler = EulerAncestralDiscreteScheduler.from_config( euler = EulerAncestralDiscreteScheduler.from_config(
"runwayml/stable-diffusion-v1-5", subfolder="scheduler" "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
) )
assert euler.__class__ == EulerAncestralDiscreteScheduler assert euler.__class__ == EulerAncestralDiscreteScheduler
...@@ -276,7 +278,7 @@ class ConfigTester(unittest.TestCase): ...@@ -276,7 +278,7 @@ class ConfigTester(unittest.TestCase):
logger = logging.get_logger("diffusers.configuration_utils") logger = logging.get_logger("diffusers.configuration_utils")
with CaptureLogger(logger) as cap_logger: with CaptureLogger(logger) as cap_logger:
pndm = PNDMScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler") pndm = PNDMScheduler.from_config("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler")
assert pndm.__class__ == PNDMScheduler assert pndm.__class__ == PNDMScheduler
# no warning should be thrown # no warning should be thrown
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment