Unverified Commit 0f91f2f6 authored by jiqing-feng's avatar jiqing-feng Committed by GitHub
Browse files

use deterministic to get stable result (#11663)



* use deterministic to get stable result
Signed-off-by: default avatarjiqing-feng <jiqing.feng@intel.com>

* add deterministic for int8 test
Signed-off-by: default avatarjiqing-feng <jiqing.feng@intel.com>

---------
Signed-off-by: default avatarjiqing-feng <jiqing.feng@intel.com>
parent 745199a8
...@@ -476,6 +476,7 @@ class SlowBnb4BitTests(Base4bitTests): ...@@ -476,6 +476,7 @@ class SlowBnb4BitTests(Base4bitTests):
r""" r"""
Test that loading the model and unquantize it produce correct results. Test that loading the model and unquantize it produce correct results.
""" """
torch.use_deterministic_algorithms(True)
self.pipeline_4bit.transformer.dequantize() self.pipeline_4bit.transformer.dequantize()
output = self.pipeline_4bit( output = self.pipeline_4bit(
prompt=self.prompt, prompt=self.prompt,
......
...@@ -478,6 +478,7 @@ class SlowBnb8bitTests(Base8bitTests): ...@@ -478,6 +478,7 @@ class SlowBnb8bitTests(Base8bitTests):
r""" r"""
Test that loading the model and unquantize it produce correct results. Test that loading the model and unquantize it produce correct results.
""" """
torch.use_deterministic_algorithms(True)
self.pipeline_8bit.transformer.dequantize() self.pipeline_8bit.transformer.dequantize()
output = self.pipeline_8bit( output = self.pipeline_8bit(
prompt=self.prompt, prompt=self.prompt,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment