Unverified Commit ac9702c2 authored by Funtowicz Morgan's avatar Funtowicz Morgan Committed by GitHub
Browse files

Fix ONNX test_quantize unittest (#6716)

parent 07434033
......@@ -45,7 +45,7 @@ jobs:
source .env/bin/activate
pip install --upgrade pip
pip install torch!=1.6.0
pip install .[sklearn,testing]
pip install .[sklearn,testing,onnxruntime]
- name: Are GPUs recognized by our DL frameworks
run: |
......
......@@ -42,7 +42,7 @@ jobs:
source .env/bin/activate
pip install --upgrade pip
pip install torch!=1.6.0
pip install .[sklearn,testing]
pip install .[sklearn,testing,onnxruntime]
- name: Are GPUs recognized by our DL frameworks
run: |
......
......@@ -74,16 +74,17 @@ extras["tf"] = [
# "onnxconverter-common",
# "keras2onnx"
"onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common",
"keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx"
"keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx",
]
extras["tf-cpu"] = [
"tensorflow-cpu",
# "onnxconverter-common",
# "keras2onnx"
"onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common",
"keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx"
"keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx",
]
extras["torch"] = ["torch"]
extras["onnxruntime"] = ["onnxruntime>=1.4.0", "onnxruntime-tools>=1.4.2"]
extras["serving"] = ["pydantic", "uvicorn", "fastapi", "starlette"]
extras["all"] = extras["serving"] + ["tensorflow", "torch"]
......
......@@ -364,7 +364,6 @@ def quantize(onnx_model_path: Path) -> Path:
Returns: The Path generated for the quantized
"""
try:
import onnx
from onnxruntime.quantization import QuantizationMode, quantize
......@@ -388,8 +387,6 @@ def quantize(onnx_model_path: Path) -> Path:
onnx.save_model(quantized_model, quantized_model_path.as_posix())
return quantized_model_path
except Exception as ie:
print(f"Error while quantizing the model:\n{str(ie)}")
def verify(path: Path):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment