Commit 5b37512b authored by Shangdi Yu's avatar Shangdi Yu Committed by Facebook GitHub Bot
Browse files

Add version guard to migration

Summary:
Pull Request resolved: https://github.com/facebookresearch/d2go/pull/679

as title, add versioning to avoid breaking d2go CI

Reviewed By: wat3rBro

Differential Revision: D63907037

fbshipit-source-id: baf94c71c68ab017ed21b4c12eaf2fa69219db68
parent f905958c
...@@ -31,7 +31,6 @@ from torch.ao.quantization.quantizer.xnnpack_quantizer import ( ...@@ -31,7 +31,6 @@ from torch.ao.quantization.quantizer.xnnpack_quantizer import (
get_symmetric_quantization_config, get_symmetric_quantization_config,
XNNPACKQuantizer, XNNPACKQuantizer,
) )
from torch.export import export_for_training
TORCH_VERSION: Tuple[int, ...] = tuple(int(x) for x in torch.__version__.split(".")[:2]) TORCH_VERSION: Tuple[int, ...] = tuple(int(x) for x in torch.__version__.split(".")[:2])
# some tests still import prepare/convert from below. So don't remove these. # some tests still import prepare/convert from below. So don't remove these.
...@@ -367,7 +366,14 @@ def prepare_fake_quant_model(cfg, model, is_qat, example_input=None): ...@@ -367,7 +366,14 @@ def prepare_fake_quant_model(cfg, model, is_qat, example_input=None):
) )
else: else:
logger.info("Using default pt2e quantization APIs with XNNPACKQuantizer") logger.info("Using default pt2e quantization APIs with XNNPACKQuantizer")
captured_model = export_for_training(model, example_input).module() if TORCH_VERSION >= (2, 5, 0):
captured_model = torch.export.export_for_training(
model, example_input
).module()
else:
captured_model = torch._export.capture_pre_autograd_graph(
model, example_input
).module()
quantizer = _get_symmetric_xnnpack_quantizer() quantizer = _get_symmetric_xnnpack_quantizer()
if is_qat: if is_qat:
model = prepare_qat_pt2e(captured_model, quantizer) model = prepare_qat_pt2e(captured_model, quantizer)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment