Unverified Commit 66fec2c8 authored by Ningxin Zheng's avatar Ningxin Zheng Committed by GitHub
Browse files

Bugbash for speedup module. (#2634)



If all the conv layers in the same dependency set
are not pruned, we need to skip this dependency set.
Signed-off-by: default avatarNingxin <Ningxin.Zheng@microsoft.com>
parent f761b910
......@@ -240,7 +240,8 @@ infer_from_inshape = {
'aten::add': lambda module_mask, mask: add_inshape(module_mask, mask),
'aten::cat': lambda module_mask, mask, cat_info, last_visited: cat_inshape(module_mask, mask, cat_info, last_visited),
'aten::mean': lambda module_masks, mask, shape: mean_inshape(module_masks, mask, shape),
'Dropout': lambda module_masks, mask: dropout_inshape(module_masks, mask)
'Dropout': lambda module_masks, mask: dropout_inshape(module_masks, mask),
'Dropout2d': lambda module_masks, mask: dropout_inshape(module_masks, mask)
}
"""
......
......@@ -259,7 +259,9 @@ class ChannelMaskConflict(MaskFix):
_logger.debug('Layer: %s ', name)
_logger.debug('Original pruned filters: %s', str(all_zeros))
# Update the masks for the layers in the dependency set
if fine_grained:
if fine_grained or out_channels is None:
# if use the fine-grained pruner or all the layers in
# this dependency set are not pruned
continue
if not all_pruned:
# if some layer are not pruned at all
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment