Commit 522a602f authored by wangkx1's avatar wangkx1
Browse files

siton bug

parent abb99c90
_BASE_: [
'../../yolov3/yolov3_r34_270e_coco.yml',
]
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_r34_270e_coco.pdparams
slim: Distill
distill_loss: DistillYOLOv3Loss
DistillYOLOv3Loss:
weight: 1000
_BASE_: [
'../../yolov3/yolov3_r34_270e_coco.yml',
]
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_r34_270e_coco.pdparams
slim: DistillPrune
distill_loss: DistillYOLOv3Loss
DistillYOLOv3Loss:
weight: 1000
pruner: Pruner
Pruner:
criterion: l1_norm
pruned_params: ['conv2d_27.w_0', 'conv2d_28.w_0', 'conv2d_29.w_0',
'conv2d_30.w_0', 'conv2d_31.w_0', 'conv2d_32.w_0',
'conv2d_34.w_0', 'conv2d_35.w_0', 'conv2d_36.w_0',
'conv2d_37.w_0', 'conv2d_38.w_0', 'conv2d_39.w_0',
'conv2d_41.w_0', 'conv2d_42.w_0', 'conv2d_43.w_0',
'conv2d_44.w_0', 'conv2d_45.w_0', 'conv2d_46.w_0']
pruned_ratios: [0.5,0.5,0.5,0.5,0.5,0.5,0.7,0.7,0.7,0.7,0.7,0.7,0.8,0.8,0.8,0.8,0.8,0.8]
# Weights of yolov3_mobilenet_v1_voc
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_mobilenet_v1_270e_voc.pdparams
slim: PrunerQAT
PrunerQAT:
criterion: fpgm
pruned_params: ['conv2d_27.w_0', 'conv2d_28.w_0', 'conv2d_29.w_0',
'conv2d_30.w_0', 'conv2d_31.w_0', 'conv2d_32.w_0',
'conv2d_34.w_0', 'conv2d_35.w_0', 'conv2d_36.w_0',
'conv2d_37.w_0', 'conv2d_38.w_0', 'conv2d_39.w_0',
'conv2d_41.w_0', 'conv2d_42.w_0', 'conv2d_43.w_0',
'conv2d_44.w_0', 'conv2d_45.w_0', 'conv2d_46.w_0']
pruned_ratios: [0.1,0.2,0.2,0.2,0.2,0.1,0.2,0.3,0.3,0.3,0.2,0.1,0.3,0.4,0.4,0.4,0.4,0.3]
print_prune_params: False
quant_config: {
'weight_quantize_type': 'channel_wise_abs_max', 'activation_quantize_type': 'moving_average_abs_max',
'weight_bits': 8, 'activation_bits': 8, 'dtype': 'int8', 'window_size': 10000, 'moving_rate': 0.9,
'quantizable_layer_type': ['Conv2D', 'Linear']}
print_qat_model: True
weights: https://paddledet.bj.bcebos.com/models/pretrained/ESNet_x1_0_pretrained.pdparams
slim: OFA
OFA:
ofa_config:
task: expand_ratio
expand_ratio: [0.5, 1]
skip_neck: True
skip_head: True
RunConfig:
# Skip the output layer of each block by layer name
skip_layers: ['backbone._conv1._conv','backbone.2_1._conv_linear_1._conv',
'backbone.2_1._conv_linear_2._conv', 'backbone.2_1._conv_dw_mv1._conv',
'backbone.2_1._conv_pw_mv1._conv', 'backbone.2_2._conv_linear._conv',
'backbone.2_3._conv_linear._conv', 'backbone.3_1._conv_linear_1._conv',
'backbone.3_1._conv_linear_2._conv', 'backbone.3_1._conv_dw_mv1._conv',
'backbone.3_1._conv_pw_mv1._conv', 'backbone.3_2._conv_linear._conv',
'backbone.3_3._conv_linear._conv', 'backbone.3_4._conv_linear._conv',
'backbone.3_5._conv_linear._conv', 'backbone.3_6._conv_linear._conv',
'backbone.3_7._conv_linear._conv', 'backbone.4_1._conv_linear_1._conv',
'backbone.4_1._conv_linear_2._conv', 'backbone.4_1._conv_dw_mv1._conv',
'backbone.4_1._conv_pw_mv1._conv', 'backbone.4_2._conv_linear._conv',
'backbone.4_3._conv_linear._conv']
# For block-wise search, make layers in each block in the same search space
same_search_space: [
['backbone.2_1._conv_dw_1._conv', 'backbone.2_1._conv_pw_2._conv',
'backbone.2_1._conv_dw_2._conv', 'backbone.2_1._se.conv1', 'backbone.2_1._se.conv2'],
['backbone.2_2._conv_pw._conv', 'backbone.2_2._conv_dw._conv',
'backbone.2_2._se.conv1', 'backbone.2_2._se.conv2'],
['backbone.2_3._conv_pw._conv', 'backbone.2_3._conv_dw._conv',
'backbone.2_3._se.conv1', 'backbone.2_3._se.conv2'],
['backbone.3_1._conv_dw_1._conv', 'backbone.3_1._conv_pw_2._conv',
'backbone.3_1._conv_dw_2._conv', 'backbone.3_1._se.conv1', 'backbone.3_1._se.conv2'],
['backbone.3_2._conv_pw._conv', 'backbone.3_2._conv_dw._conv',
'backbone.3_2._se.conv1', 'backbone.3_2._se.conv2'],
['backbone.3_3._conv_pw._conv', 'backbone.3_3._conv_dw._conv',
'backbone.3_3._se.conv1', 'backbone.3_3._se.conv2'],
['backbone.3_4._conv_pw._conv', 'backbone.3_4._conv_dw._conv',
'backbone.3_4._se.conv1', 'backbone.3_4._se.conv2'],
['backbone.3_5._conv_pw._conv', 'backbone.3_5._conv_dw._conv',
'backbone.3_5._se.conv1', 'backbone.3_5._se.conv2'],
['backbone.3_6._conv_pw._conv', 'backbone.3_6._conv_dw._conv',
'backbone.3_6._se.conv1', 'backbone.3_6._se.conv2'],
['backbone.3_7._conv_pw._conv', 'backbone.3_7._conv_dw._conv',
'backbone.3_7._se.conv1', 'backbone.3_7._se.conv2'],
['backbone.4_1._conv_dw_1._conv', 'backbone.4_1._conv_pw_2._conv',
'backbone.4_1._conv_dw_2._conv', 'backbone.4_1._se.conv1', 'backbone.4_1._se.conv2'],
['backbone.4_2._conv_pw._conv', 'backbone.4_2._conv_dw._conv',
'backbone.4_2._se.conv1', 'backbone.4_2._se.conv2'],
['backbone.4_3._conv_pw._conv', 'backbone.4_3._conv_dw._conv',
'backbone.4_3._se.conv1', 'backbone.4_3._se.conv2']]
# demo expand ratio
# Generally, for expand ratio, float in (0, 1] is available.
# But please be careful if the model is complicated.
# For picodet, there are many split and concat, the choice of channel number is important.
ofa_layers:
'backbone.2_1._conv_dw_1._conv':
'expand_ratio': [0.5, 1]
'backbone.2_2._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.2_3._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_1._conv_dw_1._conv':
'expand_ratio': [0.5, 1]
'backbone.3_2._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_3._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_4._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_5._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_6._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.3_7._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.4_1._conv_dw_1._conv':
'expand_ratio': [0.5, 1]
'backbone.4_2._conv_pw._conv':
'expand_ratio': [0.5, 1]
'backbone.4_3._conv_pw._conv':
'expand_ratio': [0.5, 1]
weights: https://paddledet.bj.bcebos.com/models/mask_rcnn_r50_fpn_1x_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/mot/mcfairmot_dla34_30e_1088x608_visdrone_vehicle_bytetracker.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/picodet_s_320_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/ppyolo_mbv3_large_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/ppyolo_r50vd_dcn_1x_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/ppyoloe_crn_s_300e_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/keypoint/tinypose_128x96.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
weights: https://paddledet.bj.bcebos.com/models/yolov3_darknet53_270e_coco.pdparams
slim: PTQ
PTQ:
ptq_config: {
'activation_quantizer': 'HistQuantizer',
'upsample_bins': 127,
'hist_percent': 0.999}
quant_batch_num: 10
fuse: True
pretrain_weights: https://paddledet.bj.bcebos.com/models/faster_rcnn_r50_fpn_1x_coco.pdparams
slim: Pruner
Pruner:
criterion: fpgm
pruned_params: ['conv2d_27.w_0', 'conv2d_28.w_0', 'conv2d_29.w_0',
'conv2d_30.w_0', 'conv2d_31.w_0', 'conv2d_32.w_0',
'conv2d_33.w_0', 'conv2d_34.w_0', 'conv2d_35.w_0',
'conv2d_36.w_0', 'conv2d_37.w_0', 'conv2d_38.w_0',
'conv2d_39.w_0', 'conv2d_40.w_0', 'conv2d_41.w_0',
'conv2d_42.w_0', 'conv2d_43.w_0', 'conv2d_44.w_0',
'conv2d_45.w_0', 'conv2d_46.w_0', 'conv2d_47.w_0',
'conv2d_48.w_0', 'conv2d_49.w_0', 'conv2d_50.w_0',
'conv2d_51.w_0', 'conv2d_52.w_0']
pruned_ratios: [0.1,0.2,0.2,0.2,0.2,0.1,0.2,0.3,0.3,0.3,0.2,0.1,0.3,0.4,0.4,0.4,0.4,0.3,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4]
print_params: False
pretrain_weights: https://paddledet.bj.bcebos.com/models/picodet_m_320_coco.pdparams
slim: UnstructuredPruner
UnstructuredPruner:
stable_epochs: 0
pruning_epochs: 150
tunning_epochs: 150
pruning_steps: 300
ratio: 0.75
initial_ratio: 0.15
prune_params_type: conv1x1_only
pretrain_weights: https://paddledet.bj.bcebos.com/models/picodet_m_320_coco.pdparams
slim: UnstructuredPruner
UnstructuredPruner:
stable_epochs: 0
pruning_epochs: 150
tunning_epochs: 150
pruning_steps: 300
ratio: 0.85
initial_ratio: 0.20
prune_params_type: conv1x1_only
pretrain_weights: https://paddledet.bj.bcebos.com/models/ppyolo_mbv3_large_coco.pdparams
slim: Pruner
Pruner:
criterion: fpgm
pruned_params: ['conv2d_62.w_0', 'conv2d_63.w_0', 'conv2d_64.w_0',
'conv2d_65.w_0', 'conv2d_66.w_0', 'conv2d_67.w_0']
pruned_ratios: [0.75, 0.75, 0.75, 0.75, 0.75, 0.75]
print_params: True
pretrain_weights: https://paddledet.bj.bcebos.com/models/ppyolo_r50vd_dcn_1x_coco.pdparams
slim: Pruner
Pruner:
criterion: fpgm
pruned_params: ['conv2d_56.w_0', 'conv2d_57.w_0', 'conv2d_58.w_0',
'conv2d_59.w_0', 'conv2d_60.w_0', 'conv2d_61.w_0',
'conv2d_63.w_0', 'conv2d_64.w_0', 'conv2d_65.w_0',
'conv2d_66.w_0', 'conv2d_67.w_0', 'conv2d_68.w_0',
'conv2d_70.w_0', 'conv2d_71.w_0', 'conv2d_72.w_0',
'conv2d_73.w_0', 'conv2d_74.w_0', 'conv2d_75.w_0']
pruned_ratios: [0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.875,0.875,0.875,0.875,0.875,0.875]
print_params: False
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_darknet53_270e_coco.pdparams
slim: Pruner
Pruner:
criterion: fpgm
pruned_params: ['conv2d_52.w_0', 'conv2d_53.w_0', 'conv2d_54.w_0',
'conv2d_55.w_0', 'conv2d_56.w_0', 'conv2d_57.w_0',
'conv2d_59.w_0', 'conv2d_60.w_0', 'conv2d_61.w_0',
'conv2d_62.w_0', 'conv2d_63.w_0', 'conv2d_64.w_0',
'conv2d_66.w_0', 'conv2d_67.w_0', 'conv2d_68.w_0',
'conv2d_69.w_0', 'conv2d_70.w_0', 'conv2d_71.w_0']
pruned_ratios: [0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.875,0.875,0.875,0.875,0.875,0.875]
print_params: True
# Weights of yolov3_mobilenet_v1_voc
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_mobilenet_v1_270e_voc.pdparams
slim: Pruner
Pruner:
criterion: fpgm
pruned_params: ['conv2d_27.w_0', 'conv2d_28.w_0', 'conv2d_29.w_0',
'conv2d_30.w_0', 'conv2d_31.w_0', 'conv2d_32.w_0',
'conv2d_34.w_0', 'conv2d_35.w_0', 'conv2d_36.w_0',
'conv2d_37.w_0', 'conv2d_38.w_0', 'conv2d_39.w_0',
'conv2d_41.w_0', 'conv2d_42.w_0', 'conv2d_43.w_0',
'conv2d_44.w_0', 'conv2d_45.w_0', 'conv2d_46.w_0']
pruned_ratios: [0.1,0.2,0.2,0.2,0.2,0.1,0.2,0.3,0.3,0.3,0.2,0.1,0.3,0.4,0.4,0.4,0.4,0.3]
print_params: False
# Weights of yolov3_mobilenet_v1_voc
pretrain_weights: https://paddledet.bj.bcebos.com/models/yolov3_mobilenet_v1_270e_voc.pdparams
slim: Pruner
Pruner:
criterion: l1_norm
pruned_params: ['conv2d_27.w_0', 'conv2d_28.w_0', 'conv2d_29.w_0',
'conv2d_30.w_0', 'conv2d_31.w_0', 'conv2d_32.w_0',
'conv2d_34.w_0', 'conv2d_35.w_0', 'conv2d_36.w_0',
'conv2d_37.w_0', 'conv2d_38.w_0', 'conv2d_39.w_0',
'conv2d_41.w_0', 'conv2d_42.w_0', 'conv2d_43.w_0',
'conv2d_44.w_0', 'conv2d_45.w_0', 'conv2d_46.w_0']
pruned_ratios: [0.1,0.2,0.2,0.2,0.2,0.1,0.2,0.3,0.3,0.3,0.2,0.1,0.3,0.4,0.4,0.4,0.4,0.3]
print_params: False
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment