Commit 0d97cc8c authored by Sugon_ldc's avatar Sugon_ldc
Browse files

add new model

parents
Pipeline #316 failed with stages
in 0 seconds
### PPM
| Method | SAD | MSE | Grad | Conn |
|-|-|-|-|-|
|ClosedFormMatting|40.6251|0.0782|55.5716|40.6646|
|KNNMatting|41.5604|0.0681|52.5200|42.1784|
|FastMatting|35.8735|0.0492|48.9267|35.6183|
|LearningBasedMatting|40.5506|0.0776|55.3923|40.5690|
|RandomWalksMatting|54.6315|0.0962|69.8779|54.0870|
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: ResizeByShort
short_size: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: True
model:
type: CloseFormMatting
_base_: closeform.yml
model:
type: FastMatting
_base_: closeform.yml
model:
type: KNNMatting
_base_: closeform.yml
model:
type: LearningBasedMatting
_base_: closeform.yml
model:
type: RandomWalksMatting
batch_size: 16
iters: 100000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: RandomCropByAlpha
crop_size: [[320, 320], [480, 480], [640, 640]]
- type: Resize
target_size: [320, 320]
- type: RandomDistort
- type: RandomBlur
- type: RandomHorizontalFlip
- type: Normalize
mode: train
get_trimap: True
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: LimitLong
max_long: 3840
- type: Normalize
mode: val
get_trimap: True
model:
type: DIM
backbone:
type: VGG16
input_channels: 4
pretrained: https://paddleseg.bj.bcebos.com/matting/models/DIM_VGG16_pretrained/model.pdparams
pretrained: Null
optimizer:
type: adam
learning_rate:
value: 0.001
batch_size: 4
iters: 50000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: RandomResize
size: [2048, 2048]
scale: [0.3, 1.5]
- type: RandomCrop
crop_size: [2048, 2048]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomHorizontalFlip
- type: Padding
target_size: [2048, 2048]
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: ResizeByShort
short_size: 2048
- type: ResizeToIntMult
mult_int: 128
- type: Normalize
mode: val
get_trimap: False
model:
type: HumanMatting
backbone:
type: ResNet34_vd
pretrained: https://paddleseg.bj.bcebos.com/matting/models/ResNet34_vd_pretrained/model.pdparams
pretrained: Null
if_refine: True
optimizer:
type: sgd
momentum: 0.9
weight_decay: 4.0e-5
lr_scheduler:
type: PiecewiseDecay
boundaries: [30000, 40000]
values: [0.001, 0.0001, 0.00001]
_base_: modnet-mobilenetv2.yml
model:
backbone:
type: HRNet_W18
pretrained: https://bj.bcebos.com/paddleseg/dygraph/hrnet_w18_ssld.tar.gz
batch_size: 16
iters: 100000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: RandomCrop
crop_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
- type: RandomHorizontalFlip
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: ResizeByShort
short_size: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
model:
type: MODNet
backbone:
type: MobileNetV2
pretrained: https://paddleseg.bj.bcebos.com/matting/models/MobileNetV2_pretrained/model.pdparams
pretrained: Null
optimizer:
type: sgd
momentum: 0.9
weight_decay: 4.0e-5
lr_scheduler:
type: PiecewiseDecay
boundaries: [40000, 80000]
values: [0.02, 0.002, 0.0002]
_base_: modnet-mobilenetv2.yml
model:
backbone:
type: ResNet50_vd
pretrained: https://bj.bcebos.com/paddleseg/dygraph/resnet50_vd_ssld_v2.tar.gz
# PP-Matting: High-Accuracy Natural Image Matting
## Reference
> Chen G, Liu Y, Wang J, et al. PP-Matting: High-Accuracy Natural Image Matting[J]. arXiv preprint arXiv:2204.09433, 2022.
## Performance
### Composition-1k
| Model | Backbone | Resolution | Training Iters | SAD $\downarrow$ | MSE $\downarrow$ | Grad $\downarrow$ | Conn $\downarrow$ | Links |
|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|
|PP-Matting|HRNet_W48|512x512|300000|46.22|0.005|22.69|45.40|[model](https://paddleseg.bj.bcebos.com/matting/models/ppmatting-hrnet_w48-composition.pdparams)|
### Distinctions-646
| Model | Backbone | Resolution | Training Iters | SAD $\downarrow$ | MSE $\downarrow$ | Grad $\downarrow$ | Conn $\downarrow$ | Links |
|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|
|PP-Matting|HRNet_W48|512x512|300000|40.69|0.009|43.91|40.56|[model](https://paddleseg.bj.bcebos.com/matting/models/ppmatting-hrnet_w48-distinctions.pdparams)|
_base_: 'ppmatting-hrnet_w18-human_512.yml'
train_dataset:
transforms:
- type: LoadImages
- type: LimitShort
max_short: 1024
- type: RandomCrop
crop_size: [1024, 1024]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomNoise
prob: 0.5
- type: RandomReJpeg
prob: 0.2
- type: RandomHorizontalFlip
- type: Normalize
val_dataset:
transforms:
- type: LoadImages
- type: LimitShort
max_short: 1024
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
_base_: 'ppmatting-hrnet_w48-distinctions.yml'
batch_size: 4
iters: 200000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: RandomCrop
crop_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomNoise
prob: 0.5
- type: RandomReJpeg
prob: 0.2
- type: RandomHorizontalFlip
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
model:
backbone:
type: HRNet_W18
pretrained: https://bj.bcebos.com/paddleseg/dygraph/hrnet_w18_ssld.tar.gz
_base_: 'ppmatting-hrnet_w48-distinctions.yml'
train_dataset:
dataset_root: data/matting/Composition-1k
val_dataset:
dataset_root: data/matting/Composition-1k
\ No newline at end of file
batch_size: 4
iters: 300000
train_dataset:
type: MattingDataset
dataset_root: data/matting/Distinctions-646
train_file: train.txt
transforms:
- type: LoadImages
- type: Padding
target_size: [512, 512]
- type: RandomCrop
crop_size: [[512, 512],[640, 640], [800, 800]]
- type: Resize
target_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomHorizontalFlip
- type: Normalize
mode: train
separator: '|'
val_dataset:
type: MattingDataset
dataset_root: data/matting/Distinctions-646
val_file: val.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 1536
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
separator: '|'
model:
type: PPMatting
backbone:
type: HRNet_W48
pretrained: https://bj.bcebos.com/paddleseg/dygraph/hrnet_w48_ssld.tar.gz
pretrained: Null
optimizer:
type: sgd
momentum: 0.9
weight_decay: 4.0e-5
lr_scheduler:
type: PolynomialDecay
learning_rate: 0.01
end_lr: 0
power: 0.9
\ No newline at end of file
batch_size: 16 # total batch size: 16
iters: 100000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: RandomCrop
crop_size: [512, 512]
- type: Padding
target_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomSharpen
prob: 0.2
- type: RandomNoise
prob: 0.5
- type: RandomReJpeg
prob: 0.2
- type: RandomHorizontalFlip
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
model:
type: PPMattingV2
backbone:
type: STDC1
pretrained: https://bj.bcebos.com/paddleseg/dygraph/PP_STDCNet1.tar.gz
decoder_channels: [128, 96, 64, 32, 16]
head_channel: 8
dpp_output_channel: 256
dpp_merge_type: add
optimizer:
type: sgd
momentum: 0.9
weight_decay: 5.0e-4
lr_scheduler:
type: PolynomialDecay
learning_rate: 0.01
end_lr: 0
power: 0.9
warmup_iters: 1000
warmup_start_lr: 1.0e-5
batch_size: 1
iters: 1000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: RandomCrop
crop_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
- type: RandomHorizontalFlip
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: ResizeByShort
short_size: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
model:
type: MODNet
backbone:
type: MobileNetV2
pretrained: https://paddleseg.bj.bcebos.com/matting/models/MobileNetV2_pretrained/model.pdparams
pretrained: Null
optimizer:
type: sgd
momentum: 0.9
weight_decay: 4.0e-5
lr_scheduler:
type: PiecewiseDecay
boundaries: [40000, 80000]
values: [0.02, 0.002, 0.0002]
batch_size: 1
iters: 1000
train_dataset:
type: MattingDataset
dataset_root: data/PPM-100
train_file: train.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: RandomCrop
crop_size: [512, 512]
- type: Padding
target_size: [512, 512]
- type: RandomDistort
- type: RandomBlur
prob: 0.1
- type: RandomSharpen
prob: 0.2
- type: RandomNoise
prob: 0.5
- type: RandomReJpeg
prob: 0.2
- type: RandomHorizontalFlip
- type: Normalize
mode: train
val_dataset:
type: MattingDataset
dataset_root: data/PPM-100
val_file: val.txt
transforms:
- type: LoadImages
- type: LimitShort
max_short: 512
- type: ResizeToIntMult
mult_int: 32
- type: Normalize
mode: val
get_trimap: False
model:
type: PPMattingV2
backbone:
type: STDC1
pretrained: https://bj.bcebos.com/paddleseg/dygraph/PP_STDCNet1.tar.gz
decoder_channels: [128, 96, 64, 32, 16]
head_channel: 8
dpp_output_channel: 256
dpp_merge_type: add
optimizer:
type: sgd
momentum: 0.9
weight_decay: 5.0e-4
lr_scheduler:
type: PolynomialDecay
learning_rate: 0.01
end_lr: 0
power: 0.9
warmup_iters: 100
warmup_start_lr: 1.0e-5
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment