Unverified Commit c8cc01e8 authored by Kai Chen's avatar Kai Chen Committed by GitHub
Browse files

Merge pull request #50 from hellock/dev

Update to 0.5.1
parents 4990aae6 994b6cb6
...@@ -32,6 +32,12 @@ which is heavily depended on by this toolbox. ...@@ -32,6 +32,12 @@ which is heavily depended on by this toolbox.
This project is released under the [Apache 2.0 license](LICENSE). This project is released under the [Apache 2.0 license](LICENSE).
## Updates
v0.5.1 (20/10/2018)
- Add BBoxAssigner and BBoxSampler, the `train_cfg` field in config files are restructured.
- `ConvFCRoIHead` / `SharedFCRoIHead` are renamed to `ConvFCBBoxHead` / `SharedFCBBoxHead` for consistency.
## Benchmark and model zoo ## Benchmark and model zoo
We provide our baseline results and the comparision with Detectron, the most We provide our baseline results and the comparision with Detectron, the most
......
...@@ -20,7 +20,7 @@ model = dict( ...@@ -20,7 +20,7 @@ model = dict(
out_channels=256, out_channels=256,
featmap_strides=[4, 8, 16, 32]), featmap_strides=[4, 8, 16, 32]),
bbox_head=dict( bbox_head=dict(
type='SharedFCRoIHead', type='SharedFCBBoxHead',
num_fcs=2, num_fcs=2,
in_channels=256, in_channels=256,
fc_out_channels=1024, fc_out_channels=1024,
......
...@@ -20,7 +20,7 @@ model = dict( ...@@ -20,7 +20,7 @@ model = dict(
out_channels=256, out_channels=256,
featmap_strides=[4, 8, 16, 32]), featmap_strides=[4, 8, 16, 32]),
bbox_head=dict( bbox_head=dict(
type='SharedFCRoIHead', type='SharedFCBBoxHead',
num_fcs=2, num_fcs=2,
in_channels=256, in_channels=256,
fc_out_channels=1024, fc_out_channels=1024,
......
...@@ -30,7 +30,7 @@ model = dict( ...@@ -30,7 +30,7 @@ model = dict(
out_channels=256, out_channels=256,
featmap_strides=[4, 8, 16, 32]), featmap_strides=[4, 8, 16, 32]),
bbox_head=dict( bbox_head=dict(
type='SharedFCRoIHead', type='SharedFCBBoxHead',
num_fcs=2, num_fcs=2,
in_channels=256, in_channels=256,
fc_out_channels=1024, fc_out_channels=1024,
......
...@@ -30,7 +30,7 @@ model = dict( ...@@ -30,7 +30,7 @@ model = dict(
out_channels=256, out_channels=256,
featmap_strides=[4, 8, 16, 32]), featmap_strides=[4, 8, 16, 32]),
bbox_head=dict( bbox_head=dict(
type='SharedFCRoIHead', type='SharedFCBBoxHead',
num_fcs=2, num_fcs=2,
in_channels=256, in_channels=256,
fc_out_channels=1024, fc_out_channels=1024,
......
from .bbox_head import BBoxHead from .bbox_head import BBoxHead
from .convfc_bbox_head import ConvFCRoIHead, SharedFCRoIHead from .convfc_bbox_head import ConvFCBBoxHead, SharedFCBBoxHead
__all__ = ['BBoxHead', 'ConvFCRoIHead', 'SharedFCRoIHead'] __all__ = ['BBoxHead', 'ConvFCBBoxHead', 'SharedFCBBoxHead']
...@@ -4,7 +4,7 @@ from .bbox_head import BBoxHead ...@@ -4,7 +4,7 @@ from .bbox_head import BBoxHead
from ..utils import ConvModule from ..utils import ConvModule
class ConvFCRoIHead(BBoxHead): class ConvFCBBoxHead(BBoxHead):
"""More general bbox head, with shared conv and fc layers and two optional """More general bbox head, with shared conv and fc layers and two optional
separated branches. separated branches.
...@@ -24,7 +24,7 @@ class ConvFCRoIHead(BBoxHead): ...@@ -24,7 +24,7 @@ class ConvFCRoIHead(BBoxHead):
fc_out_channels=1024, fc_out_channels=1024,
*args, *args,
**kwargs): **kwargs):
super(ConvFCRoIHead, self).__init__(*args, **kwargs) super(ConvFCBBoxHead, self).__init__(*args, **kwargs)
assert (num_shared_convs + num_shared_fcs + num_cls_convs + num_cls_fcs assert (num_shared_convs + num_shared_fcs + num_cls_convs + num_cls_fcs
+ num_reg_convs + num_reg_fcs > 0) + num_reg_convs + num_reg_fcs > 0)
if num_cls_convs > 0 or num_reg_convs > 0: if num_cls_convs > 0 or num_reg_convs > 0:
...@@ -116,7 +116,7 @@ class ConvFCRoIHead(BBoxHead): ...@@ -116,7 +116,7 @@ class ConvFCRoIHead(BBoxHead):
return branch_convs, branch_fcs, last_layer_dim return branch_convs, branch_fcs, last_layer_dim
def init_weights(self): def init_weights(self):
super(ConvFCRoIHead, self).init_weights() super(ConvFCBBoxHead, self).init_weights()
for module_list in [self.shared_fcs, self.cls_fcs, self.reg_fcs]: for module_list in [self.shared_fcs, self.cls_fcs, self.reg_fcs]:
for m in module_list.modules(): for m in module_list.modules():
if isinstance(m, nn.Linear): if isinstance(m, nn.Linear):
...@@ -162,11 +162,11 @@ class ConvFCRoIHead(BBoxHead): ...@@ -162,11 +162,11 @@ class ConvFCRoIHead(BBoxHead):
return cls_score, bbox_pred return cls_score, bbox_pred
class SharedFCRoIHead(ConvFCRoIHead): class SharedFCBBoxHead(ConvFCBBoxHead):
def __init__(self, num_fcs=2, fc_out_channels=1024, *args, **kwargs): def __init__(self, num_fcs=2, fc_out_channels=1024, *args, **kwargs):
assert num_fcs >= 1 assert num_fcs >= 1
super(SharedFCRoIHead, self).__init__( super(SharedFCBBoxHead, self).__init__(
num_shared_convs=0, num_shared_convs=0,
num_shared_fcs=num_fcs, num_shared_fcs=num_fcs,
num_cls_convs=0, num_cls_convs=0,
......
...@@ -12,7 +12,7 @@ def readme(): ...@@ -12,7 +12,7 @@ def readme():
MAJOR = 0 MAJOR = 0
MINOR = 5 MINOR = 5
PATCH = 0 PATCH = 1
SUFFIX = '' SUFFIX = ''
SHORT_VERSION = '{}.{}.{}{}'.format(MAJOR, MINOR, PATCH, SUFFIX) SHORT_VERSION = '{}.{}.{}{}'.format(MAJOR, MINOR, PATCH, SUFFIX)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment