Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
SOLOv2-pytorch
Commits
86cc430a
Unverified
Commit
86cc430a
authored
Jul 29, 2019
by
Kai Chen
Committed by
GitHub
Jul 29, 2019
Browse files
Restructure the ops directory (#1073)
* restructure the ops directory * add some repr strings
parent
8387aba8
Changes
30
Show whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
52 additions
and
51 deletions
+52
-51
mmdet/ops/roi_pool/__init__.py
mmdet/ops/roi_pool/__init__.py
+1
-2
mmdet/ops/roi_pool/functions/__init__.py
mmdet/ops/roi_pool/functions/__init__.py
+0
-0
mmdet/ops/roi_pool/modules/__init__.py
mmdet/ops/roi_pool/modules/__init__.py
+0
-0
mmdet/ops/roi_pool/modules/roi_pool.py
mmdet/ops/roi_pool/modules/roi_pool.py
+0
-22
mmdet/ops/roi_pool/roi_pool.py
mmdet/ops/roi_pool/roi_pool.py
+29
-1
mmdet/ops/sigmoid_focal_loss/__init__.py
mmdet/ops/sigmoid_focal_loss/__init__.py
+1
-1
mmdet/ops/sigmoid_focal_loss/functions/__init__.py
mmdet/ops/sigmoid_focal_loss/functions/__init__.py
+0
-0
mmdet/ops/sigmoid_focal_loss/modules/__init__.py
mmdet/ops/sigmoid_focal_loss/modules/__init__.py
+0
-0
mmdet/ops/sigmoid_focal_loss/modules/sigmoid_focal_loss.py
mmdet/ops/sigmoid_focal_loss/modules/sigmoid_focal_loss.py
+0
-24
mmdet/ops/sigmoid_focal_loss/sigmoid_focal_loss.py
mmdet/ops/sigmoid_focal_loss/sigmoid_focal_loss.py
+21
-1
No files found.
mmdet/ops/roi_pool/__init__.py
View file @
86cc430a
from
.functions.roi_pool
import
roi_pool
from
.roi_pool
import
roi_pool
,
RoIPool
from
.modules.roi_pool
import
RoIPool
__all__
=
[
'roi_pool'
,
'RoIPool'
]
__all__
=
[
'roi_pool'
,
'RoIPool'
]
mmdet/ops/roi_pool/functions/__init__.py
deleted
100644 → 0
View file @
8387aba8
mmdet/ops/roi_pool/modules/__init__.py
deleted
100644 → 0
View file @
8387aba8
mmdet/ops/roi_pool/modules/roi_pool.py
deleted
100644 → 0
View file @
8387aba8
import
torch.nn
as
nn
from
torch.nn.modules.utils
import
_pair
from
..functions.roi_pool
import
roi_pool
class
RoIPool
(
nn
.
Module
):
def
__init__
(
self
,
out_size
,
spatial_scale
,
use_torchvision
=
False
):
super
(
RoIPool
,
self
).
__init__
()
self
.
out_size
=
out_size
self
.
spatial_scale
=
float
(
spatial_scale
)
self
.
use_torchvision
=
use_torchvision
def
forward
(
self
,
features
,
rois
):
if
self
.
use_torchvision
:
from
torchvision.ops
import
roi_pool
as
tv_roi_pool
return
tv_roi_pool
(
features
,
rois
,
_pair
(
self
.
out_size
),
self
.
spatial_scale
)
else
:
return
roi_pool
(
features
,
rois
,
self
.
out_size
,
self
.
spatial_scale
)
mmdet/ops/roi_pool/
functions/
roi_pool.py
→
mmdet/ops/roi_pool/roi_pool.py
View file @
86cc430a
import
torch
import
torch
import
torch.nn
as
nn
from
torch.autograd
import
Function
from
torch.autograd
import
Function
from
torch.autograd.function
import
once_differentiable
from
torch.nn.modules.utils
import
_pair
from
torch.nn.modules.utils
import
_pair
from
.
.
import
roi_pool_cuda
from
.
import
roi_pool_cuda
class
RoIPoolFunction
(
Function
):
class
RoIPoolFunction
(
Function
):
...
@@ -27,6 +29,7 @@ class RoIPoolFunction(Function):
...
@@ -27,6 +29,7 @@ class RoIPoolFunction(Function):
return
output
return
output
@
staticmethod
@
staticmethod
@
once_differentiable
def
backward
(
ctx
,
grad_output
):
def
backward
(
ctx
,
grad_output
):
assert
grad_output
.
is_cuda
assert
grad_output
.
is_cuda
spatial_scale
=
ctx
.
spatial_scale
spatial_scale
=
ctx
.
spatial_scale
...
@@ -45,3 +48,28 @@ class RoIPoolFunction(Function):
...
@@ -45,3 +48,28 @@ class RoIPoolFunction(Function):
roi_pool
=
RoIPoolFunction
.
apply
roi_pool
=
RoIPoolFunction
.
apply
class
RoIPool
(
nn
.
Module
):
def
__init__
(
self
,
out_size
,
spatial_scale
,
use_torchvision
=
False
):
super
(
RoIPool
,
self
).
__init__
()
self
.
out_size
=
out_size
self
.
spatial_scale
=
float
(
spatial_scale
)
self
.
use_torchvision
=
use_torchvision
def
forward
(
self
,
features
,
rois
):
if
self
.
use_torchvision
:
from
torchvision.ops
import
roi_pool
as
tv_roi_pool
return
tv_roi_pool
(
features
,
rois
,
_pair
(
self
.
out_size
),
self
.
spatial_scale
)
else
:
return
roi_pool
(
features
,
rois
,
self
.
out_size
,
self
.
spatial_scale
)
def
__repr__
(
self
):
format_str
=
self
.
__class__
.
__name__
format_str
+=
'(out_size={}, spatial_scale={}'
.
format
(
self
.
out_size
,
self
.
spatial_scale
)
format_str
+=
', use_torchvision={})'
.
format
(
self
.
use_torchvision
)
return
format_str
mmdet/ops/sigmoid_focal_loss/__init__.py
View file @
86cc430a
from
.modules
.sigmoid_focal_loss
import
SigmoidFocalLoss
,
sigmoid_focal_loss
from
.sigmoid_focal_loss
import
SigmoidFocalLoss
,
sigmoid_focal_loss
__all__
=
[
'SigmoidFocalLoss'
,
'sigmoid_focal_loss'
]
__all__
=
[
'SigmoidFocalLoss'
,
'sigmoid_focal_loss'
]
mmdet/ops/sigmoid_focal_loss/functions/__init__.py
deleted
100644 → 0
View file @
8387aba8
mmdet/ops/sigmoid_focal_loss/modules/__init__.py
deleted
100644 → 0
View file @
8387aba8
mmdet/ops/sigmoid_focal_loss/modules/sigmoid_focal_loss.py
deleted
100644 → 0
View file @
8387aba8
from
torch
import
nn
from
..functions.sigmoid_focal_loss
import
sigmoid_focal_loss
# TODO: remove this module
class
SigmoidFocalLoss
(
nn
.
Module
):
def
__init__
(
self
,
gamma
,
alpha
):
super
(
SigmoidFocalLoss
,
self
).
__init__
()
self
.
gamma
=
gamma
self
.
alpha
=
alpha
def
forward
(
self
,
logits
,
targets
):
assert
logits
.
is_cuda
loss
=
sigmoid_focal_loss
(
logits
,
targets
,
self
.
gamma
,
self
.
alpha
)
return
loss
.
sum
()
def
__repr__
(
self
):
tmpstr
=
self
.
__class__
.
__name__
+
"("
tmpstr
+=
"gamma="
+
str
(
self
.
gamma
)
tmpstr
+=
", alpha="
+
str
(
self
.
alpha
)
tmpstr
+=
")"
return
tmpstr
mmdet/ops/sigmoid_focal_loss/
functions/
sigmoid_focal_loss.py
→
mmdet/ops/sigmoid_focal_loss/sigmoid_focal_loss.py
View file @
86cc430a
import
torch.nn
as
nn
from
torch.autograd
import
Function
from
torch.autograd
import
Function
from
torch.autograd.function
import
once_differentiable
from
torch.autograd.function
import
once_differentiable
from
.
.
import
sigmoid_focal_loss_cuda
from
.
import
sigmoid_focal_loss_cuda
class
SigmoidFocalLossFunction
(
Function
):
class
SigmoidFocalLossFunction
(
Function
):
...
@@ -32,3 +33,22 @@ class SigmoidFocalLossFunction(Function):
...
@@ -32,3 +33,22 @@ class SigmoidFocalLossFunction(Function):
sigmoid_focal_loss
=
SigmoidFocalLossFunction
.
apply
sigmoid_focal_loss
=
SigmoidFocalLossFunction
.
apply
# TODO: remove this module
class
SigmoidFocalLoss
(
nn
.
Module
):
def
__init__
(
self
,
gamma
,
alpha
):
super
(
SigmoidFocalLoss
,
self
).
__init__
()
self
.
gamma
=
gamma
self
.
alpha
=
alpha
def
forward
(
self
,
logits
,
targets
):
assert
logits
.
is_cuda
loss
=
sigmoid_focal_loss
(
logits
,
targets
,
self
.
gamma
,
self
.
alpha
)
return
loss
.
sum
()
def
__repr__
(
self
):
tmpstr
=
self
.
__class__
.
__name__
+
'(gamma={}, alpha={})'
.
format
(
self
.
gamma
,
self
.
alpha
)
return
tmpstr
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment