Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
6658f5b2
"...git@developer.sourcefind.cn:chenpangpang/open-webui.git" did not exist on "015772ef9ab2447e97855bff7ce73a8e83ad0dc2"
Commit
6658f5b2
authored
Apr 27, 2022
by
LDOUBLEV
Browse files
fix ConvBn
parent
b6cd6047
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
74 additions
and
1 deletion
+74
-1
ppocr/modeling/necks/db_fpn.py
ppocr/modeling/necks/db_fpn.py
+74
-1
No files found.
ppocr/modeling/necks/db_fpn.py
View file @
6658f5b2
...
@@ -27,7 +27,80 @@ __dir__ = os.path.dirname(os.path.abspath(__file__))
...
@@ -27,7 +27,80 @@ __dir__ = os.path.dirname(os.path.abspath(__file__))
sys
.
path
.
append
(
__dir__
)
sys
.
path
.
append
(
__dir__
)
sys
.
path
.
insert
(
0
,
os
.
path
.
abspath
(
os
.
path
.
join
(
__dir__
,
'../../..'
)))
sys
.
path
.
insert
(
0
,
os
.
path
.
abspath
(
os
.
path
.
join
(
__dir__
,
'../../..'
)))
from
ppocr.modeling.backbones.det_mobilenet_v3
import
SEModule
,
ConvBNLayer
from
ppocr.modeling.backbones.det_mobilenet_v3
import
SEModule
class
ConvBNLayer
(
nn
.
Layer
):
def
__init__
(
self
,
in_channels
,
out_channels
,
kernel_size
,
padding
,
stride
=
1
,
groups
=
None
,
if_act
=
True
,
act
=
"relu"
):
super
(
ConvBNLayer
,
self
).
__init__
()
if
groups
==
None
:
groups
=
in_channels
self
.
if_act
=
if_act
self
.
act
=
act
self
.
conv1
=
nn
.
Conv2D
(
in_channels
=
in_channels
,
out_channels
=
in_channels
,
kernel_size
=
kernel_size
,
stride
=
stride
,
padding
=
padding
,
groups
=
groups
,
bias_attr
=
False
)
self
.
bn1
=
nn
.
BatchNorm
(
num_channels
=
in_channels
,
act
=
None
)
self
.
conv2
=
nn
.
Conv2D
(
in_channels
=
in_channels
,
out_channels
=
int
(
in_channels
*
4
),
kernel_size
=
1
,
stride
=
1
,
bias_attr
=
False
)
self
.
bn2
=
nn
.
BatchNorm
(
num_channels
=
int
(
in_channels
*
4
),
act
=
None
)
self
.
conv3
=
nn
.
Conv2D
(
in_channels
=
int
(
in_channels
*
4
),
out_channels
=
out_channels
,
kernel_size
=
1
,
stride
=
1
,
bias_attr
=
False
)
self
.
_c
=
[
in_channels
,
out_channels
]
if
in_channels
!=
out_channels
:
self
.
conv_end
=
nn
.
Conv2D
(
in_channels
=
in_channels
,
out_channels
=
out_channels
,
kernel_size
=
1
,
stride
=
1
,
bias_attr
=
False
)
def
forward
(
self
,
inputs
):
x
=
self
.
conv1
(
inputs
)
x
=
self
.
bn1
(
x
)
x
=
self
.
conv2
(
x
)
x
=
self
.
bn2
(
x
)
if
self
.
if_act
:
if
self
.
act
==
"relu"
:
x
=
F
.
relu
(
x
)
elif
self
.
act
==
"hardswish"
:
x
=
F
.
hardswish
(
x
)
else
:
print
(
"The activation function({}) is selected incorrectly."
.
format
(
self
.
act
))
exit
()
x
=
self
.
conv3
(
x
)
if
self
.
_c
[
0
]
!=
self
.
_c
[
1
]:
x
=
x
+
self
.
conv_end
(
inputs
)
return
x
class
DBFPN
(
nn
.
Layer
):
class
DBFPN
(
nn
.
Layer
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment