Commit 6c7ff9c7 authored by LDOUBLEV's avatar LDOUBLEV
Browse files

fix conflict

parents ac91a9e1 9b8f587e
...@@ -109,7 +109,7 @@ class EASTHead(nn.Layer): ...@@ -109,7 +109,7 @@ class EASTHead(nn.Layer):
act=None, act=None,
name="f_geo") name="f_geo")
def forward(self, x): def forward(self, x, targets=None):
f_det = self.det_conv1(x) f_det = self.det_conv1(x)
f_det = self.det_conv2(f_det) f_det = self.det_conv2(f_det)
f_score = self.score_conv(f_det) f_score = self.score_conv(f_det)
......
...@@ -116,7 +116,7 @@ class SASTHead(nn.Layer): ...@@ -116,7 +116,7 @@ class SASTHead(nn.Layer):
self.head1 = SAST_Header1(in_channels) self.head1 = SAST_Header1(in_channels)
self.head2 = SAST_Header2(in_channels) self.head2 = SAST_Header2(in_channels)
def forward(self, x): def forward(self, x, targets=None):
f_score, f_border = self.head1(x) f_score, f_border = self.head1(x)
f_tvo, f_tco = self.head2(x) f_tvo, f_tco = self.head2(x)
......
...@@ -220,7 +220,7 @@ class PGHead(nn.Layer): ...@@ -220,7 +220,7 @@ class PGHead(nn.Layer):
weight_attr=ParamAttr(name="conv_f_direc{}".format(4)), weight_attr=ParamAttr(name="conv_f_direc{}".format(4)),
bias_attr=False) bias_attr=False)
def forward(self, x): def forward(self, x, targets=None):
f_score = self.conv_f_score1(x) f_score = self.conv_f_score1(x)
f_score = self.conv_f_score2(f_score) f_score = self.conv_f_score2(f_score)
f_score = self.conv_f_score3(f_score) f_score = self.conv_f_score3(f_score)
......
...@@ -23,32 +23,57 @@ from paddle import ParamAttr, nn ...@@ -23,32 +23,57 @@ from paddle import ParamAttr, nn
from paddle.nn import functional as F from paddle.nn import functional as F
def get_para_bias_attr(l2_decay, k, name): def get_para_bias_attr(l2_decay, k):
regularizer = paddle.regularizer.L2Decay(l2_decay) regularizer = paddle.regularizer.L2Decay(l2_decay)
stdv = 1.0 / math.sqrt(k * 1.0) stdv = 1.0 / math.sqrt(k * 1.0)
initializer = nn.initializer.Uniform(-stdv, stdv) initializer = nn.initializer.Uniform(-stdv, stdv)
weight_attr = ParamAttr( weight_attr = ParamAttr(regularizer=regularizer, initializer=initializer)
regularizer=regularizer, initializer=initializer, name=name + "_w_attr") bias_attr = ParamAttr(regularizer=regularizer, initializer=initializer)
bias_attr = ParamAttr(
regularizer=regularizer, initializer=initializer, name=name + "_b_attr")
return [weight_attr, bias_attr] return [weight_attr, bias_attr]
class CTCHead(nn.Layer): class CTCHead(nn.Layer):
def __init__(self, in_channels, out_channels, fc_decay=0.0004, **kwargs): def __init__(self,
in_channels,
out_channels,
fc_decay=0.0004,
mid_channels=None,
**kwargs):
super(CTCHead, self).__init__() super(CTCHead, self).__init__()
if mid_channels is None:
weight_attr, bias_attr = get_para_bias_attr( weight_attr, bias_attr = get_para_bias_attr(
l2_decay=fc_decay, k=in_channels, name='ctc_fc') l2_decay=fc_decay, k=in_channels)
self.fc = nn.Linear( self.fc = nn.Linear(
in_channels, in_channels,
out_channels, out_channels,
weight_attr=weight_attr, weight_attr=weight_attr,
bias_attr=bias_attr, bias_attr=bias_attr)
name='ctc_fc') else:
weight_attr1, bias_attr1 = get_para_bias_attr(
l2_decay=fc_decay, k=in_channels)
self.fc1 = nn.Linear(
in_channels,
mid_channels,
weight_attr=weight_attr1,
bias_attr=bias_attr1)
weight_attr2, bias_attr2 = get_para_bias_attr(
l2_decay=fc_decay, k=mid_channels)
self.fc2 = nn.Linear(
mid_channels,
out_channels,
weight_attr=weight_attr2,
bias_attr=bias_attr2)
self.out_channels = out_channels self.out_channels = out_channels
self.mid_channels = mid_channels
def forward(self, x, labels=None): def forward(self, x, targets=None):
if self.mid_channels is None:
predicts = self.fc(x) predicts = self.fc(x)
else:
predicts = self.fc1(x)
predicts = self.fc2(predicts)
if not self.training: if not self.training:
predicts = F.softmax(predicts, axis=2) predicts = F.softmax(predicts, axis=2)
return predicts return predicts
...@@ -250,7 +250,8 @@ class SRNHead(nn.Layer): ...@@ -250,7 +250,8 @@ class SRNHead(nn.Layer):
self.gsrm.wrap_encoder1.prepare_decoder.emb0 = self.gsrm.wrap_encoder0.prepare_decoder.emb0 self.gsrm.wrap_encoder1.prepare_decoder.emb0 = self.gsrm.wrap_encoder0.prepare_decoder.emb0
def forward(self, inputs, others): def forward(self, inputs, targets=None):
others = targets[-4:]
encoder_word_pos = others[0] encoder_word_pos = others[0]
gsrm_word_pos = others[1] gsrm_word_pos = others[1]
gsrm_slf_attn_bias1 = others[2] gsrm_slf_attn_bias1 = others[2]
......
This diff is collapsed.
...@@ -21,7 +21,8 @@ def build_neck(config): ...@@ -21,7 +21,8 @@ def build_neck(config):
from .sast_fpn import SASTFPN from .sast_fpn import SASTFPN
from .rnn import SequenceEncoder from .rnn import SequenceEncoder
from .pg_fpn import PGFPN from .pg_fpn import PGFPN
support_dict = ['DBFPN', 'EASTFPN', 'SASTFPN', 'SequenceEncoder', 'PGFPN'] from .table_fpn import TableFPN
support_dict = ['DBFPN', 'EASTFPN', 'SASTFPN', 'SequenceEncoder', 'PGFPN', 'TableFPN']
module_name = config.pop('name') module_name = config.pop('name')
assert module_name in support_dict, Exception('neck only support {}'.format( assert module_name in support_dict, Exception('neck only support {}'.format(
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -22,7 +22,7 @@ logger_initialized = {} ...@@ -22,7 +22,7 @@ logger_initialized = {}
@functools.lru_cache() @functools.lru_cache()
def get_logger(name='root', log_file=None, log_level=logging.INFO): def get_logger(name='root', log_file=None, log_level=logging.DEBUG):
"""Initialize and get a logger by name. """Initialize and get a logger by name.
If the logger has not been initialized, this method will initialize the If the logger has not been initialized, this method will initialize the
logger by adding one or two handlers, otherwise the initialized logger will logger by adding one or two handlers, otherwise the initialized logger will
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment