Commit d08a338c authored by Yohei Tamura's avatar Yohei Tamura
Browse files

modified: transformers/modeling_utils.py

parent 0477b307
......@@ -728,7 +728,7 @@ class SequenceSummary(nn.Module):
def __init__(self, config):
super(SequenceSummary, self).__init__()
self.summary_type = config.summary_type if hasattr(config, 'summary_use_proj') else 'last'
self.summary_type = config.summary_type if hasattr(config, 'summary_type') else 'last'
if self.summary_type == 'attn':
# We should use a standard multi-head attention module with absolute positional embedding for that.
# Cf. https://github.com/zihangdai/xlnet/blob/master/modeling.py#L253-L276
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment