Unverified Commit b518aaf1 authored by calpt's avatar calpt Committed by GitHub
Browse files

Fix GPT2Config parameters in GPT2ModelTester (#13630)

parent 300ee0c7
...@@ -141,10 +141,10 @@ class GPT2ModelTester: ...@@ -141,10 +141,10 @@ class GPT2ModelTester:
n_embd=self.hidden_size, n_embd=self.hidden_size,
n_layer=self.num_hidden_layers, n_layer=self.num_hidden_layers,
n_head=self.num_attention_heads, n_head=self.num_attention_heads,
intermediate_size=self.intermediate_size, n_inner=self.intermediate_size,
hidden_act=self.hidden_act, activation_function=self.hidden_act,
hidden_dropout_prob=self.hidden_dropout_prob, resid_pdrop=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob, attn_pdrop=self.attention_probs_dropout_prob,
n_positions=self.max_position_embeddings, n_positions=self.max_position_embeddings,
n_ctx=self.max_position_embeddings, n_ctx=self.max_position_embeddings,
type_vocab_size=self.type_vocab_size, type_vocab_size=self.type_vocab_size,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment