Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
a468870f
"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "0e9899f4511b63e0f96d89bfc312a082a203acf1"
Commit
a468870f
authored
Dec 16, 2019
by
thomwolf
Browse files
refactoring generation
parent
07bc8efb
Changes
2
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
213 additions
and
227 deletions
+213
-227
transformers/configuration_utils.py
transformers/configuration_utils.py
+11
-0
transformers/modeling_utils.py
transformers/modeling_utils.py
+202
-227
No files found.
transformers/configuration_utils.py
View file @
a468870f
...
@@ -57,8 +57,19 @@ class PretrainedConfig(object):
...
@@ -57,8 +57,19 @@ class PretrainedConfig(object):
self
.
torchscript
=
kwargs
.
pop
(
'torchscript'
,
False
)
# Only used by PyTorch models
self
.
torchscript
=
kwargs
.
pop
(
'torchscript'
,
False
)
# Only used by PyTorch models
self
.
use_bfloat16
=
kwargs
.
pop
(
'use_bfloat16'
,
False
)
self
.
use_bfloat16
=
kwargs
.
pop
(
'use_bfloat16'
,
False
)
self
.
pruned_heads
=
kwargs
.
pop
(
'pruned_heads'
,
{})
self
.
pruned_heads
=
kwargs
.
pop
(
'pruned_heads'
,
{})
# Is decoder is used in encoder-decoder models to differentiate encoder from decoder
self
.
is_decoder
=
kwargs
.
pop
(
'is_decoder'
,
False
)
self
.
is_decoder
=
kwargs
.
pop
(
'is_decoder'
,
False
)
# Parameters for sequence generation
self
.
generate_length
=
kwargs
.
pop
(
'generate_length'
,
10
)
self
.
generate_do_sample
=
kwargs
.
pop
(
'generate_do_sample'
,
False
)
self
.
generate_num_beams
=
kwargs
.
pop
(
'generate_num_beams'
,
1
)
self
.
generate_temperature
=
kwargs
.
pop
(
'generate_temperature'
,
1.0
)
self
.
generate_top_k
=
kwargs
.
pop
(
'generate_top_k'
,
50
)
self
.
generate_top_p
=
kwargs
.
pop
(
'generate_top_p'
,
0.0
)
self
.
generate_repetition_penalty
=
kwargs
.
pop
(
'generate_repetition_penalty'
,
1.0
)
def
save_pretrained
(
self
,
save_directory
):
def
save_pretrained
(
self
,
save_directory
):
""" Save a configuration object to the directory `save_directory`, so that it
""" Save a configuration object to the directory `save_directory`, so that it
can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method.
can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method.
...
...
transformers/modeling_utils.py
View file @
a468870f
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment