Unverified Commit 0ecdf6de authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

Patch training arguments issue (#13700)



* Patch training arguments issue

* Update src/transformers/training_args.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
parent 50c746ee
......@@ -852,6 +852,8 @@ class TrainingArguments:
del self_as_dict["per_gpu_train_batch_size"]
del self_as_dict["per_gpu_eval_batch_size"]
self_as_dict = {k: f"<{k.upper()}>" if k.endswith("_token") else v for k, v in self_as_dict.items()}
attrs_as_str = [f"{k}={v},\n" for k, v in sorted(self_as_dict.items())]
return f"{self.__class__.__name__}(\n{''.join(attrs_as_str)})"
......@@ -1161,7 +1163,8 @@ class TrainingArguments:
def to_dict(self):
"""
Serializes this instance while replace `Enum` by their values (for JSON serialization support).
Serializes this instance while replace `Enum` by their values (for JSON serialization support). It obfuscates
the token values by removing their value.
"""
d = asdict(self)
for k, v in d.items():
......@@ -1169,6 +1172,8 @@ class TrainingArguments:
d[k] = v.value
if isinstance(v, list) and len(v) > 0 and isinstance(v[0], Enum):
d[k] = [x.value for x in v]
if k.endswith("_token"):
d[k] = f"<{k.upper()}>"
return d
def to_json_string(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment