@@ -154,6 +154,7 @@ class OptimizerNames(ExplicitEnum):
PAGED_LION_8BIT="paged_lion_8bit"
# TODO: `TrainingArguments` users rely on it being fully mutable. In the future see if we can narrow this to a few keys: https://github.com/huggingface/transformers/pull/25903
@dataclass
classTrainingArguments:
"""
...
...
@@ -1707,16 +1708,6 @@ class TrainingArguments:
FutureWarning,
)
# Finally set the `TrainingArguments` to be immutable
self._frozen=True
def__setattr__(self,name,value):
# Once fully through the `__post_init__`, `TrainingArguments` are immutable