Unverified Commit c5e29d43 authored by Max Ryabinin's avatar Max Ryabinin Committed by GitHub
Browse files

Fix typing annotations for FSDP and DeepSpeed in TrainingArguments (#24549)

* Fix typing annotations for FSDP and DeepSpeed in TrainingArguments

* Change dict to Dict
parent daccde14
......@@ -976,12 +976,12 @@ class TrainingArguments:
)
},
)
fsdp_config: Optional[str] = field(
fsdp_config: Optional[Union[str, Dict]] = field(
default=None,
metadata={
"help": (
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
)
},
)
......@@ -994,11 +994,11 @@ class TrainingArguments:
)
},
)
deepspeed: Optional[str] = field(
deepspeed: Optional[Union[str, Dict]] = field(
default=None,
metadata={
"help": (
"Enable deepspeed and pass the path to deepspeed json config file (e.g. ds_config.json) or an already"
"Enable deepspeed and pass the path to deepspeed json config file (e.g. `ds_config.json`) or an already"
" loaded json file as a dict"
)
},
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment