We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0105924 commit 9a04389Copy full SHA for 9a04389
src/transformers/training_args.py
@@ -2742,7 +2742,7 @@ def _process_fsdp_args(self):
2742
fsdp_plugin_args["transformer_cls_names_to_wrap"] = ",".join(
2743
self.fsdp_config["transformer_layer_cls_to_wrap"]
2744
)
2745
- fsdp_plugin_args["fsdp_version"] = self.fsdp_config.get("fsdp_version", 1)
+ fsdp_plugin_args["version"] = self.fsdp_config.get("version", 1)
2746
prefetch_policy = self.fsdp_config.get("backward_prefetch", "NO_PREFETCH")
2747
fsdp_plugin_args["backward_prefetch"] = prefetch_policy.upper()
2748
fsdp_plugin_args["forward_prefetch"] = str_to_bool(str(self.fsdp_config.get("forward_prefetch", "false")).lower())
0 commit comments