We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b6a9f0b commit 61c0e74Copy full SHA for 61c0e74
src/transformers/training_args.py
@@ -2773,7 +2773,6 @@ def _process_fsdp_args(self):
2773
# to unexpected behaviour during training, thus throwing error here to prevent it.
2774
raise ValueError('`sync_module_states` must be `"True"` if `cpu_ram_efficient_loading` is `"True"`')
2775
2776
-
2777
# we need to set the env here as otherwise we get a warning in accelerate + we need to set it for transformers
2778
fsdp_plugin_args["cpu_ram_efficient_loading"] = str_to_bool(cpu_ram_efficient_loading)
2779
os.environ["FSDP_CPU_RAM_EFFICIENT_LOADING"] = cpu_ram_efficient_loading
0 commit comments