We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 68c8800 commit 7a0c966Copy full SHA for 7a0c966
src/transformers/trainer.py
@@ -2338,9 +2338,7 @@ def _inner_training_loop(
2338
2339
if self.is_fsdp_enabled:
2340
self.model = self.model_wrapped = model
2341
-
2342
- # Fix `got mixed torch.Tensor and DTensor` error in model.generate() for FSDP2 with LoRA
2343
- if is_fsdp2:
+ # Fix `got mixed torch.Tensor and DTensor` error in model.generate() for FSDP2 with LoRA
2344
dist.fsdp.register_fsdp_forward_method(self.model, "generate")
2345
2346
# for the rest of this function `model` is the outside model, whether it was wrapped or not
0 commit comments