Skip to content

Commit

Permalink
Update trainer.py
Browse files Browse the repository at this point in the history
  • Loading branch information
wenxindongwork committed Aug 12, 2024
1 parent b7ea171 commit 5d3600d
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2313,8 +2313,7 @@ def _inner_training_loop(
):
# the `or` condition of `is_last_step_and_steps_less_than_grad_acc` is not covered
# in accelerate. So, explicitly enable sync gradients to True in that case.
if is_last_step_and_steps_less_than_grad_acc:
self.accelerator.gradient_state._set_sync_gradients(True)
self.accelerator.gradient_state._set_sync_gradients(True)

# Gradient clipping
if args.max_grad_norm is not None and args.max_grad_norm > 0:
Expand Down

0 comments on commit 5d3600d

Please sign in to comment.