Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: use mtime by default in Trainer._rotate_checkpoints with automatic fallback #37260

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3227,9 +3227,8 @@ def _save_checkpoint(self, model, trial):

# Maybe delete some older checkpoints.
if self.args.should_save:
# Solely rely on numerical checkpoint id for rotation.
# mtime is not reliable especially on some fuse fs in cloud environments.
self._rotate_checkpoints(use_mtime=False, output_dir=run_dir)
# we use mtime as default, filesystems without mtime support will be detected in `_sorted_checkpoints`
self._rotate_checkpoints(use_mtime=True, output_dir=run_dir)

def _save_rng_state(self, output_dir):
# Save RNG state in non-distributed training
Expand Down Expand Up @@ -4042,7 +4041,17 @@ def _sorted_checkpoints(
ordering_and_checkpoint_path.append((int(regex_match.groups()[0]), path))

checkpoints_sorted = sorted(ordering_and_checkpoint_path)
# mtime is not reliable on all filesystems, especially on some fuse fs in cloud environments
# so we check if the mtime is fake and fallback to numerical ordering if needed
if use_mtime and len(ordering_and_checkpoint_path) > 1:
mtime_diff = checkpoints_sorted[-1][0] - checkpoints_sorted[0][0]
if mtime_diff < 1.0: # less than 1 second, which is almost impossible when mtime works fine
warnings.warn("mtime may not be reliable on this filesystem, falling back to numerical ordering")
return self._sorted_checkpoints(
use_mtime=False, output_dir=output_dir, checkpoint_prefix=checkpoint_prefix
)
checkpoints_sorted = [checkpoint[1] for checkpoint in checkpoints_sorted]

# Make sure we don't delete the best model.
if (
self.state.best_model_checkpoint is not None
Expand Down