Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix loading sharded checkpoints from subfolder #8798

Merged
merged 4 commits into from
Jul 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/diffusers/models/model_loading_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def _fetch_index_file(
local_files_only=local_files_only,
token=token,
revision=revision,
subfolder=subfolder,
subfolder=None,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why should this be None, though?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That is because subfolder is already determined here:

index_file_in_repo = Path(

user_agent=user_agent,
commit_hash=commit_hash,
)
Expand Down
7 changes: 6 additions & 1 deletion src/diffusers/utils/hub_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,10 +455,13 @@ def _get_checkpoint_shard_files(

# At this stage pretrained_model_name_or_path is a model identifier on the Hub
allow_patterns = original_shard_filenames
if subfolder is not None:
allow_patterns = [os.path.join(subfolder, p) for p in allow_patterns]

ignore_patterns = ["*.json", "*.md"]
if not local_files_only:
# `model_info` call must guarded with the above condition.
model_files_info = model_info(pretrained_model_name_or_path)
model_files_info = model_info(pretrained_model_name_or_path, revision=revision)
for shard_file in original_shard_filenames:
shard_file_present = any(shard_file in k.rfilename for k in model_files_info.siblings)
if not shard_file_present:
Expand All @@ -481,6 +484,8 @@ def _get_checkpoint_shard_files(
ignore_patterns=ignore_patterns,
user_agent=user_agent,
)
if subfolder is not None:
cached_folder = os.path.join(cached_folder, subfolder)

# We have already dealt with RepositoryNotFoundError and RevisionNotFoundError when getting the index, so
# we don't have to catch them here. We have also dealt with EntryNotFoundError.
Expand Down
12 changes: 12 additions & 0 deletions tests/models/unets/test_models_unet_2d_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -1045,6 +1045,18 @@ def test_load_sharded_checkpoint_from_hub(self):
assert loaded_model
assert new_output.sample.shape == (4, 4, 16, 16)

@require_torch_gpu
def test_load_sharded_checkpoint_from_hub_subfolder(self):
_, inputs_dict = self.prepare_init_args_and_inputs_for_common()
loaded_model = self.model_class.from_pretrained(
"hf-internal-testing/unet2d-sharded-dummy-subfolder", subfolder="unet"
)
loaded_model = loaded_model.to(torch_device)
new_output = loaded_model(**inputs_dict)

assert loaded_model
assert new_output.sample.shape == (4, 4, 16, 16)

@require_torch_gpu
def test_load_sharded_checkpoint_from_hub_local(self):
_, inputs_dict = self.prepare_init_args_and_inputs_for_common()
Expand Down
Loading