From c301735fdf9038b56c184853743ca94d1cbd1023 Mon Sep 17 00:00:00 2001 From: brandonrising Date: Wed, 3 Apr 2024 15:13:25 -0400 Subject: [PATCH] Update probe to always use cpu for loading models --- invokeai/backend/model_manager/probe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invokeai/backend/model_manager/probe.py b/invokeai/backend/model_manager/probe.py index 7aa650afaa4..bf21a7fe7bb 100644 --- a/invokeai/backend/model_manager/probe.py +++ b/invokeai/backend/model_manager/probe.py @@ -324,7 +324,7 @@ def _scan_and_load_checkpoint(cls, model_path: Path) -> CkptType: with SilenceWarnings(): if model_path.suffix.endswith((".ckpt", ".pt", ".pth", ".bin")): cls._scan_model(model_path.name, model_path) - model = torch.load(model_path) + model = torch.load(model_path, map_location="cpu") assert isinstance(model, dict) return model else: