Skip to content

Commit

Permalink
fix: undo lora cache preserve fix attempt
Browse files Browse the repository at this point in the history
  • Loading branch information
tazlin committed Aug 2, 2023
1 parent f5f9cd9 commit e8bde6a
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 21 deletions.
20 changes: 0 additions & 20 deletions bridge_stable_diffusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,26 +45,6 @@ def check_for_old_dir():
else:
print("Existing custom models left in their previous location.")

possible_lora_paths = [
"conda/envs/windows/Lib/site-packages/hordelib/model_database/lora.json",
"conda/envs/linux/Lib/site-packages/hordelib/model_database/lora.json",
]
for path in possible_lora_paths:
if os.path.exists(path):
print("Old lora.json file exists.")
print(
f"Correct location is: {os.environ['AIWORKER_CACHE_HOME']}/horde_model_reference/legacy/lora.json",
)
answer = input("Do you want to move it to the correct location [Y/N]? ")
if answer.lower() == "y":
import shutil

shutil.move(
path,
os.environ["AIWORKER_CACHE_HOME"] + "/horde_model_reference/legacy/lora.json",
)
print("lora.json file has been moved to the correct location.")
exit()


def main():
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
torch # We have to place it here otherwise it installs the CPU version
pydantic==1.10.12
horde_model_reference~=0.2.2
hordelib~=1.6.4
hordelib==1.6.2
gradio
pyyaml
unidecode
Expand Down

0 comments on commit e8bde6a

Please sign in to comment.