未验证 提交 73f78120 编写于 作者: A AUTOMATIC1111 提交者: GitHub

Merge pull request #15443 from Satariall/add-hf_endpoint-variable

Use HF_ENDPOINT variable for HuggingFace domain with default
...@@ -8,6 +8,7 @@ import re ...@@ -8,6 +8,7 @@ import re
import safetensors.torch import safetensors.torch
from omegaconf import OmegaConf, ListConfig from omegaconf import OmegaConf, ListConfig
from os import mkdir from os import mkdir
from os import getenv
from urllib import request from urllib import request
import ldm.modules.midas as midas import ldm.modules.midas as midas
...@@ -151,7 +152,8 @@ def list_models(): ...@@ -151,7 +152,8 @@ def list_models():
if shared.cmd_opts.no_download_sd_model or cmd_ckpt != shared.sd_model_file or os.path.exists(cmd_ckpt): if shared.cmd_opts.no_download_sd_model or cmd_ckpt != shared.sd_model_file or os.path.exists(cmd_ckpt):
model_url = None model_url = None
else: else:
model_url = "https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.safetensors" hugging_host = getenv('HF_ENDPOINT', 'https://huggingface.co')
model_url = f"{hugging_host}/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.safetensors"
model_list = modelloader.load_models(model_path=model_path, model_url=model_url, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt", ".safetensors"], download_name="v1-5-pruned-emaonly.safetensors", ext_blacklist=[".vae.ckpt", ".vae.safetensors"]) model_list = modelloader.load_models(model_path=model_path, model_url=model_url, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt", ".safetensors"], download_name="v1-5-pruned-emaonly.safetensors", ext_blacklist=[".vae.ckpt", ".vae.safetensors"])
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册