Compare commits

..

8 Commits

Author SHA1 Message Date
w-e-w 842dd5ea75 option: reduce connecting to huggingface
for assets if local cache is available
note: enabling this with prevent the assets from being updated
2024-07-30 20:32:02 +09:00
AUTOMATIC1111 48239090f1 Merge branch 'master' into dev 2024-07-27 15:50:26 +03:00
AUTOMATIC1111 82a973c043 changelog 2024-07-27 15:49:39 +03:00
AUTOMATIC1111 1d7e9eca09 Merge pull request #16275 from AUTOMATIC1111/fix-image-upscale-on-cpu
fix image upscale on cpu
2024-07-27 15:48:22 +03:00
AUTOMATIC1111 850e14923e Merge pull request #16275 from AUTOMATIC1111/fix-image-upscale-on-cpu
fix image upscale on cpu
2024-07-27 15:47:49 +03:00
w-e-w 8e0881d9ab fix image upscale on cpu
for some reason upscale using cpu will fail with
RuntimeError: Inplace update to inference tensor outside InferenceMode
switch from no_grad to inference_mode seems to have fixed it
2024-07-27 21:28:10 +09:00
AUTOMATIC1111 834297b13d Merge branch 'master' into dev 2024-07-27 07:09:08 +03:00
AUTOMATIC1111 c19d044364 Merge branch 'release_candidate' 2024-07-27 06:53:05 +03:00
5 changed files with 50 additions and 1 deletions
+6
View File
@@ -1,3 +1,9 @@
## 1.10.1
### Bug Fixes:
* fix image upscale on cpu ([#16275](https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/16275))
## 1.10.0
### Features:
+1
View File
@@ -14,6 +14,7 @@ def imports():
import torch # noqa: F401
startup_timer.record("import torch")
from modules import patch_hf_hub_download # noqa: F401
import pytorch_lightning # noqa: F401
startup_timer.record("import torch")
warnings.filterwarnings(action="ignore", category=DeprecationWarning, module="pytorch_lightning")
+41
View File
@@ -0,0 +1,41 @@
from modules.patches import patch
from modules.errors import report
from inspect import signature
from functools import wraps
try:
from huggingface_hub.utils import LocalEntryNotFoundError
from huggingface_hub import file_download
def try_local_files_only(func):
if (param := signature(func).parameters.get('local_files_only', None)) and not param.kind == param.KEYWORD_ONLY:
raise ValueError(f'{func.__name__} does not have keyword-only parameter "local_files_only"')
@wraps(func)
def wrapper(*args, **kwargs):
try:
from modules.shared import opts
try_offline_mode = not kwargs.get('local_files_only') and opts.hd_dl_local_first
except Exception:
report('Error in try_local_files_only - skip try_local_files_only', exc_info=True)
try_offline_mode = False
if try_offline_mode:
try:
return func(*args, **{**kwargs, 'local_files_only': True})
except LocalEntryNotFoundError:
pass
except Exception:
report('Unexpected exception in try_local_files_only - retry without patch', exc_info=True)
return func(*args, **kwargs)
return wrapper
try:
patch(__name__, file_download, 'hf_hub_download', try_local_files_only(file_download.hf_hub_download))
except RuntimeError:
pass # already patched
except Exception:
report('Error patching hf_hub_download', exc_info=True)
+1
View File
@@ -128,6 +128,7 @@ options_templates.update(options_section(('system', "System", "system"), {
"disable_mmap_load_safetensors": OptionInfo(False, "Disable memmapping for loading .safetensors files.").info("fixes very slow loading speed in some cases"),
"hide_ldm_prints": OptionInfo(True, "Prevent Stability-AI's ldm/sgm modules from printing noise to console."),
"dump_stacks_on_signal": OptionInfo(False, "Print stack traces before exiting the program with ctrl+c."),
"hd_dl_local_first": OptionInfo(False, "Prevent connecting to huggingface for assets if cache is available").info('this will also prevent assets from being updated'),
}))
options_templates.update(options_section(('profiler', "Profiler", "system"), {
+1 -1
View File
@@ -41,7 +41,7 @@ def upscale_pil_patch(model, img: Image.Image) -> Image.Image:
"""
param = torch_utils.get_param(model)
with torch.no_grad():
with torch.inference_mode():
tensor = pil_image_to_torch_bgr(img).unsqueeze(0) # add batch dimension
tensor = tensor.to(device=param.device, dtype=param.dtype)
with devices.without_autocast():