Compare commits

..

11 Commits

Author SHA1 Message Date
w-e-w 661322707f set min brush size to 1 2024-11-23 06:59:28 +09:00
w-e-w 1b9dea7d90 apply brush size limit early 2024-11-22 11:30:19 +09:00
w-e-w 7cf80a70d9 allow brush size up to 1/2 diagonal image 2024-11-22 11:00:45 +09:00
w-e-w 2eef345743 reduce complexity
remove the overly complex option of radius / area brush size change mode
2024-11-22 11:00:45 +09:00
w-e-w 3a1497aaf1 eslint 2024-11-20 06:09:01 +09:00
w-e-w 96eaca6153 make "Adjust brush size by area" Default behavior 2024-11-20 05:56:10 +09:00
w-e-w dd237f2541 make Undo Clear hotkey disableable 2024-11-20 05:40:01 +09:00
w-e-w c9f8953200 Canvas: Clear hotkey 2024-11-20 05:31:27 +09:00
w-e-w e009f586a2 limit the minimum delta to 1 2024-11-20 05:17:22 +09:00
w-e-w 356339eff2 Canvas: Undo hotkey 2024-11-20 04:46:26 +09:00
w-e-w 4e808fbef4 Canvas: Adjust brush size by area 2024-11-20 04:46:10 +09:00
7 changed files with 59 additions and 94 deletions
@@ -226,6 +226,8 @@ onUiLoaded(async() => {
canvas_show_tooltip: true,
canvas_auto_expand: true,
canvas_blur_prompt: false,
canvas_hotkey_undo: "KeyZ",
canvas_hotkey_clear: "KeyC",
};
const functionMap = {
@@ -236,7 +238,9 @@ onUiLoaded(async() => {
"Moving canvas": "canvas_hotkey_move",
"Fullscreen": "canvas_hotkey_fullscreen",
"Reset Zoom": "canvas_hotkey_reset",
"Overlap": "canvas_hotkey_overlap"
"Overlap": "canvas_hotkey_overlap",
"Undo": "canvas_hotkey_undo",
"Clear": "canvas_hotkey_clear"
};
// Loading the configuration from opts
@@ -321,6 +325,8 @@ onUiLoaded(async() => {
action: "Adjust brush size",
keySuffix: " + wheel"
},
{configKey: "canvas_hotkey_undo", action: "Undo brush stroke"},
{configKey: "canvas_hotkey_clear", action: "Clear canvas"},
{configKey: "canvas_hotkey_reset", action: "Reset zoom"},
{
configKey: "canvas_hotkey_fullscreen",
@@ -464,22 +470,45 @@ onUiLoaded(async() => {
gradioApp().querySelector(
`${elemId} button[aria-label="Use brush"]`
);
if (input) {
input.click();
if (!withoutValue) {
const maxValue =
parseFloat(input.getAttribute("max")) || 100;
const changeAmount = maxValue * (percentage / 100);
const newValue =
parseFloat(input.value) +
(deltaY > 0 ? -changeAmount : changeAmount);
input.value = Math.min(Math.max(newValue, 0), maxValue);
const maxValue = parseFloat(input.getAttribute("max")) || 100;
const minValue = parseFloat(input.getAttribute("min")) || 1;
// allow brush size up to 1/2 diagonal of the image, beyond gradio's arbitrary limit
const canvasImg = gradioApp().querySelector(`${elemId} img`);
if (canvasImg) {
const maxDiameter = Math.sqrt(canvasImg.naturalWidth ** 2 + canvasImg.naturalHeight ** 2) / 2;
if (maxDiameter > maxValue) {
input.setAttribute("max", maxDiameter);
}
if (minValue > 1) {
input.setAttribute("min", '1');
}
}
const brush_factor = deltaY > 0 ? 1 - opts.canvas_hotkey_brush_factor : 1 + opts.canvas_hotkey_brush_factor;
const currentRadius = parseFloat(input.value);
let delta = Math.sqrt(currentRadius ** 2 * brush_factor) - currentRadius;
// minimum brush size step of 1
if (Math.abs(delta) < 1) {
delta = deltaY > 0 ? -1 : 1;
}
const newValue = currentRadius + delta;
input.value = Math.max(newValue, 1);
input.dispatchEvent(new Event("change"));
}
}
}
// Undo the last brush stroke by clicking the undo button
function undoBrushStroke() {
gradioApp().querySelector(`${elemId} button[aria-label='Undo']`).click();
}
function clearCanvas() {
gradioApp().querySelector(`${elemId} button[aria-label='Clear']`).click();
}
// Reset zoom when uploading a new image
const fileInput = gradioApp().querySelector(
`${elemId} input[type="file"][accept="image/*"].svelte-116rqfv`
@@ -699,7 +728,9 @@ onUiLoaded(async() => {
[hotkeysConfig.canvas_hotkey_overlap]: toggleOverlap,
[hotkeysConfig.canvas_hotkey_fullscreen]: fitToScreen,
[hotkeysConfig.canvas_hotkey_shrink_brush]: () => adjustBrushSize(elemId, 10),
[hotkeysConfig.canvas_hotkey_grow_brush]: () => adjustBrushSize(elemId, -10)
[hotkeysConfig.canvas_hotkey_grow_brush]: () => adjustBrushSize(elemId, -10),
[hotkeysConfig.canvas_hotkey_undo]: undoBrushStroke,
[hotkeysConfig.canvas_hotkey_clear]: clearCanvas
};
const action = hotkeyActions[event.code];
@@ -2,16 +2,19 @@ import gradio as gr
from modules import shared
shared.options_templates.update(shared.options_section(('canvas_hotkey', "Canvas Hotkeys"), {
"canvas_hotkey_zoom": shared.OptionInfo("Alt", "Zoom canvas", gr.Radio, {"choices": ["Shift","Ctrl", "Alt"]}).info("If you choose 'Shift' you cannot scroll horizontally, 'Alt' can cause a little trouble in firefox"),
"canvas_hotkey_adjust": shared.OptionInfo("Ctrl", "Adjust brush size", gr.Radio, {"choices": ["Shift","Ctrl", "Alt"]}).info("If you choose 'Shift' you cannot scroll horizontally, 'Alt' can cause a little trouble in firefox"),
"canvas_hotkey_zoom": shared.OptionInfo("Alt", "Zoom canvas", gr.Radio, {"choices": ["Shift", "Ctrl", "Alt"]}).info("If you choose 'Shift' you cannot scroll horizontally, 'Alt' can cause a little trouble in firefox"),
"canvas_hotkey_adjust": shared.OptionInfo("Ctrl", "Adjust brush size", gr.Radio, {"choices": ["Shift", "Ctrl", "Alt"]}).info("If you choose 'Shift' you cannot scroll horizontally, 'Alt' can cause a little trouble in firefox"),
"canvas_hotkey_shrink_brush": shared.OptionInfo("Q", "Shrink the brush size"),
"canvas_hotkey_grow_brush": shared.OptionInfo("W", "Enlarge the brush size"),
"canvas_hotkey_move": shared.OptionInfo("F", "Moving the canvas").info("To work correctly in firefox, turn off 'Automatically search the page text when typing' in the browser settings"),
"canvas_hotkey_undo": shared.OptionInfo("Z", "Undo brush stroke"),
"canvas_hotkey_clear": shared.OptionInfo("C", "Clear canvas"),
"canvas_hotkey_fullscreen": shared.OptionInfo("S", "Fullscreen Mode, maximizes the picture so that it fits into the screen and stretches it to its full width "),
"canvas_hotkey_reset": shared.OptionInfo("R", "Reset zoom and canvas position"),
"canvas_hotkey_overlap": shared.OptionInfo("O", "Toggle overlap").info("Technical button, needed for testing"),
"canvas_show_tooltip": shared.OptionInfo(True, "Enable tooltip on the canvas"),
"canvas_auto_expand": shared.OptionInfo(True, "Automatically expands an image that does not fit completely in the canvas area, similar to manually pressing the S and R buttons"),
"canvas_blur_prompt": shared.OptionInfo(False, "Take the focus off the prompt when working with a canvas"),
"canvas_disabled_functions": shared.OptionInfo(["Overlap"], "Disable function that you don't use", gr.CheckboxGroup, {"choices": ["Zoom","Adjust brush size","Hotkey enlarge brush","Hotkey shrink brush","Moving canvas","Fullscreen","Reset Zoom","Overlap"]}),
"canvas_disabled_functions": shared.OptionInfo(["Overlap"], "Disable function that you don't use", gr.CheckboxGroup, {"choices": ["Zoom", "Adjust brush size", "Hotkey enlarge brush", "Hotkey shrink brush", "Undo", "Clear", "Moving canvas", "Fullscreen", "Reset Zoom", "Overlap"]}),
"canvas_hotkey_brush_factor": shared.OptionInfo(0.1, "Brush size change rate", gr.Slider, {"minimum": 0, "maximum": 1, "step": 0.01}).info('controls how much the brush size is changed when using hotkeys or scroll wheel'),
}))
@@ -4,11 +4,11 @@
// If there's a mismatch, the keyword counter turns red and if you hover on it, a tooltip tells you what's wrong.
function checkBrackets(textArea, counterElt) {
const counts = {};
textArea.value.matchAll(/(?<!\\)(?:\\\\)*?([(){}[\]])/g).forEach(bracket => {
counts[bracket[1]] = (counts[bracket[1]] || 0) + 1;
var counts = {};
(textArea.value.match(/[(){}[\]]/g) || []).forEach(bracket => {
counts[bracket] = (counts[bracket] || 0) + 1;
});
const errors = [];
var errors = [];
function checkPair(open, close, kind) {
if (counts[open] !== counts[close]) {
+2 -25
View File
@@ -187,7 +187,6 @@ class StableDiffusionProcessing:
cached_uc = [None, None]
cached_c = [None, None]
hijack_generation_params_state_list = []
comments: dict = None
sampler: sd_samplers_common.Sampler | None = field(default=None, init=False)
@@ -481,10 +480,6 @@ class StableDiffusionProcessing:
for cache in caches:
if cache[0] is not None and cached_params == cache[0]:
if len(cache) == 3:
generation_params_state, cached_params_2 = cache[2]
if cached_params == cached_params_2:
self.hijack_generation_params_state_list.extend(generation_params_state)
return cache[1]
cache = caches[0]
@@ -492,25 +487,9 @@ class StableDiffusionProcessing:
with devices.autocast():
cache[1] = function(shared.sd_model, required_prompts, steps, hires_steps, shared.opts.use_old_scheduling)
generation_params_state = model_hijack.capture_generation_params_state()
self.hijack_generation_params_state_list.extend(generation_params_state)
if len(cache) == 2:
cache.append((generation_params_state, cached_params))
else:
cache[2] = (generation_params_state, cached_params)
cache[0] = cached_params
return cache[1]
def apply_hijack_generation_params(self):
self.extra_generation_params.update(model_hijack.extra_generation_params)
for func in self.hijack_generation_params_state_list:
try:
func(self.extra_generation_params)
except Exception:
errors.report('Failed to apply hijack generation params state', exc_info=True)
self.hijack_generation_params_state_list.clear()
def setup_conds(self):
prompts = prompt_parser.SdConditioning(self.prompts, width=self.width, height=self.height)
negative_prompts = prompt_parser.SdConditioning(self.negative_prompts, width=self.width, height=self.height, is_negative_prompt=True)
@@ -523,8 +502,6 @@ class StableDiffusionProcessing:
self.uc = self.get_conds_with_caching(prompt_parser.get_learned_conditioning, negative_prompts, total_steps, [self.cached_uc], self.extra_network_data)
self.c = self.get_conds_with_caching(prompt_parser.get_multicond_learned_conditioning, prompts, total_steps, [self.cached_c], self.extra_network_data)
self.apply_hijack_generation_params()
def get_conds(self):
return self.c, self.uc
@@ -988,6 +965,8 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
p.setup_conds()
p.extra_generation_params.update(model_hijack.extra_generation_params)
# params.txt should be saved after scripts.process_batch, since the
# infotext could be modified by that callback
# Example: a wildcard processed by process_batch sets an extra model
@@ -1534,8 +1513,6 @@ class StableDiffusionProcessingTxt2Img(StableDiffusionProcessing):
self.hr_uc = self.get_conds_with_caching(prompt_parser.get_learned_conditioning, hr_negative_prompts, self.firstpass_steps, [self.cached_hr_uc, self.cached_uc], self.hr_extra_network_data, total_steps)
self.hr_c = self.get_conds_with_caching(prompt_parser.get_multicond_learned_conditioning, hr_prompts, self.firstpass_steps, [self.cached_hr_c, self.cached_c], self.hr_extra_network_data, total_steps)
self.apply_hijack_generation_params()
def setup_conds(self):
if self.is_hr_pass:
# if we are in hr pass right now, the call is being made from the refiner, and we don't need to setup firstpass cons or switch model
-8
View File
@@ -6,7 +6,6 @@ from modules import devices, sd_hijack_optimizations, shared, script_callbacks,
from modules.hypernetworks import hypernetwork
from modules.shared import cmd_opts
from modules import sd_hijack_clip, sd_hijack_open_clip, sd_hijack_unet, sd_hijack_xlmr, xlmr, xlmr_m18
from modules.util import GenerationParamsState
import ldm.modules.attention
import ldm.modules.diffusionmodules.model
@@ -322,13 +321,6 @@ class StableDiffusionModelHijack:
self.comments = []
self.extra_generation_params = {}
def capture_generation_params_state(self):
state = []
for key in list(self.extra_generation_params):
if isinstance(self.extra_generation_params[key], GenerationParamsState):
state.append(self.extra_generation_params.pop(key))
return state
def get_prompt_lengths(self, text):
if self.clip is None:
return "-", "-"
+6 -29
View File
@@ -3,9 +3,8 @@ from collections import namedtuple
import torch
from modules import prompt_parser, devices, sd_hijack, sd_emphasis, util
from modules import prompt_parser, devices, sd_hijack, sd_emphasis
from modules.shared import opts
from modules.util import GenerationParamsState
class PromptChunk:
@@ -28,31 +27,6 @@ chunk. Those objects are found in PromptChunk.fixes and, are placed into FrozenC
are applied by sd_hijack.EmbeddingsWithFixes's forward function."""
class EmbeddingHashes(GenerationParamsState):
def __init__(self, hashes: list):
super().__init__()
self.hashes = hashes
def __call__(self, extra_generation_params):
unique_hashes = dict.fromkeys(self.hashes)
if existing_ti_hashes := extra_generation_params.get('TI hashes'):
unique_hashes.update(dict.fromkeys(existing_ti_hashes.split(', ')))
extra_generation_params['TI hashes'] = ', '.join(sorted(unique_hashes, key=util.natural_sort_key))
class EmphasisMode(GenerationParamsState):
def __init__(self, texts):
super().__init__()
if opts.emphasis != 'Original' and any(x for x in texts if '(' in x or '[' in x):
self.emphasis = opts.emphasis
else:
self.emphasis = None
def __call__(self, extra_generation_params):
if self.emphasis:
extra_generation_params['Emphasis'] = self.emphasis
class TextConditionalModel(torch.nn.Module):
def __init__(self):
super().__init__()
@@ -264,9 +238,12 @@ class TextConditionalModel(torch.nn.Module):
hashes.append(f"{name}: {shorthash}")
if hashes:
self.hijack.extra_generation_params["TI hashes"] = EmbeddingHashes(hashes)
if self.hijack.extra_generation_params.get("TI hashes"):
hashes.append(self.hijack.extra_generation_params.get("TI hashes"))
self.hijack.extra_generation_params["TI hashes"] = ", ".join(hashes)
self.hijack.extra_generation_params["Emphasis"] = EmphasisMode(texts)
if any(x for x in texts if "(" in x or "[" in x) and opts.emphasis != "Original":
self.hijack.extra_generation_params["Emphasis"] = opts.emphasis
if self.return_pooled:
return torch.hstack(zs), zs[0].pooled
-15
View File
@@ -288,18 +288,3 @@ def compare_sha256(file_path: str, hash_prefix: str) -> bool:
for chunk in iter(lambda: f.read(blksize), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest().startswith(hash_prefix.strip().lower())
class GenerationParamsState:
"""A custom class used in StableDiffusionModelHijack for assigning extra_generation_params
generation_params assigned using this class will work properly with StableDiffusionProcessing.get_conds_with_caching()
if assigned directly the generation_params will not be populated if conda cache is used
Generation_params of this class will be captured (see StableDiffusionModelHijack.capture_generation_params_state) and stored with conda cache, and will be extracted in StableDiffusionProcessing.apply_hijack_generation_params()
To use this class, create a subclass with a __call__ method that takes extra_generation_params: dict as input
Example usage: sd_hijack_clip.EmbeddingHashes, sd_hijack_clip.EmphasisMode
"""
def __call__(self, extra_generation_params: dict):
raise NotImplementedError