Spaces:
Running
Running
import os | |
import torch | |
import numpy as np | |
from PIL import Image, ImageOps | |
from .utils import BIGMAX, ControlWeights, TimestepKeyframeGroup, TimestepKeyframe, get_properly_arranged_t2i_weights | |
from .logger import logger | |
class LoadImagesFromDirectory: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"directory": ("STRING", {"default": ""}), | |
}, | |
"optional": { | |
"image_load_cap": ("INT", {"default": 0, "min": 0, "max": BIGMAX, "step": 1}), | |
"start_index": ("INT", {"default": 0, "min": 0, "max": BIGMAX, "step": 1}), | |
} | |
} | |
RETURN_TYPES = ("IMAGE", "MASK", "INT") | |
FUNCTION = "load_images" | |
CATEGORY = "" | |
def load_images(self, directory: str, image_load_cap: int = 0, start_index: int = 0): | |
if not os.path.isdir(directory): | |
raise FileNotFoundError(f"Directory '{directory} cannot be found.'") | |
dir_files = os.listdir(directory) | |
if len(dir_files) == 0: | |
raise FileNotFoundError(f"No files in directory '{directory}'.") | |
dir_files = sorted(dir_files) | |
dir_files = [os.path.join(directory, x) for x in dir_files] | |
# start at start_index | |
dir_files = dir_files[start_index:] | |
images = [] | |
masks = [] | |
limit_images = False | |
if image_load_cap > 0: | |
limit_images = True | |
image_count = 0 | |
for image_path in dir_files: | |
if os.path.isdir(image_path): | |
continue | |
if limit_images and image_count >= image_load_cap: | |
break | |
i = Image.open(image_path) | |
i = ImageOps.exif_transpose(i) | |
image = i.convert("RGB") | |
image = np.array(image).astype(np.float32) / 255.0 | |
image = torch.from_numpy(image)[None,] | |
if 'A' in i.getbands(): | |
mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 | |
mask = 1. - torch.from_numpy(mask) | |
else: | |
mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") | |
images.append(image) | |
masks.append(mask) | |
image_count += 1 | |
if len(images) == 0: | |
raise FileNotFoundError(f"No images could be loaded from directory '{directory}'.") | |
return (torch.cat(images, dim=0), torch.stack(masks, dim=0), image_count) | |
class ScaledSoftUniversalWeightsDeprecated: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"base_multiplier": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 1.0, "step": 0.001}, ), | |
"flip_weights": ("BOOLEAN", {"default": False}), | |
}, | |
"optional": { | |
"uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), | |
"cn_extras": ("CN_WEIGHTS_EXTRAS",), | |
"autosize": ("ACNAUTOSIZE", {"padding": 0}), | |
} | |
} | |
RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) | |
RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") | |
FUNCTION = "load_weights" | |
CATEGORY = "" | |
def load_weights(self, base_multiplier, flip_weights, uncond_multiplier: float=1.0, cn_extras: dict[str]={}): | |
weights = ControlWeights.universal(base_multiplier=base_multiplier, uncond_multiplier=uncond_multiplier, extras=cn_extras) | |
return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) | |
class SoftControlNetWeightsDeprecated: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"weight_00": ("FLOAT", {"default": 0.09941396206337118, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_01": ("FLOAT", {"default": 0.12050177219802567, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_02": ("FLOAT", {"default": 0.14606275417942507, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_03": ("FLOAT", {"default": 0.17704576264172736, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_04": ("FLOAT", {"default": 0.214600924414215, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_05": ("FLOAT", {"default": 0.26012233262329093, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_06": ("FLOAT", {"default": 0.3152997971191405, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_07": ("FLOAT", {"default": 0.3821815722656249, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_08": ("FLOAT", {"default": 0.4632503906249999, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_09": ("FLOAT", {"default": 0.561515625, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_10": ("FLOAT", {"default": 0.6806249999999999, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_11": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_12": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"flip_weights": ("BOOLEAN", {"default": False}), | |
}, | |
"optional": { | |
"uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), | |
"cn_extras": ("CN_WEIGHTS_EXTRAS",), | |
"autosize": ("ACNAUTOSIZE", {"padding": 0}), | |
} | |
} | |
DEPRECATED = True | |
RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) | |
RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") | |
FUNCTION = "load_weights" | |
CATEGORY = "" | |
def load_weights(self, weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, | |
weight_07, weight_08, weight_09, weight_10, weight_11, weight_12, flip_weights, | |
uncond_multiplier: float=1.0, cn_extras: dict[str]={}): | |
weights_output = [weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, | |
weight_07, weight_08, weight_09, weight_10, weight_11] | |
weights_middle = [weight_12] | |
weights = ControlWeights.controlnet(weights_output=weights_output, weights_middle=weights_middle, uncond_multiplier=uncond_multiplier, extras=cn_extras) | |
return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) | |
class CustomControlNetWeightsDeprecated: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"weight_00": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_01": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_02": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_04": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_05": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_06": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_07": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_08": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_09": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_10": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_11": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_12": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"flip_weights": ("BOOLEAN", {"default": False}), | |
}, | |
"optional": { | |
"uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), | |
"cn_extras": ("CN_WEIGHTS_EXTRAS",), | |
"autosize": ("ACNAUTOSIZE", {"padding": 0}), | |
} | |
} | |
DEPRECATED = True | |
RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) | |
RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") | |
FUNCTION = "load_weights" | |
CATEGORY = "" | |
def load_weights(self, weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, | |
weight_07, weight_08, weight_09, weight_10, weight_11, weight_12, flip_weights, | |
uncond_multiplier: float=1.0, cn_extras: dict[str]={}): | |
weights_output = [weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, | |
weight_07, weight_08, weight_09, weight_10, weight_11] | |
weights_middle = [weight_12] | |
weights = ControlWeights.controlnet(weights_output=weights_output, weights_middle=weights_middle, uncond_multiplier=uncond_multiplier, extras=cn_extras) | |
return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) | |
class SoftT2IAdapterWeightsDeprecated: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"weight_00": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_01": ("FLOAT", {"default": 0.62, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_02": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"flip_weights": ("BOOLEAN", {"default": False}), | |
}, | |
"optional": { | |
"uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), | |
"cn_extras": ("CN_WEIGHTS_EXTRAS",), | |
"autosize": ("ACNAUTOSIZE", {"padding": 0}), | |
} | |
} | |
DEPRECATED = True | |
RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) | |
RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") | |
FUNCTION = "load_weights" | |
CATEGORY = "" | |
def load_weights(self, weight_00, weight_01, weight_02, weight_03, flip_weights, | |
uncond_multiplier: float=1.0, cn_extras: dict[str]={}): | |
weights = [weight_00, weight_01, weight_02, weight_03] | |
weights = get_properly_arranged_t2i_weights(weights) | |
weights = ControlWeights.t2iadapter(weights_input=weights, uncond_multiplier=uncond_multiplier, extras=cn_extras) | |
return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) | |
class CustomT2IAdapterWeightsDeprecated: | |
def INPUT_TYPES(s): | |
return { | |
"required": { | |
"weight_00": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_01": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_02": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), | |
"flip_weights": ("BOOLEAN", {"default": False}), | |
}, | |
"optional": { | |
"uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), | |
"cn_extras": ("CN_WEIGHTS_EXTRAS",), | |
"autosize": ("ACNAUTOSIZE", {"padding": 0}), | |
} | |
} | |
DEPRECATED = True | |
RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) | |
RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") | |
FUNCTION = "load_weights" | |
CATEGORY = "" | |
def load_weights(self, weight_00, weight_01, weight_02, weight_03, flip_weights, | |
uncond_multiplier: float=1.0, cn_extras: dict[str]={}): | |
weights = [weight_00, weight_01, weight_02, weight_03] | |
weights = get_properly_arranged_t2i_weights(weights) | |
weights = ControlWeights.t2iadapter(weights_input=weights, uncond_multiplier=uncond_multiplier, extras=cn_extras) | |
return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) | |