Spaces:
Paused
Paused
lllyasviel
commited on
Commit
•
59406a8
1
Parent(s):
796cb1f
i (#36)
Browse files- fooocus_version.py +1 -1
- modules/async_worker.py +1 -0
- modules/default_pipeline.py +27 -4
- update_log.md +4 -0
fooocus_version.py
CHANGED
@@ -1 +1 @@
|
|
1 |
-
version = '1.0.
|
|
|
1 |
+
version = '1.0.21'
|
modules/async_worker.py
CHANGED
@@ -27,6 +27,7 @@ def worker():
|
|
27 |
pipeline.refresh_base_model(base_model_name)
|
28 |
pipeline.refresh_refiner_model(refiner_model_name)
|
29 |
pipeline.refresh_loras(loras)
|
|
|
30 |
|
31 |
p_txt, n_txt = apply_style(style_selction, prompt, negative_prompt)
|
32 |
|
|
|
27 |
pipeline.refresh_base_model(base_model_name)
|
28 |
pipeline.refresh_refiner_model(refiner_model_name)
|
29 |
pipeline.refresh_loras(loras)
|
30 |
+
pipeline.clean_prompt_cond_caches()
|
31 |
|
32 |
p_txt, n_txt = apply_style(style_selction, prompt, negative_prompt)
|
33 |
|
modules/default_pipeline.py
CHANGED
@@ -102,18 +102,41 @@ refresh_base_model(modules.path.default_base_model_name)
|
|
102 |
refresh_refiner_model(modules.path.default_refiner_model_name)
|
103 |
refresh_loras([(modules.path.default_lora_name, 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5)])
|
104 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
|
106 |
@torch.no_grad()
|
107 |
def process(positive_prompt, negative_prompt, steps, switch, width, height, image_seed, callback):
|
108 |
-
|
109 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
|
111 |
empty_latent = core.generate_empty_latent(width=width, height=height, batch_size=1)
|
112 |
|
113 |
if xl_refiner is not None:
|
|
|
|
|
114 |
|
115 |
-
|
116 |
-
|
117 |
|
118 |
sampled_latent = core.ksampler_with_refiner(
|
119 |
model=xl_base_patched.unet,
|
|
|
102 |
refresh_refiner_model(modules.path.default_refiner_model_name)
|
103 |
refresh_loras([(modules.path.default_lora_name, 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5), ('None', 0.5)])
|
104 |
|
105 |
+
positive_conditions_cache = None
|
106 |
+
negative_conditions_cache = None
|
107 |
+
positive_conditions_refiner_cache = None
|
108 |
+
negative_conditions_refiner_cache = None
|
109 |
+
|
110 |
+
|
111 |
+
def clean_prompt_cond_caches():
|
112 |
+
global positive_conditions_cache, negative_conditions_cache, \
|
113 |
+
positive_conditions_refiner_cache, negative_conditions_refiner_cache
|
114 |
+
positive_conditions_cache = None
|
115 |
+
negative_conditions_cache = None
|
116 |
+
positive_conditions_refiner_cache = None
|
117 |
+
negative_conditions_refiner_cache = None
|
118 |
+
return
|
119 |
+
|
120 |
|
121 |
@torch.no_grad()
|
122 |
def process(positive_prompt, negative_prompt, steps, switch, width, height, image_seed, callback):
|
123 |
+
global positive_conditions_cache, negative_conditions_cache, \
|
124 |
+
positive_conditions_refiner_cache, negative_conditions_refiner_cache
|
125 |
+
|
126 |
+
positive_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=positive_prompt) if positive_conditions_cache is None else positive_conditions_cache
|
127 |
+
negative_conditions = core.encode_prompt_condition(clip=xl_base_patched.clip, prompt=negative_prompt) if negative_conditions_cache is None else negative_conditions_cache
|
128 |
+
|
129 |
+
positive_conditions_cache = positive_conditions
|
130 |
+
negative_conditions_cache = negative_conditions
|
131 |
|
132 |
empty_latent = core.generate_empty_latent(width=width, height=height, batch_size=1)
|
133 |
|
134 |
if xl_refiner is not None:
|
135 |
+
positive_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=positive_prompt) if positive_conditions_refiner_cache is None else positive_conditions_refiner_cache
|
136 |
+
negative_conditions_refiner = core.encode_prompt_condition(clip=xl_refiner.clip, prompt=negative_prompt) if negative_conditions_refiner_cache is None else negative_conditions_refiner_cache
|
137 |
|
138 |
+
positive_conditions_refiner_cache = positive_conditions_refiner
|
139 |
+
negative_conditions_refiner_cache = negative_conditions_refiner
|
140 |
|
141 |
sampled_latent = core.ksampler_with_refiner(
|
142 |
model=xl_base_patched.unet,
|
update_log.md
CHANGED
@@ -1,5 +1,9 @@
|
|
1 |
### 1.0.20
|
2 |
|
|
|
|
|
|
|
|
|
3 |
* Re-write UI to use async codes: (1) for faster start, and (2) for better live preview.
|
4 |
* Removed opencv dependency
|
5 |
* Plan to support Linux soon
|
|
|
1 |
### 1.0.20
|
2 |
|
3 |
+
* Speed-up text encoder
|
4 |
+
|
5 |
+
### 1.0.20
|
6 |
+
|
7 |
* Re-write UI to use async codes: (1) for faster start, and (2) for better live preview.
|
8 |
* Removed opencv dependency
|
9 |
* Plan to support Linux soon
|