AlekseyCalvin commited on
Commit
c78df80
·
verified ·
1 Parent(s): 6b847ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -9
app.py CHANGED
@@ -17,6 +17,7 @@ from diffusers import DiffusionPipeline, FluxTransformer2DModel, FluxPipeline, A
17
  import safetensors.torch
18
  from safetensors.torch import load_file
19
  from custom_pipeline import FluxWithCFGPipeline
 
20
  import gc
21
 
22
  cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
@@ -29,12 +30,27 @@ torch.backends.cuda.matmul.allow_tf32 = True
29
  dtype = torch.float16
30
  pipe = FluxWithCFGPipeline.from_pretrained(
31
  "ostris/OpenFLUX.1", torch_dtype=dtype
32
- )
33
- # pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype)
34
- pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
35
- pipe.set_adapters("fast")
36
- pipe.fuse_lora(adapter_names=["fast"], lora_scale=1.0)
37
  pipe.to("cuda")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  torch.cuda.empty_cache()
39
 
40
  # Load LoRAs from JSON file
@@ -119,9 +135,13 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
119
  # Load LoRA weights
120
  with calculateDuration(f"Loading LoRA weights for {selected_lora['title']}"):
121
  if "weights" in selected_lora:
122
- pipe.load_lora_weights(lora_path, weight_name=selected_lora["weights"])
 
 
123
  else:
124
- pipe.load_lora_weights(lora_path)
 
 
125
 
126
  # Set random seed for reproducibility
127
  with calculateDuration("Randomizing seed"):
@@ -134,6 +154,10 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
134
  return image, seed
135
 
136
  run_lora.zerogpu = True
 
 
 
 
137
 
138
  css = '''
139
  #gen_btn{height: 100%}
@@ -180,8 +204,8 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as app:
180
  with gr.Accordion("Advanced Settings", open=True):
181
  with gr.Column():
182
  with gr.Row():
183
- cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, step=0.5, value=1.0)
184
- steps = gr.Slider(label="Steps", minimum=1, maximum=50, step=1, value=5)
185
 
186
  with gr.Row():
187
  width = gr.Slider(label="Width", minimum=256, maximum=1536, step=64, value=768)
 
17
  import safetensors.torch
18
  from safetensors.torch import load_file
19
  from custom_pipeline import FluxWithCFGPipeline
20
+ from transformers import CLIPModel, CLIPProcessor, CLIPConfig
21
  import gc
22
 
23
  cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
 
30
  dtype = torch.float16
31
  pipe = FluxWithCFGPipeline.from_pretrained(
32
  "ostris/OpenFLUX.1", torch_dtype=dtype
33
+ ).to("cuda")
34
+ pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to("cuda")
35
+
 
 
36
  pipe.to("cuda")
37
+ clipmodel = 'norm'
38
+ if clipmodel == "long":
39
+ model_id = "zer0int/LongCLIP-GmP-ViT-L-14"
40
+ config = CLIPConfig.from_pretrained(model_id)
41
+ maxtokens = 77
42
+ if clipmodel == "norm":
43
+ model_id = "zer0int/CLIP-GmP-ViT-L-14"
44
+ config = CLIPConfig.from_pretrained(model_id)
45
+ maxtokens = 77
46
+ clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=True).to("cuda")
47
+ clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=maxtokens, ignore_mismatched_sizes=True, return_tensors="pt", truncation=True)
48
+ config.text_config.max_position_embeddings = 77
49
+
50
+ pipe.tokenizer = clip_processor.tokenizer
51
+ pipe.text_encoder = clip_model.text_model
52
+ pipe.tokenizer_max_length = maxtokens
53
+ pipe.text_encoder.dtype = torch.bfloat16
54
  torch.cuda.empty_cache()
55
 
56
  # Load LoRAs from JSON file
 
135
  # Load LoRA weights
136
  with calculateDuration(f"Loading LoRA weights for {selected_lora['title']}"):
137
  if "weights" in selected_lora:
138
+ pipe.load_lora_weights(lora_path, weight_name=selected_lora["weights"], adapter_name="choice")
139
+ pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
140
+ pipe.set_adapters(["fast", "choice"], adapter_weights=[1.0, lora_scale])
141
  else:
142
+ pipe.load_lora_weights(lora_path, adapter_name="choice")
143
+ pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
144
+ pipe.set_adapters(["fast", "choice"], adapter_weights=[1.0, lora_scale])
145
 
146
  # Set random seed for reproducibility
147
  with calculateDuration("Randomizing seed"):
 
154
  return image, seed
155
 
156
  run_lora.zerogpu = True
157
+ #pipe.load_lora_weights("ostris/OpenFLUX.1", weight_name="openflux1-v0.1.0-fast-lora.safetensors", adapter_name="fast")
158
+ #pipe.set_adapters("fast")
159
+ #pipe.set_adapters(["fast", "toy"], adapter_weights=[0.5, 1.0])
160
+ #pipe.fuse_lora(adapter_names=["fast"], lora_scale=1.0)
161
 
162
  css = '''
163
  #gen_btn{height: 100%}
 
204
  with gr.Accordion("Advanced Settings", open=True):
205
  with gr.Column():
206
  with gr.Row():
207
+ cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, step=0.5, value=3.0)
208
+ steps = gr.Slider(label="Steps", minimum=1, maximum=50, step=1, value=10)
209
 
210
  with gr.Row():
211
  width = gr.Slider(label="Width", minimum=256, maximum=1536, step=64, value=768)