John6666 commited on
Commit
4d511ed
1 Parent(s): 51aefe8

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +16 -7
  2. mod.py +3 -3
app.py CHANGED
@@ -14,7 +14,7 @@ import time
14
  import requests
15
  import pandas as pd
16
 
17
- from env import models, num_loras, num_cns
18
  from mod import (clear_cache, get_repo_safetensors, is_repo_name, is_repo_exists, get_model_trigger,
19
  description_ui, compose_lora_json, is_valid_lora, fuse_loras, save_image, preprocess_i2i_image,
20
  get_trigger_word, enhance_prompt, set_control_union_image,
@@ -111,6 +111,15 @@ def change_base_model(repo_id: str, cn_on: bool, disable_model_cache: bool, prog
111
 
112
  change_base_model.zerogpu = True
113
 
 
 
 
 
 
 
 
 
 
114
  class calculateDuration:
115
  def __init__(self, activity_name=""):
116
  self.activity_name = activity_name
@@ -235,7 +244,7 @@ def randomize_loras(selected_indices, loras_state):
235
  random_prompt = random.choice(prompt_values)
236
  return selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2, random_prompt
237
 
238
- def add_custom_lora(custom_lora, selected_indices, current_loras):
239
  if custom_lora:
240
  try:
241
  title, repo, path, trigger_word, image = check_custom_model(custom_lora)
@@ -297,7 +306,7 @@ def add_custom_lora(custom_lora, selected_indices, current_loras):
297
  else:
298
  return current_loras, gr.update(), gr.update(), gr.update(), selected_indices, gr.update(), gr.update(), gr.update(), gr.update()
299
 
300
- def remove_custom_lora(selected_indices, current_loras):
301
  if current_loras:
302
  custom_lora_repo = current_loras[-1]['repo']
303
  # Remove from loras list
@@ -548,7 +557,7 @@ run_lora.zerogpu = True
548
  def get_huggingface_safetensors(link):
549
  split_link = link.split("/")
550
  if len(split_link) == 2:
551
- model_card = ModelCard.load(link)
552
  base_model = model_card.data.get("base_model")
553
  print(f"Base model: {base_model}")
554
  if base_model not in ["black-forest-labs/FLUX.1-dev", "black-forest-labs/FLUX.1-schnell"]:
@@ -556,7 +565,7 @@ def get_huggingface_safetensors(link):
556
  image_path = model_card.data.get("widget", [{}])[0].get("output", {}).get("url", None)
557
  trigger_word = model_card.data.get("instance_prompt", "")
558
  image_url = f"https://huggingface.co/{link}/resolve/main/{image_path}" if image_path else None
559
- fs = HfFileSystem()
560
  safetensors_name = None
561
  try:
562
  list_of_files = fs.ls(link, detail=False)
@@ -802,12 +811,12 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=css, delete_ca
802
  )
803
  add_custom_lora_button.click(
804
  add_custom_lora,
805
- inputs=[custom_lora, selected_indices, loras_state],
806
  outputs=[loras_state, gallery, selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2]
807
  )
808
  remove_custom_lora_button.click(
809
  remove_custom_lora,
810
- inputs=[selected_indices, loras_state],
811
  outputs=[loras_state, gallery, selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2]
812
  )
813
  gr.on(
 
14
  import requests
15
  import pandas as pd
16
 
17
+ from env import models, num_loras, num_cns, HF_TOKEN
18
  from mod import (clear_cache, get_repo_safetensors, is_repo_name, is_repo_exists, get_model_trigger,
19
  description_ui, compose_lora_json, is_valid_lora, fuse_loras, save_image, preprocess_i2i_image,
20
  get_trigger_word, enhance_prompt, set_control_union_image,
 
111
 
112
  change_base_model.zerogpu = True
113
 
114
+ def update_loras_state(current_loras, gallery):
115
+ global loras
116
+ loras = current_loras
117
+ gallery_items = [(item["image"], item["title"]) for item in current_loras]
118
+ print(current_loras)
119
+ print(gallery_items)
120
+ print(gallery)
121
+ return gr.update(value=gallery_items)
122
+
123
  class calculateDuration:
124
  def __init__(self, activity_name=""):
125
  self.activity_name = activity_name
 
244
  random_prompt = random.choice(prompt_values)
245
  return selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2, random_prompt
246
 
247
+ def add_custom_lora(custom_lora, selected_indices, current_loras, gallery):
248
  if custom_lora:
249
  try:
250
  title, repo, path, trigger_word, image = check_custom_model(custom_lora)
 
306
  else:
307
  return current_loras, gr.update(), gr.update(), gr.update(), selected_indices, gr.update(), gr.update(), gr.update(), gr.update()
308
 
309
+ def remove_custom_lora(selected_indices, current_loras, gallery):
310
  if current_loras:
311
  custom_lora_repo = current_loras[-1]['repo']
312
  # Remove from loras list
 
557
  def get_huggingface_safetensors(link):
558
  split_link = link.split("/")
559
  if len(split_link) == 2:
560
+ model_card = ModelCard.load(link, token=HF_TOKEN)
561
  base_model = model_card.data.get("base_model")
562
  print(f"Base model: {base_model}")
563
  if base_model not in ["black-forest-labs/FLUX.1-dev", "black-forest-labs/FLUX.1-schnell"]:
 
565
  image_path = model_card.data.get("widget", [{}])[0].get("output", {}).get("url", None)
566
  trigger_word = model_card.data.get("instance_prompt", "")
567
  image_url = f"https://huggingface.co/{link}/resolve/main/{image_path}" if image_path else None
568
+ fs = HfFileSystem(token=HF_TOKEN)
569
  safetensors_name = None
570
  try:
571
  list_of_files = fs.ls(link, detail=False)
 
811
  )
812
  add_custom_lora_button.click(
813
  add_custom_lora,
814
+ inputs=[custom_lora, selected_indices, loras_state, gallery],
815
  outputs=[loras_state, gallery, selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2]
816
  )
817
  remove_custom_lora_button.click(
818
  remove_custom_lora,
819
+ inputs=[selected_indices, loras_state, gallery],
820
  outputs=[loras_state, gallery, selected_info_1, selected_info_2, selected_indices, lora_scale_1, lora_scale_2, lora_image_1, lora_image_2]
821
  )
822
  gr.on(
mod.py CHANGED
@@ -5,7 +5,8 @@ from PIL import Image
5
  from pathlib import Path
6
  import gc
7
  import subprocess
8
- from env import num_cns, model_trigger
 
9
 
10
 
11
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
@@ -13,7 +14,6 @@ subprocess.run('pip cache purge', shell=True)
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
14
  torch.set_grad_enabled(False)
15
 
16
-
17
  control_images = [None] * num_cns
18
  control_modes = [-1] * num_cns
19
  control_scales = [0] * num_cns
@@ -60,7 +60,7 @@ def clear_cache():
60
 
61
  def get_repo_safetensors(repo_id: str):
62
  from huggingface_hub import HfApi
63
- api = HfApi()
64
  try:
65
  if not is_repo_name(repo_id) or not is_repo_exists(repo_id): return gr.update(value="", choices=[])
66
  files = api.list_repo_files(repo_id=repo_id)
 
5
  from pathlib import Path
6
  import gc
7
  import subprocess
8
+ from env import num_cns, model_trigger, HF_TOKEN
9
+ import os
10
 
11
 
12
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
 
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
15
  torch.set_grad_enabled(False)
16
 
 
17
  control_images = [None] * num_cns
18
  control_modes = [-1] * num_cns
19
  control_scales = [0] * num_cns
 
60
 
61
  def get_repo_safetensors(repo_id: str):
62
  from huggingface_hub import HfApi
63
+ api = HfApi(token=HF_TOKEN)
64
  try:
65
  if not is_repo_name(repo_id) or not is_repo_exists(repo_id): return gr.update(value="", choices=[])
66
  files = api.list_repo_files(repo_id=repo_id)