Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
artificialguybr
commited on
Commit
•
a146eda
1
Parent(s):
063fc12
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1 |
-
# Existing imports
|
2 |
import gradio as gr
|
3 |
import requests
|
4 |
import io
|
@@ -7,29 +6,19 @@ import json
|
|
7 |
import os
|
8 |
import logging
|
9 |
|
10 |
-
# Initialize logging
|
11 |
logging.basicConfig(level=logging.DEBUG)
|
12 |
|
13 |
-
# Load LoRAs from JSON
|
14 |
with open('loras.json', 'r') as f:
|
15 |
loras = json.load(f)
|
16 |
|
17 |
def update_selection(selected_state: gr.SelectData):
|
18 |
logging.debug(f"Inside update_selection, selected_state: {selected_state}")
|
19 |
selected_lora_index = selected_state['index']
|
20 |
-
|
21 |
-
|
22 |
-
new_placeholder = "Your new placeholder here"
|
23 |
-
use_with_diffusers = "Your use_with_diffusers here"
|
24 |
-
use_with_uis = "Your use_with_uis here"
|
25 |
-
logging.debug(f"Updated selected_state: {selected_state}")
|
26 |
return (
|
27 |
-
updated_text,
|
28 |
-
instance_prompt,
|
29 |
gr.update(placeholder=new_placeholder),
|
30 |
-
selected_state
|
31 |
-
use_with_diffusers,
|
32 |
-
use_with_uis,
|
33 |
)
|
34 |
|
35 |
def run_lora(prompt, selected_state, progress=gr.Progress(track_tqdm=True)):
|
@@ -37,6 +26,7 @@ def run_lora(prompt, selected_state, progress=gr.Progress(track_tqdm=True)):
|
|
37 |
if not selected_state:
|
38 |
logging.error("selected_state is None or empty.")
|
39 |
raise gr.Error("You must select a LoRA")
|
|
|
40 |
selected_lora_index = selected_state['index']
|
41 |
selected_lora = loras[selected_lora_index]
|
42 |
api_url = f"https://api-inference.huggingface.co/models/{selected_lora['repo']}"
|
@@ -44,10 +34,16 @@ def run_lora(prompt, selected_state, progress=gr.Progress(track_tqdm=True)):
|
|
44 |
token = os.getenv("API_TOKEN")
|
45 |
payload = {"inputs": f"{prompt} {trigger_word}"}
|
46 |
|
47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
with gr.Blocks(css="custom.css") as app:
|
49 |
title = gr.HTML("<h1>LoRA the Explorer</h1>")
|
50 |
-
selected_state = gr.State()
|
51 |
with gr.Row():
|
52 |
gallery = gr.Gallery(
|
53 |
[(item["image"], item["title"]) for item in loras],
|
@@ -64,7 +60,7 @@ with gr.Blocks(css="custom.css") as app:
|
|
64 |
|
65 |
gallery.select(
|
66 |
update_selection,
|
67 |
-
outputs=[selected_state]
|
68 |
)
|
69 |
button.click(
|
70 |
fn=run_lora,
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import io
|
|
|
6 |
import os
|
7 |
import logging
|
8 |
|
|
|
9 |
logging.basicConfig(level=logging.DEBUG)
|
10 |
|
|
|
11 |
with open('loras.json', 'r') as f:
|
12 |
loras = json.load(f)
|
13 |
|
14 |
def update_selection(selected_state: gr.SelectData):
|
15 |
logging.debug(f"Inside update_selection, selected_state: {selected_state}")
|
16 |
selected_lora_index = selected_state['index']
|
17 |
+
selected_lora = loras[selected_lora_index]
|
18 |
+
new_placeholder = f"Type a prompt for {selected_lora['title']}"
|
|
|
|
|
|
|
|
|
19 |
return (
|
|
|
|
|
20 |
gr.update(placeholder=new_placeholder),
|
21 |
+
selected_state
|
|
|
|
|
22 |
)
|
23 |
|
24 |
def run_lora(prompt, selected_state, progress=gr.Progress(track_tqdm=True)):
|
|
|
26 |
if not selected_state:
|
27 |
logging.error("selected_state is None or empty.")
|
28 |
raise gr.Error("You must select a LoRA")
|
29 |
+
|
30 |
selected_lora_index = selected_state['index']
|
31 |
selected_lora = loras[selected_lora_index]
|
32 |
api_url = f"https://api-inference.huggingface.co/models/{selected_lora['repo']}"
|
|
|
34 |
token = os.getenv("API_TOKEN")
|
35 |
payload = {"inputs": f"{prompt} {trigger_word}"}
|
36 |
|
37 |
+
headers = {"Authorization": f"Bearer {token}"}
|
38 |
+
response = requests.post(api_url, headers=headers, json=payload)
|
39 |
+
if response.status_code == 200:
|
40 |
+
return Image.open(io.BytesIO(response.content))
|
41 |
+
else:
|
42 |
+
return "API Error"
|
43 |
+
|
44 |
with gr.Blocks(css="custom.css") as app:
|
45 |
title = gr.HTML("<h1>LoRA the Explorer</h1>")
|
46 |
+
selected_state = gr.State()
|
47 |
with gr.Row():
|
48 |
gallery = gr.Gallery(
|
49 |
[(item["image"], item["title"]) for item in loras],
|
|
|
60 |
|
61 |
gallery.select(
|
62 |
update_selection,
|
63 |
+
outputs=[prompt, selected_state]
|
64 |
)
|
65 |
button.click(
|
66 |
fn=run_lora,
|