Spaces:
Running
on
Zero
Running
on
Zero
NikhilJoson
commited on
Commit
•
2bbf193
1
Parent(s):
399b680
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import gradio as gr
|
|
4 |
from diffusers import FluxInpaintPipeline
|
5 |
import random
|
6 |
import numpy as np
|
|
|
7 |
|
8 |
MARKDOWN = """
|
9 |
# Prompt Canvas🎨
|
@@ -11,6 +12,34 @@ Thanks to [Black Forest Labs](https://huggingface.co/black-forest-labs) team for
|
|
11 |
and a big thanks to [Gothos](https://github.com/Gothos) for taking it to the next level by enabling inpainting with the FLUX.
|
12 |
"""
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
MAX_SEED = np.iinfo(np.int32).max
|
15 |
DEVICE = "cuda" #if torch.cuda.is_available() else "cpu"
|
16 |
|
|
|
4 |
from diffusers import FluxInpaintPipeline
|
5 |
import random
|
6 |
import numpy as np
|
7 |
+
import google.generativeai as genai
|
8 |
|
9 |
MARKDOWN = """
|
10 |
# Prompt Canvas🎨
|
|
|
12 |
and a big thanks to [Gothos](https://github.com/Gothos) for taking it to the next level by enabling inpainting with the FLUX.
|
13 |
"""
|
14 |
|
15 |
+
#Gemini Setup
|
16 |
+
genai.configure(api_key = os.environ['Gemini_API'])
|
17 |
+
gemini_flash = genai.GenerativeModel(model_name='gemini-1.5-flash-002')
|
18 |
+
|
19 |
+
def gemini_predict(prompt):
|
20 |
+
system_message = f"""You are the best text analyser.
|
21 |
+
You have to analyse a user query and identify what the user wants to change, from a given user query.
|
22 |
+
|
23 |
+
Examples:
|
24 |
+
Query: Change Lipstick colour to blue
|
25 |
+
Response: Lips
|
26 |
+
|
27 |
+
Query: Add a nose stud
|
28 |
+
Response: Nose
|
29 |
+
|
30 |
+
Query: Add a wallpaper to the right wall
|
31 |
+
Response: Right wall
|
32 |
+
|
33 |
+
Query: Change the Sofa's colour to Purple
|
34 |
+
Response: Sofa
|
35 |
+
|
36 |
+
Your response should be in 1 or 2-3 words
|
37 |
+
Query : {prompt}
|
38 |
+
"""
|
39 |
+
response = gemini_flash.generate_content(system_message)
|
40 |
+
return(response.text)
|
41 |
+
|
42 |
+
|
43 |
MAX_SEED = np.iinfo(np.int32).max
|
44 |
DEVICE = "cuda" #if torch.cuda.is_available() else "cpu"
|
45 |
|