fixing pre prompts
Browse files
app.py
CHANGED
|
@@ -120,10 +120,9 @@ def query(prompt, model, custom_lora, is_negative=False, steps=35, cfg_scale=7,
|
|
| 120 |
prompt = f"wbgmsst, white background, {prompt}"
|
| 121 |
if model == 'SDXL HS Card Style':
|
| 122 |
API_URL = "https://api-inference.huggingface.co/models/Norod78/sdxl-hearthstone-card-style-lora"
|
| 123 |
-
prompt = f"
|
| 124 |
if model == 'SLDR FLUX NSFW v2 Studio':
|
| 125 |
API_URL = "https://api-inference.huggingface.co/models/xey/sldr_flux_nsfw_v2-studio"
|
| 126 |
-
prompt = f"Aa, {prompt}"
|
| 127 |
if model == 'SoftPasty Flux':
|
| 128 |
API_URL = "https://api-inference.huggingface.co/models/alvdansen/softpasty-flux-dev"
|
| 129 |
prompt = f"araminta_illus illustration style, {prompt}"
|
|
|
|
| 120 |
prompt = f"wbgmsst, white background, {prompt}"
|
| 121 |
if model == 'SDXL HS Card Style':
|
| 122 |
API_URL = "https://api-inference.huggingface.co/models/Norod78/sdxl-hearthstone-card-style-lora"
|
| 123 |
+
prompt = f"Hearthstone Card, {prompt}"
|
| 124 |
if model == 'SLDR FLUX NSFW v2 Studio':
|
| 125 |
API_URL = "https://api-inference.huggingface.co/models/xey/sldr_flux_nsfw_v2-studio"
|
|
|
|
| 126 |
if model == 'SoftPasty Flux':
|
| 127 |
API_URL = "https://api-inference.huggingface.co/models/alvdansen/softpasty-flux-dev"
|
| 128 |
prompt = f"araminta_illus illustration style, {prompt}"
|