Update appTest1.py
Browse files- appTest1.py +4 -6
appTest1.py
CHANGED
@@ -240,14 +240,12 @@ async def infer(model_str, prompt, nprompt="", height=None, width=None, steps=No
|
|
240 |
if width is not None and width >= 256: kwargs["width"] = width
|
241 |
if steps is not None and steps >= 1: kwargs["num_inference_steps"] = steps
|
242 |
if cfg is not None and cfg > 0: cfg = kwargs["guidance_scale"] = cfg
|
243 |
-
noise = ""
|
244 |
if seed >= 0: kwargs["seed"] = seed
|
245 |
-
else:
|
246 |
-
|
247 |
-
|
248 |
-
noise += " "
|
249 |
task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
|
250 |
-
prompt=
|
251 |
await asyncio.sleep(0)
|
252 |
try:
|
253 |
result = await asyncio.wait_for(task, timeout=timeout)
|
|
|
240 |
if width is not None and width >= 256: kwargs["width"] = width
|
241 |
if steps is not None and steps >= 1: kwargs["num_inference_steps"] = steps
|
242 |
if cfg is not None and cfg > 0: cfg = kwargs["guidance_scale"] = cfg
|
|
|
243 |
if seed >= 0: kwargs["seed"] = seed
|
244 |
+
else: kwargs["seed"] = randint(1, MAX_SEED-1)
|
245 |
+
|
246 |
+
|
|
|
247 |
task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
|
248 |
+
prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
|
249 |
await asyncio.sleep(0)
|
250 |
try:
|
251 |
result = await asyncio.wait_for(task, timeout=timeout)
|