Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -143,12 +143,13 @@ theme = gr.themes.Monochrome(
|
|
143 |
],
|
144 |
)
|
145 |
|
146 |
-
|
147 |
def stream(model, code, generate_kwargs):
|
148 |
input_ids = tokenizer(code, return_tensors="pt").to("cuda")
|
149 |
generated_ids = model.generate(**input_ids, **generate_kwargs)
|
150 |
return tokenizer.decode(generated_ids[0][input_ids["input_ids"].shape[1]:], skip_special_tokens=True).strip()
|
151 |
|
|
|
152 |
def generate(
|
153 |
prompt, temperature=0.6, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0, library="LangChain", method="Prefix"
|
154 |
):
|
|
|
143 |
],
|
144 |
)
|
145 |
|
146 |
+
|
147 |
def stream(model, code, generate_kwargs):
|
148 |
input_ids = tokenizer(code, return_tensors="pt").to("cuda")
|
149 |
generated_ids = model.generate(**input_ids, **generate_kwargs)
|
150 |
return tokenizer.decode(generated_ids[0][input_ids["input_ids"].shape[1]:], skip_special_tokens=True).strip()
|
151 |
|
152 |
+
@spaces.GPU(enable_queue=True)
|
153 |
def generate(
|
154 |
prompt, temperature=0.6, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0, library="LangChain", method="Prefix"
|
155 |
):
|