Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import pipeline
|
3 |
-
import requests
|
4 |
-
import json
|
5 |
import edge_tts
|
6 |
import asyncio
|
7 |
import tempfile
|
8 |
import os
|
9 |
from huggingface_hub import InferenceClient
|
10 |
import re
|
11 |
-
import time
|
12 |
from streaming_stt_nemo import Model
|
13 |
import torch
|
14 |
import random
|
@@ -51,9 +47,9 @@ def models(text, model="Mixtral 8x7B", seed=42):
|
|
51 |
client = client_fn(model)
|
52 |
|
53 |
generate_kwargs = dict(
|
|
|
54 |
max_new_tokens=300,
|
55 |
-
|
56 |
-
seed=seed,
|
57 |
)
|
58 |
|
59 |
formatted_prompt = system_instructions1 + text + "[JARVIS]"
|
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
2 |
import edge_tts
|
3 |
import asyncio
|
4 |
import tempfile
|
5 |
import os
|
6 |
from huggingface_hub import InferenceClient
|
7 |
import re
|
|
|
8 |
from streaming_stt_nemo import Model
|
9 |
import torch
|
10 |
import random
|
|
|
47 |
client = client_fn(model)
|
48 |
|
49 |
generate_kwargs = dict(
|
50 |
+
top_p=0.95,
|
51 |
max_new_tokens=300,
|
52 |
+
seed=seed
|
|
|
53 |
)
|
54 |
|
55 |
formatted_prompt = system_instructions1 + text + "[JARVIS]"
|