Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,17 +1,23 @@
|
|
|
|
|
|
1 |
import transformers
|
2 |
-
from transformers import
|
3 |
-
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
-
@spaces.GPU
|
7 |
|
8 |
-
# Charger le modèle pour la génération de texte
|
9 |
-
pipe = pipeline("text-generation", model="MTSAIR/MultiVerse_70B")
|
10 |
|
11 |
-
# Fonction pour générer une réponse à partir du message de l'utilisateur
|
12 |
-
def generate_response(message):
|
13 |
-
response = pipe(message)
|
14 |
-
return response[0]['generated_text']
|
15 |
|
16 |
-
# Configurer et lancer l'interface de chat avec Gradio
|
17 |
-
gr.ChatInterface(fn=generate_response).launch()
|
|
|
1 |
+
import dataset # type: ignore
|
2 |
+
from dataset import load_dataset #type: ignore
|
3 |
import transformers
|
4 |
+
from transformers import TFAutoModelForSequenceClassification, AutoTokenizer
|
5 |
+
|
6 |
+
model = TFAutoModelForSequenceClassification.from_pretrained("bert-base-uncased")
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
|
8 |
+
|
9 |
+
ds = load_dataset("stanfordnlp/sst2")
|
10 |
+
|
11 |
+
sst2_dataset = load_dataset("glue", "sst2", split="train")
|
12 |
+
|
13 |
+
|
14 |
+
def encode(examples):
|
15 |
+
return tokenizer(examples["sentence1"], examples["sentence2"], truncation=True, padding="max_length")
|
16 |
+
|
17 |
+
|
18 |
+
sst2_dataset = sst2_dataset.map(encode, batched=True)
|
19 |
+
sst2_dataset = sst2_dataset.map(lambda examples: {"labels": examples["label"]}, batched=True)
|
20 |
|
|
|
21 |
|
|
|
|
|
22 |
|
|
|
|
|
|
|
|
|
23 |
|
|
|
|