fluecnyfromText / app.py
papasega's picture
Update app.py
b61c1b3 verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
from huggingface_hub import login
model_name = "papasega/finetune_Distilbert_SST_Avalinguo_Fluency"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
# Fonction de prédiction
def predict_fluency(text):
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
logits = model(**inputs).logits
probs = torch.softmax(logits, dim=1)
label = torch.argmax(probs, dim=1).item()
if label == 0:
label = "Low Fluency"
else:
label = "High Fluency"
return f"{label}\nLow Fluency: {probs[0][0].item()}\nHigh Fluency: {probs[0][1].item()}"
fluency = gr.Interface(fn=predict_fluency,
inputs="text",
outputs="text",
title="Classification de la fuence depuis le text",
description="Ce modèle est un modèle de classification de la fluence de l'utilisateur suivant le texte.",
examples=[
["Engineer, Yeah, you",
"Engineer, Yeah, you"],
["Engineer, indeed, the lady, an accomplished engineer, holds a prestigious Ph.D It is her first achievement of such caliber",
"Engineer, indeed, the lady, an accomplished engineer, holds a prestigious Ph.D It is her first achievement of such caliber"],
[ "Oh, how was brown for you?",
"Oh, how was brown for you?"],
["The cat chased its tail, tail spinning wildly around and around.",
"The cat chased its tail, tail spinning wildly around and around."],
[ "Now they can.",
"Now they can."],
["I like to read books and watch movies on the weekends.",
"I like to read books and watch movies on the weekends."],
[ "But kind of plastics like growing more social consciousness, right?",
"But kind of plastics like growing more social consciousness, right?"]
]
)
fluency.launch(debug=True)