deepseekv3 / app.py
sapthesh's picture
Update app.py
5518c03 verified
raw
history blame
1.08 kB
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
# Load the model and tokenizer
model_name = "deepseek-ai/DeepSeek-V3"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name, trust_remote_code=True)
def classify_text(text):
inputs = tokenizer(text, return_tensors="pt")
outputs = model(**inputs)
logits = outputs.logits
probabilities = torch.softmax(logits, dim=-1).tolist()[0]
predicted_class = torch.argmax(logits, dim=-1).item()
return {
"Predicted Class": predicted_class,
"Probabilities": probabilities
}
# Create a Gradio interface
iface = gr.Interface(
fn=classify_text,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."),
outputs=[
gr.outputs.Label(label="Predicted Class"),
gr.outputs.Label(label="Probabilities")
],
title="DeepSeek-V3 Text Classification",
description="Classify text using the DeepSeek-V3 model."
)
# Launch the interface
iface.launch()