import gradio as gr from transformers import AutoTokenizer, PreTrainedTokenizer import torch from custom_model import CustomModel # Load the tokenizer and custom model model_name = "deepseek-ai/DeepSeek-V3" tokenizer = AutoTokenizer.from_pretrained(model_name) model = CustomModel.from_pretrained(model_name) def classify_text(text): inputs = tokenizer(text, return_tensors="pt") outputs = model(**inputs) logits = outputs.logits probabilities = torch.softmax(logits, dim=-1).tolist()[0] predicted_class = torch.argmax(logits, dim=-1).item() return { "Predicted Class": predicted_class, "Probabilities": probabilities } # Create a Gradio interface iface = gr.Interface( fn=classify_text, inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."), outputs=[ gr.outputs.Label(label="Predicted Class"), gr.outputs.Label(label="Probabilities") ], title="DeepSeek-V3 Text Classification", description="Classify text using the DeepSeek-V3 model." ) # Launch the interface iface.launch()