import gradio as gr from transformers import AutoTokenizer, PreTrainedTokenizer, AutoConfig import torch from custom_model import CustomModel # Load the model and tokenizer model_name = "deepseek-ai/DeepSeek-V3@main" # Pin a specific revision, e.g., "main" tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) config = AutoConfig.from_pretrained(model_name, trust_remote_code=True) model = CustomModel.from_pretrained(model_name, config=config, trust_remote_code=True) def classify_text(text): inputs = tokenizer(text, return_tensors="pt") outputs = model(**inputs) logits = outputs.logits probabilities = torch.softmax(logits, dim=-1).tolist()[0] predicted_class = torch.argmax(logits, dim=-1).item() return { "Predicted Class": predicted_class, "Probabilities": probabilities } # Create a Gradio interface iface = gr.Interface( fn=classify_text, inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."), outputs=[ gr.outputs.Label(label="Predicted Class"), gr.outputs.Label(label="Probabilities") ], title="DeepSeek-V3 Text Classification", description="Classify text using the DeepSeek-V3 model." ) # Launch the interface iface.launch()