import warnings import gradio as gr from transformers import AutoTokenizer, PreTrainedTokenizer, AutoConfig import torch from custom_model import CustomModel # Suppress the FutureWarning warnings.filterwarnings("ignore", category=FutureWarning, module="torch") # Load the model and tokenizer model_name = "deepseek-ai/DeepSeek-V3" revision = "main" # Specify the revision directly print(f"Loading tokenizer from {model_name}...") tokenizer = AutoTokenizer.from_pretrained(model_name, revision=revision, trust_remote_code=True) print(f"Loading configuration from {model_name}...") config = AutoConfig.from_pretrained(model_name, revision=revision, trust_remote_code=True) print(f"Loading model from {model_name}...") model = CustomModel.from_pretrained(model_name, config=config, revision=revision, trust_remote_code=True) if model is None: print("Failed to load model.") else: print("Model loaded successfully.") def classify_text(text): inputs = tokenizer(text, return_tensors="pt") outputs = model(**inputs) logits = outputs.logits probabilities = torch.softmax(logits, dim=-1).tolist()[0] predicted_class = torch.argmax(logits, dim=-1).item() return { "Predicted Class": predicted_class, "Probabilities": probabilities } # Create a Gradio interface try: iface = gr.Interface( fn=classify_text, inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."), # Corrected here outputs=[ gr.outputs.Label(label="Predicted Class"), gr.outputs.Label(label="Probabilities") ], title="DeepSeek-V3 Text Classification", description="Classify text using the DeepSeek-V3 model." ) except Exception as e: print(f"Failed to create Gradio interface: {e}") # Launch the interface try: iface.launch() except Exception as e: print(f"Failed to launch Gradio interface: {e}")