import torch
from transformers import AutoTokenizer, MobileBertForSequenceClassification

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

# Load the saved model
model_name = 'harshith20/Emotion_predictor'
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = MobileBertForSequenceClassification.from_pretrained(model_name)

# Tokenize input text
input_text = "I am feeling happy today"
input_ids = tokenizer.encode(input_text, add_special_tokens=True, truncation=True, max_length=128)
input_tensor = torch.tensor([input_ids]).to(device)


# Predict emotion
with torch.no_grad():
        outputs = model(input_tensor)
        logits = outputs[0]

# Get the predicted label

predicted_emotion = torch.argmax(logits, dim=1).item()
emotion_labels = {0:'sadness',1:'joy',2:'love',3:'anger',4:'fear',5:'surprise'}
predicted_emotion_label = emotion_labels[predicted_emotion]

print(f"Input text: {input_text}")
print(f"Predicted emotion: {predicted_emotion_label}")```
Downloads last month
13
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.