Update model.py (#1)
Browse files- Update model.py (ee1e41c1a05034cd8be4687b80f81e115ca17a1b)
Co-authored-by: Baljinder Hothi <BaljinderH@users.noreply.huggingface.co>
model.py
CHANGED
@@ -5,10 +5,6 @@ from transformers import BertTokenizer, BertForSequenceClassification
|
|
5 |
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
|
6 |
model = BertForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2)
|
7 |
|
8 |
-
# Load the model (Assuming it's already trained and saved in "./saved_model")
|
9 |
-
# If you don't have a trained model, comment out this line. The code will use the default BERT model
|
10 |
-
model = BertForSequenceClassification.from_pretrained("./saved_model")
|
11 |
-
|
12 |
# Predicting Function
|
13 |
def predict(text):
|
14 |
inputs = tokenizer(text, padding=True, truncation=True, return_tensors="pt")
|
@@ -16,6 +12,6 @@ def predict(text):
|
|
16 |
predictions = torch.argmax(outputs.logits, dim=-1)
|
17 |
return "AI-generated" if predictions.item() == 1 else "Human-written"
|
18 |
|
19 |
-
#
|
20 |
-
user_input = input("Enter the text you want to classify: ")
|
21 |
-
print("Classified as:", predict(user_input))
|
|
|
5 |
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
|
6 |
model = BertForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2)
|
7 |
|
|
|
|
|
|
|
|
|
8 |
# Predicting Function
|
9 |
def predict(text):
|
10 |
inputs = tokenizer(text, padding=True, truncation=True, return_tensors="pt")
|
|
|
12 |
predictions = torch.argmax(outputs.logits, dim=-1)
|
13 |
return "AI-generated" if predictions.item() == 1 else "Human-written"
|
14 |
|
15 |
+
# Example Usage (commented out as it's not needed for web deployment)
|
16 |
+
# user_input = input("Enter the text you want to classify: ")
|
17 |
+
# print("Classified as:", predict(user_input))
|