Spaces:
Sleeping
Sleeping
manu
commited on
Commit
•
3d93b85
1
Parent(s):
c3be4cc
updated responses to actual labels
Browse files
app.py
CHANGED
@@ -8,6 +8,7 @@ checkpoint="MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
|
|
8 |
|
9 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
10 |
model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
|
|
|
11 |
|
12 |
title = "The Seagull story"
|
13 |
description = """
|
@@ -68,7 +69,7 @@ def generate_tone(index,question):
|
|
68 |
input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
|
69 |
output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
|
70 |
prediction = torch.softmax(output["logits"][0], -1).tolist()
|
71 |
-
return prediction
|
72 |
|
73 |
passages=["General","Pier","Boat","Island"]
|
74 |
|
|
|
8 |
|
9 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
10 |
model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
|
11 |
+
answers=['Yes',"Doesn't matter","No"]
|
12 |
|
13 |
title = "The Seagull story"
|
14 |
description = """
|
|
|
69 |
input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
|
70 |
output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
|
71 |
prediction = torch.softmax(output["logits"][0], -1).tolist()
|
72 |
+
return answers[np.argmax(prediction)]
|
73 |
|
74 |
passages=["General","Pier","Boat","Island"]
|
75 |
|