manu commited on
Commit
a356a5d
1 Parent(s): 89ca1bd

converted to tf

Browse files
Files changed (1) hide show
  1. app.py +6 -11
app.py CHANGED
@@ -1,13 +1,13 @@
1
  import numpy as np
2
  import gradio as gr
3
- import torch
4
- from transformers import AutoModelForSequenceClassification, AutoTokenizer
5
 
6
  checkpoint="manuu01/DeBERTa-SeagullStory"
7
 
8
 
9
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
10
- model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
11
  answers=['Yes',"Doesn't matter","No"]
12
  f=open("Questions.txt","a")
13
 
@@ -67,14 +67,9 @@ def generate_tone(index,question):
67
  """.replace("\n","")
68
 
69
  ]
70
-
71
- input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
72
- output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
73
- prediction = torch.softmax(output["logits"][0], -1).tolist()
74
- response=answers[np.argmax(prediction)]
75
-
76
- f.write(f'Passage = {index}\nQuestion: {question}\nAnswer: {response}\n\n')
77
- return response
78
 
79
  passages=["General","Pier","Boat","Island"]
80
 
 
1
  import numpy as np
2
  import gradio as gr
3
+ import tensorflow as tf
4
+ from transformers import TFAutoModelForSequenceClassification, AutoTokenizer
5
 
6
  checkpoint="manuu01/DeBERTa-SeagullStory"
7
 
8
 
9
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
10
+ model=TFAutoModelForSequenceClassification.from_pretrained(checkpoint,from_pt=True,dtype=tf.float16)
11
  answers=['Yes',"Doesn't matter","No"]
12
  f=open("Questions.txt","a")
13
 
 
67
  """.replace("\n","")
68
 
69
  ]
70
+ inputs=tokenizer(passages[index],question, return_tensors="tf")
71
+ response=(model(**inputs).logits)
72
+ return model.config.id2label[np.argmax(response)]
 
 
 
 
 
73
 
74
  passages=["General","Pier","Boat","Island"]
75