Update app.py
Browse files
app.py
CHANGED
@@ -15,10 +15,10 @@ from underthesea import word_tokenize
|
|
15 |
|
16 |
#from phoBERT import BERT_predict
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
|
23 |
#Load LSTM
|
24 |
fp = Path(__file__).with_name('lstm_model.h5')
|
@@ -40,7 +40,7 @@ def tokenizer_pad(tokenizer,comment_text,max_length=200):
|
|
40 |
return padded_sequences
|
41 |
|
42 |
def LSTM_predict(x):
|
43 |
-
|
44 |
|
45 |
pred_proba = LSTM_model.predict(x)[0]
|
46 |
|
@@ -51,7 +51,7 @@ def LSTM_predict(x):
|
|
51 |
return pred_proba
|
52 |
|
53 |
def GRU_predict(x):
|
54 |
-
|
55 |
|
56 |
|
57 |
pred_proba = GRU_model.predict(x)[0]
|
@@ -93,16 +93,3 @@ def judge(x):
|
|
93 |
result.append((result_lstm[i]+result_gru[i])/2)
|
94 |
|
95 |
return (result)
|
96 |
-
|
97 |
-
|
98 |
-
# if __name__ == "__main__":
|
99 |
-
# # print("Loading")
|
100 |
-
# # while(True):
|
101 |
-
# # string = input("\nMời nhập văn bản: ")
|
102 |
-
# # os.system('cls')
|
103 |
-
# # print(f"Văn bản đã nhập: {string}")
|
104 |
-
# # judge(string)
|
105 |
-
# interface = gr.Interface(fn=judge,
|
106 |
-
# inputs=gr.Textbox(lines=2, placeholder='Please write something', label="Input Text"),
|
107 |
-
# outputs=['text','plot','text'])
|
108 |
-
# interface.launch()
|
|
|
15 |
|
16 |
#from phoBERT import BERT_predict
|
17 |
|
18 |
+
Load tokenizer
|
19 |
+
fp = Path(__file__).with_name('tokenizer.pkl')
|
20 |
+
with open(fp,mode="rb") as f:
|
21 |
+
tokenizer = pickle.load(f)
|
22 |
|
23 |
#Load LSTM
|
24 |
fp = Path(__file__).with_name('lstm_model.h5')
|
|
|
40 |
return padded_sequences
|
41 |
|
42 |
def LSTM_predict(x):
|
43 |
+
x = tokenizer_pad(tokenizer=tokenizer,comment_text=x)
|
44 |
|
45 |
pred_proba = LSTM_model.predict(x)[0]
|
46 |
|
|
|
51 |
return pred_proba
|
52 |
|
53 |
def GRU_predict(x):
|
54 |
+
x = tokenizer_pad(tokenizer=tokenizer,comment_text=x)
|
55 |
|
56 |
|
57 |
pred_proba = GRU_model.predict(x)[0]
|
|
|
93 |
result.append((result_lstm[i]+result_gru[i])/2)
|
94 |
|
95 |
return (result)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|