Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,7 @@ def chatbot_response(user_message):
|
|
45 |
|
46 |
global bot_input_ids
|
47 |
# encode the new user input, add the eos_token and return a tensor in Pytorch
|
48 |
-
new_user_input_ids = chatbot_tokenizer.encode(user_message +
|
49 |
|
50 |
# append the new user input tokens to the chat history
|
51 |
if bot_input_ids is None:
|
@@ -57,7 +57,7 @@ def chatbot_response(user_message):
|
|
57 |
chat_history_ids = chatbot_model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
|
58 |
|
59 |
# pretty print last ouput tokens from bot
|
60 |
-
response =
|
61 |
|
62 |
return response
|
63 |
|
|
|
45 |
|
46 |
global bot_input_ids
|
47 |
# encode the new user input, add the eos_token and return a tensor in Pytorch
|
48 |
+
new_user_input_ids = chatbot_tokenizer.encode(user_message + chatbot_tokenizer.eos_token, return_tensors='pt')
|
49 |
|
50 |
# append the new user input tokens to the chat history
|
51 |
if bot_input_ids is None:
|
|
|
57 |
chat_history_ids = chatbot_model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
|
58 |
|
59 |
# pretty print last ouput tokens from bot
|
60 |
+
response = chatbot_tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
|
61 |
|
62 |
return response
|
63 |
|