Update app.py
Browse files
app.py
CHANGED
@@ -76,15 +76,15 @@ chatbot_model_name = "microsoft/DialoGPT-medium"
|
|
76 |
tokenizer = AutoTokenizer.from_pretrained(chatbot_model_name)
|
77 |
model = AutoModelForCausalLM.from_pretrained(chatbot_model_name)
|
78 |
|
79 |
-
cmax_token_limit = tokenizer.max_model_input_sizes[chatbot_model_name]
|
80 |
-
print(f"Chat bot Maximum token limit for {chatbot_model_name}: {cmax_token_limit}")
|
81 |
|
82 |
# Load the SQL Model
|
83 |
sql_model_name = "microsoft/tapex-large-finetuned-wtq"
|
84 |
sql_tokenizer = TapexTokenizer.from_pretrained(sql_model_name)
|
85 |
sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
|
86 |
|
87 |
-
max_token_limit =
|
88 |
print(f"SQL Maximum token limit for {sql_model_name}: {max_token_limit}")
|
89 |
|
90 |
#sql_response = None
|
|
|
76 |
tokenizer = AutoTokenizer.from_pretrained(chatbot_model_name)
|
77 |
model = AutoModelForCausalLM.from_pretrained(chatbot_model_name)
|
78 |
|
79 |
+
#cmax_token_limit = tokenizer.max_model_input_sizes[chatbot_model_name]
|
80 |
+
#print(f"Chat bot Maximum token limit for {chatbot_model_name}: {cmax_token_limit}")
|
81 |
|
82 |
# Load the SQL Model
|
83 |
sql_model_name = "microsoft/tapex-large-finetuned-wtq"
|
84 |
sql_tokenizer = TapexTokenizer.from_pretrained(sql_model_name)
|
85 |
sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
|
86 |
|
87 |
+
max_token_limit = sql_tokenizer.max_model_input_sizes[sql_model_name]
|
88 |
print(f"SQL Maximum token limit for {sql_model_name}: {max_token_limit}")
|
89 |
|
90 |
#sql_response = None
|