teaevo commited on
Commit
59c5eff
·
1 Parent(s): 0279b82

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -45,9 +45,12 @@ def predict(input, history=[]):
45
  if is_question:
46
  sql_encoding = sql_tokenizer(table=table, query=input + sql_tokenizer.eos_token, return_tensors="pt")
47
  sql_outputs = sql_model.generate(**sql_encoding)
48
- response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
49
 
50
- history.append(response)
 
 
 
51
  '''
52
  bot_input_ids = torch.cat([torch.LongTensor(history), sql_encoding], dim=-1)
53
  history = sql_model.generate(bot_input_ids, max_length=1000, pad_token_id=sql_tokenizer.eos_token_id).tolist()
 
45
  if is_question:
46
  sql_encoding = sql_tokenizer(table=table, query=input + sql_tokenizer.eos_token, return_tensors="pt")
47
  sql_outputs = sql_model.generate(**sql_encoding)
48
+ sql_response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
49
 
50
+ # Convert the SQL model's response to token IDs and add to the history
51
+ sql_response_ids = tokenizer.encode(sql_response + tokenizer.eos_token, return_tensors='pt')
52
+ history.extend(sql_response_ids.squeeze().tolist())
53
+
54
  '''
55
  bot_input_ids = torch.cat([torch.LongTensor(history), sql_encoding], dim=-1)
56
  history = sql_model.generate(bot_input_ids, max_length=1000, pad_token_id=sql_tokenizer.eos_token_id).tolist()