grahamwhiteuk commited on
Commit
f466ad8
1 Parent(s): bce909e

fix: use_tqdm not used by the model error

Browse files
Files changed (1) hide show
  1. model.py +1 -1
model.py CHANGED
@@ -161,7 +161,7 @@ def generate_text(messages, criteria_name):
161
  elif inference_engine == "VLLM":
162
  with torch.no_grad():
163
  # output = model.generate(chat, sampling_params, use_tqdm=False)
164
- output = model.generate(chat, use_tqdm=False)
165
 
166
  label, prob_of_risk = parse_output(output[0])
167
  else:
 
161
  elif inference_engine == "VLLM":
162
  with torch.no_grad():
163
  # output = model.generate(chat, sampling_params, use_tqdm=False)
164
+ output = model.generate(chat)
165
 
166
  label, prob_of_risk = parse_output(output[0])
167
  else: