hkoppen commited on
Commit
d513adc
1 Parent(s): effd2e2

Fix streaming error

Browse files
Files changed (1) hide show
  1. document_qa_engine.py +2 -1
document_qa_engine.py CHANGED
@@ -80,7 +80,8 @@ def create_inference_pipeline(document_store, model_name, api_key):
80
  )
81
  elif "gpt" in model_name:
82
  generator = OpenAIChatGenerator(api_key=Secret.from_token(api_key), model=model_name,
83
- generation_kwargs={"max_tokens": MAX_TOKENS, "stream": False}
 
84
  )
85
  else:
86
  generator = HuggingFaceTGIChatGenerator(token=Secret.from_token(api_key), model=model_name,
 
80
  )
81
  elif "gpt" in model_name:
82
  generator = OpenAIChatGenerator(api_key=Secret.from_token(api_key), model=model_name,
83
+ generation_kwargs={"max_tokens": MAX_TOKENS},
84
+ streaming_callback=lambda chunk: print(chunk.content, end="", flush=True),
85
  )
86
  else:
87
  generator = HuggingFaceTGIChatGenerator(token=Secret.from_token(api_key), model=model_name,