|
import os |
|
os.system("pip install openai==0.28.1") |
|
import openai |
|
import gradio as gr |
|
|
|
style = """ |
|
div.contain > :first-child > :first-child > :first-child{ |
|
height: 80vh !important; |
|
} |
|
textarea { |
|
height: 92px!important |
|
} |
|
""" |
|
|
|
openai.api_key = os.environ['OPENAI_API_KEY'] |
|
openai.api_base = os.environ['API_URL'] |
|
|
|
def predict(message, history, temperature): |
|
history_openai_format = [] |
|
for human, assistant in history: |
|
history_openai_format.append({"role": "user", "content": human }) |
|
history_openai_format.append({"role": "assistant", "content":assistant}) |
|
history_openai_format.append({"role": "user", "content": message}) |
|
|
|
response = openai.ChatCompletion.create( |
|
model='gpt-3.5-turbo-1106', |
|
max_tokens=4000, |
|
messages= history_openai_format, |
|
temperature=temperature, |
|
stream=True |
|
) |
|
|
|
partial_message = "" |
|
for chunk in response: |
|
if len(chunk['choices'][0]['delta']) != 0: |
|
partial_message = partial_message + chunk['choices'][0]['delta']['content'] |
|
yield partial_message |
|
|
|
gr.ChatInterface(predict, additional_inputs=[gr.Slider(0.0, 2.0, step=0.1, value=1.0, label="Temperature", info="Более низкие значения температуры приводят к более стабильным результатам, а более высокие значения дают более разнообразные и творческие результаты впролоть до шизоидных.")], additional_inputs_accordion_name="задать температуру", autofocus=True, theme=gr.themes.Soft(), css=style, submit_btn="отправить", stop_btn="прервать", retry_btn="⭮ заново", undo_btn="⮌ отменить", clear_btn="новый чат").queue().launch(auth=("rectal", "prolapse")) |
|
|
|
|
|
|
|
|
|
|
|
|