import gradio as gr from langchain.agents.openai_assistant import OpenAIAssistantRunnable from openai import OpenAI import os #sk apikey = os.getenv('openAI') #key from client agentkey = os.getenv('asstID') os.environ["OPENAI_API_KEY"] = apikey interpreter_assistant = OpenAIAssistantRunnable(assistant_id= agentkey) #, client = key # output = interpreter_assistant.invoke({"content": "Hello?"}) # output[0].content[0].text.value def chat_response(message, history): output = interpreter_assistant.invoke({"content": message}) response = output[0].content[0].text.value return response css = """ label[data-testid="block-label"] { display: none !important; } footer { display: none !important; } """ js_func = """ function refresh() { const url = new URL(window.location); if (url.searchParams.get('__theme') !== 'dark') { url.searchParams.set('__theme', 'dark'); window.location.href = url.href; } } """ with gr.Blocks(css=css, js = js_func, theme="monochrome") as demo: chatbot = gr.ChatInterface(chat_response) demo.launch(share=True)