Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +38 -1
chain_app.py
CHANGED
@@ -49,6 +49,10 @@ async def chat_profile():
|
|
49 |
# name='Image-Generation',
|
50 |
# markdown_description='Our image generation model, has a performance like midjourney',
|
51 |
# ),
|
|
|
|
|
|
|
|
|
52 |
cl.ChatProfile(
|
53 |
name="GPT-4",
|
54 |
markdown_description="OpenAI's GPT-4 model",
|
@@ -162,6 +166,28 @@ async def on_chat_start():
|
|
162 |
await cl.Message(
|
163 |
content='my name is Dorna, Your AI Assistant designed by neural nexus team. i was made by Artin Daneshvar and Sadra Noadoust, 2 iranian students!'
|
164 |
).send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
165 |
|
166 |
# if chat_profile == 'Image-Generation':
|
167 |
# image = cl.Image(path='cat.png', name="result", display="inline")
|
@@ -453,7 +479,18 @@ async def main(message: cl.Message):
|
|
453 |
await cl.Message(
|
454 |
content=model_response
|
455 |
).send()
|
456 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
457 |
# elif chat_profile == 'Image-Generation':
|
458 |
# result = hf_image_client.predict(
|
459 |
# prompt=message.content,
|
|
|
49 |
# name='Image-Generation',
|
50 |
# markdown_description='Our image generation model, has a performance like midjourney',
|
51 |
# ),
|
52 |
+
cl.ChatProfile(
|
53 |
+
name="gpt4-o-mini",
|
54 |
+
markdown_description="The best state of the art openai model",
|
55 |
+
),
|
56 |
cl.ChatProfile(
|
57 |
name="GPT-4",
|
58 |
markdown_description="OpenAI's GPT-4 model",
|
|
|
166 |
await cl.Message(
|
167 |
content='my name is Dorna, Your AI Assistant designed by neural nexus team. i was made by Artin Daneshvar and Sadra Noadoust, 2 iranian students!'
|
168 |
).send()
|
169 |
+
if chat_profile == 'gpt4-o-mini':
|
170 |
+
await cl.ChatSettings(
|
171 |
+
[
|
172 |
+
Select(
|
173 |
+
id="OpenAI-Model",
|
174 |
+
label="OpenAI - Model",
|
175 |
+
values=["gpt-4"],
|
176 |
+
initial_index=0,
|
177 |
+
),
|
178 |
+
Slider(
|
179 |
+
id="Temperature",
|
180 |
+
label="Model Temperature",
|
181 |
+
initial=0.7,
|
182 |
+
min=0,
|
183 |
+
max=1,
|
184 |
+
step=0.1,
|
185 |
+
),
|
186 |
+
]
|
187 |
+
).send()
|
188 |
+
await cl.Message(
|
189 |
+
content="Im one of the best models openai have released and i am configured by two iranian boys to help you."
|
190 |
+
)
|
191 |
|
192 |
# if chat_profile == 'Image-Generation':
|
193 |
# image = cl.Image(path='cat.png', name="result", display="inline")
|
|
|
479 |
await cl.Message(
|
480 |
content=model_response
|
481 |
).send()
|
482 |
+
elif chat_profile == "gpt4-o-omni":
|
483 |
+
completion = openai_client.chat.completions.create(
|
484 |
+
model="gpt-4o-mini",
|
485 |
+
messages=[
|
486 |
+
{"role": "system", "content": "You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust"},
|
487 |
+
{"role": "user", "content": message.content}
|
488 |
+
]
|
489 |
+
)
|
490 |
+
model_response = completion.choices[0].message.content
|
491 |
+
await cl.Message(
|
492 |
+
content=model_response
|
493 |
+
).send()
|
494 |
# elif chat_profile == 'Image-Generation':
|
495 |
# result = hf_image_client.predict(
|
496 |
# prompt=message.content,
|