Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +66 -0
chain_app.py
CHANGED
|
@@ -144,6 +144,11 @@ async def chat_profile():
|
|
| 144 |
name='Aya-23B',
|
| 145 |
markdown_description='Cohere open sourced AI model with 23B parameters'
|
| 146 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 147 |
cl.ChatProfile(
|
| 148 |
name="Llama-3-8B",
|
| 149 |
markdown_description="Meta Open Source model Llama-2 with 7B parameters",
|
|
@@ -738,9 +743,57 @@ async def on_chat_start():
|
|
| 738 |
),
|
| 739 |
]
|
| 740 |
).send()
|
|
|
|
| 741 |
await cl.Message(
|
| 742 |
content="Im The small Llama!. one of the best open source models released by Meta! i am the small version of meta's open source LLMs. i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
|
| 743 |
).send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 744 |
if chat_profile == 'gemma2-9B':
|
| 745 |
await cl.ChatSettings(
|
| 746 |
[
|
|
@@ -1361,6 +1414,19 @@ async def main(message: cl.Message):
|
|
| 1361 |
complete_content += event.text
|
| 1362 |
await cl.Message(content=complete_content).send()
|
| 1363 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1364 |
|
| 1365 |
|
| 1366 |
@cl.on_settings_update
|
|
|
|
| 144 |
name='Aya-23B',
|
| 145 |
markdown_description='Cohere open sourced AI model with 23B parameters'
|
| 146 |
),
|
| 147 |
+
cl.ChatProfile(
|
| 148 |
+
nanm='Command-R-Plus',
|
| 149 |
+
markdown_description='Cohere open sourced AI model named Command R +'
|
| 150 |
+
|
| 151 |
+
)
|
| 152 |
cl.ChatProfile(
|
| 153 |
name="Llama-3-8B",
|
| 154 |
markdown_description="Meta Open Source model Llama-2 with 7B parameters",
|
|
|
|
| 743 |
),
|
| 744 |
]
|
| 745 |
).send()
|
| 746 |
+
|
| 747 |
await cl.Message(
|
| 748 |
content="Im The small Llama!. one of the best open source models released by Meta! i am the small version of meta's open source LLMs. i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
|
| 749 |
).send()
|
| 750 |
+
|
| 751 |
+
if chat_profile == 'Aya-23B':
|
| 752 |
+
await cl.ChatSettings(
|
| 753 |
+
[
|
| 754 |
+
Select(
|
| 755 |
+
id="Cohere-Model",
|
| 756 |
+
label="Cohere - Model",
|
| 757 |
+
values=["Aya-23B"],
|
| 758 |
+
initial_index=0,
|
| 759 |
+
),
|
| 760 |
+
Slider(
|
| 761 |
+
id="Temperature",
|
| 762 |
+
label="Model Temperature",
|
| 763 |
+
initial=0.7,
|
| 764 |
+
min=0,
|
| 765 |
+
max=1,
|
| 766 |
+
step=0.1,
|
| 767 |
+
),
|
| 768 |
+
]
|
| 769 |
+
).send()
|
| 770 |
+
await cl.Message(
|
| 771 |
+
content='Im one of the best open source models that cohere released. i am configured by 2 iranian boys named Artin Daneshvar and Sadra Noadosut to help you out!'
|
| 772 |
+
)
|
| 773 |
+
|
| 774 |
+
if chat_profile == 'Command-R-Plus:
|
| 775 |
+
await cl.ChatSettings(
|
| 776 |
+
[
|
| 777 |
+
Select(
|
| 778 |
+
id="Cohere-Model",
|
| 779 |
+
label="Cohere - Model",
|
| 780 |
+
values=["Command-R-Plus"],
|
| 781 |
+
initial_index=0,
|
| 782 |
+
),
|
| 783 |
+
Slider(
|
| 784 |
+
id="Temperature",
|
| 785 |
+
label="Model Temperature",
|
| 786 |
+
initial=0.7,
|
| 787 |
+
min=0,
|
| 788 |
+
max=1,
|
| 789 |
+
step=0.1,
|
| 790 |
+
),
|
| 791 |
+
]
|
| 792 |
+
).send()
|
| 793 |
+
await cl.Message(
|
| 794 |
+
content='Im one of the best open source models that cohere released. i am configured by 2 iranian boys named Artin Daneshvar and Sadra Noadosut to help you out!'
|
| 795 |
+
)
|
| 796 |
+
|
| 797 |
if chat_profile == 'gemma2-9B':
|
| 798 |
await cl.ChatSettings(
|
| 799 |
[
|
|
|
|
| 1414 |
complete_content += event.text
|
| 1415 |
await cl.Message(content=complete_content).send()
|
| 1416 |
|
| 1417 |
+
elif chat_profile == 'Command-R-Plus':
|
| 1418 |
+
stream = co.chat_stream(
|
| 1419 |
+
model='command-r-plus',
|
| 1420 |
+
message=message.content,
|
| 1421 |
+
temperature=0.3,
|
| 1422 |
+
chat_history=[],
|
| 1423 |
+
prompt_truncation='AUTO',
|
| 1424 |
+
)
|
| 1425 |
+
complete_content = ''
|
| 1426 |
+
for event in stream:
|
| 1427 |
+
if event.event_type == 'text-generation':
|
| 1428 |
+
complete_content += event.text
|
| 1429 |
+
await cl.Message(content=complete_content).send()
|
| 1430 |
|
| 1431 |
|
| 1432 |
@cl.on_settings_update
|