Artin2009 commited on
Commit
a6e9dcf
1 Parent(s): 9966bb9

Update chain_app.py

Browse files
Files changed (1) hide show
  1. chain_app.py +57 -2
chain_app.py CHANGED
@@ -6,6 +6,8 @@ import requests
6
  from chainlit.input_widget import Select, Slider
7
  import os
8
  import cohere
 
 
9
 
10
  hf_token = os.environ.get("HF_TOKEN")
11
  openai_api_key = os.environ.get('OPENAI_API_KEY')
@@ -77,9 +79,21 @@ async def chat_profile():
77
  name="TTS",
78
  markdown_description="OpenAI's Text-to-Speech model",
79
  ),
 
 
 
 
 
 
 
 
 
 
 
 
80
  cl.ChatProfile(
81
  name="Llama-3-70B",
82
- markdown_description="Meta Open Source model Llama-2 with 70B parameters",
83
  ),
84
  cl.ChatProfile(
85
  name='Aya-23B',
@@ -312,6 +326,30 @@ async def on_chat_start():
312
  await cl.Message(
313
  content="Im TTS. of the best models OpenAI ever created. i can convert text to speech! . i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
314
  ).send()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
315
  if chat_profile == 'Llama-3-70B':
316
  await cl.ChatSettings(
317
  [
@@ -332,7 +370,7 @@ async def on_chat_start():
332
  ]
333
  ).send()
334
  await cl.Message(
335
- content="Im the big Llama!. one of the best open source models released by Meta! i am the Big version of meta's open source LLMs., i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
336
  ).send()
337
  if chat_profile == 'Llama-3-8B':
338
  await cl.ChatSettings(
@@ -587,6 +625,23 @@ async def main(message: cl.Message):
587
  elements=elements,
588
  ).send()
589
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
590
  elif chat_profile == 'Llama-3-70B':
591
  completion = groq_client.chat.completions.create(
592
  model="llama3-70b-8192",
 
6
  from chainlit.input_widget import Select, Slider
7
  import os
8
  import cohere
9
+ from huggingface_hub import InferenceClient
10
+
11
 
12
  hf_token = os.environ.get("HF_TOKEN")
13
  openai_api_key = os.environ.get('OPENAI_API_KEY')
 
79
  name="TTS",
80
  markdown_description="OpenAI's Text-to-Speech model",
81
  ),
82
+ cl.ChatProfile(
83
+ name="Llama-3.1-405B",
84
+ markdown_description="Meta Open Source Model Llama with 405B parameters",
85
+ ),
86
+ cl.ChatProfile(
87
+ name="Llama-3.1-70B",
88
+ markdown_description="Meta Open Source Model Llama with 70B parameters",
89
+ ),
90
+ cl.ChatProfile(
91
+ name="Llama-3.1-8B",
92
+ markdown_description="Meta Open Source Model Llama with 8B parameters",
93
+ ),
94
  cl.ChatProfile(
95
  name="Llama-3-70B",
96
+ markdown_description="Meta Open Source model Llama-3 with 70B parameters",
97
  ),
98
  cl.ChatProfile(
99
  name='Aya-23B',
 
326
  await cl.Message(
327
  content="Im TTS. of the best models OpenAI ever created. i can convert text to speech! . i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
328
  ).send()
329
+
330
+ if chat_profile == 'Llama-3.1-405B':
331
+ await cl.ChatSettings(
332
+ [
333
+ Select(
334
+ id="Meta-Model",
335
+ label="Meta - Model",
336
+ values=["Llama-3-70B"],
337
+ initial_index=0,
338
+ ),
339
+ Slider(
340
+ id="Temperature",
341
+ label="Model Temperature",
342
+ initial=0.7,
343
+ min=0,
344
+ max=1,
345
+ step=0.1,
346
+ ),
347
+ ]
348
+ ).send()
349
+ await cl.Message(
350
+ content="Im the big Llama-3.1!. one of the best open source models released by Meta! i am the Big version of meta's open source LLMs., i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
351
+ ).send()
352
+
353
  if chat_profile == 'Llama-3-70B':
354
  await cl.ChatSettings(
355
  [
 
370
  ]
371
  ).send()
372
  await cl.Message(
373
+ content="Im the big Llama-3!. one of the best open source models released by Meta! i am the Big version of meta's open source LLMs., i was configured by Artin Daneshvar and Sadra Noadoust, 2 iranian students to help you, how can i assist you today ? "
374
  ).send()
375
  if chat_profile == 'Llama-3-8B':
376
  await cl.ChatSettings(
 
625
  elements=elements,
626
  ).send()
627
 
628
+ elif chat_profile == 'Llama-3.1-405B':
629
+ client = InferenceClient(
630
+ "meta-llama/Meta-Llama-3.1-405B-Instruct",
631
+ token=hf_token,
632
+ )
633
+
634
+ for message in client.chat_completion(
635
+ messages=[{"role": "user", "content": f'{message.content}'}],
636
+ max_tokens=500,
637
+ stream=True,
638
+ ):
639
+ complete_message += message.choiches[0].delta.content
640
+ await cl.Message(
641
+ content=complete_message,
642
+ ).send()
643
+
644
+
645
  elif chat_profile == 'Llama-3-70B':
646
  completion = groq_client.chat.completions.create(
647
  model="llama3-70b-8192",