Artin2009 commited on
Commit
a4abe76
1 Parent(s): 933f2fe

Update chain_app.py

Browse files
Files changed (1) hide show
  1. chain_app.py +15 -38
chain_app.py CHANGED
@@ -2387,19 +2387,11 @@ async def main(message: cl.Message):
2387
  stop=None,
2388
  )
2389
 
2390
- complete_content = ""
2391
-
2392
- # Iterate over each chunk
2393
  for chunk in completion:
2394
- # Retrieve the content from the current chunk
2395
- content = chunk.choices[0].delta.content
2396
-
2397
  # Check if the content is not None before concatenating it
2398
- if content is not None:
2399
- complete_content += content
2400
-
2401
- # Send the concatenated content as a message
2402
- await cl.Message(content=complete_content).send()
2403
 
2404
  elif chat_profile == 'Llama-3-70B':
2405
  completion = groq_client.chat.completions.create(
@@ -2417,19 +2409,11 @@ async def main(message: cl.Message):
2417
  stop=None,
2418
  )
2419
 
2420
- complete_content = ""
2421
-
2422
- # Iterate over each chunk
2423
  for chunk in completion:
2424
- # Retrieve the content from the current chunk
2425
- content = chunk.choices[0].delta.content
2426
-
2427
  # Check if the content is not None before concatenating it
2428
- if content is not None:
2429
- complete_content += content
2430
-
2431
- # Send the concatenated content as a message
2432
- await cl.Message(content=complete_content).send()
2433
 
2434
  elif chat_profile == 'Llama-3-8B':
2435
  completion = groq_client.chat.completions.create(
@@ -2447,23 +2431,16 @@ async def main(message: cl.Message):
2447
  stop=None,
2448
  )
2449
 
2450
- complete_content = ""
2451
-
2452
- # Iterate over each chunk
2453
  for chunk in completion:
2454
- # Retrieve the content from the current chunk
2455
- content = chunk.choices[0].delta.content
2456
-
2457
  # Check if the content is not None before concatenating it
2458
- if content is not None:
2459
- complete_content += content
2460
-
2461
- # Send the concatenated content as a message
2462
- await cl.Message(content=complete_content).send()
2463
 
2464
  elif chat_profile == 'gemma2-27B':
2465
  client = Client("gokaygokay/Gemma-2-llamacpp")
2466
- result = client.predict(
 
2467
  message=message.content,
2468
  model="gemma-2-27b-it-Q5_K_M.gguf",
2469
  system_message=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
@@ -2473,10 +2450,10 @@ async def main(message: cl.Message):
2473
  top_k=40,
2474
  repeat_penalty=1.1,
2475
  api_name="/chat"
2476
- )
2477
- await cl.Message(
2478
- content=result
2479
- ).send()
2480
 
2481
  elif chat_profile == 'gemma2-9B':
2482
  completion = groq_client.chat.completions.create(
 
2387
  stop=None,
2388
  )
2389
 
 
 
 
2390
  for chunk in completion:
2391
+ # Retrieve the content from the current chunk
 
 
2392
  # Check if the content is not None before concatenating it
2393
+ if chunk is not None:
2394
+ await msg.stream_token(chunk.choices[0].delta.content)
 
 
 
2395
 
2396
  elif chat_profile == 'Llama-3-70B':
2397
  completion = groq_client.chat.completions.create(
 
2409
  stop=None,
2410
  )
2411
 
 
 
 
2412
  for chunk in completion:
2413
+ # Retrieve the content from the current chunk
 
 
2414
  # Check if the content is not None before concatenating it
2415
+ if chunk is not None:
2416
+ await msg.stream_token(chunk.choices[0].delta.content)
 
 
 
2417
 
2418
  elif chat_profile == 'Llama-3-8B':
2419
  completion = groq_client.chat.completions.create(
 
2431
  stop=None,
2432
  )
2433
 
 
 
 
2434
  for chunk in completion:
2435
+ # Retrieve the content from the current chunk
 
 
2436
  # Check if the content is not None before concatenating it
2437
+ if chunk is not None:
2438
+ await msg.stream_token(chunk.choices[0].delta.content)
 
 
 
2439
 
2440
  elif chat_profile == 'gemma2-27B':
2441
  client = Client("gokaygokay/Gemma-2-llamacpp")
2442
+ stream_list = []
2443
+ for token in client.predict(
2444
  message=message.content,
2445
  model="gemma-2-27b-it-Q5_K_M.gguf",
2446
  system_message=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
 
2450
  top_k=40,
2451
  repeat_penalty=1.1,
2452
  api_name="/chat"
2453
+ ):
2454
+ stream_list.append(token)
2455
+ for res in stream_list:
2456
+ await msg.stream_token(res)
2457
 
2458
  elif chat_profile == 'gemma2-9B':
2459
  completion = groq_client.chat.completions.create(