Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +28 -13
chain_app.py
CHANGED
@@ -1322,21 +1322,36 @@ async def main(message: cl.Message):
|
|
1322 |
# ).send()
|
1323 |
|
1324 |
elif chat_profile == 'Llama-3.1-405B':
|
1325 |
-
|
1326 |
-
"
|
1327 |
-
|
1328 |
-
|
1329 |
-
|
1330 |
-
|
1331 |
-
|
1332 |
-
|
|
|
|
|
|
|
1333 |
stream=True,
|
1334 |
-
|
1335 |
-
|
1336 |
-
|
1337 |
-
|
1338 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1339 |
|
|
|
1340 |
elif chat_profile == 'Llama-3.1-70B':
|
1341 |
completion = groq_client.chat.completions.create(
|
1342 |
model="llama-3.1-70b-versatile",
|
|
|
1322 |
# ).send()
|
1323 |
|
1324 |
elif chat_profile == 'Llama-3.1-405B':
|
1325 |
+
completion = groq_client.chat.completions.create(
|
1326 |
+
model="llama-3.1-405b-reasoning",
|
1327 |
+
messages=[
|
1328 |
+
{
|
1329 |
+
"role": "user",
|
1330 |
+
"content": message.content
|
1331 |
+
}
|
1332 |
+
],
|
1333 |
+
temperature=1,
|
1334 |
+
max_tokens=1024,
|
1335 |
+
top_p=1,
|
1336 |
stream=True,
|
1337 |
+
stop=None,
|
1338 |
+
)
|
1339 |
+
|
1340 |
+
complete_content = ""
|
1341 |
+
|
1342 |
+
# Iterate over each chunk
|
1343 |
+
for chunk in completion:
|
1344 |
+
# Retrieve the content from the current chunk
|
1345 |
+
content = chunk.choices[0].delta.content
|
1346 |
+
|
1347 |
+
# Check if the content is not None before concatenating it
|
1348 |
+
if content is not None:
|
1349 |
+
complete_content += content
|
1350 |
+
|
1351 |
+
# Send the concatenated content as a message
|
1352 |
+
await cl.Message(content=complete_content).send()
|
1353 |
|
1354 |
+
|
1355 |
elif chat_profile == 'Llama-3.1-70B':
|
1356 |
completion = groq_client.chat.completions.create(
|
1357 |
model="llama-3.1-70b-versatile",
|