seawolf2357
commited on
Commit
β’
55aa389
1
Parent(s):
76e110d
Update app.py
Browse files
app.py
CHANGED
@@ -49,15 +49,14 @@ async def generate_response(user_input, history, system_message, max_tokens, tem
|
|
49 |
messages.append({"role": "assistant", "content": val[1]})
|
50 |
messages.append({"role": "user", "content": user_input})
|
51 |
|
52 |
-
# ν¨μκ° λκΈ° λ°©μμ΄λ―λ‘ awaitλ₯Ό μ¬μ©νμ§ μμ
|
53 |
try:
|
54 |
-
response = hf_client.chat_completion(messages, max_tokens=max_tokens, stream=False, temperature=temperature, top_p=top_p)
|
55 |
-
|
|
|
56 |
except Exception as e:
|
57 |
logging.error(f"An error occurred: {e}")
|
58 |
return "μ€λ₯κ° λ°μνμ΅λλ€. λ€μ μλν΄ μ£ΌμΈμ."
|
59 |
|
60 |
-
|
61 |
# λμ€μ½λ λ΄ μΈμ€ν΄μ€ μμ± λ° μ€ν
|
62 |
discord_client = MyClient(intents=intents)
|
63 |
discord_client.run(os.getenv('DISCORD_TOKEN'))
|
|
|
49 |
messages.append({"role": "assistant", "content": val[1]})
|
50 |
messages.append({"role": "user", "content": user_input})
|
51 |
|
|
|
52 |
try:
|
53 |
+
response = await hf_client.chat_completion(messages, max_tokens=max_tokens, stream=False, temperature=temperature, top_p=top_p)
|
54 |
+
# μ¬λ°λ₯Έ μμ±μ μ κ·Όνμ¬ μλ΅ λ΄μ©μ μΆμΆ
|
55 |
+
return response.choices[0].text # 'delta' λμ 'text' λλ API λ¬Έμμ λ°λ₯Έ μ μ ν μμ± μ¬μ©
|
56 |
except Exception as e:
|
57 |
logging.error(f"An error occurred: {e}")
|
58 |
return "μ€λ₯κ° λ°μνμ΅λλ€. λ€μ μλν΄ μ£ΌμΈμ."
|
59 |
|
|
|
60 |
# λμ€μ½λ λ΄ μΈμ€ν΄μ€ μμ± λ° μ€ν
|
61 |
discord_client = MyClient(intents=intents)
|
62 |
discord_client.run(os.getenv('DISCORD_TOKEN'))
|