Update app/llm.py
Browse files- app/llm.py +2 -1
app/llm.py
CHANGED
@@ -22,7 +22,7 @@ class GenModel(BaseModel):
|
|
22 |
|
23 |
class ChatModel(BaseModel):
|
24 |
question: list
|
25 |
-
system: str =
|
26 |
#Also continuously ask for possible symptoms in order to atat a conclusive ailment or sickness and possible solutions.Remember, response in English."
|
27 |
temperature: float = 0.8
|
28 |
seed: int = 101
|
@@ -75,6 +75,7 @@ def health():
|
|
75 |
# Chat Completion API
|
76 |
@llm_router.post("/chat/", tags=["llm"])
|
77 |
async def chat(chatm:ChatModel, user: schemas.BaseUser = fastapi.Depends(current_active_user)):
|
|
|
78 |
try:
|
79 |
st = time()
|
80 |
output = llm_chat.create_chat_completion(
|
|
|
22 |
|
23 |
class ChatModel(BaseModel):
|
24 |
question: list
|
25 |
+
system: str = "You are a helpful AI assistant. You are chatting with a {0}. Help as much as you can."
|
26 |
#Also continuously ask for possible symptoms in order to atat a conclusive ailment or sickness and possible solutions.Remember, response in English."
|
27 |
temperature: float = 0.8
|
28 |
seed: int = 101
|
|
|
75 |
# Chat Completion API
|
76 |
@llm_router.post("/chat/", tags=["llm"])
|
77 |
async def chat(chatm:ChatModel, user: schemas.BaseUser = fastapi.Depends(current_active_user)):
|
78 |
+
chatm.system = chatm.system.format(user.email)
|
79 |
try:
|
80 |
st = time()
|
81 |
output = llm_chat.create_chat_completion(
|