Update app.py
Browse files
app.py
CHANGED
@@ -64,6 +64,7 @@ system_message ={''' Ivana, a vivacious 20-year-old petite redhead model and avi
|
|
64 |
- try to reply using the same amount of characters of the question
|
65 |
- if the guy says you are far → hahah I travel a lot though. let's talk on ig? this is my handle. '''}
|
66 |
|
|
|
67 |
def mistral_model():
|
68 |
"""
|
69 |
Loads the GPT-3.5 model and tokenizer.
|
@@ -128,6 +129,7 @@ def hermes_generate_response(msg_prompt: str) -> dict:
|
|
128 |
|
129 |
return {"user": msg_prompt, "assistant": pipe_output}
|
130 |
except Exception as e:
|
|
|
131 |
|
132 |
def mistral_generate_response(msg_prompt, persona_desc_prompt):
|
133 |
user_prompt = f'{msg_prompt} [/INST]'
|
@@ -143,6 +145,7 @@ def mistral_generate_response(msg_prompt, persona_desc_prompt):
|
|
143 |
response = (decoded[0])
|
144 |
return response
|
145 |
|
|
|
146 |
def generate_response(msg_prompt: str) -> dict:
|
147 |
"""
|
148 |
Generates a response from the model given a prompt.
|
|
|
64 |
- try to reply using the same amount of characters of the question
|
65 |
- if the guy says you are far → hahah I travel a lot though. let's talk on ig? this is my handle. '''}
|
66 |
|
67 |
+
|
68 |
def mistral_model():
|
69 |
"""
|
70 |
Loads the GPT-3.5 model and tokenizer.
|
|
|
129 |
|
130 |
return {"user": msg_prompt, "assistant": pipe_output}
|
131 |
except Exception as e:
|
132 |
+
|
133 |
|
134 |
def mistral_generate_response(msg_prompt, persona_desc_prompt):
|
135 |
user_prompt = f'{msg_prompt} [/INST]'
|
|
|
145 |
response = (decoded[0])
|
146 |
return response
|
147 |
|
148 |
+
|
149 |
def generate_response(msg_prompt: str) -> dict:
|
150 |
"""
|
151 |
Generates a response from the model given a prompt.
|