Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -50,7 +50,7 @@ class ModelManager:
|
|
50 |
def load_model(self, model_config):
|
51 |
if model_config['name'] not in self.models:
|
52 |
try:
|
53 |
-
# Use
|
54 |
self.models[model_config['name']] = Llama.from_pretrained(model_path=model_config['repo_id'])
|
55 |
except Exception as e:
|
56 |
print(f"Error loading model {model_config['name']}: {e}")
|
@@ -111,7 +111,7 @@ async def process_message(message):
|
|
111 |
formatted_response = ""
|
112 |
for model, response in unique_responses.items():
|
113 |
formatted_response += f"**{model}:**\n{response}\n\n"
|
114 |
-
|
115 |
curl_command = f"""
|
116 |
curl -X POST -H "Content-Type: application/json" \\
|
117 |
-d '{{"message": "{message}"}}' \\
|
|
|
50 |
def load_model(self, model_config):
|
51 |
if model_config['name'] not in self.models:
|
52 |
try:
|
53 |
+
# Corrected line: Use model_path instead of repo_id and filename
|
54 |
self.models[model_config['name']] = Llama.from_pretrained(model_path=model_config['repo_id'])
|
55 |
except Exception as e:
|
56 |
print(f"Error loading model {model_config['name']}: {e}")
|
|
|
111 |
formatted_response = ""
|
112 |
for model, response in unique_responses.items():
|
113 |
formatted_response += f"**{model}:**\n{response}\n\n"
|
114 |
+
|
115 |
curl_command = f"""
|
116 |
curl -X POST -H "Content-Type: application/json" \\
|
117 |
-d '{{"message": "{message}"}}' \\
|