Update librechat.yaml
Browse files- librechat.yaml +4 -24
librechat.yaml
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
# Configuration version (required)
|
2 |
-
version: 1.0.
|
3 |
|
4 |
# Cache settings: Set to true to enable caching
|
5 |
cache: true
|
@@ -17,7 +17,7 @@ endpoints:
|
|
17 |
# Models configuration
|
18 |
models:
|
19 |
# List of default models to use. At least one value is required.
|
20 |
-
default: ["mistral-
|
21 |
# Fetch option: Set to true to fetch models from API.
|
22 |
fetch: true # Defaults to false.
|
23 |
|
@@ -46,28 +46,8 @@ endpoints:
|
|
46 |
|
47 |
# Add additional parameters to the request. Default params will be overwritten.
|
48 |
addParams:
|
49 |
-
safe_prompt:
|
50 |
|
51 |
# Drop Default params parameters from the request. See default params in guide linked below.
|
52 |
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
|
53 |
-
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
|
54 |
-
|
55 |
-
# OpenRouter.ai Example
|
56 |
-
- name: "OpenRouter"
|
57 |
-
# For `apiKey` and `baseURL`, you can use environment variables that you define.
|
58 |
-
# recommended environment variables:
|
59 |
-
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
|
60 |
-
apiKey: "${OPENROUTER_KEY}"
|
61 |
-
baseURL: "https://openrouter.ai/api/v1"
|
62 |
-
models:
|
63 |
-
default: ["gpt-3.5-turbo"]
|
64 |
-
fetch: true
|
65 |
-
titleConvo: true
|
66 |
-
titleModel: "gpt-3.5-turbo"
|
67 |
-
summarize: false
|
68 |
-
summaryModel: "gpt-3.5-turbo"
|
69 |
-
forcePrompt: false
|
70 |
-
modelDisplayLabel: "OpenRouter"
|
71 |
-
|
72 |
-
# See the Custom Configuration Guide for more information:
|
73 |
-
# https://docs.librechat.ai/install/configuration/custom_config.html
|
|
|
1 |
# Configuration version (required)
|
2 |
+
version: 1.0.2
|
3 |
|
4 |
# Cache settings: Set to true to enable caching
|
5 |
cache: true
|
|
|
17 |
# Models configuration
|
18 |
models:
|
19 |
# List of default models to use. At least one value is required.
|
20 |
+
default: ["mistral-large-latest", "mistral-medium-latest", "mistral-small-latest"]
|
21 |
# Fetch option: Set to true to fetch models from API.
|
22 |
fetch: true # Defaults to false.
|
23 |
|
|
|
46 |
|
47 |
# Add additional parameters to the request. Default params will be overwritten.
|
48 |
addParams:
|
49 |
+
safe_prompt: true # This field is specific to Mistral AI: https://docs.mistral.ai/api/
|
50 |
|
51 |
# Drop Default params parameters from the request. See default params in guide linked below.
|
52 |
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
|
53 |
+
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|