LibreChat / librechat.yaml
mrbesher's picture
Update librechat.yaml
c0f72fc verified
raw
history blame
3.27 kB
# Configuration version (required)
version: 1.1.5
# Cache settings: Set to true to enable caching
cache: true
registration:
socialLogins: ['github', 'google', 'discord', 'openid', 'facebook']
allowedDomains:
- "gmail.com"
# Definition of custom endpoints
endpoints:
# assistants:
# disableBuilder: false # Disable Assistants Builder Interface by setting to `true`
# pollIntervalMs: 750 # Polling interval for checking assistant updates
# timeoutMs: 180000 # Timeout for assistant operations
# # Should only be one or the other, either `supportedIds` or `excludedIds`
# supportedIds: ["asst_supportedAssistantId1", "asst_supportedAssistantId2"]
# # excludedIds: ["asst_excludedAssistantId"]
custom:
- name: "openai"
apiKey: "user_provided"
baseURL: "https://api.openai.com/v1/"
models:
default: [
"gpt-4o-mini"
]
fetch: true
titleConvo: true
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "openai"
#groq
- name: "groq"
apiKey: "user_provided"
baseURL: "https://api.groq.com/openai/v1/"
models:
default: [
"gemma-7b-it"
]
fetch: true
titleConvo: true
titleModel: "mixtral-8x7b-32768"
summarize: false
summaryModel: "mixtral-8x7b-32768"
forcePrompt: false
modelDisplayLabel: "groq"
# Mistral AI API
- name: "Mistral"
apiKey: "user_provided"
baseURL: "https://api.mistral.ai/v1"
models:
default: [
"mistral-small-latest",
"mistral-medium-latest",
"mistral-large-latest"
]
fetch: false
titleConvo: true
titleMethod: "completion"
titleModel: "open-mistral-7b"
summarize: false
summaryModel: "open-mistral-7b"
forcePrompt: false
modelDisplayLabel: "Mistral"
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
# Preplexity
- name: "Perplexity"
apiKey: "user_provided"
baseURL: "https://api.perplexity.ai/"
models:
default: [
"mistral-7b-instruct",
"sonar-small-chat",
"sonar-small-online",
"sonar-medium-chat",
"sonar-medium-online"
]
fetch: false # fetching list of models is not supported
titleConvo: true
titleModel: "sonar-medium-chat"
summarize: false
summaryModel: "sonar-medium-chat"
forcePrompt: false
dropParams: ["stop", "frequency_penalty"]
modelDisplayLabel: "Perplexity"
# OpenRouter
- name: 'OpenRouter'
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: 'user_provided'
baseURL: 'https://openrouter.ai/api/v1'
models:
default: ['meta-llama/llama-3-8b-instruct']
fetch: true
titleConvo: true
titleModel: 'meta-llama/llama-3-8b-instruct'
# Recommended: Drop the stop parameter from the request as Openrouter models use a variety of stop tokens.
dropParams: ['stop']
modelDisplayLabel: 'OpenRouter'