qitongwei commited on
Commit
0946fe4
·
verified ·
1 Parent(s): edf9e4c

Update librechat.yaml

Browse files
Files changed (1) hide show
  1. librechat.yaml +17 -18
librechat.yaml CHANGED
@@ -85,24 +85,23 @@ endpoints:
85
  iconURL: https://cdn-icons-png.flaticon.com/128/1240/1240979.png
86
 
87
  modelSpecs:
88
- # ... other modelSpecs fields
89
- addedEndpoints:
90
- - google
91
- - name: "Gemini"
92
- label: "Gemini"
93
- default: false
94
- description: "Gemini 2.5 Pro"#iconURL: "https://example.com/icon.png"
95
- preset:
96
- endpoint: "google"
97
- model: "gemini-2.5-pro-exp-03-25"
98
- maxContextTokens: 35000 # Maximum context tokens
99
- max_tokens: 16000 # Maximum output tokens
100
- temperature: 1
101
- promptCache: true
102
- modelLabel: "Gemini"
103
- greeting: |
104
- Gemini 2.5 Pro access.
105
- promptPrefix: some_cool_prompt
106
 
107
  # See the Custom Configuration Guide for more information:
108
  # https://docs.librechat.ai/install/configuration/custom_config.html
 
85
  iconURL: https://cdn-icons-png.flaticon.com/128/1240/1240979.png
86
 
87
  modelSpecs:
88
+ enforce: true
89
+ prioritize: true
90
+ list:
91
+ - name: "Gemini"
92
+ label: "Gemini"
93
+ default: false
94
+ description: "Gemini 2.5 Pro"#iconURL: "https://example.com/icon.png"
95
+ preset:
96
+ endpoint: "google"
97
+ model: "gemini-2.5-pro-exp-03-25"
98
+ maxContextTokens: 35000 # Maximum context tokens
99
+ max_tokens: 16000 # Maximum output tokens
100
+ temperature: 1
101
+ modelLabel: "Gemini"
102
+ greeting: |
103
+ Gemini 2.5 Pro access.
104
+ promptPrefix: some_cool_prompt
 
105
 
106
  # See the Custom Configuration Guide for more information:
107
  # https://docs.librechat.ai/install/configuration/custom_config.html