Spaces:
Sleeping
Sleeping
Update config.yaml
Browse files- config.yaml +8 -36
config.yaml
CHANGED
@@ -1,44 +1,16 @@
|
|
1 |
-
model_list:
|
2 |
-
- model_name:
|
3 |
-
litellm_params: # all params accepted by litellm.completion() - https://docs.litellm.ai/docs/completion/input
|
4 |
-
model: azure/gpt-turbo-small-eu ### MODEL NAME sent to `litellm.completion()` ###
|
5 |
-
api_base: https://my-endpoint-europe-berri-992.openai.azure.com/
|
6 |
-
api_key: "os.environ/AZURE_API_KEY_EU" # does os.getenv("AZURE_API_KEY_EU")
|
7 |
-
rpm: 6 # [OPTIONAL] Rate limit for this deployment: in requests per minute (rpm)
|
8 |
-
- model_name: bedrock-claude-v1
|
9 |
litellm_params:
|
10 |
-
model:
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
api_base: https://my-endpoint-canada-berri992.openai.azure.com/
|
15 |
-
api_key: "os.environ/AZURE_API_KEY_CA"
|
16 |
-
rpm: 6
|
17 |
-
- model_name: anthropic-claude
|
18 |
-
litellm_params:
|
19 |
-
model: bedrock/anthropic.claude-instant-v1
|
20 |
-
### [OPTIONAL] SET AWS REGION ###
|
21 |
-
aws_region_name: us-east-1
|
22 |
-
- model_name: vllm-models
|
23 |
-
litellm_params:
|
24 |
-
model: openai/facebook/opt-125m # the `openai/` prefix tells litellm it's openai compatible
|
25 |
-
api_base: http://0.0.0.0:4000/v1
|
26 |
-
api_key: none
|
27 |
-
rpm: 1440
|
28 |
-
model_info:
|
29 |
-
version: 2
|
30 |
|
31 |
-
# Use this if you want to make requests to `claude-3-haiku-20240307`,`claude-3-opus-20240229`,`claude-2.1` without defining them on the config.yaml
|
32 |
-
# Default models
|
33 |
-
# Works for ALL Providers and needs the default provider credentials in .env
|
34 |
-
- model_name: "*"
|
35 |
-
litellm_params:
|
36 |
-
model: "*"
|
37 |
|
38 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
39 |
drop_params: True
|
40 |
-
|
41 |
|
42 |
general_settings:
|
43 |
master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234)
|
44 |
-
|
|
|
1 |
+
model_list:
|
2 |
+
- model_name: gemini-pro
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
litellm_params:
|
4 |
+
model: gemini/gemini-1.5-pro
|
5 |
+
api_key: os.environ/GEMINI_API_KEY
|
6 |
+
|
7 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
11 |
drop_params: True
|
12 |
+
|
13 |
|
14 |
general_settings:
|
15 |
master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234)
|
16 |
+
|