Spaces:
Sleeping
Sleeping
update
Browse files- Dockerfile +1 -1
- config.yaml +4 -4
Dockerfile
CHANGED
@@ -13,7 +13,7 @@ RUN pip3 install 'litellm[proxy]'
|
|
13 |
#RUN mkdir -p /.ollama
|
14 |
#RUN chmod -R 777 /.ollama
|
15 |
|
16 |
-
WORKDIR /.ollama
|
17 |
|
18 |
# Copy the entry point script
|
19 |
COPY config.yaml /config.yaml
|
|
|
13 |
#RUN mkdir -p /.ollama
|
14 |
#RUN chmod -R 777 /.ollama
|
15 |
|
16 |
+
#WORKDIR /.ollama
|
17 |
|
18 |
# Copy the entry point script
|
19 |
COPY config.yaml /config.yaml
|
config.yaml
CHANGED
@@ -13,10 +13,10 @@ model_list:
|
|
13 |
#bos_token: "<s>"
|
14 |
#eos_token: "</s>"
|
15 |
#max_tokens: 4096
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
|
21 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
22 |
drop_params: True
|
|
|
13 |
#bos_token: "<s>"
|
14 |
#eos_token: "</s>"
|
15 |
#max_tokens: 4096
|
16 |
+
#- model_name: xinference-llama-3-instruct
|
17 |
+
# litellm_params:
|
18 |
+
# model: xinference/llama-3-instruct
|
19 |
+
# api_base: https://zhengr-xinference.hf.space/v1
|
20 |
|
21 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
22 |
drop_params: True
|