Merge pull request #66 from joshuasundance-swca/mistral
Browse files- README.md +1 -0
- langchain-streamlit-demo/defaults.py +1 -0
README.md
CHANGED
@@ -53,6 +53,7 @@ This `README` was originally written by [Claude 2](https://www.anthropic.com/ind
|
|
53 |
- `meta-llama/Llama-2-13b-chat-hf`
|
54 |
- `meta-llama/Llama-2-70b-chat-hf`
|
55 |
- `codellama/CodeLlama-34b-Instruct-hf`
|
|
|
56 |
- [Azure OpenAI Service](https://azure.microsoft.com/en-us/products/ai-services/openai-service/)
|
57 |
- `[configurable]`
|
58 |
- Streaming output of assistant responses
|
|
|
53 |
- `meta-llama/Llama-2-13b-chat-hf`
|
54 |
- `meta-llama/Llama-2-70b-chat-hf`
|
55 |
- `codellama/CodeLlama-34b-Instruct-hf`
|
56 |
+
- `mistralai/Mistral-7B-Instruct-v0.1`
|
57 |
- [Azure OpenAI Service](https://azure.microsoft.com/en-us/products/ai-services/openai-service/)
|
58 |
- `[configurable]`
|
59 |
- Streaming output of assistant responses
|
langchain-streamlit-demo/defaults.py
CHANGED
@@ -11,6 +11,7 @@ MODEL_DICT = {
|
|
11 |
"meta-llama/Llama-2-13b-chat-hf": "Anyscale Endpoints",
|
12 |
"meta-llama/Llama-2-70b-chat-hf": "Anyscale Endpoints",
|
13 |
"codellama/CodeLlama-34b-Instruct-hf": "Anyscale Endpoints",
|
|
|
14 |
"Azure OpenAI": "Azure OpenAI",
|
15 |
}
|
16 |
|
|
|
11 |
"meta-llama/Llama-2-13b-chat-hf": "Anyscale Endpoints",
|
12 |
"meta-llama/Llama-2-70b-chat-hf": "Anyscale Endpoints",
|
13 |
"codellama/CodeLlama-34b-Instruct-hf": "Anyscale Endpoints",
|
14 |
+
"mistralai/Mistral-7B-Instruct-v0.1": "Anyscale Endpoints",
|
15 |
"Azure OpenAI": "Azure OpenAI",
|
16 |
}
|
17 |
|