Spaces:
Sleeping
Sleeping
more models
Browse files- minimal-example.py +5 -2
minimal-example.py
CHANGED
|
@@ -26,9 +26,12 @@ chatgpt4_llm = ChatOpenAI(model="gpt-4", temperature=0, api_key=st.secrets["OPEN
|
|
| 26 |
# Requires ollama server running locally
|
| 27 |
from langchain_community.llms import Ollama
|
| 28 |
## # from langchain_community.llms import ChatOllama
|
| 29 |
-
ollama_llm = Ollama(model="duckdb-nsql", temperature=0)
|
| 30 |
|
| 31 |
-
models = {"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
with st.sidebar:
|
| 33 |
choice = st.radio("Select an LLM:", models)
|
| 34 |
llm = models[choice]
|
|
|
|
| 26 |
# Requires ollama server running locally
|
| 27 |
from langchain_community.llms import Ollama
|
| 28 |
## # from langchain_community.llms import ChatOllama
|
|
|
|
| 29 |
|
| 30 |
+
models = {"duckdb-nsql": Ollama(model="duckdb-nsql", temperature=0),
|
| 31 |
+
"sqlcoder": Ollama(model="sqlcoder", temperature=0),
|
| 32 |
+
"gemma": Ollama(model="gemma", temperature=0),
|
| 33 |
+
"chatgpt3.5": chatgpt_llm,
|
| 34 |
+
"chatgpt4": chatgpt4_llm}
|
| 35 |
with st.sidebar:
|
| 36 |
choice = st.radio("Select an LLM:", models)
|
| 37 |
llm = models[choice]
|