Spaces:
Sleeping
Sleeping
models note
Browse files- pages/3_🕮_Docs_Demo.py +3 -1
pages/3_🕮_Docs_Demo.py
CHANGED
@@ -42,7 +42,7 @@ llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
|
|
42 |
# Setup LLM and QA chain
|
43 |
|
44 |
models = {"chatgpt3.5": ChatOpenAI(model="gpt-3.5-turbo", temperature=0, api_key=st.secrets["OPENAI_API_KEY"], streaming=True),
|
45 |
-
|
46 |
"phi3": Ollama(model="phi3", temperature=0),
|
47 |
"duckdb-nsql": Ollama(model="duckdb-nsql", temperature=0),
|
48 |
"command-r-plus": Ollama(model="command-r-plus", temperature=0),
|
@@ -56,6 +56,8 @@ models = {"chatgpt3.5": ChatOpenAI(model="gpt-3.5-turbo", temperature=0, api_key
|
|
56 |
|
57 |
|
58 |
with st.sidebar:
|
|
|
|
|
59 |
choice = st.radio("Select an LLM:", models)
|
60 |
llm = models[choice]
|
61 |
|
|
|
42 |
# Setup LLM and QA chain
|
43 |
|
44 |
models = {"chatgpt3.5": ChatOpenAI(model="gpt-3.5-turbo", temperature=0, api_key=st.secrets["OPENAI_API_KEY"], streaming=True),
|
45 |
+
# "chatgpt4": ChatOpenAI(model="gpt-4", temperature=0, api_key=st.secrets["OPENAI_API_KEY"]),
|
46 |
"phi3": Ollama(model="phi3", temperature=0),
|
47 |
"duckdb-nsql": Ollama(model="duckdb-nsql", temperature=0),
|
48 |
"command-r-plus": Ollama(model="command-r-plus", temperature=0),
|
|
|
56 |
|
57 |
|
58 |
with st.sidebar:
|
59 |
+
|
60 |
+
"Non-ChatGPT models assume you are running the app locally with ollama installed."
|
61 |
choice = st.radio("Select an LLM:", models)
|
62 |
llm = models[choice]
|
63 |
|