Spaces:
Sleeping
Sleeping
Fixed llm model name
Browse files
app.py
CHANGED
@@ -57,7 +57,6 @@ else:
|
|
57 |
)
|
58 |
|
59 |
with st.sidebar:
|
60 |
-
model_name = st.selectbox("**Model**", options=["llama-3.1-70b-versatile","gemma2-9b-it","gemma-7b-it","llama-3.2-3b-preview", "llama3-70b-8192", "mixtral-8x7b-32768"])
|
61 |
temp = st.slider("**Temperature**", min_value=0.0, max_value=1.0, step=0.001)
|
62 |
n_docs = st.number_input("**Number of retireved documents**", min_value=0, max_value=10, value=5, step=1)
|
63 |
|
@@ -67,7 +66,7 @@ else:
|
|
67 |
|
68 |
retriever = retriever(n_docs=n_docs)
|
69 |
# Create Chain
|
70 |
-
chain = get_expression_chain(retriever,
|
71 |
|
72 |
for msg in st.session_state.langchain_messages:
|
73 |
avatar = "🦜" if msg.type == "ai" else None
|
|
|
57 |
)
|
58 |
|
59 |
with st.sidebar:
|
|
|
60 |
temp = st.slider("**Temperature**", min_value=0.0, max_value=1.0, step=0.001)
|
61 |
n_docs = st.number_input("**Number of retireved documents**", min_value=0, max_value=10, value=5, step=1)
|
62 |
|
|
|
66 |
|
67 |
retriever = retriever(n_docs=n_docs)
|
68 |
# Create Chain
|
69 |
+
chain = get_expression_chain(retriever,"llama-3.3-70b-versatile",temp)
|
70 |
|
71 |
for msg in st.session_state.langchain_messages:
|
72 |
avatar = "🦜" if msg.type == "ai" else None
|