Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,6 +27,11 @@ class LLM_Langchain():
|
|
27 |
model_name_visibility = False
|
28 |
|
29 |
self.model_name = return_sum_models(model_parent)
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
self.max_new_tokens = st.sidebar.slider(
|
32 |
label="Token Length",
|
@@ -92,7 +97,7 @@ class LLM_Langchain():
|
|
92 |
for message in st.session_state.messages:
|
93 |
with st.chat_message(message.get('role')):
|
94 |
st.write(message.get("content"))
|
95 |
-
text = st.chat_input(disabled=
|
96 |
|
97 |
if text:
|
98 |
st.session_state.messages.append(
|
|
|
27 |
model_name_visibility = False
|
28 |
|
29 |
self.model_name = return_sum_models(model_parent)
|
30 |
+
checkpoint = st.sidebar.selectbox(
|
31 |
+
label = "Model used",
|
32 |
+
options = [self.model_name],
|
33 |
+
help="Model used to predict",
|
34 |
+
)
|
35 |
|
36 |
self.max_new_tokens = st.sidebar.slider(
|
37 |
label="Token Length",
|
|
|
97 |
for message in st.session_state.messages:
|
98 |
with st.chat_message(message.get('role')):
|
99 |
st.write(message.get("content"))
|
100 |
+
text = st.chat_input(disabled=False)
|
101 |
|
102 |
if text:
|
103 |
st.session_state.messages.append(
|