added logging
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import os
|
2 |
from typing import Literal
|
|
|
3 |
import streamlit as st
|
4 |
|
5 |
from langchain.embeddings import HuggingFaceInstructEmbeddings
|
@@ -98,6 +99,8 @@ def load_chain(model: Literal["openai", "GPT-NeoXT-Chat-Base-20B"] ="openai"):
|
|
98 |
k=8,
|
99 |
return_source_documents=True,
|
100 |
)
|
|
|
|
|
101 |
|
102 |
return chain
|
103 |
|
@@ -178,6 +181,7 @@ if ask:
|
|
178 |
with st.spinner("Um... excuse me but... this can take about a minute for your first question because some stuff have to be downloaded π₯Ίππ»ππ»"):
|
179 |
try:
|
180 |
answer, pages, extract = get_answer(question=user_input, model=choice)
|
|
|
181 |
except:
|
182 |
if choice=="togethercomputer/GPT-NeoXT-Chat-Base-20B":
|
183 |
st.write("The model probably timed out :(")
|
|
|
1 |
import os
|
2 |
from typing import Literal
|
3 |
+
import logging
|
4 |
import streamlit as st
|
5 |
|
6 |
from langchain.embeddings import HuggingFaceInstructEmbeddings
|
|
|
99 |
k=8,
|
100 |
return_source_documents=True,
|
101 |
)
|
102 |
+
|
103 |
+
logging.debug(f"Loaded chain with {model}.")
|
104 |
|
105 |
return chain
|
106 |
|
|
|
181 |
with st.spinner("Um... excuse me but... this can take about a minute for your first question because some stuff have to be downloaded π₯Ίππ»ππ»"):
|
182 |
try:
|
183 |
answer, pages, extract = get_answer(question=user_input, model=choice)
|
184 |
+
logging.debug(f"Answer successfully generated using {choice}.")
|
185 |
except:
|
186 |
if choice=="togethercomputer/GPT-NeoXT-Chat-Base-20B":
|
187 |
st.write("The model probably timed out :(")
|