Update app.py
Browse files
app.py
CHANGED
@@ -19,7 +19,6 @@ def init_embedding_model():
|
|
19 |
@st.cache_resource
|
20 |
def init_groq_client():
|
21 |
return Groq(api_key=os.environ.get("GROQ_API_KEY"))
|
22 |
-
|
23 |
class RAGSystem:
|
24 |
def __init__(self):
|
25 |
self.embedding_model = init_embedding_model()
|
@@ -28,6 +27,7 @@ class RAGSystem:
|
|
28 |
|
29 |
def load_knowledge_base(self) -> List[Dict]:
|
30 |
"""Load and preprocess knowledge base into a list of documents"""
|
|
|
31 |
kb = {
|
32 |
"spalling": [
|
33 |
{
|
@@ -296,6 +296,19 @@ class RAGSystem:
|
|
296 |
|
297 |
return index
|
298 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
299 |
def get_relevant_context(self, query: str, k: int = 3) -> str:
|
300 |
"""Retrieve relevant context based on query"""
|
301 |
# Generate query embedding
|
@@ -308,17 +321,14 @@ class RAGSystem:
|
|
308 |
context = "\n\n".join([self.knowledge_base[i]["text"] for i in I[0]])
|
309 |
return context
|
310 |
|
311 |
-
def get_groq_response(query: str, context: str) -> str:
|
312 |
"""Get response from Groq LLM"""
|
313 |
client = init_groq_client()
|
314 |
try:
|
315 |
prompt = f"""Based on the following context about construction defects, please answer the question.
|
316 |
-
|
317 |
Context:
|
318 |
{context}
|
319 |
-
|
320 |
Question: {query}
|
321 |
-
|
322 |
Please provide a detailed and specific answer based on the given context."""
|
323 |
|
324 |
response = client.chat.completions.create(
|
@@ -338,7 +348,7 @@ Please provide a detailed and specific answer based on the given context."""
|
|
338 |
except Exception as e:
|
339 |
return f"Error: {str(e)}"
|
340 |
|
341 |
-
|
342 |
st.set_page_config(
|
343 |
page_title="Construction Defect RAG Analyzer",
|
344 |
page_icon="🏗️",
|
@@ -384,25 +394,28 @@ def main():
|
|
384 |
|
385 |
with col1:
|
386 |
st.subheader("Image Analysis")
|
387 |
-
#
|
388 |
st.info("Image analysis results would appear here")
|
389 |
|
390 |
with col2:
|
391 |
st.subheader("AI Assistant Response")
|
392 |
-
#
|
393 |
-
|
394 |
-
|
395 |
-
|
396 |
-
|
397 |
-
|
398 |
-
|
|
|
|
|
399 |
# Display knowledge base sections
|
400 |
if st.checkbox("Show Knowledge Base"):
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
|
|
406 |
|
407 |
if __name__ == "__main__":
|
408 |
main()
|
|
|
19 |
@st.cache_resource
|
20 |
def init_groq_client():
|
21 |
return Groq(api_key=os.environ.get("GROQ_API_KEY"))
|
|
|
22 |
class RAGSystem:
|
23 |
def __init__(self):
|
24 |
self.embedding_model = init_embedding_model()
|
|
|
27 |
|
28 |
def load_knowledge_base(self) -> List[Dict]:
|
29 |
"""Load and preprocess knowledge base into a list of documents"""
|
30 |
+
# Your existing knowledge base dictionary
|
31 |
kb = {
|
32 |
"spalling": [
|
33 |
{
|
|
|
296 |
|
297 |
return index
|
298 |
|
299 |
+
def create_vector_store(self):
|
300 |
+
"""Create FAISS vector store from knowledge base"""
|
301 |
+
# Generate embeddings for all documents
|
302 |
+
texts = [doc["text"] for doc in self.knowledge_base]
|
303 |
+
embeddings = self.embedding_model.encode(texts)
|
304 |
+
|
305 |
+
# Initialize FAISS index
|
306 |
+
dimension = embeddings.shape[1]
|
307 |
+
index = faiss.IndexFlatL2(dimension)
|
308 |
+
index.add(np.array(embeddings).astype('float32'))
|
309 |
+
|
310 |
+
return index
|
311 |
+
|
312 |
def get_relevant_context(self, query: str, k: int = 3) -> str:
|
313 |
"""Retrieve relevant context based on query"""
|
314 |
# Generate query embedding
|
|
|
321 |
context = "\n\n".join([self.knowledge_base[i]["text"] for i in I[0]])
|
322 |
return context
|
323 |
|
324 |
+
def get_groq_response(query: str, context: str) -> str:
|
325 |
"""Get response from Groq LLM"""
|
326 |
client = init_groq_client()
|
327 |
try:
|
328 |
prompt = f"""Based on the following context about construction defects, please answer the question.
|
|
|
329 |
Context:
|
330 |
{context}
|
|
|
331 |
Question: {query}
|
|
|
332 |
Please provide a detailed and specific answer based on the given context."""
|
333 |
|
334 |
response = client.chat.completions.create(
|
|
|
348 |
except Exception as e:
|
349 |
return f"Error: {str(e)}"
|
350 |
|
351 |
+
ef main():
|
352 |
st.set_page_config(
|
353 |
page_title="Construction Defect RAG Analyzer",
|
354 |
page_icon="🏗️",
|
|
|
394 |
|
395 |
with col1:
|
396 |
st.subheader("Image Analysis")
|
397 |
+
# Image analysis placeholder
|
398 |
st.info("Image analysis results would appear here")
|
399 |
|
400 |
with col2:
|
401 |
st.subheader("AI Assistant Response")
|
402 |
+
if user_query: # Only show response if there's a query
|
403 |
+
# Get relevant context from knowledge base
|
404 |
+
context = st.session_state.rag_system.get_relevant_context(user_query)
|
405 |
+
|
406 |
+
# Get response from Groq
|
407 |
+
with st.spinner("Getting AI response..."):
|
408 |
+
response = get_groq_response(user_query, context)
|
409 |
+
st.write(response)
|
410 |
+
|
411 |
# Display knowledge base sections
|
412 |
if st.checkbox("Show Knowledge Base"):
|
413 |
+
st.subheader("Available Knowledge Base")
|
414 |
+
kb_data = st.session_state.rag_system.knowledge_base
|
415 |
+
for doc in kb_data:
|
416 |
+
category = doc["metadata"]["category"]
|
417 |
+
with st.expander(category.title()):
|
418 |
+
st.text(doc["text"])
|
419 |
|
420 |
if __name__ == "__main__":
|
421 |
main()
|