Shakir60 commited on
Commit
80585bc
·
verified ·
1 Parent(s): 374fb53

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +165 -0
app.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from groq import Groq
4
+ from transformers import ViTForImageClassification, ViTImageProcessor
5
+ from sentence_transformers import SentenceTransformer
6
+ from PIL import Image
7
+ import torch
8
+ import numpy as np
9
+ from typing import List, Dict
10
+ import faiss
11
+ import json
12
+
13
+ # Initialize sentence transformer for embeddings
14
+ @st.cache_resource
15
+ def init_embedding_model():
16
+ return SentenceTransformer('all-MiniLM-L6-v2')
17
+
18
+ # Initialize Groq client
19
+ @st.cache_resource
20
+ def init_groq_client():
21
+ return Groq(api_key=os.environ.get("GROQ_API_KEY"))
22
+
23
+ class RAGSystem:
24
+ def __init__(self):
25
+ self.embedding_model = init_embedding_model()
26
+ self.knowledge_base = self.load_knowledge_base()
27
+ self.vector_store = self.create_vector_store()
28
+
29
+ def load_knowledge_base(self) -> List[Dict]:
30
+ """Load and preprocess knowledge base into a list of documents"""
31
+ kb = {
32
+ "spalling": [
33
+ {
34
+ "severity": "Critical",
35
+ "description": "Severe concrete spalling with exposed reinforcement and section loss",
36
+ "repair_method": ["Install temporary support", "Remove deteriorated concrete", "Clean and treat reinforcement"],
37
+ "estimated_cost": "Very High ($15,000+)",
38
+ "timeframe": "3-4 weeks",
39
+ "location": "Primary structural elements",
40
+ "required_expertise": "Structural Engineer + Specialist Contractor",
41
+ "immediate_action": "Evacuate area, install temporary support, prevent access",
42
+ "prevention": "Regular inspections, waterproofing, chloride protection"
43
+ },
44
+ # Add other knowledge base entries...
45
+ ]
46
+ }
47
+
48
+ # Convert nested knowledge base into flat documents
49
+ documents = []
50
+ for category, items in kb.items():
51
+ for item in items:
52
+ # Create a text representation of the document
53
+ doc_text = f"Category: {category}\n"
54
+ for key, value in item.items():
55
+ if isinstance(value, list):
56
+ doc_text += f"{key}: {', '.join(value)}\n"
57
+ else:
58
+ doc_text += f"{key}: {value}\n"
59
+ documents.append({
60
+ "text": doc_text,
61
+ "metadata": {"category": category}
62
+ })
63
+
64
+ return documents
65
+
66
+ def create_vector_store(self):
67
+ """Create FAISS vector store from knowledge base"""
68
+ # Generate embeddings for all documents
69
+ texts = [doc["text"] for doc in self.knowledge_base]
70
+ embeddings = self.embedding_model.encode(texts)
71
+
72
+ # Initialize FAISS index
73
+ dimension = embeddings.shape[1]
74
+ index = faiss.IndexFlatL2(dimension)
75
+ index.add(np.array(embeddings).astype('float32'))
76
+
77
+ return index
78
+
79
+ def get_relevant_context(self, query: str, k: int = 3) -> str:
80
+ """Retrieve relevant context based on query"""
81
+ # Generate query embedding
82
+ query_embedding = self.embedding_model.encode([query])
83
+
84
+ # Search for similar documents
85
+ D, I = self.vector_store.search(np.array(query_embedding).astype('float32'), k)
86
+
87
+ # Combine relevant documents into context
88
+ context = "\n\n".join([self.knowledge_base[i]["text"] for i in I[0]])
89
+ return context
90
+
91
+ def get_groq_response(query: str, context: str) -> str:
92
+ """Get response from Groq LLM"""
93
+ client = init_groq_client()
94
+ try:
95
+ prompt = f"""Based on the following context about construction defects, please answer the question.
96
+
97
+ Context:
98
+ {context}
99
+
100
+ Question: {query}
101
+
102
+ Please provide a detailed and specific answer based on the given context."""
103
+
104
+ response = client.chat.completions.create(
105
+ messages=[
106
+ {
107
+ "role": "system",
108
+ "content": "You are a construction defect analysis expert. Provide detailed, accurate answers based on the given context."
109
+ },
110
+ {
111
+ "role": "user",
112
+ "content": prompt
113
+ }
114
+ ],
115
+ model="llama-3.3-70b-versatile",
116
+ )
117
+ return response.choices[0].message.content
118
+ except Exception as e:
119
+ return f"Error: {str(e)}"
120
+
121
+ def main():
122
+ st.set_page_config(
123
+ page_title="Construction Defect RAG Analyzer",
124
+ page_icon="🏗️",
125
+ layout="wide"
126
+ )
127
+
128
+ st.title("🏗️ Construction Defect RAG Analyzer")
129
+
130
+ # Initialize RAG system
131
+ if 'rag_system' not in st.session_state:
132
+ st.session_state.rag_system = RAGSystem()
133
+
134
+ # File upload for image analysis
135
+ uploaded_file = st.file_uploader(
136
+ "Upload a construction image",
137
+ type=['jpg', 'jpeg', 'png']
138
+ )
139
+
140
+ # Query input
141
+ user_query = st.text_input("Ask a question about construction defects:")
142
+
143
+ if user_query:
144
+ with st.spinner("Processing query..."):
145
+ # Get relevant context using RAG
146
+ context = st.session_state.rag_system.get_relevant_context(user_query)
147
+
148
+ # Debug view of retrieved context
149
+ if st.checkbox("Show retrieved context"):
150
+ st.subheader("Retrieved Context")
151
+ st.text(context)
152
+
153
+ # Get response from Groq
154
+ st.subheader("AI Assistant Response")
155
+ response = get_groq_response(user_query, context)
156
+ st.write(response)
157
+
158
+ if uploaded_file:
159
+ image = Image.open(uploaded_file)
160
+ st.image(image, caption="Uploaded Image")
161
+
162
+ # Your existing image analysis code here...
163
+
164
+ if __name__ == "__main__":
165
+ main()