mgbam commited on
Commit
652f56f
·
verified ·
1 Parent(s): be6f117

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -9
app.py CHANGED
@@ -31,6 +31,7 @@ class ResearchConfig:
31
  CHUNK_SIZE = 512
32
  CHUNK_OVERLAP = 64
33
  MAX_CONCURRENT_REQUESTS = 5
 
34
  DOCUMENT_MAP = {
35
  "Research Report: Results of a New AI Model Improving Image Recognition Accuracy to 98%":
36
  "CV-Transformer Hybrid Architecture",
@@ -61,14 +62,14 @@ if not ResearchConfig.DEEPSEEK_API_KEY:
61
  st.stop()
62
 
63
  # ------------------------------
64
- # Quantum Document Processing
65
  # ------------------------------
66
  class QuantumDocumentManager:
67
  def __init__(self):
68
  self.client = chromadb.PersistentClient(path=ResearchConfig.CHROMA_PATH)
69
  self.embeddings = OpenAIEmbeddings(
70
  model="text-embedding-3-large",
71
- model_kwargs={"dimensions": 1536}
72
  )
73
 
74
  def create_collection(self, documents: List[str], collection_name: str) -> Chroma:
@@ -185,12 +186,9 @@ class CognitiveProcessor:
185
  return {"error": str(e)}
186
 
187
  def _consensus_check(self, results: List[Dict]) -> Dict:
188
- # Implement consensus algorithm
189
  valid = [r for r in results if "error" not in r]
190
  if not valid:
191
  return {"error": "All API requests failed"}
192
-
193
- # Select longest valid response
194
  return max(valid, key=lambda x: len(x.get('choices', [{}])[0].get('message', {}).get('content', '')))
195
 
196
  # ------------------------------
@@ -208,14 +206,12 @@ class ResearchWorkflow:
208
  metadata: Dict[str, Any]
209
 
210
  def _build_workflow(self):
211
- # Define workflow nodes
212
  self.workflow.add_node("ingest", self.ingest_query)
213
  self.workflow.add_node("retrieve", self.retrieve_documents)
214
  self.workflow.add_node("analyze", self.analyze_content)
215
  self.workflow.add_node("validate", self.validate_output)
216
  self.workflow.add_node("refine", self.refine_results)
217
 
218
- # Configure workflow edges
219
  self.workflow.set_entry_point("ingest")
220
  self.workflow.add_edge("ingest", "retrieve")
221
  self.workflow.add_edge("retrieve", "analyze")
@@ -227,7 +223,6 @@ class ResearchWorkflow:
227
  self.workflow.add_edge("validate", END)
228
  self.workflow.add_edge("refine", "retrieve")
229
 
230
- # Compile workflow
231
  self.app = self.workflow.compile()
232
 
233
  def ingest_query(self, state: AgentState) -> Dict:
@@ -389,7 +384,7 @@ class ResearchInterface:
389
 
390
  st.subheader("Analysis Metrics")
391
  st.metric("Vector Collections", 2)
392
- st.metric("Embedding Dimensions", 1536)
393
 
394
  def _build_main_interface(self):
395
  st.title("🧠 NeuroResearch AI")
 
31
  CHUNK_SIZE = 512
32
  CHUNK_OVERLAP = 64
33
  MAX_CONCURRENT_REQUESTS = 5
34
+ EMBEDDING_DIMENSIONS = 1536 # New config parameter
35
  DOCUMENT_MAP = {
36
  "Research Report: Results of a New AI Model Improving Image Recognition Accuracy to 98%":
37
  "CV-Transformer Hybrid Architecture",
 
62
  st.stop()
63
 
64
  # ------------------------------
65
+ # Quantum Document Processing (Corrected Embeddings)
66
  # ------------------------------
67
  class QuantumDocumentManager:
68
  def __init__(self):
69
  self.client = chromadb.PersistentClient(path=ResearchConfig.CHROMA_PATH)
70
  self.embeddings = OpenAIEmbeddings(
71
  model="text-embedding-3-large",
72
+ dimensions=ResearchConfig.EMBEDDING_DIMENSIONS # Correct parameter usage
73
  )
74
 
75
  def create_collection(self, documents: List[str], collection_name: str) -> Chroma:
 
186
  return {"error": str(e)}
187
 
188
  def _consensus_check(self, results: List[Dict]) -> Dict:
 
189
  valid = [r for r in results if "error" not in r]
190
  if not valid:
191
  return {"error": "All API requests failed"}
 
 
192
  return max(valid, key=lambda x: len(x.get('choices', [{}])[0].get('message', {}).get('content', '')))
193
 
194
  # ------------------------------
 
206
  metadata: Dict[str, Any]
207
 
208
  def _build_workflow(self):
 
209
  self.workflow.add_node("ingest", self.ingest_query)
210
  self.workflow.add_node("retrieve", self.retrieve_documents)
211
  self.workflow.add_node("analyze", self.analyze_content)
212
  self.workflow.add_node("validate", self.validate_output)
213
  self.workflow.add_node("refine", self.refine_results)
214
 
 
215
  self.workflow.set_entry_point("ingest")
216
  self.workflow.add_edge("ingest", "retrieve")
217
  self.workflow.add_edge("retrieve", "analyze")
 
223
  self.workflow.add_edge("validate", END)
224
  self.workflow.add_edge("refine", "retrieve")
225
 
 
226
  self.app = self.workflow.compile()
227
 
228
  def ingest_query(self, state: AgentState) -> Dict:
 
384
 
385
  st.subheader("Analysis Metrics")
386
  st.metric("Vector Collections", 2)
387
+ st.metric("Embedding Dimensions", ResearchConfig.EMBEDDING_DIMENSIONS)
388
 
389
  def _build_main_interface(self):
390
  st.title("🧠 NeuroResearch AI")