Hemang Thakur commited on
Commit
85a4a41
Β·
1 Parent(s): 85f093d

a lot of changes

Browse files
Files changed (27) hide show
  1. frontend/package-lock.json +0 -0
  2. frontend/package.json +10 -2
  3. frontend/src/App.js +36 -5
  4. frontend/src/Components/AiComponents/{Evaluate.css β†’ ChatComponents/Evaluate.css} +6 -0
  5. frontend/src/Components/AiComponents/{Evaluate.js β†’ ChatComponents/Evaluate.js} +45 -14
  6. frontend/src/Components/AiComponents/{FormSection.js β†’ ChatComponents/FormSection.js} +0 -0
  7. frontend/src/Components/AiComponents/{Graph.css β†’ ChatComponents/Graph.css} +0 -0
  8. frontend/src/Components/AiComponents/{Graph.js β†’ ChatComponents/Graph.js} +5 -2
  9. frontend/src/Components/AiComponents/{LeftSideBar.js β†’ ChatComponents/LeftSideBar.js} +0 -0
  10. frontend/src/Components/AiComponents/{LeftSidebar.css β†’ ChatComponents/LeftSidebar.css} +0 -0
  11. frontend/src/Components/AiComponents/{RightSidebar.css β†’ ChatComponents/RightSidebar.css} +11 -2
  12. frontend/src/Components/AiComponents/{RightSidebar.js β†’ ChatComponents/RightSidebar.js} +6 -2
  13. frontend/src/Components/AiComponents/{Sources.css β†’ ChatComponents/Sources.css} +0 -0
  14. frontend/src/Components/AiComponents/{Sources.js β†’ ChatComponents/Sources.js} +0 -0
  15. frontend/src/Components/AiComponents/ChatComponents/Streaming.css +120 -0
  16. frontend/src/Components/AiComponents/ChatComponents/Streaming.js +82 -0
  17. frontend/src/Components/AiComponents/ChatWindow.css +0 -12
  18. frontend/src/Components/AiComponents/ChatWindow.js +20 -11
  19. frontend/src/Components/AiPage.css +4 -7
  20. frontend/src/Components/AiPage.js +85 -22
  21. frontend/src/Components/IntialSetting.css +1 -1
  22. frontend/src/Components/IntialSetting.js +32 -120
  23. main.py +49 -17
  24. src/crawl/crawler.py +3 -7
  25. src/helpers/helper.py +21 -1
  26. src/query_processing/query_processor.py +10 -10
  27. src/reasoning/reasoner.py +4 -6
frontend/package-lock.json CHANGED
The diff for this file is too large to render. See raw diff
 
frontend/package.json CHANGED
@@ -3,7 +3,7 @@
3
  "version": "0.1.0",
4
  "private": true,
5
  "dependencies": {
6
- "@emotion/react": "^11.14.0",
7
  "@emotion/styled": "^11.14.0",
8
  "@fortawesome/fontawesome-free": "^6.7.2",
9
  "@google/generative-ai": "^0.21.0",
@@ -11,12 +11,20 @@
11
  "@mui/material": "^6.4.3",
12
  "@mui/styled-engine-sc": "^6.4.2",
13
  "cra-template": "1.2.0",
 
14
  "react": "^19.0.0",
15
  "react-dom": "^19.0.0",
16
  "react-icons": "^5.4.0",
17
- "react-markdown": "^9.0.3",
18
  "react-router-dom": "^7.1.3",
19
  "react-scripts": "5.0.1",
 
 
 
 
 
 
 
20
  "styled-components": "^6.1.14",
21
  "web-vitals": "^4.2.4"
22
  },
 
3
  "version": "0.1.0",
4
  "private": true,
5
  "dependencies": {
6
+ "@emotion/react": "^11.14.0",
7
  "@emotion/styled": "^11.14.0",
8
  "@fortawesome/fontawesome-free": "^6.7.2",
9
  "@google/generative-ai": "^0.21.0",
 
11
  "@mui/material": "^6.4.3",
12
  "@mui/styled-engine-sc": "^6.4.2",
13
  "cra-template": "1.2.0",
14
+ "katex": "^0.16.4",
15
  "react": "^19.0.0",
16
  "react-dom": "^19.0.0",
17
  "react-icons": "^5.4.0",
18
+ "react-markdown": "^9.1.0",
19
  "react-router-dom": "^7.1.3",
20
  "react-scripts": "5.0.1",
21
+ "react-syntax-highlighter": "^15.5.0",
22
+ "rehype-highlight": "^7.0.2",
23
+ "rehype-katex": "^6.0.2",
24
+ "rehype-raw": "^6.1.1",
25
+ "rehype-sanitize": "^5.0.1",
26
+ "remark-gfm": "^3.0.1",
27
+ "remark-math": "^5.1.1",
28
  "styled-components": "^6.1.14",
29
  "web-vitals": "^4.2.4"
30
  },
frontend/src/App.js CHANGED
@@ -1,6 +1,8 @@
1
- import React, { useState, useEffect } from 'react';
2
  import { BrowserRouter, Routes, Route } from 'react-router-dom';
3
  import CircularProgress from '@mui/material/CircularProgress';
 
 
4
  import logo from './Icons/settings-2.svg';
5
  import './App.css';
6
  import IntialSetting from './Components/IntialSetting.js';
@@ -20,13 +22,28 @@ function App() {
20
  function Home() {
21
  const [showSettings, setShowSettings] = useState(false);
22
  const [initializing, setInitializing] = useState(false);
 
 
 
 
 
 
23
 
24
- // This callback is passed to IntialSetting and called when the user clicks Save.
25
- // It changes the underlying header content to the initializing state.
26
  const handleInitializationStart = () => {
27
  setInitializing(true);
28
  };
29
 
 
 
 
 
 
 
 
 
 
 
 
30
  return (
31
  <div className="App">
32
  <header className="App-header">
@@ -48,17 +65,31 @@ function Home() {
48
  </>
49
  )}
50
 
51
- {/* Always render the settings modal if showSettings is true */}
52
  {showSettings && (
53
  <IntialSetting
54
  trigger={showSettings}
55
  setTrigger={setShowSettings}
56
  onInitializationStart={handleInitializationStart}
 
 
57
  />
58
  )}
59
  </header>
 
 
 
 
 
 
 
 
 
 
 
 
60
  </div>
61
  );
62
  }
63
 
64
- export default App;
 
1
+ import React, { useState } from 'react';
2
  import { BrowserRouter, Routes, Route } from 'react-router-dom';
3
  import CircularProgress from '@mui/material/CircularProgress';
4
+ import Snackbar from '@mui/material/Snackbar';
5
+ import Alert from '@mui/material/Alert';
6
  import logo from './Icons/settings-2.svg';
7
  import './App.css';
8
  import IntialSetting from './Components/IntialSetting.js';
 
22
  function Home() {
23
  const [showSettings, setShowSettings] = useState(false);
24
  const [initializing, setInitializing] = useState(false);
25
+ // Snackbar state
26
+ const [snackbar, setSnackbar] = useState({
27
+ open: false,
28
+ message: "",
29
+ severity: "success",
30
+ });
31
 
 
 
32
  const handleInitializationStart = () => {
33
  setInitializing(true);
34
  };
35
 
36
+ // Function to open the snackbar
37
+ const openSnackbar = (message, severity = "success") => {
38
+ setSnackbar({ open: true, message, severity });
39
+ };
40
+
41
+ // Function to close the snackbar
42
+ const closeSnackbar = (event, reason) => {
43
+ if (reason === 'clickaway') return;
44
+ setSnackbar(prev => ({ ...prev, open: false }));
45
+ };
46
+
47
  return (
48
  <div className="App">
49
  <header className="App-header">
 
65
  </>
66
  )}
67
 
68
+ {/* InitialSetting */}
69
  {showSettings && (
70
  <IntialSetting
71
  trigger={showSettings}
72
  setTrigger={setShowSettings}
73
  onInitializationStart={handleInitializationStart}
74
+ openSnackbar={openSnackbar}
75
+ closeSnackbar={closeSnackbar}
76
  />
77
  )}
78
  </header>
79
+
80
+ {/* Render the Snackbar*/}
81
+ <Snackbar
82
+ open={snackbar.open}
83
+ autoHideDuration={snackbar.severity === 'success' ? 3000 : null}
84
+ onClose={closeSnackbar}
85
+ anchorOrigin={{ vertical: 'top', horizontal: 'center' }}
86
+ >
87
+ <Alert onClose={closeSnackbar} severity={snackbar.severity} variant="filled" sx={{ width: '100%' }}>
88
+ {snackbar.message}
89
+ </Alert>
90
+ </Snackbar>
91
  </div>
92
  );
93
  }
94
 
95
+ export default App;
frontend/src/Components/AiComponents/{Evaluate.css β†’ ChatComponents/Evaluate.css} RENAMED
@@ -86,6 +86,12 @@
86
  color: #ddd !important;
87
  }
88
 
 
 
 
 
 
 
89
  /* Spinner styling */
90
  .custom-spinner {
91
  width: 1.35rem;
 
86
  color: #ddd !important;
87
  }
88
 
89
+ /* No metrics message */
90
+ .no-metrics-message {
91
+ text-align: center;
92
+ color: red;
93
+ }
94
+
95
  /* Spinner styling */
96
  .custom-spinner {
97
  width: 1.35rem;
frontend/src/Components/AiComponents/{Evaluate.js β†’ ChatComponents/Evaluate.js} RENAMED
@@ -16,20 +16,9 @@ const MenuProps = {
16
  PaperProps: {
17
  className: 'evaluate-menu',
18
  },
 
19
  };
20
 
21
- const names = [
22
- "Bias",
23
- "Toxicity",
24
- "Answer Correctness",
25
- "Summarization",
26
- "Faithfulness",
27
- "Hallucination",
28
- "Answer Relevancy",
29
- "Contextual Relevancy",
30
- "Contextual Recall"
31
- ];
32
-
33
  function getStyles(name, selectedNames, theme) {
34
  return {
35
  fontWeight: selectedNames.includes(name.toLowerCase())
@@ -45,6 +34,32 @@ export default function MultipleSelectChip({ evaluation }) {
45
  const [evaluationResult, setEvaluationResult] = React.useState("");
46
  const [isEvaluating, setIsEvaluating] = React.useState(false);
47
  const [localLoading, setLocalLoading] = React.useState(false);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  // Reset the form fields
50
  React.useEffect(() => {
@@ -53,6 +68,7 @@ export default function MultipleSelectChip({ evaluation }) {
53
  setSelectedMetrics([]);
54
  setEvaluationResult("");
55
  setLocalLoading(true);
 
56
 
57
  // Simulate a loading delay
58
  const timer = setTimeout(() => {
@@ -66,6 +82,7 @@ export default function MultipleSelectChip({ evaluation }) {
66
  const metrics = typeof value === 'string' ? value.split(',') : value;
67
  setPersonName(metrics);
68
  setSelectedMetrics(metrics);
 
69
  };
70
 
71
  const handleDelete = (chipToDelete) => {
@@ -83,6 +100,14 @@ export default function MultipleSelectChip({ evaluation }) {
83
  const handleEvaluateClick = async () => {
84
  // Clear previous evaluation result immediately.
85
  setEvaluationResult("");
 
 
 
 
 
 
 
 
86
  setIsEvaluating(true);
87
 
88
  const payload = { ...evaluation, metrics: selectedMetrics };
@@ -130,7 +155,7 @@ export default function MultipleSelectChip({ evaluation }) {
130
 
131
  // Finds the matching display name for a metric.
132
  const getDisplayName = (lowerValue) => {
133
- const found = names.find(n => n.toLowerCase() === lowerValue);
134
  return found ? found : lowerValue;
135
  };
136
 
@@ -172,7 +197,7 @@ export default function MultipleSelectChip({ evaluation }) {
172
  )}
173
  MenuProps={MenuProps}
174
  >
175
- {names.map((name) => (
176
  <MenuItem
177
  key={name}
178
  value={name.toLowerCase()} // underlying value is lowercase
@@ -194,6 +219,12 @@ export default function MultipleSelectChip({ evaluation }) {
194
  </Button>
195
  </Box>
196
 
 
 
 
 
 
 
197
  {isEvaluating && (
198
  <Box mt={1} display="flex" alignItems="center">
199
  <Box className="custom-spinner" />
 
16
  PaperProps: {
17
  className: 'evaluate-menu',
18
  },
19
+ disableScrollLock: true
20
  };
21
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  function getStyles(name, selectedNames, theme) {
23
  return {
24
  fontWeight: selectedNames.includes(name.toLowerCase())
 
34
  const [evaluationResult, setEvaluationResult] = React.useState("");
35
  const [isEvaluating, setIsEvaluating] = React.useState(false);
36
  const [localLoading, setLocalLoading] = React.useState(false);
37
+ const [noMetricsError, setNoMetricsError] = React.useState("");
38
+ const [metricOptions, setMetricOptions] = React.useState([]);
39
+
40
+ React.useEffect(() => {
41
+ // If 'contents' is undefined in the payload
42
+ if (evaluation && evaluation.contents === undefined) {
43
+ setMetricOptions([
44
+ "Bias",
45
+ "Toxicity",
46
+ "Summarization",
47
+ "Answer Correctness",
48
+ ]);
49
+ } else {
50
+ // Else, all except "Answer Correctness"
51
+ setMetricOptions([
52
+ "Bias",
53
+ "Toxicity",
54
+ "Summarization",
55
+ "Faithfulness",
56
+ "Hallucination",
57
+ "Answer Relevancy",
58
+ "Contextual Relevancy",
59
+ "Contextual Recall",
60
+ ]);
61
+ }
62
+ }, [evaluation]);
63
 
64
  // Reset the form fields
65
  React.useEffect(() => {
 
68
  setSelectedMetrics([]);
69
  setEvaluationResult("");
70
  setLocalLoading(true);
71
+ setNoMetricsError("");
72
 
73
  // Simulate a loading delay
74
  const timer = setTimeout(() => {
 
82
  const metrics = typeof value === 'string' ? value.split(',') : value;
83
  setPersonName(metrics);
84
  setSelectedMetrics(metrics);
85
+ setNoMetricsError("");
86
  };
87
 
88
  const handleDelete = (chipToDelete) => {
 
100
  const handleEvaluateClick = async () => {
101
  // Clear previous evaluation result immediately.
102
  setEvaluationResult("");
103
+
104
+ // Check if no metrics selected
105
+ if (selectedMetrics.length === 0) {
106
+ setNoMetricsError("No metrics selected");
107
+ return;
108
+ }
109
+
110
+ setNoMetricsError("");
111
  setIsEvaluating(true);
112
 
113
  const payload = { ...evaluation, metrics: selectedMetrics };
 
155
 
156
  // Finds the matching display name for a metric.
157
  const getDisplayName = (lowerValue) => {
158
+ const found = metricOptions.find(n => n.toLowerCase() === lowerValue);
159
  return found ? found : lowerValue;
160
  };
161
 
 
197
  )}
198
  MenuProps={MenuProps}
199
  >
200
+ {metricOptions.map((name) => (
201
  <MenuItem
202
  key={name}
203
  value={name.toLowerCase()} // underlying value is lowercase
 
219
  </Button>
220
  </Box>
221
 
222
+ {noMetricsError && (
223
+ <Box className="no-metrics-message">
224
+ {noMetricsError}
225
+ </Box>
226
+ )}
227
+
228
  {isEvaluating && (
229
  <Box mt={1} display="flex" alignItems="center">
230
  <Box className="custom-spinner" />
frontend/src/Components/AiComponents/{FormSection.js β†’ ChatComponents/FormSection.js} RENAMED
File without changes
frontend/src/Components/AiComponents/{Graph.css β†’ ChatComponents/Graph.css} RENAMED
File without changes
frontend/src/Components/AiComponents/{Graph.js β†’ ChatComponents/Graph.js} RENAMED
@@ -2,19 +2,21 @@ import React, { useState, useEffect } from 'react';
2
  import { FaTimes } from 'react-icons/fa';
3
  import './Graph.css';
4
 
5
- export default function Graph({ open, onClose, onError }) {
6
  const [graphHtml, setGraphHtml] = useState("");
7
  const [loading, setLoading] = useState(true);
8
  const [error, setError] = useState("");
9
 
10
  useEffect(() => {
 
11
  if (open) {
12
  setLoading(true);
13
  setError("");
14
  fetch("/action/graph", {
15
  method: "POST",
16
  headers: { "Content-Type": "application/json" },
17
- body: JSON.stringify({})
 
18
  })
19
  .then(res => res.json())
20
  .then(data => {
@@ -37,6 +39,7 @@ export default function Graph({ open, onClose, onError }) {
37
  });
38
  }
39
  }, [open, onError]);
 
40
 
41
  if (!open) return null;
42
 
 
2
  import { FaTimes } from 'react-icons/fa';
3
  import './Graph.css';
4
 
5
+ export default function Graph({ open, onClose, payload, onError }) {
6
  const [graphHtml, setGraphHtml] = useState("");
7
  const [loading, setLoading] = useState(true);
8
  const [error, setError] = useState("");
9
 
10
  useEffect(() => {
11
+ // if (open && payload) {
12
  if (open) {
13
  setLoading(true);
14
  setError("");
15
  fetch("/action/graph", {
16
  method: "POST",
17
  headers: { "Content-Type": "application/json" },
18
+ body: JSON.stringify()
19
+ // body: JSON.stringify(payload)
20
  })
21
  .then(res => res.json())
22
  .then(data => {
 
39
  });
40
  }
41
  }, [open, onError]);
42
+ // }, [open, payload, onError]);
43
 
44
  if (!open) return null;
45
 
frontend/src/Components/AiComponents/{LeftSideBar.js β†’ ChatComponents/LeftSideBar.js} RENAMED
File without changes
frontend/src/Components/AiComponents/{LeftSidebar.css β†’ ChatComponents/LeftSidebar.css} RENAMED
File without changes
frontend/src/Components/AiComponents/{RightSidebar.css β†’ ChatComponents/RightSidebar.css} RENAMED
@@ -7,6 +7,8 @@
7
 
8
  /* Main sidebar container */
9
  .right-side-bar {
 
 
10
  position: fixed;
11
  top: 0;
12
  right: 0;
@@ -14,11 +16,16 @@
14
  background-color: var(--sidebar-background); /* Keep background uniform */
15
  color: var(--text-light);
16
  box-shadow: -2px 0 8px rgba(0, 0, 0, 0.5);
17
- transition: transform var(--transition-speed);
18
  overflow-y: auto;
19
  z-index: 1000;
20
  }
21
 
 
 
 
 
 
22
  /* When the sidebar is closed */
23
  .right-side-bar.closed {
24
  width: 0;
@@ -59,6 +66,8 @@
59
  .sidebar-content {
60
  padding: 16px;
61
  background: transparent;
 
 
62
  }
63
 
64
  /* Also clear any default marker via the pseudo-element */
@@ -126,4 +135,4 @@
126
  @keyframes spin {
127
  from { transform: rotate(0deg); }
128
  to { transform: rotate(360deg); }
129
- }
 
7
 
8
  /* Main sidebar container */
9
  .right-side-bar {
10
+ display: flex;
11
+ flex-direction: column;
12
  position: fixed;
13
  top: 0;
14
  right: 0;
 
16
  background-color: var(--sidebar-background); /* Keep background uniform */
17
  color: var(--text-light);
18
  box-shadow: -2px 0 8px rgba(0, 0, 0, 0.5);
19
+ transition: width 0.4s ease;
20
  overflow-y: auto;
21
  z-index: 1000;
22
  }
23
 
24
+ /* Sidebar resizing */
25
+ .right-side-bar.resizing {
26
+ transition: none;
27
+ }
28
+
29
  /* When the sidebar is closed */
30
  .right-side-bar.closed {
31
  width: 0;
 
66
  .sidebar-content {
67
  padding: 16px;
68
  background: transparent;
69
+ overflow-x: hidden;
70
+ overflow-y: auto;
71
  }
72
 
73
  /* Also clear any default marker via the pseudo-element */
 
135
  @keyframes spin {
136
  from { transform: rotate(0deg); }
137
  to { transform: rotate(360deg); }
138
+ }
frontend/src/Components/AiComponents/{RightSidebar.js β†’ ChatComponents/RightSidebar.js} RENAMED
@@ -1,4 +1,4 @@
1
- import React from 'react';
2
  import { FaTimes, FaCheck, FaSpinner } from 'react-icons/fa';
3
  import { BsChevronLeft } from 'react-icons/bs';
4
  import CircularProgress from '@mui/material/CircularProgress';
@@ -22,10 +22,12 @@ function RightSidebar({
22
  }) {
23
  const minWidth = 200;
24
  const maxWidth = 450;
 
25
 
26
  // Called when the user starts resizing the sidebar.
27
  const startResize = (e) => {
28
  e.preventDefault();
 
29
  document.addEventListener("mousemove", resizeSidebar);
30
  document.addEventListener("mouseup", stopResize);
31
  };
@@ -38,6 +40,7 @@ function RightSidebar({
38
  };
39
 
40
  const stopResize = () => {
 
41
  document.removeEventListener("mousemove", resizeSidebar);
42
  document.removeEventListener("mouseup", stopResize);
43
  };
@@ -72,8 +75,9 @@ function RightSidebar({
72
  return (
73
  <>
74
  <nav
 
75
  className={`right-side-bar ${isOpen ? "open" : "closed"}`}
76
- style={{ width: isOpen ? rightSidebarWidth : undefined }}
77
  >
78
  <div className="sidebar-header">
79
  <h3>
 
1
+ import React, { useRef } from 'react';
2
  import { FaTimes, FaCheck, FaSpinner } from 'react-icons/fa';
3
  import { BsChevronLeft } from 'react-icons/bs';
4
  import CircularProgress from '@mui/material/CircularProgress';
 
22
  }) {
23
  const minWidth = 200;
24
  const maxWidth = 450;
25
+ const sidebarRef = useRef(null);
26
 
27
  // Called when the user starts resizing the sidebar.
28
  const startResize = (e) => {
29
  e.preventDefault();
30
+ sidebarRef.current.classList.add("resizing"); // Add the "resizing" class to the sidebar when resizing
31
  document.addEventListener("mousemove", resizeSidebar);
32
  document.addEventListener("mouseup", stopResize);
33
  };
 
40
  };
41
 
42
  const stopResize = () => {
43
+ sidebarRef.current.classList.remove("resizing"); // Remove the "resizing" class from the sidebar when resizing stops
44
  document.removeEventListener("mousemove", resizeSidebar);
45
  document.removeEventListener("mouseup", stopResize);
46
  };
 
75
  return (
76
  <>
77
  <nav
78
+ ref={sidebarRef}
79
  className={`right-side-bar ${isOpen ? "open" : "closed"}`}
80
+ style={{ width: isOpen ? rightSidebarWidth : 0 }}
81
  >
82
  <div className="sidebar-header">
83
  <h3>
frontend/src/Components/AiComponents/{Sources.css β†’ ChatComponents/Sources.css} RENAMED
File without changes
frontend/src/Components/AiComponents/{Sources.js β†’ ChatComponents/Sources.js} RENAMED
File without changes
frontend/src/Components/AiComponents/ChatComponents/Streaming.css ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .streaming-content {
2
+ font-family: inherit;
3
+ line-height: 2rem;
4
+ white-space: pre-wrap;
5
+ word-wrap: break-word;
6
+ margin: 0;
7
+ padding: 0;
8
+ }
9
+
10
+ /* Reset margin/padding for all descendants */
11
+ .streaming-content * {
12
+ margin: 0;
13
+ padding: 0;
14
+ }
15
+
16
+ /* Top-level elements */
17
+ .streaming-content > * {
18
+ margin-top: 0.5rem;
19
+ margin-bottom: 0.5rem;
20
+ }
21
+
22
+ /* VERY FIRST element in an AI answer */
23
+ .streaming-content > *:first-child {
24
+ margin-top: 0 !important;
25
+ }
26
+
27
+ /* Headings */
28
+ .streaming-content h1,
29
+ .streaming-content h2,
30
+ .streaming-content h3,
31
+ .streaming-content h4,
32
+ .streaming-content h5,
33
+ .streaming-content h6 {
34
+ margin-top: 1rem;
35
+ margin-bottom: 0.75rem;
36
+ }
37
+
38
+ /* If heading is the very first element */
39
+ .streaming-content > h1:first-child,
40
+ .streaming-content > h2:first-child,
41
+ .streaming-content > h3:first-child,
42
+ .streaming-content > h4:first-child,
43
+ .streaming-content > h5:first-child,
44
+ .streaming-content > h6:first-child {
45
+ margin-top: 0 !important;
46
+ }
47
+
48
+ /* Paragraphs */
49
+ .streaming-content p {
50
+ margin-top: 0.25rem;
51
+ margin-bottom: 0.25rem;
52
+ }
53
+
54
+ /* Lists */
55
+ .streaming-content ul,
56
+ .streaming-content ol {
57
+ margin-top: 0.25rem;
58
+ margin-bottom: 0.25rem;
59
+ padding-left: 1.25rem;
60
+ white-space: normal;
61
+ }
62
+
63
+ .streaming-content li {
64
+ margin-bottom: 0.25rem;
65
+ }
66
+
67
+ .streaming-content li ul,
68
+ .streaming-content li ol {
69
+ margin-top: 0.15rem;
70
+ margin-bottom: 0.15rem;
71
+ }
72
+
73
+ /* Code Blocks */
74
+ .code-block-container {
75
+ margin: 0.5rem 0;
76
+ border-radius: 4px;
77
+ background-color: #2b2b2b;
78
+ overflow: hidden;
79
+ }
80
+
81
+ .code-block-header {
82
+ background-color: #1e1e1e;
83
+ color: #ffffff;
84
+ padding: 0.5rem;
85
+ font-size: 0.85rem;
86
+ font-weight: bold;
87
+ }
88
+
89
+ /* Table Container */
90
+ .table-container {
91
+ margin: 0.5rem 0;
92
+ width: 100%;
93
+ overflow-x: auto;
94
+ border: 1px solid #ddd;
95
+ border-radius: 4px;
96
+ }
97
+
98
+ .table-container th,
99
+ .table-container td {
100
+ border: 1px solid #ddd;
101
+ padding: 0.5rem;
102
+ }
103
+
104
+ /* Markdown Links */
105
+ .markdown-link {
106
+ color: #1a73e8;
107
+ text-decoration: none;
108
+ }
109
+ .markdown-link:hover {
110
+ text-decoration: underline;
111
+ }
112
+
113
+ /* Blockquotes */
114
+ .markdown-blockquote {
115
+ border-left: 4px solid #ccc;
116
+ padding-left: 0.75rem;
117
+ margin: 0.5rem 0;
118
+ color: #555;
119
+ font-style: italic;
120
+ }
frontend/src/Components/AiComponents/ChatComponents/Streaming.js ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useEffect, useRef } from 'react';
2
+ import ReactMarkdown from 'react-markdown';
3
+ import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
4
+ import { atomDark } from 'react-syntax-highlighter/dist/esm/styles/prism';
5
+ import remarkGfm from 'remark-gfm';
6
+ import rehypeRaw from 'rehype-raw';
7
+ import './Streaming.css';
8
+
9
+ // Streaming component for rendering markdown content
10
+ const Streaming = ({ content, isStreaming, onContentRef }) => {
11
+ const contentRef = useRef(null);
12
+
13
+ useEffect(() => {
14
+ if (contentRef.current && onContentRef) {
15
+ onContentRef(contentRef.current);
16
+ }
17
+ }, [content, onContentRef]);
18
+
19
+ const displayContent = isStreaming ? `${content}β–Œ` : (content || '');
20
+
21
+ return (
22
+ <div className="streaming-content" ref={contentRef}>
23
+ <ReactMarkdown
24
+ remarkPlugins={[remarkGfm]}
25
+ rehypePlugins={[rehypeRaw]}
26
+ components={{
27
+ code({node, inline, className, children, ...props}) {
28
+ const match = /language-(\w+)/.exec(className || '');
29
+ return !inline ? (
30
+ <div className="code-block-container">
31
+ <div className="code-block-header">
32
+ <span>{match ? match[1] : 'code'}</span>
33
+ </div>
34
+ <SyntaxHighlighter
35
+ style={atomDark}
36
+ language={match ? match[1] : 'text'}
37
+ PreTag="div"
38
+ {...props}
39
+ >
40
+ {String(children).replace(/\n$/, '')}
41
+ </SyntaxHighlighter>
42
+ </div>
43
+ ) : (
44
+ <code className={className} {...props}>
45
+ {children}
46
+ </code>
47
+ );
48
+ },
49
+ table({node, ...props}) {
50
+ return (
51
+ <div className="table-container">
52
+ <table {...props} />
53
+ </div>
54
+ );
55
+ },
56
+ a({node, children, href, ...props}) {
57
+ return (
58
+ <a
59
+ href={href}
60
+ target="_blank"
61
+ rel="noopener noreferrer"
62
+ className="markdown-link"
63
+ {...props}
64
+ >
65
+ {children}
66
+ </a>
67
+ );
68
+ },
69
+ blockquote({node, ...props}) {
70
+ return (
71
+ <blockquote className="markdown-blockquote" {...props} />
72
+ );
73
+ }
74
+ }}
75
+ >
76
+ {displayContent}
77
+ </ReactMarkdown>
78
+ </div>
79
+ );
80
+ };
81
+
82
+ export default Streaming;
frontend/src/Components/AiComponents/ChatWindow.css CHANGED
@@ -122,11 +122,6 @@
122
  .answer {
123
  margin: 0;
124
  line-height: 1.85;
125
- }
126
-
127
- .markdown {
128
- margin: -1rem 0 -0.8rem 0;
129
- line-height: 2rem;
130
  white-space: pre-wrap;
131
  }
132
 
@@ -161,13 +156,6 @@
161
  filter: brightness(0.65);
162
  }
163
 
164
- /* .post-icons .copy-icon:active img,
165
- .post-icons .evaluate-icon:active img,
166
- .post-icons .sources-icon:active img,
167
- .post-icons .graph-icon:active img {
168
- filter: brightness(0.35);
169
- } */
170
-
171
  /* Tooltip styling */
172
  .tooltip {
173
  position: absolute;
 
122
  .answer {
123
  margin: 0;
124
  line-height: 1.85;
 
 
 
 
 
125
  white-space: pre-wrap;
126
  }
127
 
 
156
  filter: brightness(0.65);
157
  }
158
 
 
 
 
 
 
 
 
159
  /* Tooltip styling */
160
  .tooltip {
161
  position: absolute;
frontend/src/Components/AiComponents/ChatWindow.js CHANGED
@@ -1,11 +1,11 @@
1
- import React, { useRef, useEffect, useState } from 'react';
2
  import Box from '@mui/material/Box';
3
  import Snackbar from '@mui/material/Snackbar';
4
  import Slide from '@mui/material/Slide';
5
  import IconButton from '@mui/material/IconButton';
6
  import { FaTimes } from 'react-icons/fa';
7
- import ReactMarkdown from 'react-markdown';
8
- import GraphDialog from './Graph';
9
  import './ChatWindow.css';
10
 
11
  import bot from '../../Icons/bot.png';
@@ -23,6 +23,7 @@ function SlideTransition(props) {
23
  function ChatWindow({
24
  blockId,
25
  userMessage,
 
26
  aiAnswer,
27
  thinkingTime,
28
  thoughtLabel,
@@ -60,12 +61,13 @@ function ChatWindow({
60
  setSnackbarOpen(false);
61
  };
62
 
63
- // Determine if any tokens (partial or full answer) have been received.
64
- const hasTokens = aiAnswer && aiAnswer.length > 0;
 
 
 
65
  // Assume streaming is in progress if thinkingTime is not set.
66
  const isStreaming = thinkingTime === null || thinkingTime === undefined;
67
- // Append a trailing cursor if streaming.
68
- const displayAnswer = hasTokens ? (isStreaming ? aiAnswer + "β–Œ" : aiAnswer) : "";
69
 
70
  // Helper to render the thought label.
71
  const renderThoughtLabel = () => {
@@ -94,6 +96,11 @@ function ChatWindow({
94
  prevTasksRef.current = tasks;
95
  }, [tasks, blockId, openRightSidebar]);
96
 
 
 
 
 
 
97
  return (
98
  <>
99
  { !hasTokens ? (
@@ -178,10 +185,12 @@ function ChatWindow({
178
  <img src={bot} alt="bot icon" />
179
  </div>
180
  <div className="message-bubble bot-bubble">
181
- <div className="answer" ref={answerRef}>
182
- <ReactMarkdown className="markdown">
183
- {displayAnswer}
184
- </ReactMarkdown>
 
 
185
  </div>
186
  </div>
187
  <div className="post-icons">
 
1
+ import React, { useRef, useState, useEffect } from 'react';
2
  import Box from '@mui/material/Box';
3
  import Snackbar from '@mui/material/Snackbar';
4
  import Slide from '@mui/material/Slide';
5
  import IconButton from '@mui/material/IconButton';
6
  import { FaTimes } from 'react-icons/fa';
7
+ import GraphDialog from './ChatComponents/Graph';
8
+ import Streaming from './ChatComponents/Streaming';
9
  import './ChatWindow.css';
10
 
11
  import bot from '../../Icons/bot.png';
 
23
  function ChatWindow({
24
  blockId,
25
  userMessage,
26
+ tokenChunks,
27
  aiAnswer,
28
  thinkingTime,
29
  thoughtLabel,
 
61
  setSnackbarOpen(false);
62
  };
63
 
64
+ // Combine partial chunks (tokenChunks) if present; else fall back to the aiAnswer string.
65
+ const combinedAnswer = (tokenChunks && tokenChunks.length > 0)
66
+ ? tokenChunks.join("")
67
+ : aiAnswer;
68
+ const hasTokens = combinedAnswer && combinedAnswer.length > 0;
69
  // Assume streaming is in progress if thinkingTime is not set.
70
  const isStreaming = thinkingTime === null || thinkingTime === undefined;
 
 
71
 
72
  // Helper to render the thought label.
73
  const renderThoughtLabel = () => {
 
96
  prevTasksRef.current = tasks;
97
  }, [tasks, blockId, openRightSidebar]);
98
 
99
+ // Handle getting the reference to the content for copy functionality
100
+ const handleContentRef = (ref) => {
101
+ answerRef.current = ref;
102
+ };
103
+
104
  return (
105
  <>
106
  { !hasTokens ? (
 
185
  <img src={bot} alt="bot icon" />
186
  </div>
187
  <div className="message-bubble bot-bubble">
188
+ <div className="answer">
189
+ <Streaming
190
+ content={combinedAnswer}
191
+ isStreaming={isStreaming}
192
+ onContentRef={handleContentRef}
193
+ />
194
  </div>
195
  </div>
196
  <div className="post-icons">
frontend/src/Components/AiPage.css CHANGED
@@ -24,7 +24,6 @@ html, body {
24
  display: flex;
25
  min-height: 100vh;
26
  position: relative;
27
- overflow-x: hidden;
28
  overflow-y: auto;
29
  }
30
 
@@ -38,7 +37,7 @@ html, body {
38
  padding: 2rem;
39
  transition: 0.1s;
40
  width: 99%;
41
- max-width: 800px;
42
  margin: 0 auto;
43
  }
44
 
@@ -52,8 +51,7 @@ html, body {
52
  }
53
 
54
  .search-area {
55
- width: 99%;
56
- max-width: 800px;
57
  }
58
 
59
  .search-bar {
@@ -146,8 +144,7 @@ button.send-btn.stop-btn:hover {
146
  bottom: 1.5rem;
147
  left: 50%;
148
  transform: translateX(-50%);
149
- width: 100%;
150
- max-width: 800px;
151
  background-color: #21212f;
152
  border-radius: 0.35rem;
153
  }
@@ -247,4 +244,4 @@ button.send-btn.stop-btn:hover {
247
  margin: 0;
248
  padding: 1rem;
249
  }
250
- }
 
24
  display: flex;
25
  min-height: 100vh;
26
  position: relative;
 
27
  overflow-y: auto;
28
  }
29
 
 
37
  padding: 2rem;
38
  transition: 0.1s;
39
  width: 99%;
40
+ max-width: 900px;
41
  margin: 0 auto;
42
  }
43
 
 
51
  }
52
 
53
  .search-area {
54
+ width: 83%;
 
55
  }
56
 
57
  .search-bar {
 
144
  bottom: 1.5rem;
145
  left: 50%;
146
  transform: translateX(-50%);
147
+ width: 48%;
 
148
  background-color: #21212f;
149
  border-radius: 0.35rem;
150
  }
 
244
  margin: 0;
245
  padding: 1rem;
246
  }
247
+ }
frontend/src/Components/AiPage.js CHANGED
@@ -1,9 +1,12 @@
1
- import React, { useState, useEffect, useRef } from 'react';
2
- import './AiPage.css';
 
 
3
  import { FaCog, FaPaperPlane, FaStop } from 'react-icons/fa';
4
  import IntialSetting from './IntialSetting';
5
  import ChatWindow from './AiComponents/ChatWindow';
6
- import RightSidebar from './AiComponents/RightSidebar';
 
7
 
8
  function AiPage() {
9
  // Sidebar and other states
@@ -29,6 +32,24 @@ function AiPage() {
29
  const [activeBlockId, setActiveBlockId] = useState(null);
30
  const activeEventSourceRef = useRef(null);
31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  useEffect(() => {
33
  localStorage.setItem("rightSidebarState", isRightSidebarOpen);
34
  }, [isRightSidebarOpen]);
@@ -37,6 +58,7 @@ function AiPage() {
37
  document.documentElement.style.setProperty('--right-sidebar-width', rightSidebarWidth + 'px');
38
  }, [rightSidebarWidth]);
39
 
 
40
  useEffect(() => {
41
  if (textAreaRef.current) {
42
  if (!defaultChatHeight) {
@@ -68,14 +90,16 @@ function AiPage() {
68
  }, [searchText, defaultChatHeight]);
69
 
70
  const handleOpenRightSidebar = (content, chatBlockId = null) => {
71
- if (chatBlockId) {
72
- setSelectedChatBlockId(chatBlockId);
73
- }
74
- setSidebarContent(content ? content : "default");
75
- setRightSidebarOpen(true);
 
 
76
  };
77
 
78
- const handleEvaluationError = (blockId, errorMsg) => {
79
  setChatBlocks(prev =>
80
  prev.map(block =>
81
  block.id === blockId
@@ -83,7 +107,7 @@ function AiPage() {
83
  : block
84
  )
85
  );
86
- };
87
 
88
  // Initiate the SSE
89
  const initiateSSE = (query, blockId) => {
@@ -93,12 +117,25 @@ function AiPage() {
93
  activeEventSourceRef.current = eventSource;
94
 
95
  eventSource.addEventListener("token", (e) => {
96
- setChatBlocks(prev => prev.map(block =>
97
- block.id === blockId
98
- ? { ...block, aiAnswer: block.aiAnswer + e.data }
99
- : block
100
- ));
101
- });
 
 
 
 
 
 
 
 
 
 
 
 
 
102
 
103
  eventSource.addEventListener("final_message", (e) => {
104
  const endTime = Date.now();
@@ -148,7 +185,13 @@ function AiPage() {
148
  console.error("Error from SSE:", e.data);
149
  setChatBlocks(prev => prev.map(block =>
150
  block.id === blockId
151
- ? { ...block, isError: true, errorMessage: e.data, aiAnswer: "" }
 
 
 
 
 
 
152
  : block
153
  ));
154
  eventSource.close();
@@ -228,6 +271,7 @@ function AiPage() {
228
  {
229
  id: blockId,
230
  userMessage: searchText,
 
231
  aiAnswer: "",
232
  thinkingTime: null,
233
  thoughtLabel: "",
@@ -299,6 +343,16 @@ function AiPage() {
299
  ? selectedBlock.actions.find(a => a.name === "evaluate")
300
  : null;
301
 
 
 
 
 
 
 
 
 
 
 
302
  return (
303
  <div
304
  className="app-container"
@@ -324,11 +378,7 @@ function AiPage() {
324
  if (!source || !source.link) return;
325
  window.open(source.link, '_blank');
326
  }}
327
- evaluation={
328
- evaluateAction
329
- ? { ...evaluateAction.payload, blockId: selectedBlock?.id, onError: handleEvaluationError }
330
- : null
331
- }
332
  />
333
  </div>
334
  )}
@@ -342,6 +392,7 @@ function AiPage() {
342
  key={block.id}
343
  blockId={block.id}
344
  userMessage={block.userMessage}
 
345
  aiAnswer={block.aiAnswer}
346
  thinkingTime={block.thinkingTime}
347
  thoughtLabel={block.thoughtLabel}
@@ -430,8 +481,20 @@ function AiPage() {
430
  trigger={true}
431
  setTrigger={() => setShowSettingsModal(false)}
432
  fromAiPage={true}
 
 
433
  />
434
  )}
 
 
 
 
 
 
 
 
 
 
435
  </div>
436
  );
437
  }
 
1
+ import React, { useState, useEffect, useRef, useCallback, useMemo } from 'react';
2
+ import { flushSync } from 'react-dom';
3
+ import Snackbar from '@mui/material/Snackbar';
4
+ import Alert from '@mui/material/Alert';
5
  import { FaCog, FaPaperPlane, FaStop } from 'react-icons/fa';
6
  import IntialSetting from './IntialSetting';
7
  import ChatWindow from './AiComponents/ChatWindow';
8
+ import RightSidebar from './AiComponents/ChatComponents/RightSidebar';
9
+ import './AiPage.css';
10
 
11
  function AiPage() {
12
  // Sidebar and other states
 
32
  const [activeBlockId, setActiveBlockId] = useState(null);
33
  const activeEventSourceRef = useRef(null);
34
 
35
+ // Snackbar state
36
+ const [snackbar, setSnackbar] = useState({
37
+ open: false,
38
+ message: "",
39
+ severity: "success",
40
+ });
41
+
42
+ // Function to open the snackbar
43
+ const openSnackbar = (message, severity = "success") => {
44
+ setSnackbar({ open: true, message, severity });
45
+ };
46
+
47
+ // Function to close the snackbar
48
+ const closeSnackbar = (event, reason) => {
49
+ if (reason === 'clickaway') return;
50
+ setSnackbar(prev => ({ ...prev, open: false }));
51
+ };
52
+
53
  useEffect(() => {
54
  localStorage.setItem("rightSidebarState", isRightSidebarOpen);
55
  }, [isRightSidebarOpen]);
 
58
  document.documentElement.style.setProperty('--right-sidebar-width', rightSidebarWidth + 'px');
59
  }, [rightSidebarWidth]);
60
 
61
+ // Dynamically increase height of chat input field based on newlines entered
62
  useEffect(() => {
63
  if (textAreaRef.current) {
64
  if (!defaultChatHeight) {
 
90
  }, [searchText, defaultChatHeight]);
91
 
92
  const handleOpenRightSidebar = (content, chatBlockId = null) => {
93
+ flushSync(() => {
94
+ if (chatBlockId) {
95
+ setSelectedChatBlockId(chatBlockId);
96
+ }
97
+ setSidebarContent(content ? content : "default");
98
+ setRightSidebarOpen(true);
99
+ });
100
  };
101
 
102
+ const handleEvaluationError = useCallback((blockId, errorMsg) => {
103
  setChatBlocks(prev =>
104
  prev.map(block =>
105
  block.id === blockId
 
107
  : block
108
  )
109
  );
110
+ }, []);
111
 
112
  // Initiate the SSE
113
  const initiateSSE = (query, blockId) => {
 
117
  activeEventSourceRef.current = eventSource;
118
 
119
  eventSource.addEventListener("token", (e) => {
120
+ const { chunk, index } = JSON.parse(e.data);
121
+ console.log("[SSE token chunk]", JSON.stringify(chunk));
122
+ console.log("[SSE token index]", JSON.stringify(index));
123
+
124
+ setChatBlocks(prevBlocks => {
125
+ return prevBlocks.map(block => {
126
+ if (block.id === blockId) {
127
+ const newTokenArray = block.tokenChunks ? [...block.tokenChunks] : [];
128
+ newTokenArray[index] = chunk;
129
+
130
+ return {
131
+ ...block,
132
+ tokenChunks: newTokenArray
133
+ };
134
+ }
135
+ return block;
136
+ });
137
+ });
138
+ });
139
 
140
  eventSource.addEventListener("final_message", (e) => {
141
  const endTime = Date.now();
 
185
  console.error("Error from SSE:", e.data);
186
  setChatBlocks(prev => prev.map(block =>
187
  block.id === blockId
188
+ ? {
189
+ ...block,
190
+ isError: true,
191
+ errorMessage: e.data,
192
+ aiAnswer: "",
193
+ tasks: []
194
+ }
195
  : block
196
  ));
197
  eventSource.close();
 
271
  {
272
  id: blockId,
273
  userMessage: searchText,
274
+ tokenChunks: [],
275
  aiAnswer: "",
276
  thinkingTime: null,
277
  thoughtLabel: "",
 
343
  ? selectedBlock.actions.find(a => a.name === "evaluate")
344
  : null;
345
 
346
+ // Memoized evaluation object
347
+ const evaluation = useMemo(() => {
348
+ if (!evaluateAction) return null;
349
+ return {
350
+ ...evaluateAction.payload,
351
+ blockId: selectedBlock?.id,
352
+ onError: handleEvaluationError,
353
+ };
354
+ }, [evaluateAction, selectedBlock?.id, handleEvaluationError]);
355
+
356
  return (
357
  <div
358
  className="app-container"
 
378
  if (!source || !source.link) return;
379
  window.open(source.link, '_blank');
380
  }}
381
+ evaluation={evaluation}
 
 
 
 
382
  />
383
  </div>
384
  )}
 
392
  key={block.id}
393
  blockId={block.id}
394
  userMessage={block.userMessage}
395
+ tokenChunks={block.tokenChunks}
396
  aiAnswer={block.aiAnswer}
397
  thinkingTime={block.thinkingTime}
398
  thoughtLabel={block.thoughtLabel}
 
481
  trigger={true}
482
  setTrigger={() => setShowSettingsModal(false)}
483
  fromAiPage={true}
484
+ openSnackbar={openSnackbar}
485
+ closeSnackbar={closeSnackbar}
486
  />
487
  )}
488
+ <Snackbar
489
+ open={snackbar.open}
490
+ autoHideDuration={snackbar.severity === 'success' ? 3000 : null}
491
+ onClose={closeSnackbar}
492
+ anchorOrigin={{ vertical: 'top', horizontal: 'center' }}
493
+ >
494
+ <Alert onClose={closeSnackbar} severity={snackbar.severity} variant="filled" sx={{ width: '100%' }}>
495
+ {snackbar.message}
496
+ </Alert>
497
+ </Snackbar>
498
  </div>
499
  );
500
  }
frontend/src/Components/IntialSetting.css CHANGED
@@ -171,4 +171,4 @@ input, select, textarea {
171
  width: 90%;
172
  max-height: 75vh; /* Adjust height for smaller screens */
173
  }
174
- }
 
171
  width: 90%;
172
  max-height: 75vh; /* Adjust height for smaller screens */
173
  }
174
+ }
frontend/src/Components/IntialSetting.js CHANGED
@@ -5,32 +5,21 @@ import Slider from '@mui/material/Slider';
5
  import Stack from '@mui/material/Stack';
6
  import Button from '@mui/material/Button';
7
  import IconButton from '@mui/material/IconButton';
8
- import Snackbar from '@mui/material/Snackbar';
9
- import Alert from '@mui/material/Alert';
10
  import './IntialSetting.css';
11
  import { FaTimes, FaEye, FaEyeSlash } from 'react-icons/fa';
12
 
13
  function IntialSetting(props) {
14
- // Controlled states for Model Provider and sliders
15
  const [selectedProvider, setSelectedProvider] = useState("OpenAI");
16
  const [modelTemperature, setModelTemperature] = useState(0.0);
17
  const [modelTopP, setModelTopP] = useState(1.0);
18
  const [showPassword, setShowPassword] = useState(false);
19
 
20
- // Snackbar state for notifications
21
- const [snackbar, setSnackbar] = useState({
22
- open: false,
23
- message: "",
24
- severity: "success", // "success", "error", "info", "warning"
25
- });
26
-
27
- // Ref for the form element
28
  const formRef = useRef(null);
29
-
30
- // React Router hook to navigate programmatically
31
  const navigate = useNavigate();
32
 
33
- // Define model options
34
  const modelOptions = {
35
  OpenAI: {
36
  "GPT-4 Turbo": "gpt-4-turbo",
@@ -60,7 +49,7 @@ function IntialSetting(props) {
60
  },
61
  };
62
 
63
- // Reset handler: resets both the form (uncontrolled fields) and controlled states
64
  const handleReset = (e) => {
65
  e.preventDefault();
66
  if (formRef.current) {
@@ -71,98 +60,66 @@ function IntialSetting(props) {
71
  setModelTopP(1.0);
72
  };
73
 
74
- // Snackbar close handler
75
- const handleSnackbarClose = (event, reason) => {
76
- if (reason === 'clickaway') return;
77
- setSnackbar((prev) => ({ ...prev, open: false }));
78
- };
79
-
80
- // Save handler: validates the form, shows notifications, calls the parent's callback
81
- // to update the underlying page (spinner/initializing text), sends the API request, and
82
- // navigates to /AiPage when the backend returns success.
83
  const handleSave = async (e) => {
84
  e.preventDefault();
85
  const form = formRef.current;
86
 
87
- // Retrieve values from form fields using their name attribute
88
  const modelProvider = form.elements["model-provider"].value;
89
  const modelName = form.elements["model-name"].value;
90
  const modelAPIKeys = form.elements["model-api"].value;
91
  const braveAPIKey = form.elements["brave-api"].value;
92
  const proxyList = form.elements["proxy-list"].value;
93
- // const neo4jURL = form.elements["neo4j-url"].value;
94
- // const neo4jUsername = form.elements["neo4j-username"].value;
95
- // const neo4jPassword = form.elements["neo4j-password"].value;
96
 
97
- // Validate required fields and collect missing field names
98
  const missingFields = [];
99
  if (!modelProvider || modelProvider.trim() === "") missingFields.push("Model Provider");
100
  if (!modelName || modelName.trim() === "") missingFields.push("Model Name");
101
  if (!modelAPIKeys || modelAPIKeys.trim() === "") missingFields.push("Model API Key");
102
  if (!braveAPIKey || braveAPIKey.trim() === "") missingFields.push("Brave Search API Key");
103
- // if (!neo4jURL || neo4jURL.trim() === "") missingFields.push("Neo4j URL");
104
- // if (!neo4jUsername || neo4jUsername.trim() === "") missingFields.push("Neo4j Username");
105
- // if (!neo4jPassword || neo4jPassword.trim() === "") missingFields.push("Neo4j Password");
106
 
107
- // If any required fields are missing, show an error notification
108
  if (missingFields.length > 0) {
109
- setSnackbar({
110
- open: true,
111
- message: "Please fill in the following required fields: " + missingFields.join(", "),
112
- severity: "error",
113
- });
114
  return;
115
  }
116
 
117
- // Build the JSON payload
118
  const payload = {
119
  "Model_Provider": modelProvider.toLowerCase(),
120
  "Model_Name": modelName,
121
  "Model_API_Keys": modelAPIKeys,
122
  "Brave_Search_API_Key": braveAPIKey,
123
- // "Neo4j_URL": neo4jURL,
124
- // "Neo4j_Username": neo4jUsername,
125
- // "Neo4j_Password": neo4jPassword,
126
  "Model_Temperature": modelTemperature,
127
  "Model_Top_P": modelTopP,
128
  };
129
 
130
- // Include Proxy List if provided
131
  if (proxyList && proxyList.trim() !== "") {
132
  payload["Proxy_List"] = proxyList;
133
  }
134
 
135
- // If opened from AiPage, show "Re-applying settings..." info notification with spinner
136
  if (props.fromAiPage) {
137
- setSnackbar({
138
- open: true,
139
- message: (
140
- <Box mt={1} display="flex" alignItems="center">
141
- <Box className="re-applying-settings-custom-spinner" />
142
- <Box ml={1} className="re-applying-settings-text">
143
- <span>
144
- Re-applying settings. This may take a few minutes...
145
- </span>
146
- </Box>
147
  </Box>
148
- ),
149
- severity: "info",
150
- });
151
  } else {
152
- // Original immediate success notification if opened from Home/App
153
- setSnackbar({
154
- open: true,
155
- message: "Settings saved successfully!",
156
- severity: "success",
157
- });
158
-
159
- // Call the parent's callback to change the underlying page's content (spinner/text)
160
  if (props.onInitializationStart) {
161
  props.onInitializationStart();
162
  }
163
  }
164
 
165
- // Send the payload to the backend
166
  try {
167
  const response = await fetch("/settings", {
168
  method: "POST",
@@ -174,44 +131,24 @@ function IntialSetting(props) {
174
 
175
  if (response.ok) {
176
  const data = await response.json();
177
- // When the backend returns {"success": true}, navigate to /AiPage
178
  if (data.success === true) {
179
- // If from AiPage, show the final "Settings saved successfully!" success notification
180
  if (props.fromAiPage) {
181
- setSnackbar({
182
- open: true,
183
- message: "Settings saved successfully!",
184
- severity: "success",
185
- });
186
  }
187
  navigate("/AiPage");
188
  } else {
189
- // If the response is OK but success is not true
190
- setSnackbar({
191
- open: true,
192
- message: "Error saving settings. Please try again.",
193
- severity: "error",
194
- });
195
  }
196
  } else {
197
- // If response is not OK, display error notification
198
- setSnackbar({
199
- open: true,
200
- message: "Error saving settings. Please try again.",
201
- severity: "error",
202
- });
203
  }
204
  } catch (error) {
205
  console.error("Error saving settings:", error);
206
- // Show error notification
207
- setSnackbar({
208
- open: true,
209
- message: "Error saving settings. Please try again.",
210
- severity: "error",
211
- });
212
  }
213
  };
214
 
 
215
  return props.trigger ? (
216
  <div className="showSetting" onClick={() => props.setTrigger(false)}>
217
  <div className="showSetting-inner" onClick={(e) => e.stopPropagation()}>
@@ -220,8 +157,6 @@ function IntialSetting(props) {
220
  <FaTimes />
221
  </button>
222
  <form ref={formRef}>
223
-
224
- {/* Model Provider Selection */}
225
  <div className="form-group">
226
  <label htmlFor="model-provider">Model Provider</label>
227
  <select
@@ -237,8 +172,6 @@ function IntialSetting(props) {
237
  ))}
238
  </select>
239
  </div>
240
-
241
- {/* Model Name Selection */}
242
  <div className="form-group">
243
  <label htmlFor="model-name">Model Name</label>
244
  <select id="model-name" name="model-name">
@@ -251,8 +184,6 @@ function IntialSetting(props) {
251
  )}
252
  </select>
253
  </div>
254
-
255
- {/* API Key Inputs */}
256
  <div className="form-group">
257
  <label htmlFor="model-api">Model API Key</label>
258
  <textarea
@@ -270,8 +201,6 @@ function IntialSetting(props) {
270
  placeholder="Enter API Key"
271
  />
272
  </div>
273
-
274
- {/* Proxy List */}
275
  <div className="form-group">
276
  <label htmlFor="proxy-list">Proxy List</label>
277
  <textarea
@@ -280,8 +209,7 @@ function IntialSetting(props) {
280
  placeholder="Enter proxies, one per line"
281
  ></textarea>
282
  </div>
283
-
284
- {/* Neo4j Configuration */}
285
  {/* <div className="form-group">
286
  <label htmlFor="neo4j-url">Neo4j URL</label>
287
  <input
@@ -313,17 +241,15 @@ function IntialSetting(props) {
313
  onClick={() => setShowPassword(prev => !prev)}
314
  className="password-toggle"
315
  sx={{
316
- color: "white", // Change the color of the icon
317
- p: 0, // Remove internal padding
318
- m: 0 // Remove any margin
319
  }}
320
  >
321
  {showPassword ? <FaEyeSlash /> : <FaEye />}
322
  </IconButton>
323
  </div>
324
  </div> */}
325
-
326
- {/* Model Temperature and Top-P */}
327
  <div className="form-group">
328
  <div className="sliders-container">
329
  <div className="slider-item">
@@ -354,8 +280,6 @@ function IntialSetting(props) {
354
  </div>
355
  </div>
356
  </div>
357
-
358
- {/* Buttons */}
359
  <Stack direction="row" spacing={2} sx={{ justifyContent: 'flex-end' }}>
360
  <Button
361
  type="button"
@@ -376,18 +300,6 @@ function IntialSetting(props) {
376
  </Button>
377
  </Stack>
378
  </form>
379
-
380
- {/* Notifications */}
381
- <Snackbar
382
- open={snackbar.open}
383
- autoHideDuration={snackbar.severity === 'success' ? 3000 : null}
384
- onClose={handleSnackbarClose}
385
- anchorOrigin={{ vertical: 'top', horizontal: 'center' }}
386
- >
387
- <Alert onClose={handleSnackbarClose} severity={snackbar.severity} variant="filled" sx={{ width: '100%' }}>
388
- {snackbar.message}
389
- </Alert>
390
- </Snackbar>
391
  {props.children}
392
  </div>
393
  </div>
 
5
  import Stack from '@mui/material/Stack';
6
  import Button from '@mui/material/Button';
7
  import IconButton from '@mui/material/IconButton';
 
 
8
  import './IntialSetting.css';
9
  import { FaTimes, FaEye, FaEyeSlash } from 'react-icons/fa';
10
 
11
  function IntialSetting(props) {
12
+ // State variables for form controls
13
  const [selectedProvider, setSelectedProvider] = useState("OpenAI");
14
  const [modelTemperature, setModelTemperature] = useState(0.0);
15
  const [modelTopP, setModelTopP] = useState(1.0);
16
  const [showPassword, setShowPassword] = useState(false);
17
 
18
+ // Ref for the form and navigation hook
 
 
 
 
 
 
 
19
  const formRef = useRef(null);
 
 
20
  const navigate = useNavigate();
21
 
22
+ // Model options for different providers
23
  const modelOptions = {
24
  OpenAI: {
25
  "GPT-4 Turbo": "gpt-4-turbo",
 
49
  },
50
  };
51
 
52
+ // Reset form and state variables
53
  const handleReset = (e) => {
54
  e.preventDefault();
55
  if (formRef.current) {
 
60
  setModelTopP(1.0);
61
  };
62
 
63
+ // Handle form submission and save settings
 
 
 
 
 
 
 
 
64
  const handleSave = async (e) => {
65
  e.preventDefault();
66
  const form = formRef.current;
67
 
68
+ // Retrieve form values
69
  const modelProvider = form.elements["model-provider"].value;
70
  const modelName = form.elements["model-name"].value;
71
  const modelAPIKeys = form.elements["model-api"].value;
72
  const braveAPIKey = form.elements["brave-api"].value;
73
  const proxyList = form.elements["proxy-list"].value;
 
 
 
74
 
75
+ // Check for missing required fields
76
  const missingFields = [];
77
  if (!modelProvider || modelProvider.trim() === "") missingFields.push("Model Provider");
78
  if (!modelName || modelName.trim() === "") missingFields.push("Model Name");
79
  if (!modelAPIKeys || modelAPIKeys.trim() === "") missingFields.push("Model API Key");
80
  if (!braveAPIKey || braveAPIKey.trim() === "") missingFields.push("Brave Search API Key");
 
 
 
81
 
 
82
  if (missingFields.length > 0) {
83
+ props.openSnackbar(
84
+ "Please fill in the following required fields: " + missingFields.join(", "),
85
+ "error"
86
+ );
 
87
  return;
88
  }
89
 
90
+ // Build payload for backend
91
  const payload = {
92
  "Model_Provider": modelProvider.toLowerCase(),
93
  "Model_Name": modelName,
94
  "Model_API_Keys": modelAPIKeys,
95
  "Brave_Search_API_Key": braveAPIKey,
 
 
 
96
  "Model_Temperature": modelTemperature,
97
  "Model_Top_P": modelTopP,
98
  };
99
 
 
100
  if (proxyList && proxyList.trim() !== "") {
101
  payload["Proxy_List"] = proxyList;
102
  }
103
 
104
+ // Show appropriate notification based on context
105
  if (props.fromAiPage) {
106
+ props.openSnackbar(
107
+ <Box mt={1} display="flex" alignItems="center">
108
+ <Box className="re-applying-settings-custom-spinner" />
109
+ <Box ml={1} className="re-applying-settings-text">
110
+ <span>Re-applying settings. This may take a few minutes...</span>
 
 
 
 
 
111
  </Box>
112
+ </Box>,
113
+ "info"
114
+ );
115
  } else {
116
+ props.openSnackbar("Settings saved successfully!", "success");
 
 
 
 
 
 
 
117
  if (props.onInitializationStart) {
118
  props.onInitializationStart();
119
  }
120
  }
121
 
122
+ // Send settings to backend
123
  try {
124
  const response = await fetch("/settings", {
125
  method: "POST",
 
131
 
132
  if (response.ok) {
133
  const data = await response.json();
 
134
  if (data.success === true) {
 
135
  if (props.fromAiPage) {
136
+ props.openSnackbar("Settings saved successfully!", "success");
 
 
 
 
137
  }
138
  navigate("/AiPage");
139
  } else {
140
+ props.openSnackbar("Error saving settings. Please try again.", "error");
 
 
 
 
 
141
  }
142
  } else {
143
+ props.openSnackbar("Error saving settings. Please try again.", "error");
 
 
 
 
 
144
  }
145
  } catch (error) {
146
  console.error("Error saving settings:", error);
147
+ props.openSnackbar("Error saving settings. Please try again.", "error");
 
 
 
 
 
148
  }
149
  };
150
 
151
+ // Render the settings modal
152
  return props.trigger ? (
153
  <div className="showSetting" onClick={() => props.setTrigger(false)}>
154
  <div className="showSetting-inner" onClick={(e) => e.stopPropagation()}>
 
157
  <FaTimes />
158
  </button>
159
  <form ref={formRef}>
 
 
160
  <div className="form-group">
161
  <label htmlFor="model-provider">Model Provider</label>
162
  <select
 
172
  ))}
173
  </select>
174
  </div>
 
 
175
  <div className="form-group">
176
  <label htmlFor="model-name">Model Name</label>
177
  <select id="model-name" name="model-name">
 
184
  )}
185
  </select>
186
  </div>
 
 
187
  <div className="form-group">
188
  <label htmlFor="model-api">Model API Key</label>
189
  <textarea
 
201
  placeholder="Enter API Key"
202
  />
203
  </div>
 
 
204
  <div className="form-group">
205
  <label htmlFor="proxy-list">Proxy List</label>
206
  <textarea
 
209
  placeholder="Enter proxies, one per line"
210
  ></textarea>
211
  </div>
212
+ {/* Commented Neo4j configuration fields */}
 
213
  {/* <div className="form-group">
214
  <label htmlFor="neo4j-url">Neo4j URL</label>
215
  <input
 
241
  onClick={() => setShowPassword(prev => !prev)}
242
  className="password-toggle"
243
  sx={{
244
+ color: "white",
245
+ p: 0,
246
+ m: 0
247
  }}
248
  >
249
  {showPassword ? <FaEyeSlash /> : <FaEye />}
250
  </IconButton>
251
  </div>
252
  </div> */}
 
 
253
  <div className="form-group">
254
  <div className="sliders-container">
255
  <div className="slider-item">
 
280
  </div>
281
  </div>
282
  </div>
 
 
283
  <Stack direction="row" spacing={2} sx={{ justifyContent: 'flex-end' }}>
284
  <Button
285
  type="button"
 
300
  </Button>
301
  </Stack>
302
  </form>
 
 
 
 
 
 
 
 
 
 
 
 
303
  {props.children}
304
  </div>
305
  </div>
main.py CHANGED
@@ -18,8 +18,6 @@ from google.api_core.exceptions import ResourceExhausted
18
  logger = logging.getLogger()
19
  logger.setLevel(logging.INFO)
20
 
21
- ENV_FILE_PATH = os.getenv("WRITABLE_DIR", "/tmp") + "/.env"
22
-
23
  CONTEXT_LENGTH = 128000
24
  BUFFER = 10000
25
  MAX_TOKENS_ALLOWED = CONTEXT_LENGTH - BUFFER
@@ -36,12 +34,21 @@ def format_error_sse(event_type: str, data: str) -> str:
36
  sse_message += "\n"
37
  return sse_message
38
 
 
 
 
 
 
 
 
 
39
  # Initialize the components
40
  def initialize_components():
41
- load_dotenv(ENV_FILE_PATH, override=True)
42
 
43
  from src.search.search_engine import SearchEngine
44
  from src.query_processing.query_processor import QueryProcessor
 
45
  from src.rag.graph_rag import GraphRAG
46
  from src.evaluation.evaluator import Evaluator
47
  from src.reasoning.reasoner import Reasoner
@@ -54,6 +61,7 @@ def initialize_components():
54
  SESSION_STORE['search_engine'] = SearchEngine()
55
  SESSION_STORE['query_processor'] = QueryProcessor()
56
  SESSION_STORE['crawler'] = CustomCrawler(max_concurrent_requests=1000)
 
57
  SESSION_STORE['graph_rag'] = GraphRAG(num_workers=os.cpu_count() * 2)
58
  SESSION_STORE['evaluator'] = Evaluator()
59
  SESSION_STORE['reasoner'] = Reasoner()
@@ -74,11 +82,13 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
74
 
75
  user_query = re.sub(r'category:.*', '', user_query, flags=re.IGNORECASE).strip()
76
 
77
- if cat_lower == "internal knowledge base":
78
  response = ""
 
79
  async for chunk in state["reasoner"].reason(user_query):
 
80
  response += chunk
81
- await sse_queue.put(("token", chunk))
82
 
83
  await sse_queue.put(("final_message", response))
84
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
@@ -90,7 +100,7 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
90
 
91
  await sse_queue.put(("complete", "done"))
92
 
93
- elif cat_lower == "simple external lookup":
94
  await sse_queue.put(("step", "Searching..."))
95
 
96
  optimized_query = await state['search_engine'].generate_optimized_query(user_query)
@@ -126,9 +136,11 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
126
  await sse_queue.put(("sources_read", len(search_contents)))
127
 
128
  response = ""
 
129
  async for chunk in state["reasoner"].reason(user_query, contents):
 
130
  response += chunk
131
- await sse_queue.put(("token", chunk))
132
 
133
  await sse_queue.put(("final_message", response))
134
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
@@ -146,7 +158,7 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
146
  else:
147
  await sse_queue.put(("error", "No results found."))
148
 
149
- elif cat_lower == "complex moderate decomposition":
150
  current_search_results = []
151
  current_search_contents = []
152
 
@@ -202,7 +214,11 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
202
  await sse_queue.put(("task", (sub_query, "FAILED")))
203
  return ""
204
 
205
- tasks = [sub_query_task(sub_query) for sub_query in sub_queries]
 
 
 
 
206
  results = await asyncio.gather(*tasks)
207
  end = time.time()
208
 
@@ -234,14 +250,16 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
234
  await sse_queue.put(("sources_read", len(current_search_contents)))
235
 
236
  response = ""
 
237
  is_first_chunk = True
238
  async for chunk in state['reasoner'].reason(user_query, contents):
239
  if is_first_chunk:
240
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
241
  is_first_chunk = False
242
 
 
243
  response += chunk
244
- await sse_queue.put(("token", chunk))
245
 
246
  await sse_queue.put(("final_message", response))
247
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
@@ -262,7 +280,7 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
262
  else:
263
  await sse_queue.put(("error", "No results found."))
264
 
265
- elif cat_lower == "complex advanced decomposition":
266
  current_search_results = []
267
  current_search_contents = []
268
 
@@ -314,7 +332,11 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
314
 
315
  sub_sub_queries, _ = await state['query_processor'].decompose_query(sub_query)
316
 
317
- tasks = [sub_sub_query_task(sub_sub_query) for sub_sub_query in sub_sub_queries]
 
 
 
 
318
  results = await asyncio.gather(*tasks)
319
 
320
  if any(result.strip() for result in results):
@@ -328,7 +350,11 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
328
  await sse_queue.put(("task", (sub_query, "FAILED")))
329
  return []
330
 
331
- tasks = [sub_query_task(sub_query) for sub_query in sub_queries]
 
 
 
 
332
  results = await asyncio.gather(*tasks)
333
  end = time.time()
334
 
@@ -366,14 +392,16 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
366
  await sse_queue.put(("sources_read", len(current_search_contents)))
367
 
368
  response = ""
 
369
  is_first_chunk = True
370
  async for chunk in state['reasoner'].reason(user_query, contents):
371
  if is_first_chunk:
372
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
373
  is_first_chunk = False
374
 
 
375
  response += chunk
376
- await sse_queue.put(("token", chunk))
377
 
378
  await sse_queue.put(("final_message", response))
379
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
@@ -394,7 +422,7 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
394
  else:
395
  await sse_queue.put(("error", "No results found."))
396
 
397
- elif cat_lower == "extensive research dynamic structuring":
398
  current_search_results = []
399
  current_search_contents = []
400
 
@@ -454,6 +482,7 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
454
  state['graph_rag'].set_on_event_callback(on_event_callback)
455
 
456
  start = time.time()
 
457
  await state['graph_rag'].process_graph(
458
  user_query,
459
  similarity_threshold=0.8,
@@ -489,14 +518,16 @@ async def process_query(user_query: str, sse_queue: asyncio.Queue):
489
  await sse_queue.put(("sources_read", len(current_search_contents)))
490
 
491
  response = ""
 
492
  is_first_chunk = True
493
  async for chunk in state['reasoner'].reason(user_query, answer):
494
  if is_first_chunk:
495
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
496
  is_first_chunk = False
497
 
 
498
  response += chunk
499
- await sse_queue.put(("token", chunk))
500
 
501
  await sse_queue.put(("final_message", response))
502
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
@@ -677,7 +708,7 @@ async def sse_message(request: Request, user_message: str):
677
 
678
  async def event_generator():
679
  # Build the prompt
680
- context = state["chat_history"][-5:]
681
  if context:
682
  prompt = \
683
  f"""This is the previous context of the conversation:
@@ -705,6 +736,7 @@ Current Query:
705
  yield f"event: final_message\ndata: {data}\n\n"
706
 
707
  elif event_type == "error":
 
708
  yield format_error_sse("error", data)
709
 
710
  elif event_type == "step":
 
18
  logger = logging.getLogger()
19
  logger.setLevel(logging.INFO)
20
 
 
 
21
  CONTEXT_LENGTH = 128000
22
  BUFFER = 10000
23
  MAX_TOKENS_ALLOWED = CONTEXT_LENGTH - BUFFER
 
34
  sse_message += "\n"
35
  return sse_message
36
 
37
+ # Stop the task on error (non-fastapi)
38
+ def stop_on_error():
39
+ state = SESSION_STORE
40
+
41
+ if "process_task" in state:
42
+ state["process_task"].cancel()
43
+ del state["process_task"]
44
+
45
  # Initialize the components
46
  def initialize_components():
47
+ load_dotenv(override=True)
48
 
49
  from src.search.search_engine import SearchEngine
50
  from src.query_processing.query_processor import QueryProcessor
51
+ # from src.rag.neo4j_graphrag import Neo4jGraphRAG
52
  from src.rag.graph_rag import GraphRAG
53
  from src.evaluation.evaluator import Evaluator
54
  from src.reasoning.reasoner import Reasoner
 
61
  SESSION_STORE['search_engine'] = SearchEngine()
62
  SESSION_STORE['query_processor'] = QueryProcessor()
63
  SESSION_STORE['crawler'] = CustomCrawler(max_concurrent_requests=1000)
64
+ # SESSION_STORE['graph_rag'] = Neo4jGraphRAG(num_workers=os.cpu_count() * 2)
65
  SESSION_STORE['graph_rag'] = GraphRAG(num_workers=os.cpu_count() * 2)
66
  SESSION_STORE['evaluator'] = Evaluator()
67
  SESSION_STORE['reasoner'] = Reasoner()
 
82
 
83
  user_query = re.sub(r'category:.*', '', user_query, flags=re.IGNORECASE).strip()
84
 
85
+ if cat_lower == "basic":
86
  response = ""
87
+ chunk_counter = 1
88
  async for chunk in state["reasoner"].reason(user_query):
89
+ await sse_queue.put(("token", json.dumps({"chunk": chunk, "index": chunk_counter})))
90
  response += chunk
91
+ chunk_counter += 1
92
 
93
  await sse_queue.put(("final_message", response))
94
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
 
100
 
101
  await sse_queue.put(("complete", "done"))
102
 
103
+ elif cat_lower == "advanced":
104
  await sse_queue.put(("step", "Searching..."))
105
 
106
  optimized_query = await state['search_engine'].generate_optimized_query(user_query)
 
136
  await sse_queue.put(("sources_read", len(search_contents)))
137
 
138
  response = ""
139
+ chunk_counter = 1
140
  async for chunk in state["reasoner"].reason(user_query, contents):
141
+ await sse_queue.put(("token", json.dumps({"chunk": chunk, "index": chunk_counter})))
142
  response += chunk
143
+ chunk_counter += 1
144
 
145
  await sse_queue.put(("final_message", response))
146
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
 
158
  else:
159
  await sse_queue.put(("error", "No results found."))
160
 
161
+ elif cat_lower == "pro":
162
  current_search_results = []
163
  current_search_contents = []
164
 
 
214
  await sse_queue.put(("task", (sub_query, "FAILED")))
215
  return ""
216
 
217
+ tasks = []
218
+ if len(sub_queries) > 1 and sub_queries[0] != user_query:
219
+ for sub_query in sub_queries:
220
+ tasks.append(sub_query_task(sub_query))
221
+
222
  results = await asyncio.gather(*tasks)
223
  end = time.time()
224
 
 
250
  await sse_queue.put(("sources_read", len(current_search_contents)))
251
 
252
  response = ""
253
+ chunk_counter = 1
254
  is_first_chunk = True
255
  async for chunk in state['reasoner'].reason(user_query, contents):
256
  if is_first_chunk:
257
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
258
  is_first_chunk = False
259
 
260
+ await sse_queue.put(("token", json.dumps({"chunk": chunk, "index": chunk_counter})))
261
  response += chunk
262
+ chunk_counter += 1
263
 
264
  await sse_queue.put(("final_message", response))
265
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
 
280
  else:
281
  await sse_queue.put(("error", "No results found."))
282
 
283
+ elif cat_lower == "super":
284
  current_search_results = []
285
  current_search_contents = []
286
 
 
332
 
333
  sub_sub_queries, _ = await state['query_processor'].decompose_query(sub_query)
334
 
335
+ tasks = []
336
+ if len(sub_sub_queries) > 1 and sub_sub_queries[0] != user_query:
337
+ for sub_sub_query in sub_sub_queries:
338
+ tasks.append(sub_sub_query_task(sub_sub_query))
339
+
340
  results = await asyncio.gather(*tasks)
341
 
342
  if any(result.strip() for result in results):
 
350
  await sse_queue.put(("task", (sub_query, "FAILED")))
351
  return []
352
 
353
+ tasks = []
354
+ if len(sub_queries) > 1 and sub_queries[0] != user_query:
355
+ for sub_query in sub_queries:
356
+ tasks.append(sub_query_task(sub_query))
357
+
358
  results = await asyncio.gather(*tasks)
359
  end = time.time()
360
 
 
392
  await sse_queue.put(("sources_read", len(current_search_contents)))
393
 
394
  response = ""
395
+ chunk_counter = 1
396
  is_first_chunk = True
397
  async for chunk in state['reasoner'].reason(user_query, contents):
398
  if is_first_chunk:
399
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
400
  is_first_chunk = False
401
 
402
+ await sse_queue.put(("token", json.dumps({"chunk": chunk, "index": chunk_counter})))
403
  response += chunk
404
+ chunk_counter += 1
405
 
406
  await sse_queue.put(("final_message", response))
407
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
 
422
  else:
423
  await sse_queue.put(("error", "No results found."))
424
 
425
+ elif cat_lower == "ultra":
426
  current_search_results = []
427
  current_search_contents = []
428
 
 
482
  state['graph_rag'].set_on_event_callback(on_event_callback)
483
 
484
  start = time.time()
485
+ # state['graph_rag'].initialize_schema()
486
  await state['graph_rag'].process_graph(
487
  user_query,
488
  similarity_threshold=0.8,
 
518
  await sse_queue.put(("sources_read", len(current_search_contents)))
519
 
520
  response = ""
521
+ chunk_counter = 1
522
  is_first_chunk = True
523
  async for chunk in state['reasoner'].reason(user_query, answer):
524
  if is_first_chunk:
525
  await sse_queue.put(("step", f"Thought and searched for {int(end - start)} seconds"))
526
  is_first_chunk = False
527
 
528
+ await sse_queue.put(("token", json.dumps({"chunk": chunk, "index": chunk_counter})))
529
  response += chunk
530
+ chunk_counter += 1
531
 
532
  await sse_queue.put(("final_message", response))
533
  SESSION_STORE["chat_history"].append({"query": user_query, "response": response})
 
708
 
709
  async def event_generator():
710
  # Build the prompt
711
+ context = state["chat_history"][-3:]
712
  if context:
713
  prompt = \
714
  f"""This is the previous context of the conversation:
 
736
  yield f"event: final_message\ndata: {data}\n\n"
737
 
738
  elif event_type == "error":
739
+ stop_on_error()
740
  yield format_error_sse("error", data)
741
 
742
  elif event_type == "step":
src/crawl/crawler.py CHANGED
@@ -778,7 +778,7 @@ Query:"""
778
  self,
779
  url: str,
780
  query: Optional[str] = None,
781
- timeout: float = 10.0,
782
  return_type: str = "markdown",
783
  rotate_headers: bool = True,
784
  proxies: Optional[List[str]] = None,
@@ -813,12 +813,8 @@ Query:"""
813
  headers = self.get_headers() if rotate_headers else {}
814
  proxy = self.get_proxy(proxies) if proxies else None
815
 
816
- # Split timeout into connection and processing phases
817
- timeout_config = aiohttp.ClientTimeout(
818
- total=None, # No total timeout
819
- connect=timeout, # Connection timeout
820
- sock_read=timeout # Socket read timeout
821
- )
822
 
823
  try:
824
  # Use provided session if available
 
778
  self,
779
  url: str,
780
  query: Optional[str] = None,
781
+ timeout: float = 5.0,
782
  return_type: str = "markdown",
783
  rotate_headers: bool = True,
784
  proxies: Optional[List[str]] = None,
 
813
  headers = self.get_headers() if rotate_headers else {}
814
  proxy = self.get_proxy(proxies) if proxies else None
815
 
816
+ # Total connection timeout
817
+ timeout_config = aiohttp.ClientTimeout(total=timeout)
 
 
 
 
818
 
819
  try:
820
  # Use provided session if available
src/helpers/helper.py CHANGED
@@ -5,7 +5,7 @@ import torch
5
  import transformers
6
  from langchain.text_splitter import RecursiveCharacterTextSplitter, TokenTextSplitter
7
 
8
- ENV_FILE_PATH = os.path.join(os.getenv("WRITABLE_DIR", "/tmp"), ".env")
9
 
10
  def remove_markdown(text: str) -> str:
11
  # Remove code block format type and the code block itself
@@ -48,6 +48,26 @@ def remove_markdown(text: str) -> str:
48
 
49
  return text.strip()
50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  def clear_gpu_memory():
52
  # Clear GPU memory and cache if available
53
  if torch.cuda.is_available():
 
5
  import transformers
6
  from langchain.text_splitter import RecursiveCharacterTextSplitter, TokenTextSplitter
7
 
8
+ ENV_FILE_PATH = os.path.join(os.path.dirname(__file__), "../../.env").replace("\\", "/")
9
 
10
  def remove_markdown(text: str) -> str:
11
  # Remove code block format type and the code block itself
 
48
 
49
  return text.strip()
50
 
51
+ def remove_outer_markdown_block(chunk, _acc={"b":""}):
52
+ _acc["b"] += chunk
53
+ p = re.compile(r'```markdown\s*\n(.*?)\n?```', re.DOTALL|re.IGNORECASE)
54
+ o = []
55
+
56
+ while True:
57
+ m = p.search(_acc["b"])
58
+ if not m:
59
+ break
60
+
61
+ s,e = m.span()
62
+ o.append(_acc["b"][:s]+m.group(1))
63
+ _acc["b"] = _acc["b"][e:]
64
+
65
+ if '```markdown' not in _acc["b"].lower():
66
+ o.append(_acc["b"])
67
+ _acc["b"] = ""
68
+
69
+ return "".join(o)
70
+
71
  def clear_gpu_memory():
72
  # Clear GPU memory and cache if available
73
  if torch.cuda.is_available():
src/query_processing/query_processor.py CHANGED
@@ -16,11 +16,11 @@ class QueryProcessor:
16
  async def classify_query(self, query, *, llm):
17
  template = \
18
  """You are an expert at classifying queries. Your task is to classify the given user query (descriptions are provided below) into one of the following categories:
19
- 1. Internal Knowledge Base -> The query is a simple question that can be answered without external internet resources. It is the simplest category.
20
- 2. Simple External Lookup -> The query requires up to 5 external internet resource references for a quick fact-check before answering. It is a moderate category.
21
- 3. Complex Moderate Decomposition -> The query needs complex reasoning and up to 20 external internet resource references to answer accurately. It is a complex category.
22
- 4. Complex Advanced Decomposition -> The query requires very complex reasoning, dynamic sub-query generation, and up to 50 external internet resource references to answer accurately. It is a very complex category.
23
- 5. Extensive Research Dynamic Structuring -> The query requires extensive research, extremely complex reasoning, and up to 500 external internet resource references to answer accurately. It is the most complex category.
24
 
25
  Rules:
26
  1. Only classify the query into one of the categories provided above.
@@ -28,7 +28,7 @@ Rules:
28
  3. If the query contains "category: <category_name>" in it, then the query should be classified into the category that is mentioned in the query.
29
 
30
  Examples:
31
- 1. Internal Knowledge Base
32
  - "What is the capital of France?"
33
  - "How many continents are there in the world?"
34
  - "What is the chemical formula for water?"
@@ -37,7 +37,7 @@ Examples:
37
  - "What is Newton's first law of motion?"
38
  - "What is the sum of the angles in a triangle?"
39
 
40
- 2. Simple External Lookup
41
  - "What is the current market price of gold?"
42
  - "What is the current population of Japan?"
43
  - "Who won the Nobel Peace Prize in 2023?"
@@ -45,7 +45,7 @@ Examples:
45
  - "How many countries are in the European Union?"
46
  - "What are the top five best-selling novels of 2024?"
47
 
48
- 3. Complex Moderate Decomposition
49
  - "Compare the political structures of ancient Greece and ancient Rome"
50
  - "How do climate change and deforestation contribute to the decline in biodiversity?"
51
  - "What are the major differences in healthcare systems between the U.S., Canada, and the U.K.?"
@@ -53,7 +53,7 @@ Examples:
53
  - "What are the economic and social impacts of artificial intelligence on the job market?"
54
  - "What are the pros and cons of genetically modified organisms in agriculture?"
55
 
56
- 4. Complex Advanced Decomposition
57
  - "What are the long-term psychological and societal effects of internet addiction on teenagers?"
58
  - "How do various world religions view the concept of life after death, and what cultural practices reflect these beliefs?"
59
  - "What are the implications of quantum computing on encryption technologies, and how might they evolve in the next decade?"
@@ -61,7 +61,7 @@ Examples:
61
  - "What are the top five electric SUVs available in the market as of October 2024 for under $60,000, and how do they compare in terms of range, features, and performance?"
62
  - "What are the major obstacles to achieving carbon neutrality in heavy industries like steel and cement? What are the potential solutions?"
63
 
64
- 5. Extensive Research Dynamic Structuring
65
  - "Create a comprehensive study on the economic, cultural, and environmental impacts of megacities around the world, including case studies from at least five different continents."
66
  - "Conduct a detailed comparative analysis of various renewable energy policies across G20 countries, evaluating their effectiveness, challenges, and future projections."
67
  - "Research the evolution of space exploration programs in the last 60 years, highlighting the key milestones, technological advancements, and geopolitical factors that shaped them."
 
16
  async def classify_query(self, query, *, llm):
17
  template = \
18
  """You are an expert at classifying queries. Your task is to classify the given user query (descriptions are provided below) into one of the following categories:
19
+ 1. Basic -> The query is a simple question that can be answered without external internet resources. It is the simplest category.
20
+ 2. Advanced -> The query requires up to 5 external internet resource references for a quick fact-check before answering. It is a moderate category.
21
+ 3. Pro -> The query needs complex reasoning and up to 20 external internet resource references to answer accurately. It is a complex category.
22
+ 4. Super -> The query requires very complex reasoning, dynamic sub-query generation, and up to 50 external internet resource references to answer accurately. It is a very complex category.
23
+ 5. Ultra -> The query requires extensive research, extremely complex reasoning, and up to 500 external internet resource references to answer accurately. It is the most complex category.
24
 
25
  Rules:
26
  1. Only classify the query into one of the categories provided above.
 
28
  3. If the query contains "category: <category_name>" in it, then the query should be classified into the category that is mentioned in the query.
29
 
30
  Examples:
31
+ 1. Basic
32
  - "What is the capital of France?"
33
  - "How many continents are there in the world?"
34
  - "What is the chemical formula for water?"
 
37
  - "What is Newton's first law of motion?"
38
  - "What is the sum of the angles in a triangle?"
39
 
40
+ 2. Advanced
41
  - "What is the current market price of gold?"
42
  - "What is the current population of Japan?"
43
  - "Who won the Nobel Peace Prize in 2023?"
 
45
  - "How many countries are in the European Union?"
46
  - "What are the top five best-selling novels of 2024?"
47
 
48
+ 3. Pro
49
  - "Compare the political structures of ancient Greece and ancient Rome"
50
  - "How do climate change and deforestation contribute to the decline in biodiversity?"
51
  - "What are the major differences in healthcare systems between the U.S., Canada, and the U.K.?"
 
53
  - "What are the economic and social impacts of artificial intelligence on the job market?"
54
  - "What are the pros and cons of genetically modified organisms in agriculture?"
55
 
56
+ 4. Super
57
  - "What are the long-term psychological and societal effects of internet addiction on teenagers?"
58
  - "How do various world religions view the concept of life after death, and what cultural practices reflect these beliefs?"
59
  - "What are the implications of quantum computing on encryption technologies, and how might they evolve in the next decade?"
 
61
  - "What are the top five electric SUVs available in the market as of October 2024 for under $60,000, and how do they compare in terms of range, features, and performance?"
62
  - "What are the major obstacles to achieving carbon neutrality in heavy industries like steel and cement? What are the potential solutions?"
63
 
64
+ 5. Ultra
65
  - "Create a comprehensive study on the economic, cultural, and environmental impacts of megacities around the world, including case studies from at least five different continents."
66
  - "Conduct a detailed comparative analysis of various renewable energy policies across G20 countries, evaluating their effectiveness, challenges, and future projections."
67
  - "Research the evolution of space exploration programs in the last 60 years, highlighting the key milestones, technological advancements, and geopolitical factors that shaped them."
src/reasoning/reasoner.py CHANGED
@@ -1,7 +1,6 @@
1
  from langchain.prompts import ChatPromptTemplate
2
  from langchain_core.prompts import ChatPromptTemplate
3
- from src.utils.api_key_manager import APIKeyManager
4
- from src.utils.api_key_manager import with_api_manager
5
  from src.query_processing.late_chunking.late_chunker import LateChunker
6
 
7
  class Reasoner:
@@ -23,7 +22,7 @@ class Reasoner:
23
  Your task is to reason about the given user query and provide an answer.
24
 
25
  Rules:
26
- 1. Your response should only be the answer in plain text without any formatting.
27
  2. You must use proper reasoning and logic to answer the query for your internal use but do not show your reasoning process in the response.
28
 
29
  Query:
@@ -36,7 +35,7 @@ Query:
36
  Given the user query and the relevant context, your task is to reason and provide an answer.
37
 
38
  Rules:
39
- 1. Your response should only be the answer in plain text without any formatting.
40
  2. You must use proper reasoning and logic to answer the query for your internal use but do not show your reasoning process in the response.
41
  3. You must not mention the context/documents provided to you in the response. Make it sound like you are the one who is answering the query.
42
 
@@ -116,5 +115,4 @@ if __name__ == "__main__":
116
  rotate_proxy=False,
117
  return_html=True
118
  ))
119
- print(contents)
120
-
 
1
  from langchain.prompts import ChatPromptTemplate
2
  from langchain_core.prompts import ChatPromptTemplate
3
+ from src.utils.api_key_manager import APIKeyManager, with_api_manager
 
4
  from src.query_processing.late_chunking.late_chunker import LateChunker
5
 
6
  class Reasoner:
 
22
  Your task is to reason about the given user query and provide an answer.
23
 
24
  Rules:
25
+ 1. Your response should only be the answer in valid markdown format.
26
  2. You must use proper reasoning and logic to answer the query for your internal use but do not show your reasoning process in the response.
27
 
28
  Query:
 
35
  Given the user query and the relevant context, your task is to reason and provide an answer.
36
 
37
  Rules:
38
+ 1. Your response should only be the answer in valid markdown format.
39
  2. You must use proper reasoning and logic to answer the query for your internal use but do not show your reasoning process in the response.
40
  3. You must not mention the context/documents provided to you in the response. Make it sound like you are the one who is answering the query.
41
 
 
115
  rotate_proxy=False,
116
  return_html=True
117
  ))
118
+ print(contents)