Spaces:
Runtime error
Runtime error
Salvatore Rossitto
commited on
Commit
·
b7a9171
1
Parent(s):
735f5b7
...
Browse files- RBotReloaded.py +6 -4
- agent_llama_ui.py +6 -5
- requirements.txt +4 -3
RBotReloaded.py
CHANGED
@@ -116,7 +116,7 @@ class StorageRetrievalLLM:
|
|
116 |
# Load pages
|
117 |
loader = DirectoryLoader(stored_pages_folder, glob="**/*.pdf", loader_cls=PyMuPDFLoader)
|
118 |
documents = loader.load()
|
119 |
-
|
120 |
# Split into chunks
|
121 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=EMBD_CHUNK_SIZE, chunk_overlap=100)
|
122 |
documents = text_splitter.split_documents(documents)
|
@@ -175,8 +175,10 @@ class StorageRetrievalLLM:
|
|
175 |
|
176 |
# Load file
|
177 |
file_path = doc if os.path.exists(doc) else os.path.join("data", doc)
|
178 |
-
loader = DirectoryLoader(file_path, glob="**/*.pdf", loader_cls=PyMuPDFLoader)
|
179 |
-
documents = loader.load()
|
|
|
|
|
180 |
|
181 |
# Split and add
|
182 |
splitter = RecursiveCharacterTextSplitter()
|
@@ -385,7 +387,7 @@ class SmartAgent:
|
|
385 |
# Create agent
|
386 |
self.smartAgent = self.create_smart_agent()
|
387 |
|
388 |
-
print("Smart Agent Initialized")
|
389 |
|
390 |
def reset_context(self):
|
391 |
self.chat_history.clear()
|
|
|
116 |
# Load pages
|
117 |
loader = DirectoryLoader(stored_pages_folder, glob="**/*.pdf", loader_cls=PyMuPDFLoader)
|
118 |
documents = loader.load()
|
119 |
+
|
120 |
# Split into chunks
|
121 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=EMBD_CHUNK_SIZE, chunk_overlap=100)
|
122 |
documents = text_splitter.split_documents(documents)
|
|
|
175 |
|
176 |
# Load file
|
177 |
file_path = doc if os.path.exists(doc) else os.path.join("data", doc)
|
178 |
+
# loader = DirectoryLoader(file_path, glob="**/*.pdf", loader_cls=PyMuPDFLoader)
|
179 |
+
# documents = loader.load()
|
180 |
+
loader = TextLoader(path=file_path, loader_cls=PyMuPDFLoader)
|
181 |
+
documents = loader.load()
|
182 |
|
183 |
# Split and add
|
184 |
splitter = RecursiveCharacterTextSplitter()
|
|
|
387 |
# Create agent
|
388 |
self.smartAgent = self.create_smart_agent()
|
389 |
|
390 |
+
print(f"Smart Agent Initialized - CUDA Support:{torch.cuda.is_available()}")
|
391 |
|
392 |
def reset_context(self):
|
393 |
self.chat_history.clear()
|
agent_llama_ui.py
CHANGED
@@ -18,8 +18,8 @@ load_dotenv()
|
|
18 |
default_model = "mistral-7b-instruct-v0.1.Q4_K_M.gguf"
|
19 |
default_context = 8192
|
20 |
default_load_type = "Auto"
|
21 |
-
default_iterations =
|
22 |
-
default_temperature = 0.
|
23 |
default_topp = 0.95
|
24 |
|
25 |
@st.cache_resource
|
@@ -212,10 +212,11 @@ def render_simple_chat():
|
|
212 |
generated_files = get_generated_files()
|
213 |
st.sidebar.subheader("Generated Files")
|
214 |
for file_path in generated_files:
|
215 |
-
|
|
|
216 |
st.write("---")
|
217 |
-
st.markdown(f"[{file_name}]({file_path})", unsafe_allow_html=True)
|
218 |
-
st.image(file_path)
|
219 |
|
220 |
i = 0
|
221 |
for m in history():
|
|
|
18 |
default_model = "mistral-7b-instruct-v0.1.Q4_K_M.gguf"
|
19 |
default_context = 8192
|
20 |
default_load_type = "Auto"
|
21 |
+
default_iterations = 3
|
22 |
+
default_temperature = 0.2
|
23 |
default_topp = 0.95
|
24 |
|
25 |
@st.cache_resource
|
|
|
212 |
generated_files = get_generated_files()
|
213 |
st.sidebar.subheader("Generated Files")
|
214 |
for file_path in generated_files:
|
215 |
+
file_path = file_path.replace("\\", "/")
|
216 |
+
file_name = file_path.split("/")[-1]
|
217 |
st.write("---")
|
218 |
+
st.markdown(f"[{file_name}](/{file_path})", unsafe_allow_html=True)
|
219 |
+
st.image(file_path, use_column_width=True)
|
220 |
|
221 |
i = 0
|
222 |
for m in history():
|
requirements.txt
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
torchvision
|
|
|
4 |
accelerate
|
5 |
aiohttp
|
6 |
anyio
|
|
|
1 |
+
numpy>=1.24.1
|
2 |
+
torch --index-url https://download.pytorch.org/whl/cu118
|
3 |
+
torchvision --index-url https://download.pytorch.org/whl/cu118
|
4 |
+
torchaudio --index-url https://download.pytorch.org/whl/cu118
|
5 |
accelerate
|
6 |
aiohttp
|
7 |
anyio
|