Spaces:
redfernstech
/
Runtime error

Srinivasulu kethanaboina commited on
Commit
4ca2037
·
verified ·
1 Parent(s): e7b9b0f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -31
app.py CHANGED
@@ -1,23 +1,11 @@
1
  from dotenv import load_dotenv
2
  import gradio as gr
3
  import os
 
4
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
- from sentence_transformers import SentenceTransformer
8
- import csv
9
- import os
10
- import csv
11
- import os
12
- from datasets import Dataset, DatasetDict
13
-
14
- PERSIST_DIR = "history" # Replace with your actual directory path
15
- CSV_FILE = os.path.join(PERSIST_DIR, "chat_history.csv")
16
-
17
- # Assuming current_chat_history is managed within a Dataset or DatasetDict
18
- current_chat_history = Dataset({"query": [], "response": []})
19
-
20
-
21
 
22
  # Load environment variables
23
  load_dotenv()
@@ -36,29 +24,28 @@ Settings.embed_model = HuggingFaceEmbedding(
36
  )
37
 
38
  # Define the directory for persistent storage and data
39
- PERSIST_DIR = "db"
40
- PDF_DIRECTORY = 'data' # Changed to the directory containing PDFs
41
 
42
  # Ensure directories exist
43
- os.makedirs(PDF_DIRECTORY, exist_ok=True)
44
  os.makedirs(PERSIST_DIR, exist_ok=True)
45
 
46
  # Variable to store current chat conversation
47
- current_chat_history = []
 
48
 
49
  def data_ingestion_from_directory():
50
  # Use SimpleDirectoryReader on the directory containing the PDF files
 
51
  documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
52
  storage_context = StorageContext.from_defaults()
53
  index = VectorStoreIndex.from_documents(documents)
54
  index.storage_context.persist(persist_dir=PERSIST_DIR)
55
 
 
56
  def handle_query(query):
57
  global current_chat_history
58
 
59
- # Ensure the directory exists or create it
60
- os.makedirs(PERSIST_DIR, exist_ok=True)
61
-
62
  chat_text_qa_msgs = [
63
  (
64
  "user",
@@ -95,8 +82,7 @@ def handle_query(query):
95
  response = "Sorry, I couldn't find an answer."
96
 
97
  # Update current chat history
98
- current_chat_history["query"].append(query)
99
- current_chat_history["response"].append(response)
100
 
101
  # Save chat history to CSV
102
  with open(CSV_FILE, 'a', newline='', encoding='utf-8') as file:
@@ -105,14 +91,7 @@ def handle_query(query):
105
 
106
  return response
107
 
108
- # Example usage: Process PDF ingestion from directory
109
- print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
110
- data_ingestion_from_directory()
111
 
112
- # Define the function to handle predictions
113
- """def predict(message,history):
114
- response = handle_query(message)
115
- return response"""
116
  def predict(message, history):
117
  # Your logo HTML code
118
  logo_html = '''
@@ -167,9 +146,11 @@ footer {
167
  }
168
  label.svelte-1b6s6s {display: none}
169
  '''
 
 
170
  gr.ChatInterface(predict,
171
  css=css,
172
  description="FernAI",
173
  clear_btn=None, undo_btn=None, retry_btn=None,
174
  examples=['Tell me about Redfernstech?', 'Services in Redfernstech?']
175
- ).launch(share = False)
 
1
  from dotenv import load_dotenv
2
  import gradio as gr
3
  import os
4
+ import csv
5
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
6
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
7
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
+ from datasets import Dataset
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  # Load environment variables
11
  load_dotenv()
 
24
  )
25
 
26
  # Define the directory for persistent storage and data
27
+ PERSIST_DIR = "history" # Replace with your actual directory path
28
+ CSV_FILE = os.path.join(PERSIST_DIR, "chat_history.csv")
29
 
30
  # Ensure directories exist
 
31
  os.makedirs(PERSIST_DIR, exist_ok=True)
32
 
33
  # Variable to store current chat conversation
34
+ current_chat_history = Dataset.from_dict({"query": [], "response": []})
35
+
36
 
37
  def data_ingestion_from_directory():
38
  # Use SimpleDirectoryReader on the directory containing the PDF files
39
+ PDF_DIRECTORY = 'data' # Replace with the directory containing your PDFs
40
  documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
41
  storage_context = StorageContext.from_defaults()
42
  index = VectorStoreIndex.from_documents(documents)
43
  index.storage_context.persist(persist_dir=PERSIST_DIR)
44
 
45
+
46
  def handle_query(query):
47
  global current_chat_history
48
 
 
 
 
49
  chat_text_qa_msgs = [
50
  (
51
  "user",
 
82
  response = "Sorry, I couldn't find an answer."
83
 
84
  # Update current chat history
85
+ current_chat_history = current_chat_history.concat(Dataset.from_dict({"query": [query], "response": [response]}))
 
86
 
87
  # Save chat history to CSV
88
  with open(CSV_FILE, 'a', newline='', encoding='utf-8') as file:
 
91
 
92
  return response
93
 
 
 
 
94
 
 
 
 
 
95
  def predict(message, history):
96
  # Your logo HTML code
97
  logo_html = '''
 
146
  }
147
  label.svelte-1b6s6s {display: none}
148
  '''
149
+
150
+ # Launch Gradio interface
151
  gr.ChatInterface(predict,
152
  css=css,
153
  description="FernAI",
154
  clear_btn=None, undo_btn=None, retry_btn=None,
155
  examples=['Tell me about Redfernstech?', 'Services in Redfernstech?']
156
+ ).launch(share=False)