anasmkh commited on
Commit
c1ca5a1
·
verified ·
1 Parent(s): 7bcf19f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -34
app.py CHANGED
@@ -4,16 +4,16 @@ from getpass import getpass
4
  openai_api_key = os.getenv('OPENAI_API_KEY')
5
  openai_api_key = openai_api_key
6
 
 
7
  from llama_index.llms.openai import OpenAI
8
  from llama_index.embeddings.openai import OpenAIEmbedding
9
  from llama_index.core import Settings
10
 
11
- Settings.llm = OpenAI(model="gpt-3.5-turbo", temperature=0.4)
12
  Settings.embed_model = OpenAIEmbedding(model="text-embedding-ada-002")
13
 
14
  from llama_index.core import SimpleDirectoryReader
15
 
16
- # Load initial documents
17
  documents = SimpleDirectoryReader("new_file").load_data()
18
 
19
  from llama_index.core import VectorStoreIndex, StorageContext
@@ -25,7 +25,7 @@ client = qdrant_client.QdrantClient(
25
  )
26
 
27
  vector_store = QdrantVectorStore(
28
- collection_name="paper",
29
  client=client,
30
  enable_hybrid=True,
31
  batch_size=20,
@@ -51,40 +51,49 @@ chat_engine = index.as_chat_engine(
51
  memory=memory,
52
  system_prompt=(
53
  """You are an AI assistant who answers the user questions,
54
- use the schema fields to generate appropriate and valid json queries"""
55
  ),
56
  )
57
 
58
- import gradio as gr
59
 
 
 
 
 
 
 
 
 
 
60
  def chat_with_ai(user_input, chat_history):
 
 
 
 
 
 
 
 
61
  response = chat_engine.chat(user_input)
62
  references = response.source_nodes
63
- ref, pages = [], []
64
  for i in range(len(references)):
65
- if references[i].metadata['file_name'] not in ref:
66
- ref.append(references[i].metadata['file_name'])
67
- complete_response = str(response) + "\n\n"
68
- if ref != [] or pages != []:
69
- chat_history.append((user_input, complete_response))
70
- ref = []
71
- elif ref == [] or pages == []:
72
- chat_history.append((user_input, str(response)))
 
73
 
74
  return chat_history, ""
75
 
76
  def clear_history():
77
  return [], ""
78
 
79
- def upload_file(file):
80
- # Save the uploaded file to the "new_file" directory
81
- if not os.path.exists("new_file"):
82
- os.makedirs("new_file")
83
- file_path = os.path.join("new_file", file.name)
84
- with open(file_path, "wb") as f:
85
- f.write(file.read())
86
- return f"File {file.name} uploaded successfully!"
87
-
88
  def gradio_chatbot():
89
  with gr.Blocks() as demo:
90
  gr.Markdown("# Chat Interface for LlamaIndex")
@@ -95,24 +104,15 @@ def gradio_chatbot():
95
  )
96
 
97
  submit_button = gr.Button("Send")
98
- btn_clear = gr.Button("Delete Context")
99
-
100
- # Add a file upload component
101
- file_upload = gr.File(label="Upload a file")
102
 
103
- # Add a button to handle file upload
104
- upload_button = gr.Button("Upload File")
105
 
106
  chat_history = gr.State([])
107
 
108
- # Define the file upload action
109
- upload_button.click(upload_file, inputs=file_upload, outputs=user_input)
110
-
111
- # Define the chat interaction
112
  submit_button.click(chat_with_ai, inputs=[user_input, chat_history], outputs=[chatbot, user_input])
113
 
114
  user_input.submit(chat_with_ai, inputs=[user_input, chat_history], outputs=[chatbot, user_input])
115
- btn_clear.click(fn=clear_history, outputs=[chatbot, user_input])
116
 
117
  return demo
118
 
 
4
  openai_api_key = os.getenv('OPENAI_API_KEY')
5
  openai_api_key = openai_api_key
6
 
7
+
8
  from llama_index.llms.openai import OpenAI
9
  from llama_index.embeddings.openai import OpenAIEmbedding
10
  from llama_index.core import Settings
11
 
12
+ Settings.llm = OpenAI(model="gpt-3.5-turbo",temperature=0.4)
13
  Settings.embed_model = OpenAIEmbedding(model="text-embedding-ada-002")
14
 
15
  from llama_index.core import SimpleDirectoryReader
16
 
 
17
  documents = SimpleDirectoryReader("new_file").load_data()
18
 
19
  from llama_index.core import VectorStoreIndex, StorageContext
 
25
  )
26
 
27
  vector_store = QdrantVectorStore(
28
+ collection_name = "paper",
29
  client=client,
30
  enable_hybrid=True,
31
  batch_size=20,
 
51
  memory=memory,
52
  system_prompt=(
53
  """You are an AI assistant who answers the user questions,
54
+ use the schema fields to generate appriopriate and valid json queries"""
55
  ),
56
  )
57
 
58
+ # def is_greeting(user_input):
59
 
60
+ # greetings = ["hello", "hi", "hey", "good morning", "good afternoon", "good evening", "greetings"]
61
+ # user_input_lower = user_input.lower().strip()
62
+ # return any(greet in user_input_lower for greet in greetings)
63
+ # def is_bye(user_input):
64
+
65
+ # greetings = ["thanks", "thanks you", "thanks a lot", "good answer", "good bye", "bye bye"]
66
+ # user_input_lower = user_input.lower().strip()
67
+ # return any(greet in user_input_lower for greet in greetings)
68
+ import gradio as gr
69
  def chat_with_ai(user_input, chat_history):
70
+ # if is_greeting(user_input):
71
+ # response = 'hi, how can i help you?'
72
+ # chat_history.append((user_input, response))
73
+ # return chat_history, ""
74
+ # elif is_bye(user_input):
75
+ # response = "you're wlocome"
76
+ # chat_history.append((user_input, response))
77
+ # return chat_history, ""
78
  response = chat_engine.chat(user_input)
79
  references = response.source_nodes
80
+ ref,pages = [],[]
81
  for i in range(len(references)):
82
+ if references[i].metadata['file_name'] not in ref:
83
+ ref.append(references[i].metadata['file_name'])
84
+ # pages.append(references[i].metadata['page_label'])
85
+ complete_response = str(response) + "\n\n"
86
+ if ref !=[] or pages!=[]:
87
+ chat_history.append((user_input, complete_response))
88
+ ref = []
89
+ elif ref==[] or pages==[]:
90
+ chat_history.append((user_input,str(response)))
91
 
92
  return chat_history, ""
93
 
94
  def clear_history():
95
  return [], ""
96
 
 
 
 
 
 
 
 
 
 
97
  def gradio_chatbot():
98
  with gr.Blocks() as demo:
99
  gr.Markdown("# Chat Interface for LlamaIndex")
 
104
  )
105
 
106
  submit_button = gr.Button("Send")
107
+ btn_clear = gr.Button("Delete Context")
 
 
 
108
 
 
 
109
 
110
  chat_history = gr.State([])
111
 
 
 
 
 
112
  submit_button.click(chat_with_ai, inputs=[user_input, chat_history], outputs=[chatbot, user_input])
113
 
114
  user_input.submit(chat_with_ai, inputs=[user_input, chat_history], outputs=[chatbot, user_input])
115
+ btn_clear.click(fn=clear_history, outputs=[chatbot, user_input])
116
 
117
  return demo
118