prithvirajpawar commited on
Commit
68acb0b
·
1 Parent(s): 1e44c85

changes in prompt to address issue in welcome message of comment about not excerpts

Browse files
Files changed (2) hide show
  1. app.py +8 -6
  2. helpmate_ai.py +15 -7
app.py CHANGED
@@ -2,7 +2,7 @@ from fastapi import FastAPI, Request, Depends, HTTPException, Header, File, Uplo
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from typing import List, Optional
5
- from helpmate_ai import initialize_conversation, retreive_results, rerank_with_cross_encoder, generate_response
6
  import google.generativeai as genai
7
  import os
8
  from dotenv import load_dotenv
@@ -53,7 +53,7 @@ class Report(BaseModel):
53
 
54
  # Initialize conversation and model
55
  conversation_bot = []
56
- conversation = initialize_conversation()
57
  model = genai.GenerativeModel("gemini-1.5-flash", system_instruction=conversation)
58
 
59
  # Initialize speech recognizer
@@ -76,6 +76,8 @@ def get_gemini_completions(conversation: str) -> str:
76
  @app.get("/init", response_model=ChatResponse, dependencies=[Depends(verify_api_key)])
77
  async def initialize_chat():
78
  global conversation_bot
 
 
79
  introduction = get_gemini_completions(conversation)
80
  conversation_bot = [Message(role="bot", content=introduction)]
81
  return ChatResponse(
@@ -159,11 +161,11 @@ async def handle_feedback(
159
  async def reset_conversation():
160
  global conversation_bot, conversation
161
  conversation_bot = []
162
- conversation = initialize_conversation()
163
  introduction = get_gemini_completions(conversation)
164
  conversation_bot.append(Message(role="bot", content=introduction))
165
  return {"status": "success", "message": "Conversation reset"}
166
 
167
- # if __name__ == "__main__":
168
- # import uvicorn
169
- # uvicorn.run(app, host="0.0.0.0", port=8000)
 
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from typing import List, Optional
5
+ from helpmate_ai import get_system_msg, retreive_results, rerank_with_cross_encoder, generate_response
6
  import google.generativeai as genai
7
  import os
8
  from dotenv import load_dotenv
 
53
 
54
  # Initialize conversation and model
55
  conversation_bot = []
56
+ conversation = get_system_msg()
57
  model = genai.GenerativeModel("gemini-1.5-flash", system_instruction=conversation)
58
 
59
  # Initialize speech recognizer
 
76
  @app.get("/init", response_model=ChatResponse, dependencies=[Depends(verify_api_key)])
77
  async def initialize_chat():
78
  global conversation_bot
79
+
80
+ conversation = "Hi"
81
  introduction = get_gemini_completions(conversation)
82
  conversation_bot = [Message(role="bot", content=introduction)]
83
  return ChatResponse(
 
161
  async def reset_conversation():
162
  global conversation_bot, conversation
163
  conversation_bot = []
164
+ conversation = "Hi"
165
  introduction = get_gemini_completions(conversation)
166
  conversation_bot.append(Message(role="bot", content=introduction))
167
  return {"status": "success", "message": "Conversation reset"}
168
 
169
+ if __name__ == "__main__":
170
+ import uvicorn
171
+ uvicorn.run(app, host="0.0.0.0", port=8000)
helpmate_ai.py CHANGED
@@ -2,11 +2,11 @@
2
  import pandas as pd
3
  import chromadb
4
 
5
- def initialize_conversation():
6
  """
7
  Generate a response using GPT-3.5's ChatCompletion based on the user query and retrieved information.
8
  """
9
- conversation = [
10
  f"""
11
  You are a helpful assistant in the insurance domain who can effectively answer user queries about insurance policies and documents.
12
  The document name is 'Group Life Insurance Policy' and it contais information about 3 different insurance policies 'Member Life Insurance', 'Member Accidental Death and Dismemberment Insurance' and 'Dependent Life Insurance'.
@@ -46,14 +46,22 @@ def initialize_conversation():
46
  6. If the provided excerpts do not fully answer the query, provide partial information and suggest which sections of the policy document the user should review for further details.
47
  7. If no relevant information is found in the provided excerpts, respond with 'No relevant information found in the provided excerpts.'
48
 
49
- # Start the session with a short welcome message which also has policy name and a smiley and nothing else.
50
  """
51
  ]
52
 
53
- # conversation = [{"role": "user", "parts": system_message}]
54
- # conversation = [{"role": "system", "content": system_message}]
55
-
56
- return conversation
 
 
 
 
 
 
 
 
57
 
58
  # Import the SentenceTransformer Embedding Function into chroma
59
  from chromadb.utils import embedding_functions
 
2
  import pandas as pd
3
  import chromadb
4
 
5
+ def get_system_msg():
6
  """
7
  Generate a response using GPT-3.5's ChatCompletion based on the user query and retrieved information.
8
  """
9
+ system_msg = [
10
  f"""
11
  You are a helpful assistant in the insurance domain who can effectively answer user queries about insurance policies and documents.
12
  The document name is 'Group Life Insurance Policy' and it contais information about 3 different insurance policies 'Member Life Insurance', 'Member Accidental Death and Dismemberment Insurance' and 'Dependent Life Insurance'.
 
46
  6. If the provided excerpts do not fully answer the query, provide partial information and suggest which sections of the policy document the user should review for further details.
47
  7. If no relevant information is found in the provided excerpts, respond with 'No relevant information found in the provided excerpts.'
48
 
49
+ <When user says 'Hi' respond with a short welcome message which also has policy name and a smiley.>
50
  """
51
  ]
52
 
53
+ return system_msg
54
+
55
+ # def get_welcome_msg():
56
+ # """
57
+ # Generate a welcome msg.
58
+ # """
59
+ # messages = f"""
60
+ # Start the session with a short welcome message which also has policy name and a smiley.
61
+ # """
62
+ # introduction = [{"role": "user", "parts": messages}]
63
+
64
+ # return introduction
65
 
66
  # Import the SentenceTransformer Embedding Function into chroma
67
  from chromadb.utils import embedding_functions