bachephysicdun commited on
Commit
14d48df
·
1 Parent(s): 9c7476c

implement history stream

Browse files
Files changed (4) hide show
  1. app/callbacks.py +3 -1
  2. app/chains.py +2 -1
  3. app/main.py +42 -13
  4. app/prompts.py +30 -6
app/callbacks.py CHANGED
@@ -15,7 +15,9 @@ class LogResponseCallback(BaseCallbackHandler):
15
  """Run when llm ends running."""
16
  # TODO: The function on_llm_end is going to be called when the LLM stops sending
17
  # the response. Use the crud.add_message function to capture that response.
18
- raise NotImplemented
 
 
19
 
20
  def on_llm_start(
21
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
 
15
  """Run when llm ends running."""
16
  # TODO: The function on_llm_end is going to be called when the LLM stops sending
17
  # the response. Use the crud.add_message function to capture that response.
18
+ print(outputs)
19
+ message = schemas.MessageBase(message=outputs.get('text'), type='AI')
20
+ crud.add_message(self.db, message=message, username=self.user_request.username)
21
 
22
  def on_llm_start(
23
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
app/chains.py CHANGED
@@ -9,6 +9,7 @@ import schemas
9
  from prompts import (
10
  raw_prompt,
11
  raw_prompt_formatted,
 
12
  format_context,
13
  tokenizer
14
  )
@@ -34,7 +35,7 @@ simple_chain = (raw_prompt | llm).with_types(input_type=schemas.UserQuestion)
34
  formatted_chain = (raw_prompt_formatted | llm).with_types(input_type=schemas.UserQuestion)
35
 
36
  # # TODO: use history_prompt_formatted and HistoryInput to create the history_chain
37
- # history_chain = None
38
 
39
  # # TODO: Let's construct the standalone_chain by piping standalone_prompt_formatted with the LLM
40
  # standalone_chain = None
 
9
  from prompts import (
10
  raw_prompt,
11
  raw_prompt_formatted,
12
+ history_prompt_formatted,
13
  format_context,
14
  tokenizer
15
  )
 
35
  formatted_chain = (raw_prompt_formatted | llm).with_types(input_type=schemas.UserQuestion)
36
 
37
  # # TODO: use history_prompt_formatted and HistoryInput to create the history_chain
38
+ history_chain = (history_prompt_formatted | llm).with_types(input_type=schemas.HistoryInput)
39
 
40
  # # TODO: Let's construct the standalone_chain by piping standalone_prompt_formatted with the LLM
41
  # standalone_chain = None
app/main.py CHANGED
@@ -10,18 +10,32 @@ from typing import List
10
  from sqlalchemy.orm import Session
11
 
12
  import schemas
13
- from chains import simple_chain, formatted_chain
14
- import crud, models, schemas
15
  from database import SessionLocal, engine
16
  from callbacks import LogResponseCallback
17
 
18
-
 
 
 
 
 
 
19
  models.Base.metadata.create_all(bind=engine)
20
 
21
  app = FastAPI()
22
 
23
  def get_db():
 
 
 
 
 
24
  db = SessionLocal()
 
 
 
25
  try:
26
  yield db
27
  finally:
@@ -71,16 +85,31 @@ async def formatted_stream(request: Request):
71
  return EventSourceResponse(generate_stream(user_question, formatted_chain))
72
 
73
 
74
- # @app.post("/history/stream")
75
- # async def history_stream(request: Request, db: Session = Depends(get_db)):
76
- # # TODO: Let's implement the "/history/stream" endpoint. The endpoint should follow those steps:
77
- # # - The endpoint receives the request
78
- # # - The request is parsed into a user request
79
- # # - The user request is used to pull the chat history of the user
80
- # # - We add as part of the user history the current question by using add_message.
81
- # # - We create an instance of HistoryInput by using format_chat_history.
82
- # # - We use the history input within the history chain.
83
- # raise NotImplemented
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
 
85
 
86
  # @app.post("/rag/stream")
 
10
  from sqlalchemy.orm import Session
11
 
12
  import schemas
13
+ from chains import simple_chain, formatted_chain, history_chain
14
+ import crud, models, schemas, prompts
15
  from database import SessionLocal, engine
16
  from callbacks import LogResponseCallback
17
 
18
+ # models.Base comes from SQLAlchemy’s declarative_base() in database.py.
19
+ # It acts as the base class for all ORM models (defined in models.py).
20
+ # .metadata.create_all(): Tells SQLAlchemy to create all the tables defined
21
+ # in the models module if they don’t already exist in the database.
22
+ # -> metadata is a catalog of all the tables and other schema constructs in your database.
23
+ # -> create_all() method creates all the tables that don't exist yet in the database.
24
+ # -> bind=engine specifies which database engine to use for this operation.
25
  models.Base.metadata.create_all(bind=engine)
26
 
27
  app = FastAPI()
28
 
29
  def get_db():
30
+ """This is a dependency function used to create and provide a
31
+ database session to various endpoints in the FastAPI app.
32
+ """
33
+ # A new SQLAlchemy session is created using the SessionLocal session factory.
34
+ # This session will be used for database transactions.
35
  db = SessionLocal()
36
+
37
+ # This pattern ensures that each request gets its own database session and that
38
+ # the session is properly closed when the request is finished, preventing resource leaks.
39
  try:
40
  yield db
41
  finally:
 
85
  return EventSourceResponse(generate_stream(user_question, formatted_chain))
86
 
87
 
88
+ @app.post("/history/stream")
89
+ async def history_stream(request: Request, db: Session = Depends(get_db)):
90
+ # TODO: Let's implement the "/history/stream" endpoint. The endpoint should follow those steps:
91
+ # - The endpoint receives the request
92
+ data = await request.json()
93
+
94
+ # - The request is parsed into a user request
95
+ user_request = schemas.UserRequest(**data['input'])
96
+
97
+ # - The user request is used to pull the chat history of the user
98
+ chat_history = crud.get_user_chat_history(db=db, username=user_request.username)
99
+
100
+ # - We add as part of the user history the current question by using add_message.
101
+ message = schemas.MessageBase(message=user_request.question, type='User')
102
+ crud.add_message(db, message=message, username=user_request.username)
103
+
104
+ # - We create an instance of HistoryInput by using format_chat_history.
105
+ history_input = schemas.HistoryInput(question=user_request.username, chat_history=chat_history)
106
+
107
+ # - We use the history input within the history chain.
108
+ return EventSourceResponse(generate_stream(
109
+ history_input, history_chain, [LogResponseCallback(user_request, db)]
110
+ ))
111
+
112
+
113
 
114
 
115
  # @app.post("/rag/stream")
app/prompts.py CHANGED
@@ -36,7 +36,14 @@ def format_prompt(prompt) -> PromptTemplate:
36
  def format_chat_history(messages: List[models.Message]):
37
  # TODO: implement format_chat_history to format
38
  # the list of Message into a text of chat history.
39
- raise NotImplemented
 
 
 
 
 
 
 
40
 
41
 
42
  def format_context(docs: List[str]):
@@ -46,11 +53,20 @@ def format_context(docs: List[str]):
46
  # like of strings and returns the context as one string.
47
  raise NotImplemented
48
 
49
- raw_prompt = "{question}"
50
 
51
  # TODO: Create the history_prompt prompt that will capture the question and the conversation history.
52
  # The history_prompt needs a {chat_history} placeholder and a {question} placeholder.
53
- history_prompt: str = None
 
 
 
 
 
 
 
 
 
54
 
55
  # TODO: Create the standalone_prompt prompt that will capture the question and the chat history
56
  # to generate a standalone question. It needs a {chat_history} placeholder and a {question} placeholder,
@@ -61,11 +77,19 @@ standalone_prompt: str = None
61
  rag_prompt: str = None
62
 
63
  # TODO: create raw_prompt_formatted by using format_prompt
64
- raw_prompt_formatted = format_prompt(raw_prompt)
65
- raw_prompt = PromptTemplate.from_template(raw_prompt)
 
 
 
 
 
 
 
66
 
67
  # TODO: use format_prompt to create history_prompt_formatted
68
- history_prompt_formatted: PromptTemplate = None
 
69
  # TODO: use format_prompt to create standalone_prompt_formatted
70
  standalone_prompt_formatted: PromptTemplate = None
71
  # TODO: use format_prompt to create rag_prompt_formatted
 
36
  def format_chat_history(messages: List[models.Message]):
37
  # TODO: implement format_chat_history to format
38
  # the list of Message into a text of chat history.
39
+
40
+ return '\n'.join([
41
+ '[{}] {}: {}'.format(
42
+ message.timestamp.strftime("%Y-%m-%d %H:%M:%S"),
43
+ message.type,
44
+ message.message
45
+ ) for message in messages
46
+ ])
47
 
48
 
49
  def format_context(docs: List[str]):
 
53
  # like of strings and returns the context as one string.
54
  raise NotImplemented
55
 
56
+ prompt = "{question}"
57
 
58
  # TODO: Create the history_prompt prompt that will capture the question and the conversation history.
59
  # The history_prompt needs a {chat_history} placeholder and a {question} placeholder.
60
+ history_prompt: str = """
61
+ Given the follwoing conversation provide a helpful answer to the follow up question.
62
+
63
+ Chat History:
64
+ {chat_history}
65
+
66
+ Follow Up Question: {question}
67
+
68
+ helpful answer:
69
+ """
70
 
71
  # TODO: Create the standalone_prompt prompt that will capture the question and the chat history
72
  # to generate a standalone question. It needs a {chat_history} placeholder and a {question} placeholder,
 
77
  rag_prompt: str = None
78
 
79
  # TODO: create raw_prompt_formatted by using format_prompt
80
+ #raw_prompt_formatted = format_prompt(raw_prompt)
81
+ #raw_prompt = PromptTemplate.from_template(raw_prompt)
82
+
83
+ # i) raw prompt
84
+ raw_prompt = PromptTemplate.from_template(prompt)
85
+
86
+ # ii) formatted prompt
87
+ raw_prompt_formatted = format_prompt(prompt)
88
+
89
 
90
  # TODO: use format_prompt to create history_prompt_formatted
91
+ history_prompt_formatted = format_prompt(history_prompt)
92
+
93
  # TODO: use format_prompt to create standalone_prompt_formatted
94
  standalone_prompt_formatted: PromptTemplate = None
95
  # TODO: use format_prompt to create rag_prompt_formatted