Stéphanie Kamgnia Wonkap commited on
Commit
99f4972
1 Parent(s): 0b0ce2d

fixing invoke

Browse files
Files changed (1) hide show
  1. src/generator.py +2 -2
src/generator.py CHANGED
@@ -18,7 +18,7 @@ def promt_template():
18
  prompt = ChatPromptTemplate.from_messages(
19
  [
20
  ("system",prompt_in_chat_format),
21
- ("human", "{query}")
22
  ])
23
  #RAG_PROMPT_TEMPLATE = tokenizer.apply_chat_template(
24
  #prompt_in_chat_format, tokenize=False, add_generation_prompt=True)
@@ -41,6 +41,6 @@ def answer_with_rag(
41
  #print(final_prompt)
42
  # Redact an answer
43
  print("=> Generating answer...")
44
- response=retrieval_chain.invoke(query)
45
 
46
  return response['answer'], response["context"]
 
18
  prompt = ChatPromptTemplate.from_messages(
19
  [
20
  ("system",prompt_in_chat_format),
21
+ ("human", "{input}")
22
  ])
23
  #RAG_PROMPT_TEMPLATE = tokenizer.apply_chat_template(
24
  #prompt_in_chat_format, tokenize=False, add_generation_prompt=True)
 
41
  #print(final_prompt)
42
  # Redact an answer
43
  print("=> Generating answer...")
44
+ response=retrieval_chain.invoke({'input':query})
45
 
46
  return response['answer'], response["context"]