Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -139,13 +139,26 @@ Answer concisely:"""
|
|
139 |
# Create a prompt object using the template
|
140 |
prompt = ChatPromptTemplate.from_template(template)
|
141 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
# Function to generate a response using the prompt and the context
|
143 |
def generate_response_with_prompt(context, question):
|
144 |
-
|
145 |
context=context,
|
146 |
question=question
|
147 |
)
|
148 |
-
|
|
|
|
|
|
|
|
|
|
|
149 |
|
150 |
# Define the function to generate a hybrid response using Neo4j and other retrieval methods
|
151 |
def retriever(question: str):
|
|
|
139 |
# Create a prompt object using the template
|
140 |
prompt = ChatPromptTemplate.from_template(template)
|
141 |
|
142 |
+
# Function to generate a response using the prompt and the context
|
143 |
+
#def generate_response_with_prompt(context, question):
|
144 |
+
#response = prompt.format(
|
145 |
+
#context=context,
|
146 |
+
#question=question
|
147 |
+
#)
|
148 |
+
#return response
|
149 |
+
|
150 |
# Function to generate a response using the prompt and the context
|
151 |
def generate_response_with_prompt(context, question):
|
152 |
+
formatted_prompt = prompt.format(
|
153 |
context=context,
|
154 |
question=question
|
155 |
)
|
156 |
+
# Use the ChatOpenAI instance to generate a response directly from the formatted prompt
|
157 |
+
llm = ChatOpenAI(temperature=0, api_key=os.environ['OPENAI_API_KEY'])
|
158 |
+
response = llm(formatted_prompt)
|
159 |
+
return response.content.strip()
|
160 |
+
|
161 |
+
|
162 |
|
163 |
# Define the function to generate a hybrid response using Neo4j and other retrieval methods
|
164 |
def retriever(question: str):
|