Update app.py
Browse files
app.py
CHANGED
@@ -38,16 +38,19 @@ def get_relevant_context(pdf_text, query, num_contexts=3):
|
|
38 |
relevant_context = " ".join([pdf_text_chunks[i] for i in top_indices])
|
39 |
return relevant_context
|
40 |
|
41 |
-
# Function to generate a response from GPT-
|
42 |
def generate_response(context, question):
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
47 |
max_tokens=200,
|
48 |
temperature=0.7,
|
49 |
)
|
50 |
-
return response
|
51 |
|
52 |
# Function to handle irrelevant questions
|
53 |
def is_irrelevant_question(question):
|
@@ -85,7 +88,7 @@ def main():
|
|
85 |
# Get the most relevant context from the document
|
86 |
relevant_context = get_relevant_context(pdf_text, question)
|
87 |
|
88 |
-
# Generate the response from GPT-
|
89 |
answer = generate_response(relevant_context, question)
|
90 |
|
91 |
# Display the answer
|
|
|
38 |
relevant_context = " ".join([pdf_text_chunks[i] for i in top_indices])
|
39 |
return relevant_context
|
40 |
|
41 |
+
# Function to generate a response from GPT-4 chat model
|
42 |
def generate_response(context, question):
|
43 |
+
messages = [
|
44 |
+
{"role": "system", "content": "You are a helpful assistant expert on GPT-4."},
|
45 |
+
{"role": "user", "content": f"Context: {context}\nQuestion: {question}"}
|
46 |
+
]
|
47 |
+
response = openai.ChatCompletion.create(
|
48 |
+
model="gpt-4o-mini", # Use the GPT-4 chat model
|
49 |
+
messages=messages,
|
50 |
max_tokens=200,
|
51 |
temperature=0.7,
|
52 |
)
|
53 |
+
return response['choices'][0]['message']['content'].strip()
|
54 |
|
55 |
# Function to handle irrelevant questions
|
56 |
def is_irrelevant_question(question):
|
|
|
88 |
# Get the most relevant context from the document
|
89 |
relevant_context = get_relevant_context(pdf_text, question)
|
90 |
|
91 |
+
# Generate the response from GPT-4 chat model
|
92 |
answer = generate_response(relevant_context, question)
|
93 |
|
94 |
# Display the answer
|