Spaces:
Sleeping
Sleeping
revised app.py
Browse files
app.py
CHANGED
@@ -80,6 +80,17 @@ def ensure_complete_sentences(text):
|
|
80 |
return ' '.join(sentences).strip()
|
81 |
return text # Return as is if no complete sentence is found
|
82 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
# Initialize the LLM using ChatGroq with GROQ's API
|
84 |
def initialize_llm(model, temperature, max_tokens):
|
85 |
try:
|
@@ -154,8 +165,11 @@ def create_rag_pipeline(file_paths, model, temperature, max_tokens):
|
|
154 |
logger.error(f"Error creating RAG pipeline: {e}")
|
155 |
return None, f"Error creating RAG pipeline: {e}"
|
156 |
|
157 |
-
# Function to answer questions with post-processing
|
158 |
def answer_question(file_paths, model, temperature, max_tokens, question):
|
|
|
|
|
|
|
159 |
rag_chain, message = create_rag_pipeline(file_paths, model, temperature, max_tokens)
|
160 |
if rag_chain is None:
|
161 |
return message
|
|
|
80 |
return ' '.join(sentences).strip()
|
81 |
return text # Return as is if no complete sentence is found
|
82 |
|
83 |
+
# Function to check if input is valid
|
84 |
+
def is_valid_input(text):
|
85 |
+
"""
|
86 |
+
Checks if the input text is meaningful.
|
87 |
+
Returns True if the text contains alphabetic characters.
|
88 |
+
"""
|
89 |
+
if not text or text.strip() == "":
|
90 |
+
return False
|
91 |
+
# Regex to check for at least one alphabetic character
|
92 |
+
return bool(re.search('[A-Za-z]', text))
|
93 |
+
|
94 |
# Initialize the LLM using ChatGroq with GROQ's API
|
95 |
def initialize_llm(model, temperature, max_tokens):
|
96 |
try:
|
|
|
165 |
logger.error(f"Error creating RAG pipeline: {e}")
|
166 |
return None, f"Error creating RAG pipeline: {e}"
|
167 |
|
168 |
+
# Function to answer questions with input validation and post-processing
|
169 |
def answer_question(file_paths, model, temperature, max_tokens, question):
|
170 |
+
if not is_valid_input(question):
|
171 |
+
return "Please provide a valid question or input containing meaningful text."
|
172 |
+
|
173 |
rag_chain, message = create_rag_pipeline(file_paths, model, temperature, max_tokens)
|
174 |
if rag_chain is None:
|
175 |
return message
|