muhammadahmedrayyan
commited on
Commit
•
c9a5b96
1
Parent(s):
e035f8f
Update app.py
Browse files
app.py
CHANGED
@@ -13,9 +13,9 @@ st.set_page_config(
|
|
13 |
|
14 |
# Load Hugging Face models and tokenizer for text generation
|
15 |
@st.cache_resource
|
16 |
-
def load_model(
|
17 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
18 |
-
model = AutoModelForCausalLM.from_pretrained(
|
19 |
return tokenizer, model
|
20 |
|
21 |
# Load Hugging Face models and tokenizer for document question answering
|
@@ -27,8 +27,8 @@ def load_document_model():
|
|
27 |
|
28 |
# Function to create a text generation pipeline
|
29 |
@st.cache_resource
|
30 |
-
def create_pipeline(
|
31 |
-
tokenizer, model = load_model(
|
32 |
return pipeline("text-generation", model=model, tokenizer=tokenizer)
|
33 |
|
34 |
# Function to create a document question answering pipeline
|
@@ -48,6 +48,10 @@ def read_pdf(file):
|
|
48 |
except Exception as e:
|
49 |
return f"Error reading PDF: {e}"
|
50 |
|
|
|
|
|
|
|
|
|
51 |
# Custom CSS for styling
|
52 |
st.markdown(
|
53 |
"""
|
@@ -145,9 +149,6 @@ with col2:
|
|
145 |
with st.spinner("Generating response..."):
|
146 |
try:
|
147 |
if model_selection == "Disease Analysis":
|
148 |
-
# Adjusted model name to match working configurations
|
149 |
-
pipe = create_pipeline("harishussain12/Disease_Managment")
|
150 |
-
|
151 |
context = ""
|
152 |
if uploaded_file is not None:
|
153 |
file_content = read_pdf(uploaded_file)
|
@@ -157,14 +158,10 @@ with col2:
|
|
157 |
context = file_content
|
158 |
|
159 |
query_input = search_input + (f"\n\nContext:\n{context}" if context else "")
|
160 |
-
|
161 |
-
|
162 |
-
response = pipe(query_input, max_length=200, num_return_sequences=1)
|
163 |
st.markdown(f"### Response:\n{response[0]['generated_text']}")
|
164 |
|
165 |
elif model_selection == "Document Analysis":
|
166 |
-
pipe = create_document_pipeline()
|
167 |
-
|
168 |
context = ""
|
169 |
if uploaded_file is not None:
|
170 |
file_content = read_pdf(uploaded_file)
|
@@ -174,7 +171,7 @@ with col2:
|
|
174 |
context = file_content
|
175 |
|
176 |
if search_input and context:
|
177 |
-
result =
|
178 |
st.markdown(f"### Answer:\n{result['answer']}")
|
179 |
|
180 |
except Exception as e:
|
|
|
13 |
|
14 |
# Load Hugging Face models and tokenizer for text generation
|
15 |
@st.cache_resource
|
16 |
+
def load_model():
|
17 |
+
tokenizer = AutoTokenizer.from_pretrained("harishussain12/Disease_Managment")
|
18 |
+
model = AutoModelForCausalLM.from_pretrained("harishussain12/Disease_Managment")
|
19 |
return tokenizer, model
|
20 |
|
21 |
# Load Hugging Face models and tokenizer for document question answering
|
|
|
27 |
|
28 |
# Function to create a text generation pipeline
|
29 |
@st.cache_resource
|
30 |
+
def create_pipeline():
|
31 |
+
tokenizer, model = load_model()
|
32 |
return pipeline("text-generation", model=model, tokenizer=tokenizer)
|
33 |
|
34 |
# Function to create a document question answering pipeline
|
|
|
48 |
except Exception as e:
|
49 |
return f"Error reading PDF: {e}"
|
50 |
|
51 |
+
# Load pipelines
|
52 |
+
text_pipeline = create_pipeline()
|
53 |
+
document_pipeline = create_document_pipeline()
|
54 |
+
|
55 |
# Custom CSS for styling
|
56 |
st.markdown(
|
57 |
"""
|
|
|
149 |
with st.spinner("Generating response..."):
|
150 |
try:
|
151 |
if model_selection == "Disease Analysis":
|
|
|
|
|
|
|
152 |
context = ""
|
153 |
if uploaded_file is not None:
|
154 |
file_content = read_pdf(uploaded_file)
|
|
|
158 |
context = file_content
|
159 |
|
160 |
query_input = search_input + (f"\n\nContext:\n{context}" if context else "")
|
161 |
+
response = text_pipeline(query_input, max_length=200, num_return_sequences=1)
|
|
|
|
|
162 |
st.markdown(f"### Response:\n{response[0]['generated_text']}")
|
163 |
|
164 |
elif model_selection == "Document Analysis":
|
|
|
|
|
165 |
context = ""
|
166 |
if uploaded_file is not None:
|
167 |
file_content = read_pdf(uploaded_file)
|
|
|
171 |
context = file_content
|
172 |
|
173 |
if search_input and context:
|
174 |
+
result = document_pipeline({"question": search_input, "context": context})
|
175 |
st.markdown(f"### Answer:\n{result['answer']}")
|
176 |
|
177 |
except Exception as e:
|