File size: 5,005 Bytes
7a25f0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
# Configure page settings (MUST BE FIRST STREAMLIT COMMAND)
import streamlit as st
from streamlit_option_menu import option_menu
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
from PyPDF2 import PdfReader

# Set page config
st.set_page_config(
    page_title="Disease Analysis GPT",
    layout="wide",
    initial_sidebar_state="expanded"
)

# Load Hugging Face models and tokenizer for text generation
@st.cache_resource
def load_model():
    tokenizer = AutoTokenizer.from_pretrained("harishussain12/Disease_Managment")
    model = AutoModelForCausalLM.from_pretrained("harishussain12/Disease_Managment")
    return tokenizer, model

# Function to create a text generation pipeline
@st.cache_resource
def create_pipeline():
    tokenizer, model = load_model()
    return pipeline("text-generation", model=model, tokenizer=tokenizer)

# Function to extract text from PDF file
def read_pdf(file):
    try:
        reader = PdfReader(file)
        text = ""
        for page in reader.pages:
            text += page.extract_text()
        return text
    except Exception as e:
        return f"Error reading PDF: {e}"

# Load pipelines
text_pipeline = create_pipeline()

# Custom CSS for styling
st.markdown(
    """
    <style>
    body {
        font-family: 'Arial', sans-serif;
    }
    .stButton button {
        background-color: #0b2545;
        color: white;
        border: none;
        border-radius: 25px;
        padding: 8px 20px;
        font-size: 14px;
        font-weight: bold;
        cursor: pointer;
    }
    .stButton button:hover {
        background-color: #0a1b35;
    }
    .search-box {
        border-radius: 20px;
        border: 1px solid #ccc;
        padding: 10px;
        width: 100%;
        font-size: 16px;
        background-color: #ffffff;
    }
    .info-box {
        background-color: #f8f9fa;
        border-left: 5px solid #0b2545;
        padding: 15px;
        border-radius: 5px;
        font-size: 14px;
    }
    </style>
    """,
    unsafe_allow_html=True
)

# Sidebar
with st.sidebar:
    new_chat_button = st.button("New Chat", key="new_chat", help="Start a new chat to ask a different question.")
    if new_chat_button:
        st.session_state.clear()  # Clear session state to simulate a new chat

    selected = option_menu(
        menu_title=None,
        options=[" Home", " Discover"],
        icons=["house", "search"],
        menu_icon="cast",
        default_index=0,
        styles={
            "container": {"padding": "0!important", "background-color": "#3e4a5b"},
            "icon": {"color": "#ffffff", "font-size": "16px"},
            "nav-link": {
                "font-size": "15px", 
                "text-align": "left", 
                "margin": "0px", 
                "color": "#ffffff",
                "font-weight": "bold",
                "padding": "10px 20px",
            },
            "nav-link-selected": {"background-color": "#0b2545", "color": "white"},
        }
    )

# Main content
col1, col2, col3 = st.columns([1, 2, 1])

with col2:
    st.markdown("<h1 style='text-align: center;'>Disease Analysis GPT</h1>", unsafe_allow_html=True)
    st.markdown("<h3 style='text-align: center;'>What do you want to know?</h3>", unsafe_allow_html=True)

    # Model selection (now including Document Analysis)
    model_selection = st.selectbox(
        "Select a model", 
        options=["Disease Analysis", "Document Analysis"], 
        index=0
    )

    # If the user selects Document Analysis, show an error and prompt them to switch to Disease Analysis
    if model_selection == "Document Analysis":
        st.error("Please switch to 'Disease Analysis' model for generating responses. Document Analysis is not available in this version.")

    # Search box
    search_input = st.text_input(
        "", 
        placeholder="Type your question here...", 
        label_visibility="collapsed",
        help="Ask anything related to disease management."
    )

    # File upload below search box
    uploaded_file = st.file_uploader("Upload a PDF file", type="pdf", help="Attach relevant files or documents to your query.")

    if search_input:
        with st.spinner("Generating response..."):
            try:
                if model_selection == "Disease Analysis":
                    context = ""
                    if uploaded_file is not None:
                        file_content = read_pdf(uploaded_file)
                        if "Error" in file_content:
                            st.error(file_content)
                        else:
                            context = file_content

                    query_input = search_input + (f"\n\nContext:\n{context}" if context else "")
                    response = text_pipeline(query_input, max_length=200, num_return_sequences=1)
                    st.markdown(f"### Response:\n{response[0]['generated_text']}")

            except Exception as e:
                st.error(f"Error generating response: {str(e)}")