moazzamdev commited on
Commit
b827b4c
·
1 Parent(s): a4129a8

Update page2.py

Browse files
Files changed (1) hide show
  1. page2.py +14 -8
page2.py CHANGED
@@ -5,6 +5,11 @@ from streamlit_chat import message
5
  from PIL import Image
6
  import base64
7
  import io
 
 
 
 
 
8
  from langchain.chains import LLMChain
9
  from langchain.prompts import PromptTemplate
10
  from langchain.memory import ConversationBufferMemory
@@ -13,7 +18,6 @@ from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
13
  # Streamlit app
14
  def image():
15
 
16
-
17
  def process_image(uploaded_file):
18
  # Display the uploaded image
19
  image = Image.open(uploaded_file)
@@ -32,12 +36,13 @@ def image():
32
  llm = ChatGoogleGenerativeAI(model="gemini-pro-vision", google_api_key=apiKey)
33
 
34
  image_url = None # Initialize image_url outside the if statement
 
 
 
 
35
 
36
- uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
37
- if uploaded_file is not None:
38
- image_url = process_image(uploaded_file)
39
-
40
-
41
  if 'messages' not in st.session_state:
42
  st.session_state['messages'] = []
43
 
@@ -63,8 +68,9 @@ def image():
63
  "content": prompt
64
  }
65
  )
66
- response = llm.invoke([message])
67
- text_output = response.content
 
68
 
69
  with st.chat_message("assistant").markdown(text_output):
70
  st.session_state.messages.append(
 
5
  from PIL import Image
6
  import base64
7
  import io
8
+ from IPython.display import display
9
+ from IPython.display import Markdown
10
+
11
+ import pathlib
12
+ import textwrap
13
  from langchain.chains import LLMChain
14
  from langchain.prompts import PromptTemplate
15
  from langchain.memory import ConversationBufferMemory
 
18
  # Streamlit app
19
  def image():
20
 
 
21
  def process_image(uploaded_file):
22
  # Display the uploaded image
23
  image = Image.open(uploaded_file)
 
36
  llm = ChatGoogleGenerativeAI(model="gemini-pro-vision", google_api_key=apiKey)
37
 
38
  image_url = None # Initialize image_url outside the if statement
39
+ with st.sidebar:
40
+ uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
41
+ if uploaded_file is not None:
42
+ image_url = process_image(uploaded_file)
43
 
44
+ with st.chat_message("assistant"):
45
+ st.write("Hello 👋, upload an image and ask questions related to it!")
 
 
 
46
  if 'messages' not in st.session_state:
47
  st.session_state['messages'] = []
48
 
 
68
  "content": prompt
69
  }
70
  )
71
+ with st.spinner('Generating...'):
72
+ response = llm.invoke([message])
73
+ text_output = response.content
74
 
75
  with st.chat_message("assistant").markdown(text_output):
76
  st.session_state.messages.append(