getapi commited on
Commit
5c14a47
·
1 Parent(s): 799a9b8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -0
app.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ import io
3
+ import os
4
+ import streamlit as st
5
+ import google.generativeai as genai
6
+
7
+ # import google.ai.generativelanguage as glm
8
+
9
+
10
+ safety_settings = [
11
+ {
12
+ "category": "HARM_CATEGORY_HARASSMENT",
13
+ "threshold": "BLOCK_NONE"
14
+ },
15
+ {
16
+ "category": "HARM_CATEGORY_HATE_SPEECH",
17
+ "threshold": "BLOCK_NONE"
18
+ },
19
+ {
20
+ "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
21
+ "threshold": "BLOCK_NONE"
22
+ },
23
+ {
24
+ "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
25
+ "threshold": "BLOCK_NONE"
26
+ },
27
+ ]
28
+
29
+ with st.sidebar:
30
+ st.title("Gemini Pro")
31
+
32
+ genai.configure(api_key=st.secrets["api_key"])
33
+
34
+
35
+ # select_model = st.selectbox("модель", ["gemini-pro", "gemini-pro-vision"])
36
+
37
+ # if select_model == "gemini-pro-vision":
38
+ uploaded_image = st.file_uploader(
39
+ "upload image",
40
+ label_visibility="collapsed",
41
+ accept_multiple_files=False,
42
+ type=["png", "jpg"],
43
+ )
44
+
45
+ if uploaded_image:
46
+ image_bytes = uploaded_image.read()
47
+
48
+
49
+ def get_response(messages, model="gemini-pro"):
50
+ model = genai.GenerativeModel(model)
51
+ res = model.generate_content(messages, stream=True, safety_settings=safety_settings)
52
+ return res
53
+
54
+
55
+ if "messages" not in st.session_state:
56
+ st.session_state["messages"] = []
57
+ messages = st.session_state["messages"]
58
+
59
+ if messages:
60
+ for item in messages:
61
+ role, parts = item.values()
62
+ if role == "user":
63
+ st.chat_message("user").markdown(parts[0])
64
+ elif role == "model":
65
+ st.chat_message("assistant").markdown(parts[0])
66
+
67
+ chat_message = st.chat_input("Спроси что-нибудь!")
68
+
69
+ if chat_message:
70
+ st.chat_message("user").markdown(chat_message)
71
+ res_area = st.chat_message("assistant").empty()
72
+
73
+ if "image_bytes" in globals():
74
+ vision_message = [chat_message, Image.open(io.BytesIO(image_bytes))]
75
+ res = get_response(vision_message, model="gemini-pro-vision")
76
+ else:
77
+ vision_message = [{"role": "user", "parts": [chat_message]}]
78
+ res = get_response(vision_message)
79
+
80
+
81
+ res_text = ""
82
+ for chunk in res:
83
+ res_text += chunk.text
84
+ res_area.markdown(res_text)
85
+
86
+ messages.append({"role": "model", "parts": [res_text]})
87
+