NotASI commited on
Commit
a05cc1e
·
1 Parent(s): c75a093

Update Nightly

Browse files
Files changed (2) hide show
  1. Tabs/Gemini_Chabot_Nightly.py +130 -37
  2. app.py +5 -2
Tabs/Gemini_Chabot_Nightly.py CHANGED
@@ -1,3 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import google.generativeai as genai
3
  from google.generativeai.types import HarmCategory, HarmBlockThreshold
@@ -21,56 +111,53 @@ Known errors ⚠️:
21
  - Error when submit messages from uploading files.
22
  """
23
 
24
- def upload_to_gemini(path, mime_type=None):
25
- file = genai.upload_file(path, mime_type=mime_type)
26
- print(f"Uploaded file '{file.display_name}' as: {file.uri}")
27
- return file
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
  def transform_history(history):
30
  new_history = []
31
  for user_msg, model_msg in history:
32
- new_history.append({"role": "user", "parts": [{"text": user_msg}]})
33
- new_history.append({"role": "model", "parts": [{"text": model_msg}]})
34
  return new_history
35
 
36
- def chatbot_stable(message, history):
37
  message_text = message["text"]
38
  message_files = message["files"]
39
  print("Message text:", message_text)
40
  print("Message files:", message_files)
41
  if message_files:
42
- image_uris = [upload_to_gemini(file_path["path"]) for file_path in message_files]
43
- message_content = [{"text": message_text}] + image_uris
44
  else:
45
- message_content = {"text": message_text}
46
- genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
47
- model = genai.GenerativeModel(
48
- model_name,
49
- safety_settings={
50
- HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
51
- HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_ONLY_HIGH,
52
- HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
53
- HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
54
- },
55
- generation_config={
56
- "temperature": 1,
57
- "top_p": 0.95,
58
- "top_k": 64,
59
- "max_output_tokens": 8192,
60
- "response_mime_type": "text/plain",
61
- }
62
- )
63
-
64
- global chat
65
- chat = model.start_chat(history=[])
66
- chat.history = transform_history(history)
67
  response = chat.send_message(message_content, stream=True)
68
  response.resolve()
69
 
70
  return response.text
71
 
72
- gemini_chatbot_interface_stable = gr.Chatbot(
73
- height=500,
74
  likeable=True,
75
  avatar_images=(
76
  None,
@@ -81,9 +168,15 @@ gemini_chatbot_interface_stable = gr.Chatbot(
81
  render_markdown=True
82
  )
83
 
84
- gemini_chatbot_stable = gr.ChatInterface(
85
- fn=chatbot_stable,
86
- chatbot=gemini_chatbot_interface_stable,
 
 
 
 
 
87
  title="Gemini 1.5 Chatbot",
88
- multimodal=True
 
89
  )
 
1
+ # import os
2
+ # import google.generativeai as genai
3
+ # from google.generativeai.types import HarmCategory, HarmBlockThreshold
4
+ # import gradio as gr
5
+ # from dotenv import load_dotenv
6
+
7
+ # load_dotenv()
8
+
9
+ # GEMINI_API_KEY_NIGHTLY = os.getenv("GEMINI_API_KEY_NIGHTLY")
10
+ # model_name = "gemini-1.5-flash"
11
+
12
+ # TITLE_NIGHTLY = """<h1 align="center">🎮Chat with Gemini 1.5🔥 -- Nightly</h1>"""
13
+ # NOTICE_NIGHTLY = """
14
+ # Notices 📜:
15
+ # - This app is still in development (extreme unstable)
16
+ # - Some features may not work as expected
17
+ # - Currently the chatbot only support text and images
18
+ # """
19
+ # ERROR_NIGHTLY = """
20
+ # Known errors ⚠️:
21
+ # - Error when submit messages from uploading files.
22
+ # """
23
+
24
+ # def upload_to_gemini(path, mime_type=None):
25
+ # file = genai.upload_file(path, mime_type=mime_type)
26
+ # print(f"Uploaded file '{file.display_name}' as: {file.uri}")
27
+ # return file
28
+
29
+ # def transform_history(history):
30
+ # new_history = []
31
+ # for user_msg, model_msg in history:
32
+ # new_history.append({"role": "user", "parts": [{"text": user_msg}]})
33
+ # new_history.append({"role": "model", "parts": [{"text": model_msg}]})
34
+ # return new_history
35
+
36
+ # def chatbot_stable(message, history):
37
+ # message_text = message["text"]
38
+ # message_files = message["files"]
39
+ # print("Message text:", message_text)
40
+ # print("Message files:", message_files)
41
+ # if message_files:
42
+ # image_uris = [upload_to_gemini(file_path["path"]) for file_path in message_files]
43
+ # message_content = [{"text": message_text}] + image_uris
44
+ # else:
45
+ # message_content = {"text": message_text}
46
+ # genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
47
+ # model = genai.GenerativeModel(
48
+ # model_name,
49
+ # safety_settings={
50
+ # HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
51
+ # HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_ONLY_HIGH,
52
+ # HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
53
+ # HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
54
+ # },
55
+ # generation_config={
56
+ # "temperature": 1,
57
+ # "top_p": 0.95,
58
+ # "top_k": 64,
59
+ # "max_output_tokens": 8192,
60
+ # "response_mime_type": "text/plain",
61
+ # }
62
+ # )
63
+
64
+ # global chat
65
+ # chat = model.start_chat(history=[])
66
+ # chat.history = transform_history(history)
67
+ # response = chat.send_message(message_content, stream=True)
68
+ # response.resolve()
69
+
70
+ # return response.text
71
+
72
+ # gemini_chatbot_interface_stable = gr.Chatbot(
73
+ # height=500,
74
+ # likeable=True,
75
+ # avatar_images=(
76
+ # None,
77
+ # "https://media.roboflow.com/spaces/gemini-icon.png"
78
+ # ),
79
+ # show_copy_button=True,
80
+ # show_share_button=True,
81
+ # render_markdown=True
82
+ # )
83
+
84
+ # gemini_chatbot_stable = gr.ChatInterface(
85
+ # fn=chatbot_stable,
86
+ # chatbot=gemini_chatbot_interface_stable,
87
+ # title="Gemini 1.5 Chatbot",
88
+ # multimodal=True
89
+ # )
90
+
91
  import os
92
  import google.generativeai as genai
93
  from google.generativeai.types import HarmCategory, HarmBlockThreshold
 
111
  - Error when submit messages from uploading files.
112
  """
113
 
114
+ genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
115
+ model = genai.GenerativeModel(
116
+ model_name,
117
+ safety_settings={
118
+ HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
119
+ HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_ONLY_HIGH,
120
+ HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
121
+ HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH,
122
+ },
123
+ generation_config={
124
+ "temperature": 1,
125
+ "top_p": 0.95,
126
+ "top_k": 64,
127
+ "max_output_tokens": 8192,
128
+ "response_mime_type": "text/plain",
129
+ }
130
+ )
131
+ chat = model.start_chat(history=[])
132
+
133
+ def clear_chat_history():
134
+ chat.history = []
135
 
136
  def transform_history(history):
137
  new_history = []
138
  for user_msg, model_msg in history:
139
+ new_history.append({"role": "user", "parts": [user_msg]})
140
+ new_history.append({"role": "model", "parts": [model_msg]})
141
  return new_history
142
 
143
+ def chatbot_nightly(message, history):
144
  message_text = message["text"]
145
  message_files = message["files"]
146
  print("Message text:", message_text)
147
  print("Message files:", message_files)
148
  if message_files:
149
+ image_uris = [genai.upload_file(path=file_path["path"]) for file_path in message_files]
150
+ message_content = [message_text] + image_uris
151
  else:
152
+ message_content = [message_text]
153
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
  response = chat.send_message(message_content, stream=True)
155
  response.resolve()
156
 
157
  return response.text
158
 
159
+ gemini_chatbot_interface_nightly = gr.Chatbot(
160
+ height=400,
161
  likeable=True,
162
  avatar_images=(
163
  None,
 
168
  render_markdown=True
169
  )
170
 
171
+ clear_chat_button = gr.ClearButton(
172
+ components=[gemini_chatbot_interface_nightly],
173
+ value="🗑️ Clear"
174
+ )
175
+
176
+ gemini_chatbot_nightly = gr.ChatInterface(
177
+ fn=chatbot_nightly,
178
+ chatbot=gemini_chatbot_interface_nightly,
179
  title="Gemini 1.5 Chatbot",
180
+ multimodal=True,
181
+ clear_btn=clear_chat_button
182
  )
app.py CHANGED
@@ -11,7 +11,7 @@ import gradio as gr
11
  from PIL import Image
12
  from dotenv import load_dotenv
13
  from google.generativeai.types import HarmCategory, HarmBlockThreshold
14
- from Tabs.Gemini_Chabot_Nightly import TITLE_NIGHTLY, NOTICE_NIGHTLY, ERROR_NIGHTLY, gemini_chatbot_stable
15
 
16
  load_dotenv()
17
 
@@ -225,7 +225,10 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
225
  with gr.Row():
226
  gr.Markdown(NOTICE_NIGHTLY)
227
  gr.Markdown(ERROR_NIGHTLY)
228
- gemini_chatbot_stable.render()
 
 
 
229
  # ============================== Nightly - END ==============================
230
 
231
  demo.queue().launch(debug=True, show_error=True)
 
11
  from PIL import Image
12
  from dotenv import load_dotenv
13
  from google.generativeai.types import HarmCategory, HarmBlockThreshold
14
+ from Tabs.Gemini_Chabot_Nightly import gemini_chatbot_nightly, clear_chat_button, clear_chat_history, TITLE_NIGHTLY, NOTICE_NIGHTLY, ERROR_NIGHTLY
15
 
16
  load_dotenv()
17
 
 
225
  with gr.Row():
226
  gr.Markdown(NOTICE_NIGHTLY)
227
  gr.Markdown(ERROR_NIGHTLY)
228
+ gemini_chatbot_nightly.render()
229
+ clear_chat_button.click(
230
+ fn=clear_chat_history
231
+ )
232
  # ============================== Nightly - END ==============================
233
 
234
  demo.queue().launch(debug=True, show_error=True)