Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,27 @@
|
|
1 |
import gradio as gr
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from typing import List
|
3 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
4 |
+
import google.generativeai as genai
|
5 |
+
from langchain_community.vectorstores import FAISS
|
6 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
7 |
+
|
8 |
+
genai.configure(api_key="AIzaSyD2o8vjePJb6z8vT_PVe82lVWMD3_cBL0g")
|
9 |
+
|
10 |
+
|
11 |
+
def predict(message :str ,history,topic : str = "OIC") -> str:
|
12 |
+
model = genai.GenerativeModel("gemini-pro")
|
13 |
+
his = []
|
14 |
+
for i,j in history:
|
15 |
+
his.extend([
|
16 |
+
{"role": "user", "parts": i},
|
17 |
+
{"role": "model", "parts": j},
|
18 |
+
])
|
19 |
+
chat = model.start_chat(
|
20 |
+
history=his
|
21 |
+
)
|
22 |
+
response = chat.send_message(message)
|
23 |
+
return response.text
|
24 |
+
iface = gr.Interface(fn = predict,inputs = ["text","list","text"],outputs = "text")
|
25 |
+
iface.launch()
|
26 |
+
|
27 |
+
|