Spaces:
Sleeping
Sleeping
import gradio as gr | |
from typing import List | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
import google.generativeai as genai | |
from langchain_community.vectorstores import FAISS | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
import re | |
genai.configure(api_key="AIzaSyD2o8vjePJb6z8vT_PVe82lVWMD3_cBL0g") | |
def format_gemini_response(text): | |
bold_pattern = r"\*\*(.*?)\*\*" | |
italic_pattern = r"\*(.*?)\*" | |
code_pattern = r"```(.*?)```" | |
text = text.replace('\n', '<br>') | |
formatted_text = re.sub(code_pattern, "<pre><code>\\1</code></pre>", text) | |
formatted_text = re.sub(bold_pattern, "<b>\\1</b>", formatted_text) | |
formatted_text = re.sub(italic_pattern, "<i>\\1</i>", formatted_text) | |
return formatted_text | |
def predict(message: str, chat_his: List[List[str]], d: dict) -> str: | |
if not message.strip(): | |
return "Error: Message cannot be empty.", chat_his, d | |
model = genai.GenerativeModel("gemini-pro") | |
his = [] | |
for i, j in chat_his: | |
his.extend([ | |
{"role": "user", "parts": i}, | |
{"role": "model", "parts": j}, | |
]) | |
chat = model.start_chat(history=his) | |
response = chat.send_message(message) | |
# Update chat history | |
chat_his.append((message, response.text)) | |
return format_gemini_response(response.text), chat_his, d | |
iface = gr.Interface( | |
fn=predict, | |
inputs=["text", "list", "json"], | |
outputs="html" # Change to HTML for proper rendering | |
) | |
iface.launch(share=True) | |