Spaces:
Sleeping
Sleeping
import gradio as gr | |
import google.generativeai as genai | |
import PyPDF2 | |
import os | |
from dotenv import load_dotenv | |
# API anahtarınızı yükleyin | |
load_dotenv(override=True) | |
api_key = os.getenv("GEMINI_API_KEY") | |
genai.configure(api_key=api_key) | |
# Gemini modelini başlat | |
model = genai.GenerativeModel(model_name="gemini-1.5-flash", | |
system_instruction="Sen bir belge sohbet asistanısın. Sorulara sana verilen belgeler içindeki bilgileri tarayarak yanıt vermelisin. Aksi belirtilmediği sürece yanıtları Türkçe olarak ver." | |
) | |
def extract_text_from_pdf(pdf_files): | |
extracted_texts = [] | |
for pdf_file in pdf_files: | |
with open(pdf_file.name, 'rb') as f: | |
pdf_reader = PyPDF2.PdfReader(f) | |
extracted_text = "" | |
for page in pdf_reader.pages: | |
text = page.extract_text() | |
if text: | |
extracted_text += text | |
extracted_texts.append(extracted_text) | |
return extracted_texts | |
def chat_with_documents(history, pdf_files): | |
if not history: | |
return history | |
extracted_texts = extract_text_from_pdf(pdf_files) | |
user_input = history[-1]['content'] | |
prompt = [user_input] + extracted_texts | |
response = model.generate_content(prompt) | |
history.append({ | |
'role': 'assistant', | |
'content': response.text | |
}) | |
return history | |
with gr.Blocks() as demo: | |
gr.Markdown("# Belgelerinizle Sohbet Edin (Gemini API kullanarak)") | |
with gr.Accordion("Belgeler", open=False): | |
docs = gr.File( | |
label="Belgeleri Yükleyin", | |
file_count="multiple", | |
type="filepath" | |
) | |
chatbot = gr.Chatbot(type="messages") | |
msg = gr.Textbox(label="Mesajınız") | |
clear = gr.Button("Temizle") | |
def on_user_message(user_message, history): | |
history = history or [] | |
history.append({ | |
'role': 'user', | |
'content': user_message | |
}) | |
return "", history | |
def on_generate_response(history, docs_value): | |
if not docs_value: | |
history.append({ | |
'role': 'assistant', | |
'content': "Lütfen önce belge(leri) yükleyin." | |
}) | |
return history | |
if history: | |
history = chat_with_documents(history, docs_value) | |
return history | |
msg.submit(on_user_message, [msg, chatbot], [msg, chatbot]).then( | |
on_generate_response, [chatbot, docs], chatbot | |
) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
demo.launch() |