File size: 1,478 Bytes
8c00426
36503b4
8c00426
36503b4
 
8c00426
36503b4
8c00426
 
36503b4
 
8c00426
 
 
 
 
 
 
 
e7e9ed3
 
8c00426
 
 
 
 
 
 
 
 
36503b4
8c00426
 
 
 
36503b4
8c00426
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import gradio as gr
from langchain_community.document_loaders import PyPDFLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.chains import RetrievalQA
from langchain_community.llms import HuggingFacePipeline
from transformers import pipeline

def qa_from_pdf(pdf_path, question):
    loader = PyPDFLoader(pdf_path)
    pages = loader.load()

    splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
    documents = splitter.split_documents(pages)

    embedding_model = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
    db = FAISS.from_documents(documents, embedding_model)

    hf_pipeline = pipeline('text-generation', model='sshleifer/tiny-gpt2', max_new_tokens=100)

    llm = HuggingFacePipeline(pipeline=hf_pipeline)

    qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=db.as_retriever())
    answer = qa_chain.run(question)
    return answer

iface = gr.Interface(
    fn=qa_from_pdf,
    inputs=[
        gr.File(label="PDF Dosyası Yükle", file_types=[".pdf"], type="filepath"),
        gr.Textbox(label="Sorunuzu yazın")
    ],
    outputs="text",
    title="📄 RAG Demo: PDF üzerinden Soru-Cevap",
    description="Bir PDF yükleyin ve ona sorular sorun. Küçük dil modeliyle çalışan demo."
)

if __name__ == "__main__":
    iface.launch()