File size: 1,942 Bytes
f6a53a3
c534f6f
f6a53a3
 
 
c534f6f
 
 
 
 
f6a53a3
 
 
c534f6f
f6a53a3
 
c534f6f
 
 
 
 
 
 
 
22db157
cf43e0d
22db157
cf43e0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22db157
c534f6f
f6a53a3
c534f6f
f6a53a3
c534f6f
f6a53a3
 
 
c534f6f
f6a53a3
 
c534f6f
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import gradio as gr
import openai
from langchain.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import Chroma

# Set API Key
openai.api_key = "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"

# Load embedding model
model_name = "intfloat/e5-small"
embedding_model = HuggingFaceEmbeddings(model_name=model_name)

# Load ChromaDB
persist_directory = "./docs/chroma/"
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_model)

# Define RAG function
def rag_pipeline(question):
    """Retrieve relevant documents and generate AI response"""
    retrieved_docs = vectordb.similarity_search(question, k=5)
    context = " ".join([doc.page_content for doc in retrieved_docs])
    
    # Generate AI response
    full_prompt = f"Context: {context}\\n\\nQuestion: {question}"
    import openai
import openai

def generate_response(question, context):
    """Generate AI response using OpenAI GPT-4"""
    full_prompt = f"Context: {context}\n\nQuestion: {question}"
    
    response = openai.ChatCompletion.create(
        model="gpt-4",
        messages=[
            {"role": "system", "content": "You are an AI assistant helping with question answering."},
            {"role": "user", "content": full_prompt}
        ],
        max_tokens=300,
        temperature=0.7
    )
    
    return response['choices'][0]['message']['content'].strip()

    

# Gradio UI
iface = gr.Interface(
    fn=rag_pipeline,
    inputs=gr.Textbox(label="Enter your question"),
    outputs=[gr.Textbox(label="Generated Response"), gr.Textbox(label="Retrieved Documents")],
    title="RAG-Based Question Answering System",
    description="Enter a question and retrieve relevant documents with AI-generated response."
)

# Launch Gradio app
if __name__ == "__main__":
    iface.launch()