File size: 2,649 Bytes
2fdfa6a
1bce4f6
2fdfa6a
3740b36
1bce4f6
3740b36
2fdfa6a
 
3740b36
 
 
 
2fdfa6a
 
3740b36
 
87b7758
3740b36
 
 
 
 
 
 
12e22a0
3740b36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2fdfa6a
3740b36
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import os
import json
import gradio as gr
from transformers import pipeline

# Step 1: Load JSON Dataset
def load_dataset(folder_path):
    data = []
    for filename in os.listdir(folder_path):
        if filename.endswith(".json"):
            with open(os.path.join(folder_path, filename), "r") as file:
                data.extend(json.load(file))  # Assuming each file is a list of entries
    return data

# Step 2: Initialize OpenAI or Hugging Face Model
model = pipeline("question-answering", model="deepset/roberta-base-squad2")  # Replace with your preferred model

# Step 3: Query Handler
def query_chatbot(query, name, email, contact):
    # Load the dataset
    dataset = load_dataset("dataset/")
    
    # Retrieve relevant information using a simple RAG technique
    responses = []
    for entry in dataset:
        context = entry.get("content", "")  # Extract relevant content from the JSON file
        if query.lower() in context.lower():
            response = model(question=query, context=context)
            responses.append(response["answer"])

    # Compile the result
    response = (
        f"Hello {name}!\n\n"
        f"Based on your query: '{query}', here are some relevant insights:\n\n"
        + "\n".join(responses)[:3]  # Limit to top 3 responses
    )
    
    # Create a profile (Optional enhancement)
    profile = {
        "name": name,
        "email": email,
        "contact": contact,
        "query": query,
        "responses": responses,
    }

    # Optionally, you can send this profile to an email or save it for further analysis.
    return response

# Step 4: Gradio Interface
def chatbot_ui():
    with gr.Blocks() as app:
        gr.Markdown("# πŸŽ“ Education Consultant Chatbot")
        
        with gr.Row():
            with gr.Column():
                query_input = gr.Textbox(label="Your Query", placeholder="Ask about courses, visas, or programs...")
                name_input = gr.Textbox(label="Name", placeholder="Your Full Name")
                email_input = gr.Textbox(label="Email", placeholder="Your Email Address")
                contact_input = gr.Textbox(label="Contact (Optional)", placeholder="Your Contact Number")
                submit_btn = gr.Button("Submit")
            
            with gr.Column():
                output_text = gr.Textbox(label="Chatbot Response")
        
        submit_btn.click(
            query_chatbot,
            inputs=[query_input, name_input, email_input, contact_input],
            outputs=output_text,
        )

    return app

# Step 5: Launch App
if __name__ == "__main__":
    chatbot = chatbot_ui()
    chatbot.launch()