File size: 3,478 Bytes
bc5cf4c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import gradio as gr
import openai
import os
import json

# OpenAI API setup
openai.api_key = os.getenv("GROQ_API_KEY")
openai.api_base = "https://api.groq.com/openai/v1"

# File to store conversation history
CONVERSATION_FILE = "conversation_history.json"

# Function to load conversation history
def load_history():
    if not os.path.exists(CONVERSATION_FILE):
        # Create the file with an empty list as default content
        with open(CONVERSATION_FILE, "w") as file:
            json.dump([], file)
    try:
        with open(CONVERSATION_FILE, "r") as file:
            return json.load(file)
    except json.JSONDecodeError:
        return []

# Function to save conversation history
def save_history(history):
    try:
        with open(CONVERSATION_FILE, "w") as file:
            json.dump(history, file, indent=4)
    except Exception as e:
        print(f"Error saving history: {e}")

# Function to clear conversation history
def clear_conversation_history():
    try:
        with open(CONVERSATION_FILE, "w") as file:
            json.dump([], file)
        return "Conversation history cleared successfully.", ""
    except Exception as e:
        return f"Error clearing history: {e}", ""

# Function to get response from the LLM
def get_groq_response(message, history=[]):
    try:
        messages = [{"role": "system", "content": "Precise answer"}] + history + [{"role": "user", "content": message}]
        response = openai.ChatCompletion.create(
            model="llama-3.1-70b-versatile",
            messages=messages
        )
        return response.choices[0].message["content"]
    except Exception as e:
        return f"Error: {str(e)}"

# Chatbot function
def chatbot(user_input, history):
    # Load conversation history
    conversation_history = history or load_history()
    
    # Format history for the LLM
    formatted_history = [{"role": "user" if i % 2 == 0 else "assistant", "content": msg} for i, (msg, _) in enumerate(conversation_history)] + \
                        [{"role": "assistant", "content": response} for _, response in conversation_history]
    
    # Get bot response
    bot_response = get_groq_response(user_input, formatted_history)
    
    # Update history with the new conversation
    conversation_history.append((user_input, bot_response))
    
    # Save the updated history
    save_history(conversation_history)
    
    # Format for HTML display
    display_html = "<br>".join(
        f"<div><b>User:</b> {user}</div><div><b>Bot:</b> {bot}</div><br>"
        for user, bot in conversation_history
    )
    
    return conversation_history, display_html, ""  # Clear the user input field

# Gradio Interface
with gr.Blocks() as demo:
    gr.Markdown("# Chatbot with Enhanced Formatting and Selectable Chat History")
    
    chat_display = gr.HTML(label="Conversation")
    user_input = gr.Textbox(label="Type your message here:")
    clear_button = gr.Button("Clear History")
    system_message = gr.Textbox(label="System Message", interactive=False)

    history_state = gr.State(load_history())

    # Chat interaction
    user_input.submit(chatbot, inputs=[user_input, history_state], outputs=[history_state, chat_display, user_input])
    
    # Clear history button action
    clear_button.click(clear_conversation_history, inputs=None, outputs=[system_message, chat_display])
    clear_button.click(lambda: [], outputs=history_state)  # Reset the history state

# Launch the app
demo.launch()