File size: 2,383 Bytes
ebe2c15
baa9fd5
90e244b
8b64abf
 
 
 
 
90e244b
ff97556
ebe2c15
ff97556
8b64abf
ff97556
8b64abf
 
 
 
 
 
baa9fd5
ebe2c15
ff97556
ebe2c15
ff97556
baa9fd5
ff97556
 
 
 
 
44f58b2
ff97556
 
 
 
c60d892
ebe2c15
ff97556
ebe2c15
 
 
 
b502f2d
ebe2c15
 
ff97556
ebe2c15
 
 
 
 
 
 
 
 
ff97556
8b64abf
ff97556
8b64abf
ebe2c15
 
8b64abf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from openai import OpenAI
from gradio.chat_interface import ChatInterface
from dotenv import load_dotenv
from gradio.themes.soft import Soft
from gradio.themes import colors
from gradio.components import Textbox
from gradio.components import Chatbot
from RAG_class import RAG_1177
load_dotenv()
rag = RAG_1177()

#Gradio chatbot interface changes
textbox = Textbox(placeholder="Skriv ditt fråga här...",scale=4)
chatbot = Chatbot(placeholder="  <strong>Tips:</strong> Var så specifik som möjligt och använd gärna exempelfrågorna nedanför.</li>",scale=3, height=250, show_copy_button=True, label="1177 chatbot")
new_primary_color = colors.red
new_secondary_color = colors.red
my_custom_theme = Soft(
    primary_hue=new_primary_color,
    secondary_hue=new_secondary_color
)

def predict(message, history):
    
    history_openai_format = []
    history_openai_format.append({"role": "system", "content": rag.system_prompt()})

    #formatt chatbot history 
    if len(history) > 0:
        for human, assistant in history:  
            history_openai_format.append({"role": "user", "content": human})
            history_openai_format.append({"role": "assistant", "content": assistant})

    if rag.relevant_question(message) == "NEJ":
        user_prompt = "Denna fråga är helt orrelevant och håller sig inte till ämnet. Ge inga referenser"
    else:
        user_prompt = rag.rag_user_prompt(message, 3)

    history_openai_format.append({"role": "user", "content": user_prompt})
 
    client = OpenAI()
    response = client.chat.completions.create(
        model='gpt-3.5-turbo',
        messages=history_openai_format,
        temperature=0.4,
        stream=True
    )
    #streaming
    partial_message = ""
    for chunk in response:
        if chunk.choices[0].delta.content is not None:
            partial_message = partial_message + chunk.choices[0].delta.content
            yield partial_message

    yield partial_message

def main():
    ChatInterface(predict, textbox=textbox, chatbot=chatbot, title="Välkommen till 1177 AI-chatbot!🔍", theme=my_custom_theme,submit_btn="Skicka",
                  retry_btn="🔄Försök igen", undo_btn="↩️ Ångra", clear_btn="🗑️ Rensa",
                   examples=rag.example_questions,  cache_examples=False, description=rag.get_description).launch(share=False)

if __name__ == "__main__":
    main()