debisoft commited on
Commit
89a989f
·
1 Parent(s): 009b166

Combine chat

Browse files
Files changed (1) hide show
  1. main.py +78 -2
main.py CHANGED
@@ -1,10 +1,20 @@
1
  from fasthtml.common import *
 
 
 
2
 
3
- app = FastHTML()
 
 
 
 
 
 
 
4
  rt = app.route
5
 
6
  style="""
7
- #mapid { height: 640px; }
8
  """
9
 
10
  js = """
@@ -31,4 +41,70 @@ def get():
31
  Script(js)
32
  )
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  serve()
 
1
  from fasthtml.common import *
2
+ from openai import OpenAI # openai==1.2.0
3
+ from dotenv import load_dotenv, find_dotenv
4
+ _ = load_dotenv(find_dotenv())
5
 
6
+ upstage_token = os.getenv('UPSTAGE_TOKEN')
7
+
8
+ # Set up the app, including daisyui and tailwind for the chat component
9
+ hdrs = (picolink, Script(src="https://cdn.tailwindcss.com"),
10
+ Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css"))
11
+ app = FastHTML(hdrs=hdrs, cls="p-4 max-w-lg mx-auto")
12
+
13
+ #app = FastHTML()
14
  rt = app.route
15
 
16
  style="""
17
+ #mapid { height: 480px; }
18
  """
19
 
20
  js = """
 
41
  Script(js)
42
  )
43
 
44
+
45
+
46
+
47
+
48
+ client = OpenAI(
49
+ api_key=upstage_token,
50
+ base_url="https://api.upstage.ai/v1/solar"
51
+ )
52
+
53
+ sp = "You are a helpful and concise assistant."
54
+
55
+ def get_completion(prompt, model="solar-1-mini-chat"):
56
+ messages = [{"role": "user", "content": prompt}]
57
+ response = client.chat.completions.create(
58
+ model=model,
59
+ messages=messages,
60
+ temperature=0, # this is the degree of randomness of the model's output
61
+ )
62
+ return response.choices[0].message.content
63
+
64
+ def get_completion_from_messages(messages, model="solar-1-mini-chat", temperature=0):
65
+ response = client.chat.completions.create(
66
+ model=model,
67
+ messages=messages,
68
+ temperature=temperature, # this is the degree of randomness of the model's output
69
+ )
70
+ return response.choices[0].message.content
71
+
72
+ # Chat message component (renders a chat bubble)
73
+ def ChatMessage(msg, user):
74
+ bubble_class = "chat-bubble-primary" if user else 'chat-bubble-secondary'
75
+ chat_class = "chat-end" if user else 'chat-start'
76
+ return Div(cls=f"chat {chat_class}")(
77
+ Div('user' if user else 'assistant', cls="chat-header"),
78
+ Div(msg, cls=f"chat-bubble {bubble_class}"),
79
+ Hidden(msg, name="messages")
80
+ )
81
+
82
+ # The input field for the user message. Also used to clear the
83
+ # input field after sending a message via an OOB swap
84
+ def ChatInput():
85
+ return Input(name='msg', id='msg-input', placeholder="Type a message",
86
+ cls="input input-bordered w-full", hx_swap_oob='true')
87
+
88
+ # The main screen
89
+ @app.get
90
+ def index():
91
+ page = Form(hx_post=send, hx_target="#chatlist", hx_swap="beforeend")(
92
+ Div(id="chatlist", cls="chat-box h-[73vh] overflow-y-auto"),
93
+ Div(cls="flex space-x-2 mt-2")(
94
+ Group(ChatInput(), Button("Send", cls="btn btn-primary"))
95
+ )
96
+ )
97
+ return Titled('Chatbot Demo', page)
98
+
99
+ # Handle the form submission
100
+ @app.post
101
+ def send(msg:str, messages:list[str]=None):
102
+ if not messages: messages = []
103
+ messages.append(msg.rstrip())
104
+ print(messages[0])
105
+ r = get_completion(messages[0]) # get response from chat model
106
+ return (ChatMessage(msg, True), # The user's message
107
+ ChatMessage(r.rstrip(), False), # The chatbot's response
108
+ ChatInput()) # And clear the input field via an OOB swap
109
+
110
  serve()