terry-li-hm commited on
Commit
1c88374
·
1 Parent(s): 35a5de6

Migrate files

Browse files
Files changed (4) hide show
  1. app.py +111 -67
  2. public/favicon.svg +1 -0
  3. public/logo_dark.svg +1 -0
  4. public/logo_light.svg +1 -0
app.py CHANGED
@@ -1,74 +1,118 @@
1
- # You can find this code for Chainlit python streaming here (https://docs.chainlit.io/concepts/streaming/python)
2
-
3
- # OpenAI Chat completion
4
-
5
- import openai #importing openai for API usage
6
- import chainlit as cl #importing chainlit for our app
7
- from chainlit.input_widget import Select, Switch, Slider #importing chainlit settings selection tools
8
- from chainlit.prompt import Prompt, PromptMessage #importing prompt tools
9
- from chainlit.playground.providers import ChatOpenAI #importing ChatOpenAI tools
10
-
11
- # You only need the api key inserted here if it's not in your .env file
12
- #openai.api_key = "YOUR_API_KEY"
13
-
14
- # ChatOpenAI Templates
15
- system_template = """You are a helpful assistant who always speaks in a pleasant tone!
16
- """
17
-
18
- user_template = """{input}
19
- Think through your response step by step.
20
- """
21
-
22
- @cl.on_chat_start # marks a function that will be executed at the start of a user session
23
- async def start_chat():
24
- settings = {
25
- "model": "gpt-3.5-turbo",
26
- "temperature": 0,
27
- "max_tokens": 500,
28
- "top_p": 1,
29
- "frequency_penalty": 0,
30
- "presence_penalty": 0,
31
- }
32
-
33
- cl.user_session.set("settings", settings)
34
-
35
- @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
36
- async def main(message: str):
37
-
38
- settings = cl.user_session.get("settings")
39
-
40
- prompt = Prompt(
41
- provider=ChatOpenAI.id,
42
- messages=[
43
- PromptMessage(
44
- role="system",
45
- template=system_template,
46
- formatted=system_template,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  ),
48
- PromptMessage(
49
- role="user",
50
- template=user_template,
51
- formatted=user_template.format(input=message),
52
- )
53
- ],
54
- inputs = {"input" : message},
55
- settings=settings
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  )
57
 
58
- print([m.to_openai() for m in prompt.messages])
 
 
 
59
 
60
- msg = cl.Message(content="")
61
 
62
- # Call OpenAI
63
- async for stream_resp in await openai.ChatCompletion.acreate(
64
- messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
65
- ):
66
- token = stream_resp.choices[0]["delta"].get("content", "")
67
- await msg.stream_token(token)
68
 
69
- # Update the prompt object with the completion
70
- prompt.completion = msg.content
71
- msg.prompt = prompt
72
 
73
- # Send and close the message stream
74
- await msg.send()
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ import chainlit as cl
4
+ import openai
5
+ from chainlit.input_widget import Select, Slider, Switch
6
+ from langchain.chat_models import ChatOpenAI
7
+ from llama_index import (
8
+ LLMPredictor,
9
+ ServiceContext,
10
+ StorageContext,
11
+ TrafilaturaWebReader,
12
+ VectorStoreIndex,
13
+ load_index_from_storage,
14
+ )
15
+ from llama_index.callbacks.base import CallbackManager
16
+ from llama_index.llms import ChatMessage, HuggingFaceLLM, MessageRole, OpenAI
17
+
18
+
19
+ def get_api_key():
20
+ api_key = os.getenv("OPENAI_API_KEY")
21
+ if api_key is None:
22
+ print("OPENAI_API_KEY missing from environment variables")
23
+ api_key = input("Please enter your OPENAI_API_KEY: ")
24
+ return api_key
25
+
26
+
27
+ openai.api_key = get_api_key()
28
+
29
+
30
+ def load_index():
31
+ try:
32
+ storage_context = StorageContext.from_defaults(persist_dir="./storage")
33
+ index = load_index_from_storage(storage_context)
34
+ except FileNotFoundError:
35
+ print("Storage file not found. Loading from web.")
36
+ documents = TrafilaturaWebReader().load_data(["https://bit.ly/45BncJA"])
37
+ index = VectorStoreIndex.from_documents(documents)
38
+ index.storage_context.persist()
39
+ return index
40
+
41
+
42
+ index = load_index()
43
+
44
+ welcome_msg = (
45
+ "Hi there! I’m your China Life chatbot, specialising in answering "
46
+ "[frequently asked questions](https://bit.ly/45BncJA). "
47
+ "How may I assist you today? "
48
+ "Feel free to ask questions like, "
49
+ "“Is there any action required after receiving the policy?” or "
50
+ "“Can I settle using a demand draft?”"
51
+ )
52
+
53
+
54
+ @cl.on_chat_start
55
+ async def start():
56
+ chat_profile = cl.user_session.get("chat_profile")
57
+ msg = cl.Message(content="")
58
+ for token in list(welcome_msg):
59
+ await cl.sleep(0.01)
60
+ await msg.stream_token(token)
61
+
62
+ await msg.send()
63
+
64
+ settings = await cl.ChatSettings(
65
+ [
66
+ Select(
67
+ id="Model",
68
+ label="Model",
69
+ values=["gpt-3.5-turbo", "gpt-4"],
70
+ initial_index=1,
71
  ),
72
+ Slider(
73
+ id="Temperature",
74
+ label="Temperature",
75
+ initial=0,
76
+ min=0,
77
+ max=2,
78
+ step=0.1,
79
+ ),
80
+ ]
81
+ ).send()
82
+ await setup_query_engine(settings)
83
+
84
+
85
+ @cl.on_settings_update
86
+ async def setup_query_engine(settings):
87
+ print("on_settings_update", settings)
88
+
89
+ llm = OpenAI(model=settings["Model"], temperature=settings["Temperature"])
90
+
91
+ service_context = ServiceContext.from_defaults(
92
+ llm=llm, callback_manager=CallbackManager([cl.LlamaIndexCallbackHandler()])
93
  )
94
 
95
+ query_engine = index.as_query_engine(
96
+ service_context=service_context,
97
+ streaming=True,
98
+ )
99
 
100
+ cl.user_session.set("query_engine", query_engine)
101
 
 
 
 
 
 
 
102
 
103
+ @cl.on_message
104
+ async def main(message: cl.Message):
105
+ query_engine = cl.user_session.get("query_engine")
106
 
107
+ if query_engine is None:
108
+ await start()
109
+ query_engine = cl.user_session.get("query_engine")
110
+
111
+ if query_engine:
112
+ query_result = await cl.make_async(query_engine.query)(message.content)
113
+ response_message = cl.Message(content=query_result.response_txt or "")
114
+
115
+ for token in query_result.response_gen:
116
+ await response_message.stream_token(token=token)
117
+
118
+ await response_message.send()
public/favicon.svg ADDED
public/logo_dark.svg ADDED
public/logo_light.svg ADDED