XThomasBU commited on
Commit
9d29527
·
verified ·
1 Parent(s): f936dae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -40
app.py CHANGED
@@ -1,50 +1,58 @@
1
- import asyncio
2
-
3
- import chainlit as cl
4
-
5
- from chain import Chain
6
  import os
7
 
8
- from typing import Dict, Optional
 
 
 
 
 
 
 
9
  import chainlit as cl
10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
- @cl.oauth_callback
13
- def oauth_callback(
14
- provider_id: str,
15
- token: str,
16
- raw_user_data: Dict[str, str],
17
- default_user: cl.User,
18
- ) -> Optional[cl.User]:
19
- return default_user
20
 
21
  @cl.on_chat_start
22
- async def start_chat():
23
- app_user = cl.user_session.get("user")
24
- print(app_user)
25
- print(os.getenv("OAUTH_CLIENT_ID"))
26
- print('HERE-------------------------------------------------')
27
- chain = Chain(None)
28
- await chain.text("I will count to 5. How many concurrent times should I count?")
 
29
 
30
 
31
  @cl.on_message
32
- async def on_message(message: str, message_id: str):
33
- chain = Chain(message_id)
34
-
35
- try:
36
- num = int(message)
37
- except ValueError:
38
- await chain.text_stream("Sorry, that doesn't look like an integer to me.", final=True)
39
- return
40
-
41
- if num > 10:
42
- await chain.text_stream("Whoa, let's try a smaller number. (Max 10.)", final=True)
43
- return
44
-
45
- await chain.text("Alright, here we go:")
46
- coroutines = []
47
- for i in range(num):
48
- coroutines.append(chain.text_stream("1 2 3 4 5", delay=1, name=f"Counter {i + 1}"))
49
- await asyncio.gather(*coroutines)
50
- await chain.text_stream("Okay, I'm done counting now.", final=True)
 
 
 
 
 
 
1
  import os
2
 
3
+ from langchain.llms.huggingface_hub import HuggingFaceHub
4
+ from langchain.prompts import ChatPromptTemplate
5
+ from langchain.schema import StrOutputParser
6
+ from langchain.schema.runnable import Runnable
7
+ from langchain.schema.runnable.config import RunnableConfig
8
+
9
+ from chainlit.playground.config import add_llm_provider
10
+ from chainlit.playground.providers.langchain import LangchainGenericProvider
11
  import chainlit as cl
12
 
13
+ # Instantiate the LLM
14
+ llm = HuggingFaceHub(
15
+ model_kwargs={"max_length": 500},
16
+ repo_id="google/flan-t5-xxl",
17
+ huggingfacehub_api_token=os.environ["HUGGINGFACEHUB_API_TOKEN"],
18
+ )
19
+
20
+ # Add the LLM provider
21
+ add_llm_provider(
22
+ LangchainGenericProvider(
23
+ # It is important that the id of the provider matches the _llm_type
24
+ id=llm._llm_type,
25
+ # The name is not important. It will be displayed in the UI.
26
+ name="HuggingFaceHub",
27
+ # This should always be a Langchain llm instance (correctly configured)
28
+ llm=llm,
29
+ # If the LLM works with messages, set this to True
30
+ is_chat=False,
31
+ )
32
+ )
33
 
 
 
 
 
 
 
 
 
34
 
35
  @cl.on_chat_start
36
+ async def on_chat_start():
37
+ prompt = ChatPromptTemplate.from_messages(
38
+ [
39
+ ("human", "{question}"),
40
+ ]
41
+ )
42
+ runnable = prompt | llm | StrOutputParser()
43
+ cl.user_session.set("runnable", runnable)
44
 
45
 
46
  @cl.on_message
47
+ async def on_message(message: cl.Message):
48
+ runnable = cl.user_session.get("runnable") # type: Runnable
49
+
50
+ msg = cl.Message(content="")
51
+
52
+ async for chunk in runnable.astream(
53
+ {"question": message.content},
54
+ config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
55
+ ):
56
+ await msg.stream_token(chunk)
57
+
58
+ await msg.send()