File size: 1,523 Bytes
9de5ca0 6657580 7900276 9de5ca0 2a988f1 9de5ca0 2a988f1 7900276 9de5ca0 6657580 9de5ca0 7900276 6657580 7900276 6657580 7900276 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
### Import Section ###
"""
IMPORTS HERE
"""
# Example Imports (adjust based on actual needs)
import chainlit as cl
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationChain
from langchain.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig
from typing import cast
### Global Section ###
"""
GLOBAL CODE HERE
"""
# Initialize a language model or chain globally
llm = ChatOpenAI(temperature=0.9)
conversation_chain = ConversationChain(llm=llm)
# Any global variables like API keys, configurations, etc.
# API_KEY = "your_api_key_here"
### On Chat Start (Session Start) Section ###
@cl.on_chat_start
async def on_chat_start():
""" SESSION SPECIFIC CODE HERE """
await cl.Message(content="Welcome! How can I assist you today?").send()
### Rename Chains ###
@cl.author_rename
def rename(orig_author: str):
if orig_author == "user":
return "You"
elif orig_author == "system":
return "Assistant"
return orig_author
### On Message Section ###
@cl.on_message
async def on_message(message: cl.Message):
runnable = cast(Runnable, cl.user_session.get("runnable"))
msg = cl.Message(content="")
async for chunk in runnable.astream(
{"question": message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await msg.stream_token(chunk)
await msg.send() |