Spaces:
Paused
Paused
File size: 1,353 Bytes
23d1aea ac04983 99709e3 24c1914 ac04983 23d1aea ac04983 24c1914 23d1aea 24c1914 ac04983 23d1aea ac04983 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
from langchain_openai.chat_models import ChatOpenAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage
import streamlit as st
from dotenv import load_dotenv
load_dotenv()
st.set_page_config(
page_title="My Chat GPT",
page_icon="🧊",
layout="wide",
initial_sidebar_state="expanded",
menu_items={
'Get Help': 'https://www.extremelycoolapp.com/help',
'Report a bug': "https://www.extremelycoolapp.com/bug",
'About': "# This is a header. This is an *extremely* cool app!"
}
)
st.header('This is a header')
chatModel = ChatOpenAI(temperature=.8,model="gpt-3.5-turbo", max_tokens=30)
if "sessionMessages" not in st.session_state:
print("No Session message")
st.session_state.sessionMessages = [
SystemMessage("You are a comedian who answers to query in comedy")
]
def load_answers(input):
st.session_state.sessionMessages.append(HumanMessage(content=input))
result =chatModel(st.session_state.sessionMessages)
st.session_state.sessionMessages.append(AIMessage(content=result.content))
return result.content
def get_text():
i= st.text_input("You: ", key="input")
return i
user_input = get_text()
submit=st.button("submit")
if submit:
response = load_answers(user_input)
st.subheader("Answer: ")
st.write(response)
|