DevBM's picture
Rename chatllama3.py to app.py
3903e67 verified
import streamlit as st
from transformers import pipeline
from transformers import AutoConfig, pipeline
from langchain_community.llms import Ollama
import time
st.title('*ChatBot clone*')
llm = Ollama(model='llama3:latest')
def response_generator(prompt):
response = llm.invoke(prompt, stop=['<|eot_id|>'])
for word in response.split():
yield word + " "
time.sleep(0.05)
# init chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# display chat history
for message in st.session_state.messages:
with st.chat_message(message['role']):
st.markdown(message['content'])
# accept user input
if prompt := st.chat_input("What is up?"):
# add user message to user history
st.session_state.messages.append({'role':'user','content':prompt})
# display user message
with st.chat_message('user'):
st.markdown(prompt)
# display assistant response
with st.chat_message('assistant'):
ans = llm.invoke(prompt, stop=['<|eot_id|>'])
respose = st.write_stream(response_generator(prompt))
st.session_state.messages.append({'role':'assistant', 'content':respose})