Spaces:
Runtime error
Runtime error
File size: 1,217 Bytes
36eaeaf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import streamlit as st
from transformers import pipeline
from transformers import AutoConfig, pipeline
from langchain_community.llms import Ollama
import time
st.title('*ChatBot clone*')
llm = Ollama(model='llama3:latest')
def response_generator(prompt):
response = llm.invoke(prompt, stop=['<|eot_id|>'])
for word in response.split():
yield word + " "
time.sleep(0.05)
# init chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# display chat history
for message in st.session_state.messages:
with st.chat_message(message['role']):
st.markdown(message['content'])
# accept user input
if prompt := st.chat_input("What is up?"):
# add user message to user history
st.session_state.messages.append({'role':'user','content':prompt})
# display user message
with st.chat_message('user'):
st.markdown(prompt)
# display assistant response
with st.chat_message('assistant'):
ans = llm.invoke(prompt, stop=['<|eot_id|>'])
respose = st.write_stream(response_generator(prompt))
st.session_state.messages.append({'role':'assistant', 'content':respose})
|