Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline | |
# configuring streamlit page settings | |
st.set_page_config( | |
page_title='Digital Ink', | |
layout = 'centered' | |
) | |
generation_args = { | |
"max_new_tokens": 1000, | |
"return_full_text": False, | |
"num_beams": 5, | |
"do_sample": True, | |
"top_k": 60, | |
} | |
# Initialize the model pipeline | |
chat_pipeline = pipeline("text-generation", model="microsoft/Phi-3-mini-128k-instruct") | |
# Streamlit app | |
st.title("Digital Ink") | |
# Initialize the chat history | |
if 'chat_history' not in st.session_state: | |
st.session_state.chat_history = [] | |
#display chat history | |
for message in st.session_state.chat_history: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# User input | |
user_input = st.chat_input("Ask Digital Ink..") | |
if user_input: | |
# Add user message to chat history | |
st.session_state.message.append({"role": "system", "content": "You are a helpful assistant named Digital Ink. Your purpose is to provide creative engaging and effective marketing content.You can introduce your self as follows: I'm Digital Ink, a marketing content generation model. I'm designed to assist you in creating engaging and effective marketing content, such as blog posts, social media posts, and product descriptions"}) | |
st.session_state.message.append({"role": "user", "content": user_input}) | |
st.chat_state.chat_message("user").markdown(user_input) | |
# Generate response from chatbot | |
context = [msg['content'] for msg in st.session_state.messages] | |
message = [ | |
{"role": "system", "content": "You are a helpful assistant named Digital Ink. Your purpose is to provide creative engaging and effective marketing content.You can introduce your self as follows: I'm Digital Ink, a marketing content generation model. I'm designed to assist you in creating engaging and effective marketing content, such as blog posts, social media posts, and product descriptions"}, | |
{"role": "user", "content": user_input}, | |
{"role": "assistant", "content": ""}, | |
{"role": "user", "content": ""}, | |
] | |
response = chat_pipeline(message, **generation_args)[0]['generated_text'] | |
# Add assistant response to chat history | |
st.session_state.messages.append({"role": "assistant", "content": response}) | |
with st.chat_message("assistant"): | |
st.markdown(response) | |