import streamlit as st from transformers import pipeline # Load the conversational pipeline conversational_pipeline = pipeline("conversational") # Streamlit app header st.set_page_config(page_title="Conversational Model Demo", page_icon="🤖") st.header("Conversational Model Demo") # Input for user message user_message = st.text_input("You:", "") if st.button("Send"): # Format the conversation for the conversational pipeline conversation_history = [{"role": "system", "content": "You are an AI assistant."}, {"role": "user", "content": user_message}] # Get the model's response model_response = conversational_pipeline(conversation_history)[0]['generated_responses'][0] # Display the model's response st.text_area("Model:", model_response, height=100)