CTP_CONTEST / app.py
HassanDataSci's picture
Update app.py
848db2f verified
raw
history blame
839 Bytes
import streamlit as st
from transformers import pipeline
# Load the conversational model
@st.cache_resource
def load_model():
return pipeline("conversational", model="facebook/blenderbot-400M-distill")
# Initialize the model
generator = load_model()
st.title("Donald Trump Style Text Generator")
st.write("Enter a prompt, and the model will respond in a manner inspired by Donald Trump.")
# Input from the user
user_input = st.text_input("Enter your prompt:")
if user_input:
# Prefix to encourage Trump-inspired responses
trump_prompt = f"In the style of Donald Trump: {user_input}"
# Generate the conversational response
conversation = generator(trump_prompt)
# Extract and display the generated text
generated_text = conversation.generated_responses[0]
st.write("Response:", generated_text)