llm_test1 / app.py
sachitksh123's picture
Update app.py
8fce208 verified
raw
history blame
892 Bytes
import streamlit as st
import requests
from transformers import pipeline
import os
# Load the token from Hugging Face secrets
HUGGINGFACE_TOKEN = os.environ.get("hf_token") # Ensure this token is correctly set
print("Hugging Face Token:", HUGGINGFACE_TOKEN) # Debugging line
# Set up the text generation pipeline with the token
pipe = pipeline("text-generation", model="mistralai/Mistral-7B-v0.1",
use_auth_token=HUGGINGFACE_TOKEN)
# Streamlit application
st.title("Text Generation with Hugging Face")
# User input
user_input = st.text_input("You: ", "Who are you?")
if st.button("Generate Response"):
if user_input:
response = pipe(user_input)
generated_text = response[0]['generated_text'] # Adjust according to the response format
st.text_area("Bot:", generated_text, height=200)
else:
st.warning("Please enter a message.")