llm_test1 / app.py
sachitksh123's picture
Update app.py
a28aa09 verified
raw
history blame
1.12 kB
import streamlit as st
import requests
from transformers import pipeline
import urllib3
import os
# Disable warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Create a custom session
session = requests.Session()
session.verify = False # Disable SSL verification
# Load the token from Hugging Face secrets
HUGGINGFACE_TOKEN = os.environ.get("hf_token") # Set the default to empty if not found
# Set up the text generation pipeline with the token
pipe = pipeline("text-generation", model="meta-llama/Meta-Llama-3-8B",
use_auth_token=HUGGINGFACE_TOKEN, request_session=session)
# Streamlit application
st.title("Text Generation with Hugging Face")
# User input
user_input = st.text_input("You: ", "Who are you?")
if st.button("Generate Response"):
if user_input:
messages = [{"role": "user", "content": user_input}]
response = pipe(messages)
generated_text = response[0]['generated_text'] # Adjust according to the response format
st.text_area("Bot:", generated_text, height=200)
else:
st.warning("Please enter a message.")