llm_test1 / app.py
sachitksh123's picture
Create app.py
76586ef verified
raw
history blame
943 Bytes
import streamlit as st
import requests
from transformers import pipeline
import urllib3
# Disable warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Create a custom session
session = requests.Session()
session.verify = False # Disable SSL verification
# Set up the text generation pipeline
pipe = pipeline("text-generation", model="meta-llama/Llama-3.1-8B-Instruct", use_auth_token=True, request_session=session)
# Streamlit application
st.title("Text Generation with Hugging Face")
# User input
user_input = st.text_input("You: ", "Who are you?")
if st.button("Generate Response"):
if user_input:
messages = [{"role": "user", "content": user_input}]
response = pipe(messages)
generated_text = response[0]['generated_text'] # Adjust according to the response format
st.text_area("Bot:", generated_text, height=200)
else:
st.warning("Please enter a message.")