am_con / app.py
berito's picture
Upload app.py
ddeed03 verified
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from threading import Thread
# Model Initialization
model_id = "rasyosef/Llama-3.2-180M-Amharic-Instruct"
st.title("Llama 3.2 180M Amharic Chatbot Demo")
st.write("""
This chatbot was created using [Llama-3.2-180M-Amharic-Instruct](https://huggingface.co/rasyosef/Llama-3.2-180M-Amharic-Instruct),
a finetuned version of the 180 million parameter Llama 3.2 Amharic transformer model.
""")
# Load the tokenizer and model
@st.cache_resource
def load_model():
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)
llama_pipeline = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
pad_token_id=tokenizer.pad_token_id,
eos_token_id=tokenizer.eos_token_id
)
return tokenizer, llama_pipeline
tokenizer, llama_pipeline = load_model()
# Generate text
def generate_response(prompt, max_new_tokens):
response = llama_pipeline(
prompt,
max_new_tokens=max_new_tokens,
repetition_penalty=1.15
)
return response[0]['generated_text']
# Sidebar: Configuration
st.sidebar.header("Chatbot Configuration")
max_tokens = st.sidebar.slider("Maximum new tokens", min_value=8, max_value=256, value=64, help="Larger values result in longer responses.")
# Examples
examples = [
"แˆฐแˆ‹แˆแฃ แŠฅแŠ•แ‹ดแ‰ต แŠแˆ…?",
"แ‹จแŠขแ‰ตแ‹ฎแŒตแ‹ซ แ‹‹แŠ“ แŠจแ‰ฐแˆ› แˆตแˆ แˆแŠ•แ‹ตแŠ• แŠแ‹?",
"แ‹จแŠขแ‰ตแ‹ฎแŒตแ‹ซ แ‹จแˆ˜แŒจแˆจแˆปแ‹ แŠ•แŒ‰แˆต แˆ›แŠ• แŠแ‰ แˆฉ?",
"แ‹จแŠ แˆ›แˆญแŠ› แŒแŒฅแˆ แƒแแˆแŠ",
"แ‰ฐแˆจแ‰ต แŠ•แŒˆแˆจแŠ\n\nแŒ…แ‰ฅแŠ“ แŠ แŠ•แ‰ แˆณ",
"แŠ แŠ•แ‹ต แŠ แˆตแ‰‚แŠ แ‰€แˆแ‹ต แŠ•แŒˆแˆจแŠ",
"แ‹จแˆแˆจแŠ•แˆณแ‹ญ แ‹‹แŠ“ แŠจแ‰ฐแˆ› แˆตแˆ แˆแŠ•แ‹ตแŠ• แŠแ‹?",
"แŠ แˆแŠ• แ‹จแŠ แˆœแˆชแŠซ แ•แˆฌแ‹šแ‹ณแŠ•แ‰ต แˆ›แŠ• แŠแ‹?",
]
st.subheader("Chat with the Amharic Chatbot")
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
# Example selector
example = st.selectbox("Choose an example:", ["Type your own message"] + examples)
# User Input
user_input = st.text_input("Your message:", value=example if example != "Type your own message" else "", placeholder="Type your message here...")
if st.button("Send"):
if user_input:
# Generate response
with st.spinner("Generating response..."):
response = generate_response(user_input, max_tokens)
st.session_state.chat_history.append((user_input, response))
# Display Chat History
st.write("### Chat History")
for i, (user_msg, bot_response) in enumerate(st.session_state.chat_history):
st.write(f"**User {i+1}:** {user_msg}")
st.write(f"**Bot:** {bot_response}")