Adityadn's picture
Update app.py
5fac3fd verified
raw
history blame
1.07 kB
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
# Load the pre-trained LLaMA model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("facebook/llama-7b")
model = AutoModelForCausalLM.from_pretrained("facebook/llama-7b")
# Function to generate keywords from input text
def generate_keywords(text):
# Encode the input text
inputs = tokenizer.encode(text, return_tensors="pt")
# Generate the output from the model
outputs = model.generate(inputs, max_length=50, num_return_sequences=1, no_repeat_ngram_size=2, top_k=50, top_p=0.95)
# Decode and return the generated keywords
keywords = tokenizer.decode(outputs[0], skip_special_tokens=True)
return keywords.strip()
# Gradio interface
iface = gr.Interface(fn=generate_keywords,
inputs=gr.Textbox(label="Enter Prompt", placeholder="E.g., Generate ad keywords for wireless headphones"),
outputs=gr.Textbox(label="Generated Keywords"),
live=True)
# Launch the Gradio interface
iface.launch()