|
import gradio as gr
|
|
import torch
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
|
|
|
model_name = "AventIQ-AI/gpt2-book-article-recommendation"
|
|
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
|
def recommend_titles(alphabet, num_recommendations=5):
|
|
"""Generate book/article recommendations based on an input alphabet."""
|
|
input_text = alphabet.strip()
|
|
if not input_text:
|
|
return ["β οΈ Please enter a valid letter."]
|
|
|
|
input_ids = tokenizer.encode(input_text, return_tensors="pt").to(device)
|
|
|
|
with torch.no_grad():
|
|
outputs = model.generate(input_ids, max_length=15, num_return_sequences=num_recommendations, do_sample=True)
|
|
|
|
return [tokenizer.decode(output, skip_special_tokens=True) for output in outputs]
|
|
|
|
|
|
example_inputs = ["A", "B", "C", "D", "E"]
|
|
|
|
|
|
with gr.Blocks() as demo:
|
|
gr.Markdown("## π AI-Powered Book & Article Recommendation")
|
|
gr.Markdown("Enter a letter, and the AI will suggest relevant book or article titles!")
|
|
|
|
with gr.Row():
|
|
alphabet_input = gr.Textbox(label="π Enter a Letter:", placeholder="Example: A")
|
|
num_recommendations = gr.Slider(minimum=1, maximum=10, value=5, step=1, label="Number of Recommendations")
|
|
|
|
recommend_button = gr.Button("π Get Recommendations")
|
|
output_text = gr.Textbox(label="π Recommended Titles:", lines=6)
|
|
|
|
gr.Markdown("### π― Example Inputs")
|
|
example_buttons = [gr.Button(example) for example in example_inputs]
|
|
|
|
for btn in example_buttons:
|
|
btn.click(fn=lambda letter=btn.value: letter, outputs=alphabet_input)
|
|
|
|
recommend_button.click(recommend_titles, inputs=[alphabet_input, num_recommendations], outputs=output_text)
|
|
|
|
demo.launch()
|
|
|