pp3232133's picture
Update app.py
e5f91d9
raw
history blame
940 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# Wczytanie własnego modelu chatbota z Hugging Face
model_name = "pp3232133/pp3232133-distilgpt2-wikitext2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Funkcja obsługująca wejście i wyjście dla interfejsu Gradio
def chatbot_interface(input_text):
input_ids = tokenizer.encode(input_text, return_tensors="pt")
chatbot_output = model.generate(input_ids, max_length=100)[0]
response = tokenizer.decode(chatbot_output, skip_special_tokens=True)
return response
# Interfejs Gradio dla chatbota
iface = gr.Interface(
fn=chatbot_interface,
inputs="text",
outputs="text",
title="Chatbot",
description="Custom chatbot based on your Hugging Face model. Start typing to chat with the bot.",
theme="compact"
)
# Uruchomienie interfejsu
iface.launch()