KiKi-GPT / app.py
Keyven's picture
Update app.py
36be628
raw
history blame
1.21 kB
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
import torch
# DialoGPT Modell und Tokenizer laden
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
def chat_with_kiki_gpt(user_input):
# Benutzereingabe kodieren und Modellantwort generieren
input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')
chat_history_ids = model.generate(input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
chat_output = tokenizer.decode(chat_history_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True)
return f"KIKI-GPT: {chat_output}"
# Gradio-Benutzeroberfläche
interface = gr.Interface(
fn=chat_with_kiki_gpt,
inputs=gr.inputs.Textbox(lines=5, placeholder="Type your message to KIKI-GPT here..."),
outputs=gr.outputs.Textbox(),
title="KIKI-GPT",
description="Welcome to KIKI-GPT - a project on Hugging Face Spaces using Microsoft's DialoGPT. One of the fastest and best performing models for robotics! Created by Keyvan Hardani. For inquiries, contact: [email protected].",
live=True
)
interface.launch()