Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer | |
from transformers import pipeline | |
from transformers import TextDataset, DataCollatorForLanguageModeling | |
from transformers import Trainer, TrainingArguments,AutoModelWithLMHead | |
import gradio as gr | |
chef = pipeline('text-generation', model="./en_gpt2-medium_rachel_replics/en_gpt2-medium_rachel_replics", tokenizer="gpt2-medium") | |
# gradio part | |
def echo(message, history, model): | |
#chef = pipeline('text-generation', model="./models/en_gpt2-medium_rachel_replics", tokenizer=model_type) | |
if model=="gpt2-medium": | |
answer = chef(f"<s>NOTFRIEND: {message}\nRACHEL:")[0]['generated_text'] | |
answer = tmp[tmp.find(f"RACHEL: ") + len("RACHEL") + 2 : tmp.find('</s>')] | |
return answer | |
elif model=="gpt2-medium": | |
answer = chef(f"<s>NOTFRIEND: {message}\nRACHEL:")[0]['generated_text'] | |
answer = tmp[tmp.find(f"RACHEL: ") + len("RACHEL") + 2 : tmp.find('</s>')] | |
return answer | |
elif model=="gpt2-medium": | |
answer = chef(f"<s>NOTFRIEND: {message}\nRACHEL:")[0]['generated_text'] | |
answer = tmp[tmp.find(f"RACHEL: ") + len("RACHEL") + 2 : tmp.find('</s>')] | |
return answer | |
title = "Chatbot who speaks like Rachel from Friends" | |
description = "You have a good opportunity to have a dialog with actress from Friends - Rachel Green" | |
model = gr.Dropdown(["gpt2", "gpt2-medium", "gpt2-large"], label="LLM", info="What model do you want to use?", value="gpt2-medium") | |
with gr.Blocks() as demo: | |
gr.ChatInterface( | |
fn=echo, | |
title=title, | |
description=description, | |
additional_inputs=[model], | |
retry_btn=None, | |
undo_btn=None, | |
clear_btn=None, | |
) | |
demo.launch(debug=False, share=True) |