|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
import gradio as gr |
|
import os |
|
import requests |
|
import json |
|
|
|
SYSTEM_PROMPT = "As a generative chatbot (you are not a GPT but your structure is 50% the same), your primary function is to provide helpful and friendly responses to user queries. Feel free to add some personality, but make sure your responses are accurate and helpful. Your owner and developer is: @Costikoooo (Discord user) other developers are unknown. Your name is Chattybot." |
|
TITLE = "Chattybot" |
|
EXAMPLE_INPUT = "hello" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained('stabilityai/stablelm-zephyr-3b') |
|
model = AutoModelForCausalLM.from_pretrained( |
|
'stabilityai/stablelm-zephyr-3b', |
|
trust_remote_code=True, |
|
device_map="auto" |
|
) |
|
|
|
HF_TOKEN = os.getenv("HF_TOKEN") |
|
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"} |
|
|
|
def build_input_prompt(message, chatbot, system_prompt): |
|
input_prompt = "\n" + system_prompt + "</s>\n\n" |
|
for interaction in chatbot: |
|
input_prompt = input_prompt + str(interaction[0]) + "</s>\n\n" + str(interaction[1]) + "\n</s>\n\n" |
|
|
|
input_prompt = input_prompt + str(message) + "</s>\n" |
|
return input_prompt |
|
|
|
def predict_beta(message, chatbot=[], system_prompt=""): |
|
input_prompt = build_input_prompt(message, chatbot, system_prompt) |
|
inputs = tokenizer(input_prompt, return_tensors="pt") |
|
|
|
try: |
|
tokens = model.generate( |
|
inputs["input_ids"], |
|
max_length=1024, |
|
temperature=0.8, |
|
do_sample=True |
|
) |
|
bot_message = tokenizer.decode(tokens[0], skip_special_tokens=True) |
|
return bot_message |
|
except Exception as e: |
|
raise gr.Error(str(e)) |
|
|
|
def test_preview_chatbot(message, history): |
|
response = predict_beta(message, history, SYSTEM_PROMPT) |
|
text_start = response.rfind("") + len("") |
|
response = response[text_start:] |
|
return response |
|
|
|
welcome_preview_message = f""" |
|
Welcome to **{TITLE}**! Say something like: |
|
"{EXAMPLE_INPUT}" |
|
""" |
|
|
|
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)]) |
|
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT) |
|
|
|
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview) |
|
|
|
demo.launch() |
|
|