Spaces:
Sleeping
Sleeping
File size: 1,680 Bytes
8681516 3ebab61 f0e8cfd 880159c 3ebab61 880159c a1f0658 880159c a1f0658 880159c a1f0658 71096b5 f0fc310 1944b47 a1f0658 880159c a1f0658 f0e8cfd 3e48917 f0e8cfd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import gradio as gr
import os
from smolagents import HfApiModel
model = HfApiModel(model_id="mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.environ.get("HF_TOKEN"))
system_data = [
{
"role":"system",
"content":[
{
"type":"text",
"text": "You are a doctor who specializes on helping patients with addiction issues"
}
]
}
]
def get_user_data(prompt: str):
return [
{
"role":"user",
"content":[
{
"type":"text",
"text": prompt
}
]
}
]
def get_history(history):
mod_history = []
for user_message, bot_message in history:
user_dict = {
"role": "user",
"content": [
{
"type": "text",
"text": user_message
}
]
}
bot_dict = {
"role": "assistant",
"content": [
{
"type": "text",
"text": bot_message
}
]
}
mod_history.append(user_dict)
mod_history.append(bot_dict)
print(mod_history)
return mod_history
def chat(prompt, history):
return model(system_data + get_history(history)+ get_user_data(prompt)).content
demo = gr.ChatInterface(chat, chatbot=gr.Chatbot(),title="ArunGPT",theme = gr.themes.Soft(), description="Hello this is chatbot is created for only educational purpose and is powered by mistral 8x 7b model").queue()
demo.launch()
|