Spaces:
Sleeping
Sleeping
File size: 1,352 Bytes
8681516 3ebab61 f0e8cfd 880159c 3ebab61 880159c a1f0658 880159c a1f0658 880159c a1f0658 f2ca0d7 a1f0658 880159c a1f0658 f0e8cfd 296c5d4 f0e8cfd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import gradio as gr
import os
from smolagents import HfApiModel
model = HfApiModel(model_id="mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.environ.get("HF_TOKEN"))
system_data = [
{
"role":"system",
"content":[
{
"type":"text",
"text": "You are a doctor who specializes on helping patients with addiction issues"
}
]
}
]
def get_user_data(prompt: str):
return [
{
"role":"user",
"content":[
{
"type":"text",
"text": prompt
}
]
}
]
def get_history(history):
mod_history = []
print(history)
for i in history:
d = {}
d["role"] = i["role"]
d["content"] = [
{
"type": "text",
"text": i["content"]
}
]
mod_history.append(d)
return mod_history
def chat(prompt, history):
return model(system_data + get_history(history)+ get_user_data(prompt)).content
demo = gr.ChatInterface(chat, type="messages",title="ArunGPT",theme = gr.themes.Soft(), description="Hello this is chatbot is created for only educational purpose and is powered by mistral 8x 7b model").queue()
demo.launch()
|