Spaces:
Sleeping
Sleeping
import gradio as gr | |
import os | |
from smolagents import HfApiModel | |
model = HfApiModel(model_id="mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.environ.get("HF_TOKEN")) | |
system_data = [ | |
{ | |
"role":"system", | |
"content":[ | |
{ | |
"type":"text", | |
"text": "You are a doctor who specializes on helping patients with addiction issues" | |
} | |
] | |
} | |
] | |
def get_user_data(prompt: str): | |
return [ | |
{ | |
"role":"user", | |
"content":[ | |
{ | |
"type":"text", | |
"text": prompt | |
} | |
] | |
} | |
] | |
def get_history(history): | |
mod_history = [] | |
print(history) | |
for i in history: | |
d = {} | |
d["role"] = i["role"] | |
d["content"] = [ | |
{ | |
"type": "text", | |
"text": i["content"] | |
} | |
] | |
mod_history.append(d) | |
return mod_history | |
def chat(prompt, history): | |
return model(system_data + get_history(history)+ get_user_data(prompt)).content | |
demo = gr.ChatInterface(chat, type="messages",title="ArunGPT",theme = gr.themes.Soft(), description="Hello this is chatbot is created for only educational purpose and is powered by mistral 8x 7b model").queue() | |
demo.launch() | |