Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,20 +3,38 @@ from langchain_core.output_parsers import StrOutputParser
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
import gradio as gr
|
5 |
import os
|
|
|
6 |
|
7 |
|
8 |
prompt = ChatPromptTemplate.from_messages([("system", "You are a helpful AI assistant named Arun."), ("user", "{input}")])
|
9 |
|
10 |
llm = ChatNVIDIA(model="mistralai/mixtral-8x7b-instruct-v0.1")
|
11 |
chain = prompt | llm | StrOutputParser()
|
12 |
-
|
13 |
|
14 |
def chat(prompt, history):
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
demo = gr.ChatInterface(chat, title="ArunGPT",theme = gr.themes.Soft(), description="Hello this is chatbot is created for only educational purpose and is powered by mistral 8x 7b model").queue()
|
22 |
|
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
import gradio as gr
|
5 |
import os
|
6 |
+
from smolagents import HfApiModel
|
7 |
|
8 |
|
9 |
prompt = ChatPromptTemplate.from_messages([("system", "You are a helpful AI assistant named Arun."), ("user", "{input}")])
|
10 |
|
11 |
llm = ChatNVIDIA(model="mistralai/mixtral-8x7b-instruct-v0.1")
|
12 |
chain = prompt | llm | StrOutputParser()
|
13 |
+
model = HfApiModel(model_id="mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.environ.get("HF_TOKEN"))
|
14 |
|
15 |
def chat(prompt, history):
|
16 |
+
data = [
|
17 |
+
{
|
18 |
+
"role":"system",
|
19 |
+
"content":[
|
20 |
+
{
|
21 |
+
"type":"text",
|
22 |
+
"text": "You are a doctor who specializes on helping patients with addiction issues"
|
23 |
+
}
|
24 |
+
]
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"role":"user",
|
28 |
+
"content":[
|
29 |
+
{
|
30 |
+
"type":"text",
|
31 |
+
"text": prompt
|
32 |
+
}
|
33 |
+
]
|
34 |
+
}
|
35 |
+
]
|
36 |
+
|
37 |
+
return model(data).content
|
38 |
|
39 |
demo = gr.ChatInterface(chat, title="ArunGPT",theme = gr.themes.Soft(), description="Hello this is chatbot is created for only educational purpose and is powered by mistral 8x 7b model").queue()
|
40 |
|