Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import itertools
|
2 |
+
import gradio as gr
|
3 |
+
import requests
|
4 |
+
import os
|
5 |
+
from gradio.themes.utils import sizes
|
6 |
+
|
7 |
+
|
8 |
+
def respond(message, history):
|
9 |
+
|
10 |
+
if len(message.strip()) == 0:
|
11 |
+
return "ERROR the question should not be empty"
|
12 |
+
|
13 |
+
local_token = os.getenv('API_TOKEN')
|
14 |
+
local_endpoint = os.getenv('API_ENDPOINT')
|
15 |
+
|
16 |
+
if local_token is None or local_endpoint is None:
|
17 |
+
return "ERROR missing env variables"
|
18 |
+
|
19 |
+
# Add your API token to the headers
|
20 |
+
headers = {
|
21 |
+
'Content-Type': 'application/json',
|
22 |
+
'Authorization': f'Bearer {local_token}'
|
23 |
+
}
|
24 |
+
|
25 |
+
#prompt = list(itertools.chain.from_iterable(history))
|
26 |
+
#prompt.append(message)
|
27 |
+
#q = {"inputs": [prompt]}
|
28 |
+
q = {"inputs": [message]}
|
29 |
+
try:
|
30 |
+
response = requests.post(
|
31 |
+
local_endpoint, json=q, headers=headers, timeout=100)
|
32 |
+
response_data = response.json()
|
33 |
+
#print(response_data)
|
34 |
+
response_data=response_data["predictions"][0]
|
35 |
+
#print(response_data)
|
36 |
+
|
37 |
+
except Exception as error:
|
38 |
+
response_data = f"ERROR status_code: {type(error).__name__}"
|
39 |
+
# + str(response.status_code) + " response:" + response.text
|
40 |
+
|
41 |
+
# print(response.json())
|
42 |
+
return response_data
|
43 |
+
|
44 |
+
|
45 |
+
theme = gr.themes.Soft(
|
46 |
+
text_size=sizes.text_sm,radius_size=sizes.radius_sm, spacing_size=sizes.spacing_sm,
|
47 |
+
)
|
48 |
+
|
49 |
+
|
50 |
+
demo = gr.ChatInterface(
|
51 |
+
respond,
|
52 |
+
chatbot=gr.Chatbot(show_label=False, container=False, show_copy_button=True, bubble_full_width=True),
|
53 |
+
textbox=gr.Textbox(placeholder="Ask me a question",
|
54 |
+
container=False, scale=7),
|
55 |
+
title="Databricks LLM RAG demo - Chat with llama2 Databricks model serving endpoint",
|
56 |
+
description="This chatbot is a demo example for the dbdemos llm chatbot. <br>This content is provided as a LLM RAG educational example, without support. It is using llama2, can hallucinate and should not be used as production content.<br>Please review our dbdemos license and terms for more details.",
|
57 |
+
examples=[["どうやってdatabricksを始めたらいいですか?"],
|
58 |
+
["Databricks Cluster Policyとはなんですか?"],
|
59 |
+
["お腹がすいたので晩御飯を食べに行きます。おすすめを教えて。"],],
|
60 |
+
cache_examples=False,
|
61 |
+
theme=theme,
|
62 |
+
retry_btn=None,
|
63 |
+
undo_btn=None,
|
64 |
+
clear_btn="Clear",
|
65 |
+
)
|
66 |
+
|
67 |
+
if __name__ == "__main__":
|
68 |
+
demo.launch()
|