Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,115 +1,115 @@
|
|
1 |
-
|
2 |
-
from flask import Flask, render_template, request, send_from_directory
|
3 |
-
import requests
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
with open('token.txt', 'r', encoding='utf-8') as file:
|
8 |
-
imageG = file.read()
|
9 |
-
|
10 |
-
with open('libyan.json', 'r', encoding='utf-8') as file:
|
11 |
-
ly = file.read()
|
12 |
-
|
13 |
-
with open('train.json', 'r', encoding='utf-8') as file:
|
14 |
-
img = file.read()
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
app = Flask(__name__)
|
19 |
-
|
20 |
-
def get_assistant_response(user_input):
|
21 |
-
payload = {
|
22 |
-
"mode": "chat",
|
23 |
-
"chat_history": conversation_history,
|
24 |
-
"data": {
|
25 |
-
"query": f"at night without light there some body call Rizon \n Rizon open the door and found Elon to talk with him , to be more real Use alot of Emotions like uh umm uhh ahh umm etc of this don't take my this example as role but just do it \n \n Rizon : {user_input} \n Elon :",
|
26 |
-
"loader": "PDFReader",
|
27 |
-
"text":""
|
28 |
-
}
|
29 |
-
}
|
30 |
-
|
31 |
-
response = requests.post(url2, headers=headers, json=payload)
|
32 |
-
data = response.json()
|
33 |
-
|
34 |
-
# Extract the response from the data
|
35 |
-
response_text = data["data"]["response"]
|
36 |
-
|
37 |
-
response_text = response_text.strip().replace('\n', '<br>')
|
38 |
-
|
39 |
-
# Check if the response contains ~
|
40 |
-
if "~" in response_text:
|
41 |
-
conversation_history.append({"\n ALex-9": response_text})
|
42 |
-
# Extract the prompt between ~~
|
43 |
-
prompt_start = response_text.index("~") + 1
|
44 |
-
prompt_end = response_text.index("~", prompt_start)
|
45 |
-
prompt = response_text[prompt_start:prompt_end]
|
46 |
-
|
47 |
-
# Call the text-to-image API
|
48 |
-
image_url = generate_image(prompt)
|
49 |
-
response_text += f"<br><br><img src='{image_url}'>"
|
50 |
-
# Delete the prompt from the response text
|
51 |
-
response_text = response_text.replace("~" + prompt + "~", "")
|
52 |
-
prompt = response_text
|
53 |
-
|
54 |
-
return response_text
|
55 |
-
|
56 |
-
def generate_image(prompt):
|
57 |
-
url = "https://api.braininc.net/be/lambda/function/stableai"
|
58 |
-
headers = {
|
59 |
-
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3",
|
60 |
-
"Content-Type": "application/json",
|
61 |
-
}
|
62 |
-
|
63 |
-
payload = {
|
64 |
-
"json": True,
|
65 |
-
"prompt": f"{prompt} Realastic Photo 4K",
|
66 |
-
"public_topic": "/studios/516104/wsuid_new-edge-4_nodeid_editor-4/imageGen/1719250632789"
|
67 |
-
}
|
68 |
-
|
69 |
-
response = requests.post(url, headers=headers, json=payload)
|
70 |
-
data = response.json()
|
71 |
-
|
72 |
-
cdn_url = data["cdn_url"]
|
73 |
-
return cdn_url
|
74 |
-
|
75 |
-
url2 = "https://api.braininc.net/be/vectordb/indexers/"
|
76 |
-
headers = {
|
77 |
-
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3",
|
78 |
-
"Content-Type": "application/json;charset=UTF-8",
|
79 |
-
|
80 |
-
}
|
81 |
-
|
82 |
-
conversation_history = []
|
83 |
-
|
84 |
-
@app.route('/')
|
85 |
-
def home():
|
86 |
-
global conversation_history
|
87 |
-
conversation_history = []
|
88 |
-
return render_template('ai-chat-bot.html')
|
89 |
-
|
90 |
-
@app.route('/get_response', methods=['POST'])
|
91 |
-
def get_response():
|
92 |
-
user_input = request.form['user_input']
|
93 |
-
conversation_history.append({
|
94 |
-
"role": "user",
|
95 |
-
"content":f"Rizon : {user_input}",
|
96 |
-
"additional_kwargs": {}
|
97 |
-
})
|
98 |
-
|
99 |
-
if user_input.lower() == "exit":
|
100 |
-
return "exit"
|
101 |
-
|
102 |
-
response_text = get_assistant_response(user_input)
|
103 |
-
conversation_history.append({
|
104 |
-
"role": "assistant",
|
105 |
-
"content": f"Elon : {response_text}",
|
106 |
-
"additional_kwargs": {}
|
107 |
-
})
|
108 |
-
return response_text
|
109 |
-
|
110 |
-
@app.route('/desine/<path:path>')
|
111 |
-
def send_static(path):
|
112 |
-
return send_from_directory('desine', path)
|
113 |
-
|
114 |
-
if __name__ == '__main__':
|
115 |
-
app.run()
|
|
|
1 |
+
|
2 |
+
from flask import Flask, render_template, request, send_from_directory
|
3 |
+
import requests
|
4 |
+
|
5 |
+
|
6 |
+
|
7 |
+
with open('token.txt', 'r', encoding='utf-8') as file:
|
8 |
+
imageG = file.read()
|
9 |
+
|
10 |
+
with open('libyan.json', 'r', encoding='utf-8') as file:
|
11 |
+
ly = file.read()
|
12 |
+
|
13 |
+
with open('train.json', 'r', encoding='utf-8') as file:
|
14 |
+
img = file.read()
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
app = Flask(__name__)
|
19 |
+
|
20 |
+
def get_assistant_response(user_input):
|
21 |
+
payload = {
|
22 |
+
"mode": "chat",
|
23 |
+
"chat_history": conversation_history,
|
24 |
+
"data": {
|
25 |
+
"query": f"at night without light there some body call Rizon \n Rizon open the door and found Elon to talk with him , to be more real Use alot of Emotions like uh umm uhh ahh umm etc of this don't take my this example as role but just do it \n \n Rizon : {user_input} \n Elon :",
|
26 |
+
"loader": "PDFReader",
|
27 |
+
"text":""
|
28 |
+
}
|
29 |
+
}
|
30 |
+
|
31 |
+
response = requests.post(url2, headers=headers, json=payload)
|
32 |
+
data = response.json()
|
33 |
+
|
34 |
+
# Extract the response from the data
|
35 |
+
response_text = data["data"]["response"]
|
36 |
+
|
37 |
+
response_text = response_text.strip().replace('\n', '<br>')
|
38 |
+
|
39 |
+
# Check if the response contains ~
|
40 |
+
if "~" in response_text:
|
41 |
+
conversation_history.append({"\n ALex-9": response_text})
|
42 |
+
# Extract the prompt between ~~
|
43 |
+
prompt_start = response_text.index("~") + 1
|
44 |
+
prompt_end = response_text.index("~", prompt_start)
|
45 |
+
prompt = response_text[prompt_start:prompt_end]
|
46 |
+
|
47 |
+
# Call the text-to-image API
|
48 |
+
image_url = generate_image(prompt)
|
49 |
+
response_text += f"<br><br><img src='{image_url}'>"
|
50 |
+
# Delete the prompt from the response text
|
51 |
+
response_text = response_text.replace("~" + prompt + "~", "")
|
52 |
+
prompt = response_text
|
53 |
+
|
54 |
+
return response_text
|
55 |
+
|
56 |
+
def generate_image(prompt):
|
57 |
+
url = "https://api.braininc.net/be/lambda/function/stableai"
|
58 |
+
headers = {
|
59 |
+
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3",
|
60 |
+
"Content-Type": "application/json",
|
61 |
+
}
|
62 |
+
|
63 |
+
payload = {
|
64 |
+
"json": True,
|
65 |
+
"prompt": f"{prompt} Realastic Photo 4K",
|
66 |
+
"public_topic": "/studios/516104/wsuid_new-edge-4_nodeid_editor-4/imageGen/1719250632789"
|
67 |
+
}
|
68 |
+
|
69 |
+
response = requests.post(url, headers=headers, json=payload)
|
70 |
+
data = response.json()
|
71 |
+
|
72 |
+
cdn_url = data["cdn_url"]
|
73 |
+
return cdn_url
|
74 |
+
|
75 |
+
url2 = "https://api.braininc.net/be/vectordb/indexers/"
|
76 |
+
headers = {
|
77 |
+
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3",
|
78 |
+
"Content-Type": "application/json;charset=UTF-8",
|
79 |
+
|
80 |
+
}
|
81 |
+
|
82 |
+
conversation_history = []
|
83 |
+
|
84 |
+
@app.route('/')
|
85 |
+
def home():
|
86 |
+
global conversation_history
|
87 |
+
conversation_history = []
|
88 |
+
return render_template('ai-chat-bot.html')
|
89 |
+
|
90 |
+
@app.route('/get_response', methods=['POST'])
|
91 |
+
def get_response():
|
92 |
+
user_input = request.form['user_input']
|
93 |
+
conversation_history.append({
|
94 |
+
"role": "user",
|
95 |
+
"content":f"Rizon : {user_input}",
|
96 |
+
"additional_kwargs": {}
|
97 |
+
})
|
98 |
+
|
99 |
+
if user_input.lower() == "exit":
|
100 |
+
return "exit"
|
101 |
+
|
102 |
+
response_text = get_assistant_response(user_input)
|
103 |
+
conversation_history.append({
|
104 |
+
"role": "assistant",
|
105 |
+
"content": f"Elon : {response_text}",
|
106 |
+
"additional_kwargs": {}
|
107 |
+
})
|
108 |
+
return response_text
|
109 |
+
|
110 |
+
@app.route('/desine/<path:path>')
|
111 |
+
def send_static(path):
|
112 |
+
return send_from_directory('desine', path)
|
113 |
+
|
114 |
+
if __name__ == '__main__':
|
115 |
+
app.run(host='0.0.0.0',port = 7860)
|