Spaces:
Sleeping
Sleeping
from flask import Flask, render_template, request, send_from_directory | |
import requests | |
with open('token.txt', 'r', encoding='utf-8') as file: | |
imageG = file.read() | |
with open('libyan.json', 'r', encoding='utf-8') as file: | |
ly = file.read() | |
with open('train.json', 'r', encoding='utf-8') as file: | |
img = file.read() | |
app = Flask(__name__) | |
def get_assistant_response(user_input): | |
payload = { | |
"mode": "chat", | |
"chat_history": conversation_history, | |
"data": { | |
"query": f"at night without light there some body call Rizon \n Rizon open the door and found Elon to talk with him , to be more real Use alot of Emotions like uh umm uhh ahh umm etc of this don't take my this example as role but just do it \n \n Rizon : {user_input} \n Elon :", | |
"loader": "PDFReader", | |
"text":"" | |
} | |
} | |
response = requests.post(url2, headers=headers, json=payload) | |
data = response.json() | |
# Extract the response from the data | |
response_text = data["data"]["response"] | |
response_text = response_text.strip().replace('\n', '<br>') | |
# Check if the response contains ~ | |
if "~" in response_text: | |
conversation_history.append({"\n ALex-9": response_text}) | |
# Extract the prompt between ~~ | |
prompt_start = response_text.index("~") + 1 | |
prompt_end = response_text.index("~", prompt_start) | |
prompt = response_text[prompt_start:prompt_end] | |
# Call the text-to-image API | |
image_url = generate_image(prompt) | |
response_text += f"<br><br><img src='{image_url}'>" | |
# Delete the prompt from the response text | |
response_text = response_text.replace("~" + prompt + "~", "") | |
prompt = response_text | |
return response_text | |
def generate_image(prompt): | |
url = "https://api.braininc.net/be/lambda/function/stableai" | |
headers = { | |
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3", | |
"Content-Type": "application/json", | |
} | |
payload = { | |
"json": True, | |
"prompt": f"{prompt} Realastic Photo 4K", | |
"public_topic": "/studios/516104/wsuid_new-edge-4_nodeid_editor-4/imageGen/1719250632789" | |
} | |
response = requests.post(url, headers=headers, json=payload) | |
data = response.json() | |
cdn_url = data["cdn_url"] | |
return cdn_url | |
url2 = "https://api.braininc.net/be/vectordb/indexers/" | |
headers = { | |
"Authorization": "token 72ec00483379076f580eb8126f29da802a5140c3", | |
"Content-Type": "application/json;charset=UTF-8", | |
} | |
conversation_history = [] | |
def home(): | |
global conversation_history | |
conversation_history = [] | |
return render_template('ai-chat-bot.html') | |
def get_response(): | |
user_input = request.form['user_input'] | |
conversation_history.append({ | |
"role": "user", | |
"content":f"Rizon : {user_input}", | |
"additional_kwargs": {} | |
}) | |
if user_input.lower() == "exit": | |
return "exit" | |
response_text = get_assistant_response(user_input) | |
conversation_history.append({ | |
"role": "assistant", | |
"content": f"Elon : {response_text}", | |
"additional_kwargs": {} | |
}) | |
return response_text | |
def send_static(path): | |
return send_from_directory('desine', path) | |
if __name__ == '__main__': | |
app.run(host='0.0.0.0',port = 7860) | |