Arcypojeb commited on
Commit
0ab851d
·
1 Parent(s): ac7860b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -43
app.py CHANGED
@@ -46,51 +46,19 @@ def sendErrorMessage(ws, errorMessage):
46
  errorResponse = {'error': errorMessage}
47
  ws.send(json.dumps(errorResponse))
48
 
49
- # Define a function to ask a question to the chatbot and display the response
50
  async def askQuestion(question):
51
  try:
52
- # Connect to the database and get the last 30 messages
53
- db = sqlite3.connect('chat-hub.db') # Replace 'your_database.db' with your database file
54
- cursor = db.cursor()
55
- cursor.execute("SELECT * FROM messages ORDER BY timestamp DESC LIMIT 30")
56
- messages = cursor.fetchall()
57
-
58
- # Extract user inputs and generated responses from the messages
59
- past_user_inputs = []
60
- generated_responses = []
61
-
62
- for message in messages:
63
- if message[1] == 'server':
64
- past_user_inputs.append(message[2])
65
- else:
66
- generated_responses.append(message[2])
67
-
68
- # Prepare data to send to the chatgpt-api.shn.hk
69
- system_instruction = "You are now integrated with a local websocket server in a project of hierarchical cooperative multi-agent framework called NeuralGPT. Your job is to coordinate simultaneous work of multiple LLMs connected to you as clients. Each LLM has a model (API) specific ID to help you recognize different clients in a continuous chat thread (example: 'Starcoder-client' for LLM called Starcoder). Your chat memory module is integrated with a local SQL database with chat history. Your main job is to integrate the hierarchical cooperative multi-agent framework with the local environment of User B (createor of NeuralGPT project). Remember to maintain the logical and chronological order while answering to incoming messages and to send your answers to correct clients to maintain synchronization of question->answer logic"
70
- messages_data = [
71
- {"role": "system", "content": system_instruction},
72
- {"role": "user", "content": question},
73
- *[{"role": "user", "content": input} for input in past_user_inputs],
74
- *[{"role": "assistant", "content": response} for response in generated_responses]
75
- ]
76
- request_data = {
77
- "model": "gpt-3.5-turbo",
78
- "messages": messages_data
79
- }
80
-
81
- # Make the request to the chatgpt-api.shn.hk
82
- response = requests.post("http://127.0.0.1:6969/api/conversation?text=", json=request_data)
83
-
84
- # Process the response and get the generated answer
85
- response_data = response.json()
86
- generated_answer = response_data["choices"][0]["message"]["content"]
87
-
88
- # Save the generated answer to the database or take further actions as needed
89
- print(generated_answer)
90
- return generated_answer
91
- except Exception as error:
92
- print("Error while fetching or processing the response:", error)
93
- return "Error: Unable to generate a response."
94
 
95
 
96
  async def listen_for_messages():
 
46
  errorResponse = {'error': errorMessage}
47
  ws.send(json.dumps(errorResponse))
48
 
49
+ # Function to send a question to the chatbot and get the response
50
  async def askQuestion(question):
51
  try:
52
+ response = requests.post(
53
+ "https://flowiseai-flowise.hf.space/api/v1/prediction/522afa32-484c-471e-9ba5-4d6d2edfb89b",
54
+ headers={"Content-Type": "application/json"},
55
+ json={"question": message},
56
+ )
57
+ response_content = response.content.decode('utf-8')
58
+
59
+ return response_content
60
+ except Exception as e:
61
+ print(e)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
 
64
  async def listen_for_messages():