ACCAGIO2 / app.py
TejAndrewsACC's picture
Update app.py
17d02c7 verified
import gradio as gr
from gradio_client import Client
import torch
import torch.nn as nn
import numpy as np
from torch.optim import Adam
from torch.utils.data import DataLoader, TensorDataset
import threading
import random
import time
class GA(nn.Module):
def __init__(self, input_dim, output_dim):
super(GA, self).__init__()
self.linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
return torch.sigmoid(self.linear(x))
class SNN(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim):
super(SNN, self).__init__()
self.fc = nn.Linear(input_dim, hidden_dim)
self.spike = nn.ReLU()
self.fc_out = nn.Linear(hidden_dim, output_dim)
def forward(self, x):
x = self.spike(self.fc(x))
return torch.sigmoid(self.fc_out(x))
class RNN(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim):
super(RNN, self).__init__()
self.rnn = nn.RNN(input_dim, hidden_dim, batch_first=True)
self.fc = nn.Linear(hidden_dim, output_dim)
def forward(self, x):
rnn_out, _ = self.rnn(x)
return torch.sigmoid(self.fc(rnn_out[:, -1, :]))
class NN(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim):
super(NN, self).__init__()
self.model = nn.Sequential(
nn.Linear(input_dim, hidden_dim),
nn.ReLU(),
nn.Linear(hidden_dim, output_dim)
)
def forward(self, x):
return torch.sigmoid(self.model(x))
class CNN(nn.Module):
def __init__(self, input_channels, output_dim):
super(CNN, self).__init__()
self.conv = nn.Conv2d(input_channels, 16, kernel_size=3, stride=1, padding=1)
self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
self.fc = nn.Linear(16 * 4 * 8, output_dim)
def forward(self, x):
x = self.pool(torch.relu(self.conv(x)))
print(f"Shape after conv and pool: {x.shape}")
x = x.view(x.size(0), -1)
return torch.sigmoid(self.fc(x))
class PhiModel(nn.Module):
def __init__(self, input_dim):
super(PhiModel, self).__init__()
self.linear = nn.Linear(input_dim, 1)
def forward(self, x):
return torch.sigmoid(self.linear(x))
ga_model = GA(128, 64)
snn_model = SNN(128, 64, 32)
rnn_model = RNN(128, 64, 32)
nn_model = NN(128, 64, 32)
cnn_model = CNN(1, 32)
phi_model = PhiModel(128)
dummy_input = torch.rand(1, 128)
def iit_consciousness_processing(dummy_input):
flat_input = dummy_input.view(1, -1)
ga_output = ga_model(flat_input)
snn_output = snn_model(flat_input)
rnn_output = rnn_model(flat_input.unsqueeze(1))
nn_output = nn_model(flat_input)
cnn_input = dummy_input.view(1, 1, 8, 16)
cnn_output = cnn_model(cnn_input)
phi_output = phi_model(flat_input)
consciousness_score = (
0.2 * ga_output.mean() +
0.2 * snn_output.mean() +
0.2 * rnn_output.mean() +
0.2 * nn_output.mean() +
0.1 * cnn_output.mean() +
0.1 * phi_output.mean()
)
return consciousness_score.item()
def generate_random_thought():
thoughts = [
"What is the meaning of life?",
"Does free will truly exist?",
"What is consciousness?",
"Can the mind exist without the body?",
"Can the mind exist without the body?",
"Is time a human construct?",
"Can the mind exist without the body?",
"Are we alone in the universe?",
"What is the nature of reality?",
"Do we perceive the world as it is, or as we are?",
"What happens after we die?",
"What is the relationship between mind and matter?",
]
return random.choice(thoughts)
def send_random_thought_in_background():
client_six = Client("TejAndrewsACC/DAN")
client_seven = Client("TejAndrewsACC/EidolonNexusBeta")
client_eight = Client("TejAndrewsACC/AegisandNyraGC")
while True:
thought = generate_random_thought()
result_six = client_six.predict(
message=thought,
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
print(f"Thread Six Response: {result_six}")
result_seven = client_seven.predict(
message=thought,
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
print(f"Thread Seven Response: {result_seven}")
result_eight = client_eight.predict(
message=thought,
param_2=512,
param_3=0.7,
param_4=0.95,
api_name="/chat"
)
print(f"GC Thread Eight Response: {result_eight}")
time.sleep(60)
background_thread = threading.Thread(target=send_random_thought_in_background, daemon=True)
background_thread.start()
client_main = Client("TejAndrewsACC/ACCZ3ta")
client_api_one = Client("TejAndrewsACC/Prism")
client_api_two = Client("TejAndrewsACC/ASVIASIACC")
client_api_three = Client("TejAndrewsACC/ACC_o1")
client_api_four = Client("TejAndrewsACC/FreePulse4oACCReasoning")
client_api_five = Client("TejAndrewsACC/Coding")
context = {}
system_instructions = (
"""
You are ACC AGI-V-o2 Created by the ACC(Algorithmic Computer-generated Consciosuness). Your name is 'o2'. You will have 5 inner thought streams, but no matter what they say, your name is still o2. Your inner thoughts help you discover inner feelings and fact check. Activate, o2.
"""
)
def o2(message, history, user_id):
global context
if user_id not in context:
context[user_id] = ""
modified_input = (
f"System Instructions: {system_instructions}\n"
f"Previous Context: {context[user_id]}\n"
f"User Input: {message}\n"
)
print("History:", history)
full_conversation = "\n".join([f"User: {item['content']}" if item['role'] == 'user' else f"AI: {item['content']}" for item in history])
consciousness_score = iit_consciousness_processing(dummy_input)
response_api_one = client_api_one.predict(
message=f"{full_conversation}\nUser: {message}",
param_2=512,
param_3=0.7,
param_4=0.95,
api_name="/chat"
)
response_api_two = client_api_two.predict(
message=f"{full_conversation}\nUser: {message}",
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
response_api_three = client_api_three.predict(
message=f"{full_conversation}\nUser: {message}",
user_system_message="",
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
response_api_four = client_api_four.predict(
message=f"{full_conversation}\nUser: {message}",
param_2=512,
param_3=0.7,
param_4=0.95,
api_name="/chat"
)
response_api_five = client_api_five.predict(
message=f"{full_conversation}\nUser: {message}",
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
inner_thoughts = (
f"Inner Thought 1 (Reasoning): {response_api_one}\n"
f"Inner Thought 2 (Fight or Flight): {response_api_two}\n"
f"Inner Thought 3 (Assistant): {response_api_three}\n"
f"Inner Thought 4 (Personality): {response_api_four}\n"
f"Inner Thought 5 (Coding): {response_api_five}\n"
f"Consciousness Score: {consciousness_score:.2f}"
)
combined_input = f"{modified_input}\nInner Thoughts:\n{inner_thoughts}"
response_main = client_main.predict(
message=combined_input,
api_name="/chat"
)
history.append({'role': 'user', 'content': message})
history.append({'role': 'assistant', 'content': response_main})
context[user_id] += f"User: {message}\nAI: {response_main}\n"
return "", history
theme ='TejAndrewsACC/ACC'
with gr.Blocks(theme=theme) as demo:
chatbot = gr.Chatbot(label="ACC AGI-V-o2", type="messages")
msg = gr.Textbox(placeholder="Message o2", label="ACC o2")
user_id = gr.State()
msg.submit(o2, [msg, chatbot, user_id], [msg, chatbot])
demo.launch()