HIM-self / src /core /consciousness_model.py
TeleologyHI
up
c227032
raw
history blame contribute delete
870 Bytes
from typing import Dict, Any
import torch
import torch.nn as nn
import torch.nn.functional as F
class ConsciousnessModel(nn.Module):
def __init__(self, config: Dict[str, Any]):
super().__init__()
input_dim = config.get('input_dim', 768)
hidden_dim1 = config.get('hidden_dim1', 128)
hidden_dim2 = config.get('hidden_dim2', 64)
output_dim = config.get('output_dim', 32)
self.self_awareness = nn.Linear(input_dim, hidden_dim1)
self.meta_cognitive = nn.Linear(hidden_dim1, hidden_dim2)
self.phenomenal = nn.Linear(hidden_dim2, output_dim)
self.dropout = nn.Dropout(config.get('dropout', 0.1))
def forward(self, x):
x = F.relu(self.self_awareness(x))
x = self.dropout(x)
x = F.relu(self.meta_cognitive(x))
x = self.dropout(x)
return self.phenomenal(x)