HIM-self / src /core /sparse_activation.py
Takk8IS
Initial HIM implementation
fbebf66
raw
history blame
970 Bytes
import torch
import torch.nn as nn
from typing import Dict, Tuple, List
import numpy as np
class SparseActivationManager:
def __init__(self, sparsity_threshold: float = 0.95):
self.sparsity_threshold = sparsity_threshold
self.activation_history = []
self.pattern_analyzer = PatternAnalyzer()
def compute_pattern(self, input_tensor: torch.Tensor) -> torch.Tensor:
importance_scores = self._compute_importance_scores(input_tensor)
activation_mask = self._generate_activation_mask(importance_scores)
return self._apply_sparse_activation(input_tensor, activation_mask)
def _compute_importance_scores(self, input_tensor: torch.Tensor) -> torch.Tensor:
attention_weights = self._calculate_attention_weights(input_tensor)
gradient_information = self._compute_gradient_information(input_tensor)
return self._combine_importance_metrics(attention_weights, gradient_information)