File size: 1,557 Bytes
b39c220
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import torch
import copy

# we want to take 0.2 of the pixel and 0.7 of the mean of the pixels around it 100 times
# we will take a size between the current pixel and the pixels around it                
def smooth_attention(attention: torch.Tensor, iters: int = 1000, threshold: float = 0.1, scale: float = 0.2, size: int = 3):
    
    # squeeze the attention
    attention = copy.deepcopy(attention.squeeze())
    
    # make 100 iterations
    for _ in range(iters):
        
        # initialize the difference
        difference = torch.full(attention.shape, torch.inf)
        
        # iterate over the pixels of the attention
        for i in range(attention.shape[0]):
            
            for j in range(attention.shape[1]):
                
                # recuperate the pixel
                pixel = attention[i, j]
                
                # recuperate the mean of the pixels around it
                mean = attention[max(0, i - size): min(attention.shape[0], i + size), max(0, j - size): min(attention.shape[1], j + size)].mean()
                
                # update the attention
                attention[i, j] = (1 - scale) * pixel + scale * mean
                
                # recuperate the difference
                difference[i, j] = abs(pixel - mean)
        
        # compare each difference with the threshold
        if (difference < threshold).all(): break
    
    # unsqueeze the attention
    attention = attention.unsqueeze(-1)
    
    # return the attention
    return attention