xilluill commited on
Commit
ca71d6b
·
1 Parent(s): 39cb3b9

update attention mask

Browse files
Files changed (1) hide show
  1. models/kv_edit.py +1 -1
models/kv_edit.py CHANGED
@@ -71,7 +71,7 @@ class only_Flux(torch.nn.Module): # 仅包括初始化函数
71
 
72
 
73
  # attention_mask[background_token_indices.unsqueeze(1).expand(-1, seq_len), :] = False
74
- attention_mask[background_token_indices.unsqueeze(1), mask_token_indices] = True # 关注掩码
75
  attention_mask[background_token_indices.unsqueeze(1), text_indices] = True # 关注文本
76
  attention_mask[background_token_indices.unsqueeze(1), background_token_indices] = True # 关注背景区域
77
 
 
71
 
72
 
73
  # attention_mask[background_token_indices.unsqueeze(1).expand(-1, seq_len), :] = False
74
+ # attention_mask[background_token_indices.unsqueeze(1), mask_token_indices] = True # 关注掩码
75
  attention_mask[background_token_indices.unsqueeze(1), text_indices] = True # 关注文本
76
  attention_mask[background_token_indices.unsqueeze(1), background_token_indices] = True # 关注背景区域
77