update attention mask
Browse files- models/kv_edit.py +1 -1
models/kv_edit.py
CHANGED
@@ -71,7 +71,7 @@ class only_Flux(torch.nn.Module): # 仅包括初始化函数
|
|
71 |
|
72 |
|
73 |
# attention_mask[background_token_indices.unsqueeze(1).expand(-1, seq_len), :] = False
|
74 |
-
attention_mask[background_token_indices.unsqueeze(1), mask_token_indices] = True # 关注掩码
|
75 |
attention_mask[background_token_indices.unsqueeze(1), text_indices] = True # 关注文本
|
76 |
attention_mask[background_token_indices.unsqueeze(1), background_token_indices] = True # 关注背景区域
|
77 |
|
|
|
71 |
|
72 |
|
73 |
# attention_mask[background_token_indices.unsqueeze(1).expand(-1, seq_len), :] = False
|
74 |
+
# attention_mask[background_token_indices.unsqueeze(1), mask_token_indices] = True # 关注掩码
|
75 |
attention_mask[background_token_indices.unsqueeze(1), text_indices] = True # 关注文本
|
76 |
attention_mask[background_token_indices.unsqueeze(1), background_token_indices] = True # 关注背景区域
|
77 |
|