Amitz244 commited on
Commit
bd52cb4
·
verified ·
1 Parent(s): 1ef7b3a

Upload modeling.py

Browse files
Files changed (1) hide show
  1. modeling.py +53 -0
modeling.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ from transformers import CLIPModel
4
+ from peft import LoraConfig, get_peft_model
5
+
6
+ class MLP(nn.Module):
7
+ def __init__(self, input_dim=768, hidden_dim1=512, hidden_dim2=256, output_dim=8,dropout_rate=0.5):
8
+ super(MLP, self).__init__()
9
+ self.fc1 = nn.Linear(input_dim, hidden_dim1)
10
+ self.relu1 = nn.ReLU()
11
+ self.dropout = nn.Dropout(dropout_rate)
12
+ self.fc2 = nn.Linear(hidden_dim1, hidden_dim2)
13
+ self.relu2 = nn.ReLU()
14
+ self.fc3 = nn.Linear(hidden_dim2, output_dim)
15
+
16
+ def forward(self, x):
17
+ x = self.fc1(x)
18
+ x = self.relu1(x)
19
+ x = self.dropout(x)
20
+ x = self.fc2(x)
21
+ x = self.relu2(x)
22
+ x = self.dropout(x)
23
+ x = self.fc3(x)
24
+ return x
25
+
26
+ class clip_lora_model(nn.Module):
27
+ def __init__(self, input_dim=768, hidden_dim1=512, hidden_dim2=256, output_dim=8,dropout_rate=0.5,r=16,lora_alpha=8):
28
+ super(clip_lora_model, self).__init__()
29
+ self.output_dim=output_dim
30
+ self.mlp = MLP(input_dim, hidden_dim1, hidden_dim2, output_dim,dropout_rate)
31
+
32
+ model_name = 'openai/clip-vit-large-patch14'
33
+ model = CLIPModel.from_pretrained(model_name)
34
+ self.proj = model.visual_projection
35
+ for param in self.proj.parameters():
36
+ param.requires_grad = False
37
+ encoder = model.vision_model
38
+ target_modules = ["k_proj", "v_proj", "q_proj"]
39
+ config = LoraConfig(
40
+ r=int(r),
41
+ lora_alpha=lora_alpha,
42
+ target_modules=target_modules,
43
+ lora_dropout=0.1,
44
+ bias="none",
45
+ )
46
+ self.model = get_peft_model(encoder, config)
47
+
48
+ def forward(self, x):
49
+ model_outputs = self.model(x)
50
+ image_embeds = model_outputs[1]
51
+ model_outputs = self.proj(image_embeds)
52
+ outputs = self.mlp(model_outputs)
53
+ return outputs