|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import Optional |
|
|
|
import torch.nn.functional as F |
|
from torch import nn |
|
|
|
|
|
class LoRALinearLayer(nn.Module): |
|
def __init__(self, in_features, out_features, rank=4, network_alpha=None, device=None, dtype=None): |
|
super().__init__() |
|
|
|
self.down = nn.Linear(in_features, rank, bias=False, device=device, dtype=dtype) |
|
self.up = nn.Linear(rank, out_features, bias=False, device=device, dtype=dtype) |
|
|
|
|
|
self.network_alpha = network_alpha |
|
self.rank = rank |
|
|
|
nn.init.normal_(self.down.weight, std=1 / rank) |
|
nn.init.zeros_(self.up.weight) |
|
|
|
def forward(self, hidden_states): |
|
orig_dtype = hidden_states.dtype |
|
dtype = self.down.weight.dtype |
|
|
|
down_hidden_states = self.down(hidden_states.to(dtype)) |
|
up_hidden_states = self.up(down_hidden_states) |
|
|
|
if self.network_alpha is not None: |
|
up_hidden_states *= self.network_alpha / self.rank |
|
|
|
return up_hidden_states.to(orig_dtype) |
|
|
|
|
|
class LoRAConv2dLayer(nn.Module): |
|
def __init__( |
|
self, in_features, out_features, rank=4, kernel_size=(1, 1), stride=(1, 1), padding=0, network_alpha=None |
|
): |
|
super().__init__() |
|
|
|
self.down = nn.Conv2d(in_features, rank, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) |
|
|
|
|
|
self.up = nn.Conv2d(rank, out_features, kernel_size=(1, 1), stride=(1, 1), bias=False) |
|
|
|
|
|
|
|
self.network_alpha = network_alpha |
|
self.rank = rank |
|
|
|
nn.init.normal_(self.down.weight, std=1 / rank) |
|
nn.init.zeros_(self.up.weight) |
|
|
|
def forward(self, hidden_states): |
|
orig_dtype = hidden_states.dtype |
|
dtype = self.down.weight.dtype |
|
|
|
down_hidden_states = self.down(hidden_states.to(dtype)) |
|
up_hidden_states = self.up(down_hidden_states) |
|
|
|
if self.network_alpha is not None: |
|
up_hidden_states *= self.network_alpha / self.rank |
|
|
|
return up_hidden_states.to(orig_dtype) |
|
|
|
|
|
class LoRACompatibleConv(nn.Conv2d): |
|
""" |
|
A convolutional layer that can be used with LoRA. |
|
""" |
|
|
|
def __init__(self, *args, lora_layer: Optional[LoRAConv2dLayer] = None, **kwargs): |
|
super().__init__(*args, **kwargs) |
|
self.lora_layer = lora_layer |
|
|
|
def set_lora_layer(self, lora_layer: Optional[LoRAConv2dLayer]): |
|
self.lora_layer = lora_layer |
|
|
|
def forward(self, x): |
|
if self.lora_layer is None: |
|
|
|
|
|
return F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups) |
|
else: |
|
return super().forward(x) + self.lora_layer(x) |
|
|
|
|
|
class LoRACompatibleLinear(nn.Linear): |
|
""" |
|
A Linear layer that can be used with LoRA. |
|
""" |
|
|
|
def __init__(self, *args, lora_layer: Optional[LoRALinearLayer] = None, **kwargs): |
|
super().__init__(*args, **kwargs) |
|
self.lora_layer = lora_layer |
|
|
|
def set_lora_layer(self, lora_layer: Optional[LoRAConv2dLayer]): |
|
self.lora_layer = lora_layer |
|
|
|
def forward(self, x): |
|
if self.lora_layer is None: |
|
return super().forward(x) |
|
else: |
|
return super().forward(x) + self.lora_layer(x) |
|
|