henry000 commited on
Commit
478977c
Β·
2 Parent(s): 25d3c1c 3186e72

πŸ”€ [Merge] branch 'MODEL' into TEST

Browse files
Files changed (3) hide show
  1. config/model/v7-base.yaml +1 -0
  2. model/module.py +26 -4
  3. model/yolo.py +14 -7
config/model/v7-base.yaml CHANGED
@@ -241,3 +241,4 @@ model:
241
  - [36,75, 76,55, 72,146] # P4/16
242
  - [142,110, 192,243, 459,401] # P5/32
243
  source: [102, 103, 104]
 
 
241
  - [36,75, 76,55, 72,146] # P4/16
242
  - [142,110, 192,243, 459,401] # P5/32
243
  source: [102, 103, 104]
244
+ output: True
model/module.py CHANGED
@@ -11,10 +11,10 @@ class Conv(nn.Module):
11
  out_channels,
12
  kernel_size,
13
  stride=1,
14
- padding=0,
15
  dilation=1,
16
  groups=1,
17
- act=nn.ReLU(),
18
  bias=False,
19
  auto_padding=True,
20
  padding_mode="zeros",
@@ -48,10 +48,10 @@ class Conv(nn.Module):
48
  # RepVGG
49
  class RepConv(nn.Module):
50
  # https://github.com/DingXiaoH/RepVGG
51
- def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, groups=1, act=nn.ReLU()):
52
 
53
  super().__init__()
54
-
55
  self.conv1 = Conv(in_channels, out_channels, kernel_size, stride, groups=groups, act=False)
56
  self.conv2 = Conv(in_channels, out_channels, 1, stride, groups=groups, act=False)
57
  self.act = act if isinstance(act, nn.Module) else nn.Identity()
@@ -64,6 +64,28 @@ class RepConv(nn.Module):
64
 
65
  # to be implement
66
  # def fuse_convs(self):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
 
69
  # ResNet
 
11
  out_channels,
12
  kernel_size,
13
  stride=1,
14
+ padding=None,
15
  dilation=1,
16
  groups=1,
17
+ act=nn.SiLU(),
18
  bias=False,
19
  auto_padding=True,
20
  padding_mode="zeros",
 
48
  # RepVGG
49
  class RepConv(nn.Module):
50
  # https://github.com/DingXiaoH/RepVGG
51
+ def __init__(self, in_channels, out_channels, kernel_size=3, padding=None, stride=1, groups=1, act=nn.SiLU(), deploy=False):
52
 
53
  super().__init__()
54
+ self.deploy = deploy
55
  self.conv1 = Conv(in_channels, out_channels, kernel_size, stride, groups=groups, act=False)
56
  self.conv2 = Conv(in_channels, out_channels, 1, stride, groups=groups, act=False)
57
  self.act = act if isinstance(act, nn.Module) else nn.Identity()
 
64
 
65
  # to be implement
66
  # def fuse_convs(self):
67
+ def fuse_conv_bn(self, conv, bn):
68
+
69
+ std = (bn.running_var + bn.eps).sqrt()
70
+ bias = bn.bias - bn.running_mean * bn.weight / std
71
+
72
+ t = (bn.weight / std).reshape(-1, 1, 1, 1)
73
+ weights = conv.weight * t
74
+
75
+ bn = nn.Identity()
76
+ conv = nn.Conv2d(in_channels = conv.in_channels,
77
+ out_channels = conv.out_channels,
78
+ kernel_size = conv.kernel_size,
79
+ stride=conv.stride,
80
+ padding = conv.padding,
81
+ dilation = conv.dilation,
82
+ groups = conv.groups,
83
+ bias = True,
84
+ padding_mode = conv.padding_mode)
85
+
86
+ conv.weight = torch.nn.Parameter(weights)
87
+ conv.bias = torch.nn.Parameter(bias)
88
+ return conv
89
 
90
 
91
  # ResNet
model/yolo.py CHANGED
@@ -1,10 +1,12 @@
1
- import torch.nn as nn
 
 
2
  import torch
 
3
  from loguru import logger
4
- from typing import Dict, Any, List, Union
5
- import inspect
6
- from utils.tools import load_model_cfg
7
  from model import module
 
8
 
9
 
10
  def get_layer_map():
@@ -46,6 +48,7 @@ class YOLO(nn.Module):
46
  layer_type, layer_info = next(iter(layer_spec.items()))
47
  layer_args = layer_info.get("args", {})
48
  source = layer_info.get("source", -1)
 
49
 
50
  if isinstance(source, str):
51
  source = layer_indices_by_tag[source]
@@ -55,7 +58,7 @@ class YOLO(nn.Module):
55
  layer_args["nc"] = self.nc
56
  layer_args["ch"] = [output_dim[idx] for idx in source]
57
 
58
- layer = self.create_layer(layer_type, source, **layer_args)
59
  model_list.append(layer)
60
 
61
  if "tags" in layer_info:
@@ -69,6 +72,7 @@ class YOLO(nn.Module):
69
 
70
  def forward(self, x):
71
  y = [x]
 
72
  for layer in self.model:
73
  if isinstance(layer.source, list):
74
  model_input = [y[idx] for idx in layer.source]
@@ -76,7 +80,9 @@ class YOLO(nn.Module):
76
  model_input = y[layer.source]
77
  x = layer(model_input)
78
  y.append(x)
79
- return x
 
 
80
 
81
  def get_out_channels(self, layer_type: str, layer_args: dict, output_dim: list, source: Union[int, list]):
82
  if "Conv" in layer_type:
@@ -88,10 +94,11 @@ class YOLO(nn.Module):
88
  if layer_type == "IDetect":
89
  return None
90
 
91
- def create_layer(self, layer_type: str, source: Union[int, list], **kwargs):
92
  if layer_type in self.layer_map:
93
  layer = self.layer_map[layer_type](**kwargs)
94
  layer.source = source
 
95
  return layer
96
  else:
97
  raise ValueError(f"Unsupported layer type: {layer_type}")
 
1
+ import inspect
2
+ from typing import Any, Dict, List, Union
3
+
4
  import torch
5
+ import torch.nn as nn
6
  from loguru import logger
7
+
 
 
8
  from model import module
9
+ from utils.tools import load_model_cfg
10
 
11
 
12
  def get_layer_map():
 
48
  layer_type, layer_info = next(iter(layer_spec.items()))
49
  layer_args = layer_info.get("args", {})
50
  source = layer_info.get("source", -1)
51
+ output = layer_info.get("output", False)
52
 
53
  if isinstance(source, str):
54
  source = layer_indices_by_tag[source]
 
58
  layer_args["nc"] = self.nc
59
  layer_args["ch"] = [output_dim[idx] for idx in source]
60
 
61
+ layer = self.create_layer(layer_type, source, output, **layer_args)
62
  model_list.append(layer)
63
 
64
  if "tags" in layer_info:
 
72
 
73
  def forward(self, x):
74
  y = [x]
75
+ output = []
76
  for layer in self.model:
77
  if isinstance(layer.source, list):
78
  model_input = [y[idx] for idx in layer.source]
 
80
  model_input = y[layer.source]
81
  x = layer(model_input)
82
  y.append(x)
83
+ if layer.output:
84
+ output.append(x)
85
+ return output
86
 
87
  def get_out_channels(self, layer_type: str, layer_args: dict, output_dim: list, source: Union[int, list]):
88
  if "Conv" in layer_type:
 
94
  if layer_type == "IDetect":
95
  return None
96
 
97
+ def create_layer(self, layer_type: str, source: Union[int, list], output=False, **kwargs):
98
  if layer_type in self.layer_map:
99
  layer = self.layer_map[layer_type](**kwargs)
100
  layer.source = source
101
+ layer.output = output
102
  return layer
103
  else:
104
  raise ValueError(f"Unsupported layer type: {layer_type}")