File size: 1,199 Bytes
dcacd5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
from typing import Dict, Optional

from torch import Tensor, nn
from transformers import PreTrainedModel

from .configuration_dptdepth import DPTDepthConfig
from .models import DPTDepthModel as DPTDepth


class DPTDepthModel(PreTrainedModel):
    """
    The line that sets the config_class is not mandatory,
    unless you want to register your model with the auto classes
    """

    config_class = DPTDepthConfig

    def __init__(self, config: DPTDepthConfig):
        super().__init__(config)
        self.model = DPTDepth()
        self.loss = nn.L1Loss()

    """
    You can have your model return anything you want, 
    but returning a dictionary with the loss included when labels are passed, 
    will make your model directly usable inside the Trainer class. 
    Using another output format is fine as long as you are planning on 
    using your own training loop or another library for training.
    """

    def forward(self, rgbs: Tensor, gts: Optional[Tensor] = None) -> Dict[str, Tensor]:
        logits = self.model(rgbs)
        if gts is not None:
            loss = self.loss(logits, gts)
            return {"loss": loss, "logits": logits}
        return {"logits": logits}