File size: 3,908 Bytes
9f13819
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import torch
import unittest
import math
import re
import warnings
from dataclasses import asdict, dataclass, field
from enum import Enum
from typing import List, Optional, Tuple, Union
import itertools
import copy

import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers.pytorch_utils import Conv1D
from .gating import GATING_TO_MODEL_MAPPING

from ..import_utils import is_bnb_4bit_available, is_bnb_available
from ..utils import (
    COMMON_LAYERS_PATTERN,
    TRANSFORMERS_MODELS_TO_LORA_TARGET_MODULES_MAPPING,
    ModulesToSaveWrapper,
    PeftConfig,
    PeftType,
    _freeze_adapter,
    _get_submodules,
    transpose,
)

if is_bnb_available():
    import bitsandbytes as bnb
from moelora import *

class TestMoELoRA(unittest.TestCase):
    def setUp(self):
        self.model = MoELoRA()  # Instantiate your MoELoRA model here

    def test_forward_no_adapters(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_with_adapters(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_with_global_user_embeds(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        self.model.global_user_embeds = [torch.randn(10, 30)]  # Set the global_user_embeds
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

if __name__ == '__main__':
    unittest.main()import torch
import unittest

class TestMoELoRA(unittest.TestCase):
    def setUp(self):
        self.model = MoELoRA()  # Instantiate your MoELoRA model here

    def test_forward_no_adapters(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_with_adapters(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_with_global_user_embeds(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        self.model.global_user_embeds = [torch.randn(10, 30)]  # Set the global_user_embeds
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_with_global_user_embeds_exception(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        self.model.global_user_embeds = [torch.randn(5, 30)]  # Set the global_user_embeds with incompatible shape
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

    def test_forward_no_global_user_embeds(self):
        x = torch.randn(10, 20, 30)  # Create a random input tensor
        self.model.active_adapter = 'adapter1'  # Set the active adapter
        self.model.global_user_embeds = []  # Set an empty global_user_embeds
        output = self.model.forward(x)
        self.assertEqual(output.shape, (10, 20, 30))  # Assert the output shape is correct

if __name__ == '__main__':
    unittest.main()