Upload AutoEncoder
Browse files- model.safetensors +1 -1
- modeling_autoencoder.py +5 -1
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 133840
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:416ef0961369dfbc2337037fa0a017862a9d07f7fba1bce8e0a1fd09af91b433
|
3 |
size 133840
|
modeling_autoencoder.py
CHANGED
@@ -100,6 +100,9 @@ class AutoEncoderConfig(PretrainedConfig):
|
|
100 |
embed: bool = False,
|
101 |
vocab_size: int|bool = False,
|
102 |
max_position: int|bool = False,
|
|
|
|
|
|
|
103 |
**kwargs
|
104 |
):
|
105 |
super().__init__(**kwargs)
|
@@ -228,7 +231,8 @@ class AutoEncoder(PreTrainedModel):
|
|
228 |
if config.embed:
|
229 |
# Word Embeddings
|
230 |
self.word_embeddings = nn.Embedding(config.vocab_size,
|
231 |
-
config.input_dim,
|
|
|
232 |
# Postional Embeddings
|
233 |
self.position_embeddings = nn.Embedding(config.max_position,
|
234 |
config.input_dim,)
|
|
|
100 |
embed: bool = False,
|
101 |
vocab_size: int|bool = False,
|
102 |
max_position: int|bool = False,
|
103 |
+
pad_token_id: int = 0,
|
104 |
+
bos_token_id: int = 1,
|
105 |
+
eos_token_id: int = 2,
|
106 |
**kwargs
|
107 |
):
|
108 |
super().__init__(**kwargs)
|
|
|
231 |
if config.embed:
|
232 |
# Word Embeddings
|
233 |
self.word_embeddings = nn.Embedding(config.vocab_size,
|
234 |
+
config.input_dim,
|
235 |
+
config.pad_token_id,)
|
236 |
# Postional Embeddings
|
237 |
self.position_embeddings = nn.Embedding(config.max_position,
|
238 |
config.input_dim,)
|