Spaces:
Running
on
Zero
Running
on
Zero
Update llama_customized_models.py
Browse files
llama_customized_models.py
CHANGED
@@ -129,17 +129,22 @@ class LlamaForCausalLMWithNumericalEmbedding(LlamaForCausalLM):
|
|
129 |
b, l = input_ids.size()
|
130 |
assert len(properties) == b, "The number of properties should be equal to the batch size."
|
131 |
assert len(properties_index) == b, "The number of properties_index should be equal to the batch size."
|
132 |
-
|
|
|
133 |
embeddings = self.model.embed_tokens(input_ids)
|
|
|
134 |
|
135 |
for i, (props, props_index, embeds) in enumerate(zip(properties, properties_index, embeddings)):
|
136 |
assert len(props) == len(props_index), "The number of properties should be equal to the number of properties_index."
|
137 |
props = torch.tensor(props, device=embeds.device, dtype=torch.float32).unsqueeze(1)
|
|
|
138 |
num_embeds = self.numerical_embedding(props)
|
|
|
139 |
if len(props_index) > 0:
|
140 |
assert embeddings[i, props_index, :].shape == num_embeds.shape, "The shape of the embeddings and the numerical embeddings should be the same."
|
141 |
embeddings[i, props_index, :] = num_embeds
|
142 |
-
|
|
|
143 |
return super().forward(
|
144 |
input_ids=None,
|
145 |
attention_mask=attention_mask,
|
|
|
129 |
b, l = input_ids.size()
|
130 |
assert len(properties) == b, "The number of properties should be equal to the batch size."
|
131 |
assert len(properties_index) == b, "The number of properties_index should be equal to the batch size."
|
132 |
+
|
133 |
+
print(input_ids, "input_ids")
|
134 |
embeddings = self.model.embed_tokens(input_ids)
|
135 |
+
print(embeddings, "embeddings")
|
136 |
|
137 |
for i, (props, props_index, embeds) in enumerate(zip(properties, properties_index, embeddings)):
|
138 |
assert len(props) == len(props_index), "The number of properties should be equal to the number of properties_index."
|
139 |
props = torch.tensor(props, device=embeds.device, dtype=torch.float32).unsqueeze(1)
|
140 |
+
print(props, "props")
|
141 |
num_embeds = self.numerical_embedding(props)
|
142 |
+
print(num_embeds, "num_embeds")
|
143 |
if len(props_index) > 0:
|
144 |
assert embeddings[i, props_index, :].shape == num_embeds.shape, "The shape of the embeddings and the numerical embeddings should be the same."
|
145 |
embeddings[i, props_index, :] = num_embeds
|
146 |
+
print(embeddings, "embedding_after")
|
147 |
+
|
148 |
return super().forward(
|
149 |
input_ids=None,
|
150 |
attention_mask=attention_mask,
|