EmmanuelCasarrubias's picture
Add word transformation function using encoder layer and attention mechanism
cab73cf verified
raw
history blame contribute delete
704 Bytes
import numpy as np
from matrices import construir_matriz_Q, generar_k_transpuesta_y_v
from attention import encoder_layer, calcular_pesos_proyeccion, scaled_dot_product_attention_2D
def transform_word(word):
significant_numbers = []
for char in word:
Q = construir_matriz_Q(char)
K_transpose, V = generar_k_transpuesta_y_v(Q, char)
output = scaled_dot_product_attention_2D(Q, K_transpose, V)
Wq, Wk, Wv = calcular_pesos_proyeccion(output)
encoder_output = encoder_layer(Q, K_transpose, V, Wq, Wk, Wv)
significant_number = np.sum(np.max(encoder_output, axis=1))
significant_numbers.append(significant_number)
return significant_numbers