|
NUM_EMBEDDINGS = 1024 |
|
|
|
# Number of parameters = 152M |
|
NUM_LAYERS |
|
EMBED_DIM |
|
NUM_HEADS |
|
HEAD_DIM |
|
MLP_DIM |
|
|
|
|
|
transformer_layer.TransformerLayerGenerate: |
|
num_heads |
|
head_size |
|
window_length |
|
use_long_xl_architecture |
|
max_unrolled_windows |
|
relative_position_type |
|
use_causal_mask |
|
attn_dropout_rate |
|
memory_num_neighbors |
|
dtype |
|
|
|
decoder_stack.DecoderStackGenerate: |
|
num_layers |
|
embedding_size |
|
embedding_stddev |
|
layer_factory |
|
dstack_window_length |
|
use_absolute_positions |
|
use_final_layernorm |
|
final_dropout_rate |
|
final_mlp_factory |
|
recurrent_layer_indices |
|
memory_factory |
|
memory_layer_indices |
|
dtype |
|
|
|
|
|
models.DecoderOnlyLanguageModelGenerate: |
|
num_heads |
|
head_size |
|
task_config |
|
decoder_factory |
|
|
|
|
|
training_loop.Trainer: |
|
model_definition |
|
|