|
{ |
|
"architectures": [ |
|
"Isoformer" |
|
], |
|
"enformer_attn_dim_key": 64, |
|
"enformer_attn_dropout": 0.05, |
|
"enformer_depth": 11, |
|
"enformer_dim": 1536, |
|
"enformer_dim_divisible_by": 128, |
|
"enformer_dropout_rate": 0.4, |
|
"enformer_heads": 8, |
|
"enformer_num_downsamples": 7, |
|
"enformer_output_heads": { |
|
"human": 5313, |
|
"mouse": 1643 |
|
}, |
|
"enformer_pos_dropout": 0.01, |
|
"enformer_target_length": -1, |
|
"enformer_use_checkpointing": false, |
|
"enformer_use_convnext": false, |
|
"enformer_use_tf_gamma": false, |
|
"esm_add_bias_fnn": true, |
|
"esm_attention_probs_dropout_prob": 0.0, |
|
"esm_emb_layer_norm_before": false, |
|
"esm_hidden_dropout_prob": 0.0, |
|
"esm_hidden_size": 640, |
|
"esm_intermediate_size": 2560, |
|
"esm_mask_token_id": 32, |
|
"esm_max_position_embeddings": 1026, |
|
"esm_num_attention_heads": 20, |
|
"esm_num_hidden_layers": 30, |
|
"esm_pad_token_id": 1, |
|
"esm_position_embedding_type": "rotary", |
|
"esm_tie_word_embeddings": false, |
|
"esm_token_dropout": true, |
|
"esm_use_cache": false, |
|
"esm_vocab_size": 33, |
|
"model_type": "isoformer", |
|
"nt_add_bias_fnn": false, |
|
"nt_attention_probs_dropout_prob": 0.0, |
|
"nt_emb_layer_norm_before": false, |
|
"nt_hidden_dropout_prob": 0.0, |
|
"nt_hidden_size": 768, |
|
"nt_intermediate_size": 3072, |
|
"nt_mask_token_id": 2, |
|
"nt_max_position_embeddings": 2050, |
|
"nt_num_attention_heads": 16, |
|
"nt_num_hidden_layers": 24, |
|
"nt_pad_token_id": 1, |
|
"nt_position_embedding_type": "rotary", |
|
"nt_tie_word_embeddings": false, |
|
"nt_token_dropout": false, |
|
"nt_use_cache": 0.0, |
|
"nt_vocab_size": 4107, |
|
"num_heads_omics_cross_attention": 8, |
|
"num_protein_tokens_per_seq": 1200, |
|
"num_tokens_per_seq_nuctf": 196608, |
|
"num_tokens_per_seq_nuctf_rna": 2048, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.29.2" |
|
} |
|
|