{ "architectures": [ "AutoModelForSeq2SeqLM" ], "model_type": "fsmt", "activation_dropout": 0.0, "activation_function": "relu", "attention_dropout": 0.0, "d_model": 256, "dropout": 0.3, "init_std": 0.02, "max_position_embeddings": 1024, "num_hidden_layers": 2, "src_vocab_size": 1000, "tgt_vocab_size": 1000, "langs": [ "src", "trg" ], "encoder_attention_heads": 4, "encoder_ffn_dim": 1024, "encoder_layerdrop": 0, "encoder_layers": 2, "decoder_attention_heads": 8, "decoder_ffn_dim": 1024, "decoder_layerdrop": 0, "decoder_layers": 4, "bos_token_id": 0, "pad_token_id": 1, "eos_token_id": 2, "unk_token_id": 3, "is_encoder_decoder": true, "scale_embedding": true, "tie_word_embeddings": true, "num_beams": 5, "early_stopping": false, "length_penalty": 1.0 }