{ "_name_or_path": "hf-internal-testing/tiny-random-t5", "bos_token_id": 0, "classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "eos_token_id": 1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": { "auto_cast": "matmul", "auto_cast_type": "bf16", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "dynamic_batch_size": false, "inline_weights_to_neff": false, "input_names": [ "decoder_input_ids", "decoder_attention_mask", "encoder_hidden_states", "attention_mask", "beam_idx", "beam_scores" ], "model_type": "t5-decoder", "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "output_names": [ "next_token_scores", "next_tokens", "next_indices", "past.0.self.key", "past.1.self.key", "past.2.self.key", "past.3.self.key", "past.4.self.key", "past.0.self.value", "past.1.self.value", "past.2.self.value", "past.3.self.value", "past.4.self.value", "past.0.cross.key", "past.1.cross.key", "past.2.cross.key", "past.3.cross.key", "past.4.cross.key", "past.0.cross.value", "past.1.cross.value", "past.2.cross.value", "past.3.cross.value", "past.4.cross.value", "decoder_hidden_state.0", "decoder_hidden_state.1", "decoder_hidden_state.2", "decoder_hidden_state.3", "decoder_hidden_state.4", "decoder_hidden_state.5", "decoder_attention.0", "decoder_attention.1", "decoder_attention.2", "decoder_attention.3", "decoder_attention.4", "cross_attention.0", "cross_attention.1", "cross_attention.2", "cross_attention.3", "cross_attention.4" ], "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18, "tensor_parallel_size": 1 }, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "pad_token_id": 0, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "torchscript": true, "transformers_version": "4.43.2", "use_cache": true, "vocab_size": 1103 }