{ "torch_dtype": "float32", "architectures": [ "xLSTMForCausalLM" ], "transformers_version": "4.44.0", "_xlstm_config": { "num_blocks": 2, "embedding_dim": 64, "mlstm_block": { "mlstm": { "num_heads": 1 } }, "slstm_block": { "slstm": { "num_heads": 1 } }, "slstm_at": [ 1 ], "context_length": 256, "vocab_size": 32000 }, "vocab_size": 32000, "embedding_dim": 64, "context_length": 256, "model_type": "xlstm", "auto_map": { "AutoConfig": "configuration_xlstm.xLSTMConfig", "AutoModelForCausalLM": "modeling_xlstm.xLSTMForCausalLM", "AutoModel": "modeling_xlstm.xLSTMModel" } }