{ "architectures": [ "DIT" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_dit.DITConfig", "AutoModelForMaskedLM": "modeling_dit.DIT" }, "hidden_size": 1024, "max_seq_len": 512, "model_type": "dit", "num_attention_heads": 16, "num_hidden_layers": 24, "p_uniform": 0.0, "t_eps": 0.0001, "timestep_cond_dim": 128, "torch_dtype": "float32", "transformers_version": "4.49.0", "vocab_size": 50258 }