qingyuyang's picture
Upload 6 files
e406499
raw
history blame
584 Bytes
{
"_class_name": "Transformer2DModel",
"_diffusers_version": "0.19.3",
"activation_fn": "geglu",
"attention_bias": true,
"attention_head_dim": 16,
"cross_attention_dim": null,
"dropout": 0.1,
"in_channels": 3,
"norm_elementwise_affine": true,
"norm_num_groups": 32,
"norm_type": "ada_norm_zero",
"num_attention_heads": 4,
"num_embeds_ada_norm": 100,
"num_layers": 1,
"num_vector_embeds": null,
"only_cross_attention": false,
"out_channels": 3,
"patch_size": 16,
"sample_size": 256,
"upcast_attention": false,
"use_linear_projection": false
}