{ | |
"_class_name": "PriorTransformer", | |
"_diffusers_version": "0.17.0.dev0", | |
"additional_embeddings": 0, | |
"attention_head_dim": 64, | |
"clip_embedding_dim": 768, | |
"dropout": 0.0, | |
"embedding_dim": 1024, | |
"num_attention_heads": 16, | |
"num_embeddings": 1024, | |
"num_layers": 24, | |
"out_dim": 2048, | |
"time_embed_dim": 4096, | |
"time_embed_act_fn": "gelu" | |
} | |