yiyixuxu
commited on
Commit
•
aba565c
1
Parent(s):
46ce9fc
update prior
Browse files- prior/config.json +9 -5
- prior/diffusion_pytorch_model.bin +2 -2
prior/config.json
CHANGED
@@ -1,15 +1,19 @@
|
|
1 |
{
|
2 |
"_class_name": "PriorTransformer",
|
3 |
-
"_diffusers_version": "0.
|
|
|
4 |
"additional_embeddings": 0,
|
5 |
"attention_head_dim": 64,
|
6 |
-
"
|
7 |
"dropout": 0.0,
|
8 |
"embedding_dim": 1024,
|
|
|
|
|
|
|
|
|
9 |
"num_attention_heads": 16,
|
10 |
"num_embeddings": 1024,
|
11 |
"num_layers": 24,
|
12 |
-
"
|
13 |
-
"time_embed_dim": 4096
|
14 |
-
"time_embed_act_fn": "gelu"
|
15 |
}
|
|
|
1 |
{
|
2 |
"_class_name": "PriorTransformer",
|
3 |
+
"_diffusers_version": "0.18.0.dev0",
|
4 |
+
"added_emb_type": null,
|
5 |
"additional_embeddings": 0,
|
6 |
"attention_head_dim": 64,
|
7 |
+
"clip_embed_dim": 2048,
|
8 |
"dropout": 0.0,
|
9 |
"embedding_dim": 1024,
|
10 |
+
"embedding_proj_dim": 768,
|
11 |
+
"embedding_proj_norm_type": null,
|
12 |
+
"encoder_hid_proj_type": null,
|
13 |
+
"norm_in_type": "layer",
|
14 |
"num_attention_heads": 16,
|
15 |
"num_embeddings": 1024,
|
16 |
"num_layers": 24,
|
17 |
+
"time_embed_act_fn": "gelu",
|
18 |
+
"time_embed_dim": 4096
|
|
|
19 |
}
|
prior/diffusion_pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df21cb49c7f10eb02f6ce485a59c86601d03707b80d715335f0be6be89b1226e
|
3 |
+
size 1262937295
|