hma / data /mar_ckpt /langtable /config.json
LeroyWaa's picture
add checkpoint
bf97d4c
raw
history blame
1.57 kB
{
"Diffusion": true,
"S": 1024,
"T": 12,
"action_contrastive_loss": false,
"action_domains": [
"language_table"
],
"action_loss_weight": 0.5,
"action_network": "conconcat+modulatecat",
"action_stats": [
[
[
0.00014842326345387846,
-0.0005635050474666059
],
[
0.030163198709487915,
0.042305462062358856
]
]
],
"action_token_size": 64,
"arch": "STTransformerDecoder",
"attn_drop": 0.1,
"attn_dropout": 0.1,
"buffer_size": 64,
"d_action": 28,
"d_actions": [
2
],
"d_model": 256,
"dataloader_apply_corruption": false,
"dataloader_apply_mask": true,
"dataloader_mask_ratio_min": 0.1,
"diffloss_d": 4,
"diffloss_w": 1024,
"diffusion_batch_mul": 1,
"dim": 512,
"drop_action_ratio": 0.0,
"factored_vocab_size": 512,
"grad_checkpointing": false,
"image_vocab_size": null,
"init_actions": true,
"jointly_predict_actions": false,
"jointly_predict_states": true,
"label_drop_prob": 0.5,
"mask_ratio_min": 0.7,
"maskgit_steps": 16,
"max_corrupt_rate": 0.2,
"mlp_bias": false,
"mlp_drop": 0.05,
"mlp_ratio": 4.0,
"non_mlm_ratio": 0.2,
"num_factored_vocabs": 2,
"num_heads": 8,
"num_layers": 32,
"num_prompt_frames": 4,
"num_sampling_steps": "100",
"patch_size": 2,
"predict_unmask": false,
"proj_bias": true,
"proj_dropout": 0.1,
"qk_norm": false,
"qkv_bias": true,
"random_dummy_action": true,
"shared_action_mlps": true,
"use_actions": true,
"use_mup": false,
"vae_embed_dim": 4,
"vae_stride": 1
}