{ "experiment": { "tokenizer_checkpoint": "titok_bl128_vae_c16.bin", "output_dir": "titok_bl128_vae_c16" }, "model": { "vq_model": { "quantize_mode": "vae", "token_size": 16, "vit_enc_model_size": "base", "vit_dec_model_size": "large", "vit_enc_patch_size": 16, "vit_dec_patch_size": 16, "num_latent_tokens": 128, "finetune_decoder": false, "is_legacy": false } }, "dataset": { "preprocessing": { "crop_size": 256 } } }