PE-Core-B16-224 / config.json
Deekshith
Upload 3 files
10253f2 verified
{
"vision_config": {
"patch_size": 16,
"width": 768,
"layers": 12,
"heads": 12,
"mlp_ratio": 4.0,
"output_dim": 1024,
"ls_init_value": null,
"drop_path": 0.0,
"image_size": 224,
"use_abs_posemb": true,
"use_cls_token": true,
"use_rope2d": true,
"pool_type": "attn",
"attn_pooler_heads": 8,
"use_ln_pre": true,
"use_ln_post": true
},
"text_config": {
"context_length": 32,
"width": 1024,
"heads": 16,
"layers": 24,
"output_dim": 1024,
"mlp_ratio": 4.0,
"vocab_size": 49408
}
}