Models
GitHub
Discord
Turbo
Sign in
Download
Models
Download
GitHub
Discord
Sign in
srizon
/
pixie
:latest
3,541
Downloads
Updated
1 year ago
Pixie is a combined model powered by dolphin-llama3 and llava who can break complex problems into smaller pieces and find the best solutions using her own pattern. Not only text based, she can read images as well.
Pixie is a combined model powered by dolphin-llama3 and llava who can break complex problems into smaller pieces and find the best solutions using her own pattern. Not only text based, she can read images as well.
Cancel
vision
pixie:latest
...
/
projector
72d6f08a42f6 · 624MB
Metadata
general.architecture
clip
clip
general.file_type
F16
F16
clip.has_llava_projector
true
true
clip.has_text_encoder
false
false
clip.has_vision_encoder
true
true
clip.projector_type
mlp
mlp
clip.use_gelu
false
false
clip.vision.attention.head_count
16
16
clip.vision.attention.layer_norm_epsilon
1e-05
1e-05
clip.vision.block_count
23
23
clip.vision.embedding_length
1024
1024
clip.vision.feed_forward_length
4096
4096
clip.vision.image_mean
[0.48145467, 0.4578275, 0.40821072]
[0.48145467, 0.4578275, 0.40821072]
clip.vision.image_size
336
336
clip.vision.image_std
[0.26862955, 0.2613026, 0.2757771]
[0.26862955, 0.2613026, 0.2757771]
clip.vision.patch_size
14
14
clip.vision.projection_dim
768
768
Tensor
Name
Type
Shape
mm.0.bias
F32
F32
[4096]
mm.0.weight
F16
F16
[1024, 4096]
mm.2.bias
F32
F32
[4096]
mm.2.weight
F16
F16
[4096, 4096]
v.blk.0
v.blk.0.attn_k.bias
F32
F32
[1024]
v.blk.0.attn_k.weight
F16
F16
[1024, 1024]
v.blk.0.attn_out.bias
F32
F32
[1024]
v.blk.0.attn_out.weight
F16
F16
[1024, 1024]
v.blk.0.attn_q.bias
F32
F32
[1024]
v.blk.0.attn_q.weight
F16
F16
[1024, 1024]
v.blk.0.attn_v.bias
F32
F32
[1024]
v.blk.0.attn_v.weight
F16
F16
[1024, 1024]
v.blk.0.ffn_down.bias
F32
F32
[4096]
v.blk.0.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.0.ffn_up.bias
F32
F32
[1024]
v.blk.0.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.0.ln1.bias
F32
F32
[1024]
v.blk.0.ln1.weight
F32
F32
[1024]
v.blk.0.ln2.bias
F32
F32
[1024]
v.blk.0.ln2.weight
F32
F32
[1024]
v.blk.1
v.blk.1.attn_k.bias
F32
F32
[1024]
v.blk.1.attn_k.weight
F16
F16
[1024, 1024]
v.blk.1.attn_out.bias
F32
F32
[1024]
v.blk.1.attn_out.weight
F16
F16
[1024, 1024]
v.blk.1.attn_q.bias
F32
F32
[1024]
v.blk.1.attn_q.weight
F16
F16
[1024, 1024]
v.blk.1.attn_v.bias
F32
F32
[1024]
v.blk.1.attn_v.weight
F16
F16
[1024, 1024]
v.blk.1.ffn_down.bias
F32
F32
[4096]
v.blk.1.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.1.ffn_up.bias
F32
F32
[1024]
v.blk.1.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.1.ln1.bias
F32
F32
[1024]
v.blk.1.ln1.weight
F32
F32
[1024]
v.blk.1.ln2.bias
F32
F32
[1024]
v.blk.1.ln2.weight
F32
F32
[1024]
v.blk.2
v.blk.2.attn_k.bias
F32
F32
[1024]
v.blk.2.attn_k.weight
F16
F16
[1024, 1024]
v.blk.2.attn_out.bias
F32
F32
[1024]
v.blk.2.attn_out.weight
F16
F16
[1024, 1024]
v.blk.2.attn_q.bias
F32
F32
[1024]
v.blk.2.attn_q.weight
F16
F16
[1024, 1024]
v.blk.2.attn_v.bias
F32
F32
[1024]
v.blk.2.attn_v.weight
F16
F16
[1024, 1024]
v.blk.2.ffn_down.bias
F32
F32
[4096]
v.blk.2.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.2.ffn_up.bias
F32
F32
[1024]
v.blk.2.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.2.ln1.bias
F32
F32
[1024]
v.blk.2.ln1.weight
F32
F32
[1024]
v.blk.2.ln2.bias
F32
F32
[1024]
v.blk.2.ln2.weight
F32
F32
[1024]
v.blk.3
v.blk.3.attn_k.bias
F32
F32
[1024]
v.blk.3.attn_k.weight
F16
F16
[1024, 1024]
v.blk.3.attn_out.bias
F32
F32
[1024]
v.blk.3.attn_out.weight
F16
F16
[1024, 1024]
v.blk.3.attn_q.bias
F32
F32
[1024]
v.blk.3.attn_q.weight
F16
F16
[1024, 1024]
v.blk.3.attn_v.bias
F32
F32
[1024]
v.blk.3.attn_v.weight
F16
F16
[1024, 1024]
v.blk.3.ffn_down.bias
F32
F32
[4096]
v.blk.3.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.3.ffn_up.bias
F32
F32
[1024]
v.blk.3.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.3.ln1.bias
F32
F32
[1024]
v.blk.3.ln1.weight
F32
F32
[1024]
v.blk.3.ln2.bias
F32
F32
[1024]
v.blk.3.ln2.weight
F32
F32
[1024]
v.blk.4
v.blk.4.attn_k.bias
F32
F32
[1024]
v.blk.4.attn_k.weight
F16
F16
[1024, 1024]
v.blk.4.attn_out.bias
F32
F32
[1024]
v.blk.4.attn_out.weight
F16
F16
[1024, 1024]
v.blk.4.attn_q.bias
F32
F32
[1024]
v.blk.4.attn_q.weight
F16
F16
[1024, 1024]
v.blk.4.attn_v.bias
F32
F32
[1024]
v.blk.4.attn_v.weight
F16
F16
[1024, 1024]
v.blk.4.ffn_down.bias
F32
F32
[4096]
v.blk.4.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.4.ffn_up.bias
F32
F32
[1024]
v.blk.4.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.4.ln1.bias
F32
F32
[1024]
v.blk.4.ln1.weight
F32
F32
[1024]
v.blk.4.ln2.bias
F32
F32
[1024]
v.blk.4.ln2.weight
F32
F32
[1024]
v.blk.5
v.blk.5.attn_k.bias
F32
F32
[1024]
v.blk.5.attn_k.weight
F16
F16
[1024, 1024]
v.blk.5.attn_out.bias
F32
F32
[1024]
v.blk.5.attn_out.weight
F16
F16
[1024, 1024]
v.blk.5.attn_q.bias
F32
F32
[1024]
v.blk.5.attn_q.weight
F16
F16
[1024, 1024]
v.blk.5.attn_v.bias
F32
F32
[1024]
v.blk.5.attn_v.weight
F16
F16
[1024, 1024]
v.blk.5.ffn_down.bias
F32
F32
[4096]
v.blk.5.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.5.ffn_up.bias
F32
F32
[1024]
v.blk.5.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.5.ln1.bias
F32
F32
[1024]
v.blk.5.ln1.weight
F32
F32
[1024]
v.blk.5.ln2.bias
F32
F32
[1024]
v.blk.5.ln2.weight
F32
F32
[1024]
v.blk.6
v.blk.6.attn_k.bias
F32
F32
[1024]
v.blk.6.attn_k.weight
F16
F16
[1024, 1024]
v.blk.6.attn_out.bias
F32
F32
[1024]
v.blk.6.attn_out.weight
F16
F16
[1024, 1024]
v.blk.6.attn_q.bias
F32
F32
[1024]
v.blk.6.attn_q.weight
F16
F16
[1024, 1024]
v.blk.6.attn_v.bias
F32
F32
[1024]
v.blk.6.attn_v.weight
F16
F16
[1024, 1024]
v.blk.6.ffn_down.bias
F32
F32
[4096]
v.blk.6.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.6.ffn_up.bias
F32
F32
[1024]
v.blk.6.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.6.ln1.bias
F32
F32
[1024]
v.blk.6.ln1.weight
F32
F32
[1024]
v.blk.6.ln2.bias
F32
F32
[1024]
v.blk.6.ln2.weight
F32
F32
[1024]
v.blk.7
v.blk.7.attn_k.bias
F32
F32
[1024]
v.blk.7.attn_k.weight
F16
F16
[1024, 1024]
v.blk.7.attn_out.bias
F32
F32
[1024]
v.blk.7.attn_out.weight
F16
F16
[1024, 1024]
v.blk.7.attn_q.bias
F32
F32
[1024]
v.blk.7.attn_q.weight
F16
F16
[1024, 1024]
v.blk.7.attn_v.bias
F32
F32
[1024]
v.blk.7.attn_v.weight
F16
F16
[1024, 1024]
v.blk.7.ffn_down.bias
F32
F32
[4096]
v.blk.7.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.7.ffn_up.bias
F32
F32
[1024]
v.blk.7.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.7.ln1.bias
F32
F32
[1024]
v.blk.7.ln1.weight
F32
F32
[1024]
v.blk.7.ln2.bias
F32
F32
[1024]
v.blk.7.ln2.weight
F32
F32
[1024]
v.blk.8
v.blk.8.attn_k.bias
F32
F32
[1024]
v.blk.8.attn_k.weight
F16
F16
[1024, 1024]
v.blk.8.attn_out.bias
F32
F32
[1024]
v.blk.8.attn_out.weight
F16
F16
[1024, 1024]
v.blk.8.attn_q.bias
F32
F32
[1024]
v.blk.8.attn_q.weight
F16
F16
[1024, 1024]
v.blk.8.attn_v.bias
F32
F32
[1024]
v.blk.8.attn_v.weight
F16
F16
[1024, 1024]
v.blk.8.ffn_down.bias
F32
F32
[4096]
v.blk.8.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.8.ffn_up.bias
F32
F32
[1024]
v.blk.8.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.8.ln1.bias
F32
F32
[1024]
v.blk.8.ln1.weight
F32
F32
[1024]
v.blk.8.ln2.bias
F32
F32
[1024]
v.blk.8.ln2.weight
F32
F32
[1024]
v.blk.9
v.blk.9.attn_k.bias
F32
F32
[1024]
v.blk.9.attn_k.weight
F16
F16
[1024, 1024]
v.blk.9.attn_out.bias
F32
F32
[1024]
v.blk.9.attn_out.weight
F16
F16
[1024, 1024]
v.blk.9.attn_q.bias
F32
F32
[1024]
v.blk.9.attn_q.weight
F16
F16
[1024, 1024]
v.blk.9.attn_v.bias
F32
F32
[1024]
v.blk.9.attn_v.weight
F16
F16
[1024, 1024]
v.blk.9.ffn_down.bias
F32
F32
[4096]
v.blk.9.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.9.ffn_up.bias
F32
F32
[1024]
v.blk.9.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.9.ln1.bias
F32
F32
[1024]
v.blk.9.ln1.weight
F32
F32
[1024]
v.blk.9.ln2.bias
F32
F32
[1024]
v.blk.9.ln2.weight
F32
F32
[1024]
v.blk.10
v.blk.10.attn_k.bias
F32
F32
[1024]
v.blk.10.attn_k.weight
F16
F16
[1024, 1024]
v.blk.10.attn_out.bias
F32
F32
[1024]
v.blk.10.attn_out.weight
F16
F16
[1024, 1024]
v.blk.10.attn_q.bias
F32
F32
[1024]
v.blk.10.attn_q.weight
F16
F16
[1024, 1024]
v.blk.10.attn_v.bias
F32
F32
[1024]
v.blk.10.attn_v.weight
F16
F16
[1024, 1024]
v.blk.10.ffn_down.bias
F32
F32
[4096]
v.blk.10.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.10.ffn_up.bias
F32
F32
[1024]
v.blk.10.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.10.ln1.bias
F32
F32
[1024]
v.blk.10.ln1.weight
F32
F32
[1024]
v.blk.10.ln2.bias
F32
F32
[1024]
v.blk.10.ln2.weight
F32
F32
[1024]
v.blk.11
v.blk.11.attn_k.bias
F32
F32
[1024]
v.blk.11.attn_k.weight
F16
F16
[1024, 1024]
v.blk.11.attn_out.bias
F32
F32
[1024]
v.blk.11.attn_out.weight
F16
F16
[1024, 1024]
v.blk.11.attn_q.bias
F32
F32
[1024]
v.blk.11.attn_q.weight
F16
F16
[1024, 1024]
v.blk.11.attn_v.bias
F32
F32
[1024]
v.blk.11.attn_v.weight
F16
F16
[1024, 1024]
v.blk.11.ffn_down.bias
F32
F32
[4096]
v.blk.11.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.11.ffn_up.bias
F32
F32
[1024]
v.blk.11.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.11.ln1.bias
F32
F32
[1024]
v.blk.11.ln1.weight
F32
F32
[1024]
v.blk.11.ln2.bias
F32
F32
[1024]
v.blk.11.ln2.weight
F32
F32
[1024]
v.blk.12
v.blk.12.attn_k.bias
F32
F32
[1024]
v.blk.12.attn_k.weight
F16
F16
[1024, 1024]
v.blk.12.attn_out.bias
F32
F32
[1024]
v.blk.12.attn_out.weight
F16
F16
[1024, 1024]
v.blk.12.attn_q.bias
F32
F32
[1024]
v.blk.12.attn_q.weight
F16
F16
[1024, 1024]
v.blk.12.attn_v.bias
F32
F32
[1024]
v.blk.12.attn_v.weight
F16
F16
[1024, 1024]
v.blk.12.ffn_down.bias
F32
F32
[4096]
v.blk.12.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.12.ffn_up.bias
F32
F32
[1024]
v.blk.12.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.12.ln1.bias
F32
F32
[1024]
v.blk.12.ln1.weight
F32
F32
[1024]
v.blk.12.ln2.bias
F32
F32
[1024]
v.blk.12.ln2.weight
F32
F32
[1024]
v.blk.13
v.blk.13.attn_k.bias
F32
F32
[1024]
v.blk.13.attn_k.weight
F16
F16
[1024, 1024]
v.blk.13.attn_out.bias
F32
F32
[1024]
v.blk.13.attn_out.weight
F16
F16
[1024, 1024]
v.blk.13.attn_q.bias
F32
F32
[1024]
v.blk.13.attn_q.weight
F16
F16
[1024, 1024]
v.blk.13.attn_v.bias
F32
F32
[1024]
v.blk.13.attn_v.weight
F16
F16
[1024, 1024]
v.blk.13.ffn_down.bias
F32
F32
[4096]
v.blk.13.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.13.ffn_up.bias
F32
F32
[1024]
v.blk.13.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.13.ln1.bias
F32
F32
[1024]
v.blk.13.ln1.weight
F32
F32
[1024]
v.blk.13.ln2.bias
F32
F32
[1024]
v.blk.13.ln2.weight
F32
F32
[1024]
v.blk.14
v.blk.14.attn_k.bias
F32
F32
[1024]
v.blk.14.attn_k.weight
F16
F16
[1024, 1024]
v.blk.14.attn_out.bias
F32
F32
[1024]
v.blk.14.attn_out.weight
F16
F16
[1024, 1024]
v.blk.14.attn_q.bias
F32
F32
[1024]
v.blk.14.attn_q.weight
F16
F16
[1024, 1024]
v.blk.14.attn_v.bias
F32
F32
[1024]
v.blk.14.attn_v.weight
F16
F16
[1024, 1024]
v.blk.14.ffn_down.bias
F32
F32
[4096]
v.blk.14.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.14.ffn_up.bias
F32
F32
[1024]
v.blk.14.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.14.ln1.bias
F32
F32
[1024]
v.blk.14.ln1.weight
F32
F32
[1024]
v.blk.14.ln2.bias
F32
F32
[1024]
v.blk.14.ln2.weight
F32
F32
[1024]
v.blk.15
v.blk.15.attn_k.bias
F32
F32
[1024]
v.blk.15.attn_k.weight
F16
F16
[1024, 1024]
v.blk.15.attn_out.bias
F32
F32
[1024]
v.blk.15.attn_out.weight
F16
F16
[1024, 1024]
v.blk.15.attn_q.bias
F32
F32
[1024]
v.blk.15.attn_q.weight
F16
F16
[1024, 1024]
v.blk.15.attn_v.bias
F32
F32
[1024]
v.blk.15.attn_v.weight
F16
F16
[1024, 1024]
v.blk.15.ffn_down.bias
F32
F32
[4096]
v.blk.15.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.15.ffn_up.bias
F32
F32
[1024]
v.blk.15.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.15.ln1.bias
F32
F32
[1024]
v.blk.15.ln1.weight
F32
F32
[1024]
v.blk.15.ln2.bias
F32
F32
[1024]
v.blk.15.ln2.weight
F32
F32
[1024]
v.blk.16
v.blk.16.attn_k.bias
F32
F32
[1024]
v.blk.16.attn_k.weight
F16
F16
[1024, 1024]
v.blk.16.attn_out.bias
F32
F32
[1024]
v.blk.16.attn_out.weight
F16
F16
[1024, 1024]
v.blk.16.attn_q.bias
F32
F32
[1024]
v.blk.16.attn_q.weight
F16
F16
[1024, 1024]
v.blk.16.attn_v.bias
F32
F32
[1024]
v.blk.16.attn_v.weight
F16
F16
[1024, 1024]
v.blk.16.ffn_down.bias
F32
F32
[4096]
v.blk.16.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.16.ffn_up.bias
F32
F32
[1024]
v.blk.16.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.16.ln1.bias
F32
F32
[1024]
v.blk.16.ln1.weight
F32
F32
[1024]
v.blk.16.ln2.bias
F32
F32
[1024]
v.blk.16.ln2.weight
F32
F32
[1024]
v.blk.17
v.blk.17.attn_k.bias
F32
F32
[1024]
v.blk.17.attn_k.weight
F16
F16
[1024, 1024]
v.blk.17.attn_out.bias
F32
F32
[1024]
v.blk.17.attn_out.weight
F16
F16
[1024, 1024]
v.blk.17.attn_q.bias
F32
F32
[1024]
v.blk.17.attn_q.weight
F16
F16
[1024, 1024]
v.blk.17.attn_v.bias
F32
F32
[1024]
v.blk.17.attn_v.weight
F16
F16
[1024, 1024]
v.blk.17.ffn_down.bias
F32
F32
[4096]
v.blk.17.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.17.ffn_up.bias
F32
F32
[1024]
v.blk.17.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.17.ln1.bias
F32
F32
[1024]
v.blk.17.ln1.weight
F32
F32
[1024]
v.blk.17.ln2.bias
F32
F32
[1024]
v.blk.17.ln2.weight
F32
F32
[1024]
v.blk.18
v.blk.18.attn_k.bias
F32
F32
[1024]
v.blk.18.attn_k.weight
F16
F16
[1024, 1024]
v.blk.18.attn_out.bias
F32
F32
[1024]
v.blk.18.attn_out.weight
F16
F16
[1024, 1024]
v.blk.18.attn_q.bias
F32
F32
[1024]
v.blk.18.attn_q.weight
F16
F16
[1024, 1024]
v.blk.18.attn_v.bias
F32
F32
[1024]
v.blk.18.attn_v.weight
F16
F16
[1024, 1024]
v.blk.18.ffn_down.bias
F32
F32
[4096]
v.blk.18.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.18.ffn_up.bias
F32
F32
[1024]
v.blk.18.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.18.ln1.bias
F32
F32
[1024]
v.blk.18.ln1.weight
F32
F32
[1024]
v.blk.18.ln2.bias
F32
F32
[1024]
v.blk.18.ln2.weight
F32
F32
[1024]
v.blk.19
v.blk.19.attn_k.bias
F32
F32
[1024]
v.blk.19.attn_k.weight
F16
F16
[1024, 1024]
v.blk.19.attn_out.bias
F32
F32
[1024]
v.blk.19.attn_out.weight
F16
F16
[1024, 1024]
v.blk.19.attn_q.bias
F32
F32
[1024]
v.blk.19.attn_q.weight
F16
F16
[1024, 1024]
v.blk.19.attn_v.bias
F32
F32
[1024]
v.blk.19.attn_v.weight
F16
F16
[1024, 1024]
v.blk.19.ffn_down.bias
F32
F32
[4096]
v.blk.19.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.19.ffn_up.bias
F32
F32
[1024]
v.blk.19.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.19.ln1.bias
F32
F32
[1024]
v.blk.19.ln1.weight
F32
F32
[1024]
v.blk.19.ln2.bias
F32
F32
[1024]
v.blk.19.ln2.weight
F32
F32
[1024]
v.blk.20
v.blk.20.attn_k.bias
F32
F32
[1024]
v.blk.20.attn_k.weight
F16
F16
[1024, 1024]
v.blk.20.attn_out.bias
F32
F32
[1024]
v.blk.20.attn_out.weight
F16
F16
[1024, 1024]
v.blk.20.attn_q.bias
F32
F32
[1024]
v.blk.20.attn_q.weight
F16
F16
[1024, 1024]
v.blk.20.attn_v.bias
F32
F32
[1024]
v.blk.20.attn_v.weight
F16
F16
[1024, 1024]
v.blk.20.ffn_down.bias
F32
F32
[4096]
v.blk.20.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.20.ffn_up.bias
F32
F32
[1024]
v.blk.20.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.20.ln1.bias
F32
F32
[1024]
v.blk.20.ln1.weight
F32
F32
[1024]
v.blk.20.ln2.bias
F32
F32
[1024]
v.blk.20.ln2.weight
F32
F32
[1024]
v.blk.21
v.blk.21.attn_k.bias
F32
F32
[1024]
v.blk.21.attn_k.weight
F16
F16
[1024, 1024]
v.blk.21.attn_out.bias
F32
F32
[1024]
v.blk.21.attn_out.weight
F16
F16
[1024, 1024]
v.blk.21.attn_q.bias
F32
F32
[1024]
v.blk.21.attn_q.weight
F16
F16
[1024, 1024]
v.blk.21.attn_v.bias
F32
F32
[1024]
v.blk.21.attn_v.weight
F16
F16
[1024, 1024]
v.blk.21.ffn_down.bias
F32
F32
[4096]
v.blk.21.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.21.ffn_up.bias
F32
F32
[1024]
v.blk.21.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.21.ln1.bias
F32
F32
[1024]
v.blk.21.ln1.weight
F32
F32
[1024]
v.blk.21.ln2.bias
F32
F32
[1024]
v.blk.21.ln2.weight
F32
F32
[1024]
v.blk.22
v.blk.22.attn_k.bias
F32
F32
[1024]
v.blk.22.attn_k.weight
F16
F16
[1024, 1024]
v.blk.22.attn_out.bias
F32
F32
[1024]
v.blk.22.attn_out.weight
F16
F16
[1024, 1024]
v.blk.22.attn_q.bias
F32
F32
[1024]
v.blk.22.attn_q.weight
F16
F16
[1024, 1024]
v.blk.22.attn_v.bias
F32
F32
[1024]
v.blk.22.attn_v.weight
F16
F16
[1024, 1024]
v.blk.22.ffn_down.bias
F32
F32
[4096]
v.blk.22.ffn_down.weight
F16
F16
[1024, 4096]
v.blk.22.ffn_up.bias
F32
F32
[1024]
v.blk.22.ffn_up.weight
F16
F16
[4096, 1024]
v.blk.22.ln1.bias
F32
F32
[1024]
v.blk.22.ln1.weight
F32
F32
[1024]
v.blk.22.ln2.bias
F32
F32
[1024]
v.blk.22.ln2.weight
F32
F32
[1024]
v.class_embd
F32
F32
[1024]
v.patch_embd.weight
F16
F16
[14, 14, 3, 1024]
v.position_embd.weight
F16
F16
[1024, 577]
v.pre_ln.bias
F32
F32
[1024]
v.pre_ln.weight
F32
F32
[1024]