Models
GitHub
Discord
Turbo
Sign in
Download
Models
Download
GitHub
Discord
Sign in
nchapman
/
dolphin3.0-qwen2.5
:3b
671
Downloads
Updated
8 months ago
Dolphin 3.0 is designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.
Dolphin 3.0 is designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.
Cancel
0.5b
1.5b
3b
dolphin3.0-qwen2.5:3b
...
/
model
0cb1908c5f44 · 1.9GB
Metadata
general.architecture
qwen2
qwen2
general.file_type
Q4_K_M
Q4_K_M
qwen2.attention.head_count
16
16
qwen2.attention.head_count_kv
2
2
qwen2.attention.layer_norm_rms_epsilon
1e-06
1e-06
qwen2.block_count
36
36
qwen2.context_length
32768
32768
qwen2.embedding_length
2048
2048
qwen2.feed_forward_length
11008
11008
qwen2.rope.freq_base
1e+06
1e+06
tokenizer.ggml.add_bos_token
false
false
tokenizer.ggml.bos_token_id
151643
151643
tokenizer.ggml.eos_token_id
151645
151645
tokenizer.ggml.merges
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
[Ġ Ġ, ĠĠ ĠĠ, i n, Ġ t, ĠĠĠĠ ĠĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
151643
151643
tokenizer.ggml.pre
qwen2
qwen2
tokenizer.ggml.token_type
[1, 1, 1, 1, 1, ...]
[1, 1, 1, 1, 1, ...]
tokenizer.ggml.tokens
[!, ", #, $, %, ...]
[!, ", #, $, %, ...]
quantize.imatrix.chunks_count
128
128
quantize.imatrix.dataset
/training_dir/calibration_datav3.txt
/training_dir/calibration_datav3.txt
quantize.imatrix.entries_count
252
252
quantize.imatrix.file
/models_out/Dolphin3.0-Qwen2.5-3b-GGUF/Dolphin3.0-Qwen2.5-3b.imatrix
/models_out/Dolphin3.0-Qwen2.5-3b-GGUF/Dolphin3.0-Qwen2.5-3b.imatrix
Tensor
Name
Type
Shape
token_embd.weight
Q6_K
Q6_K
[2048, 151936]
blk.0
blk.0.attn_k.bias
F32
F32
[256]
blk.0.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.0.attn_norm.weight
F32
F32
[2048]
blk.0.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.0.attn_q.bias
F32
F32
[2048]
blk.0.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.0.attn_v.bias
F32
F32
[256]
blk.0.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.0.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.0.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.0.ffn_norm.weight
F32
F32
[2048]
blk.0.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.1
blk.1.attn_k.bias
F32
F32
[256]
blk.1.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.1.attn_norm.weight
F32
F32
[2048]
blk.1.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.1.attn_q.bias
F32
F32
[2048]
blk.1.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.1.attn_v.bias
F32
F32
[256]
blk.1.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.1.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.1.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.1.ffn_norm.weight
F32
F32
[2048]
blk.1.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.2
blk.2.attn_k.bias
F32
F32
[256]
blk.2.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.2.attn_norm.weight
F32
F32
[2048]
blk.2.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.2.attn_q.bias
F32
F32
[2048]
blk.2.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.2.attn_v.bias
F32
F32
[256]
blk.2.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.2.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.2.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.2.ffn_norm.weight
F32
F32
[2048]
blk.2.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.3
blk.3.attn_k.bias
F32
F32
[256]
blk.3.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.3.attn_norm.weight
F32
F32
[2048]
blk.3.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.3.attn_q.bias
F32
F32
[2048]
blk.3.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.3.attn_v.bias
F32
F32
[256]
blk.3.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.3.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.3.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.3.ffn_norm.weight
F32
F32
[2048]
blk.3.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.4
blk.4.attn_k.bias
F32
F32
[256]
blk.4.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.4.attn_norm.weight
F32
F32
[2048]
blk.4.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.4.attn_q.bias
F32
F32
[2048]
blk.4.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.4.attn_v.bias
F32
F32
[256]
blk.4.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.4.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.4.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.4.ffn_norm.weight
F32
F32
[2048]
blk.4.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.5
blk.5.attn_k.bias
F32
F32
[256]
blk.5.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.5.attn_norm.weight
F32
F32
[2048]
blk.5.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.5.attn_q.bias
F32
F32
[2048]
blk.5.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.5.attn_v.bias
F32
F32
[256]
blk.5.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.5.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.5.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.5.ffn_norm.weight
F32
F32
[2048]
blk.5.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.6
blk.6.attn_k.bias
F32
F32
[256]
blk.6.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.6.attn_norm.weight
F32
F32
[2048]
blk.6.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.6.attn_q.bias
F32
F32
[2048]
blk.6.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.6.attn_v.bias
F32
F32
[256]
blk.6.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.6.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.6.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.6.ffn_norm.weight
F32
F32
[2048]
blk.6.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.7
blk.7.attn_k.bias
F32
F32
[256]
blk.7.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.7.attn_norm.weight
F32
F32
[2048]
blk.7.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.7.attn_q.bias
F32
F32
[2048]
blk.7.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.7.attn_v.bias
F32
F32
[256]
blk.7.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.7.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.7.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.7.ffn_norm.weight
F32
F32
[2048]
blk.7.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.8
blk.8.attn_k.bias
F32
F32
[256]
blk.8.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.8.attn_norm.weight
F32
F32
[2048]
blk.8.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.8.attn_q.bias
F32
F32
[2048]
blk.8.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.8.attn_v.bias
F32
F32
[256]
blk.8.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.8.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.8.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.8.ffn_norm.weight
F32
F32
[2048]
blk.8.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.9
blk.9.attn_k.bias
F32
F32
[256]
blk.9.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.9.attn_norm.weight
F32
F32
[2048]
blk.9.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.9.attn_q.bias
F32
F32
[2048]
blk.9.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.9.attn_v.bias
F32
F32
[256]
blk.9.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.9.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.9.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.9.ffn_norm.weight
F32
F32
[2048]
blk.9.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.10
blk.10.attn_k.bias
F32
F32
[256]
blk.10.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.10.attn_norm.weight
F32
F32
[2048]
blk.10.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.10.attn_q.bias
F32
F32
[2048]
blk.10.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.10.attn_v.bias
F32
F32
[256]
blk.10.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.10.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.10.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.10.ffn_norm.weight
F32
F32
[2048]
blk.10.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.11
blk.11.attn_k.bias
F32
F32
[256]
blk.11.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.11.attn_norm.weight
F32
F32
[2048]
blk.11.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.11.attn_q.bias
F32
F32
[2048]
blk.11.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.11.attn_v.bias
F32
F32
[256]
blk.11.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.11.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.11.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.11.ffn_norm.weight
F32
F32
[2048]
blk.11.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.12
blk.12.attn_k.bias
F32
F32
[256]
blk.12.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.12.attn_norm.weight
F32
F32
[2048]
blk.12.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.12.attn_q.bias
F32
F32
[2048]
blk.12.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.12.attn_v.bias
F32
F32
[256]
blk.12.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.12.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.12.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.12.ffn_norm.weight
F32
F32
[2048]
blk.12.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.13
blk.13.attn_k.bias
F32
F32
[256]
blk.13.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.13.attn_norm.weight
F32
F32
[2048]
blk.13.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.13.attn_q.bias
F32
F32
[2048]
blk.13.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.13.attn_v.bias
F32
F32
[256]
blk.13.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.13.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.13.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.13.ffn_norm.weight
F32
F32
[2048]
blk.13.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.14
blk.14.attn_k.bias
F32
F32
[256]
blk.14.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.14.attn_norm.weight
F32
F32
[2048]
blk.14.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.14.attn_q.bias
F32
F32
[2048]
blk.14.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.14.attn_v.bias
F32
F32
[256]
blk.14.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.14.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.14.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.14.ffn_norm.weight
F32
F32
[2048]
blk.14.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.15
blk.15.attn_k.bias
F32
F32
[256]
blk.15.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.15.attn_norm.weight
F32
F32
[2048]
blk.15.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.15.attn_q.bias
F32
F32
[2048]
blk.15.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.15.attn_v.bias
F32
F32
[256]
blk.15.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.15.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.15.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.15.ffn_norm.weight
F32
F32
[2048]
blk.15.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.16
blk.16.attn_k.bias
F32
F32
[256]
blk.16.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.16.attn_norm.weight
F32
F32
[2048]
blk.16.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.16.attn_q.bias
F32
F32
[2048]
blk.16.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.16.attn_v.bias
F32
F32
[256]
blk.16.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.16.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.16.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.16.ffn_norm.weight
F32
F32
[2048]
blk.16.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.17
blk.17.attn_k.bias
F32
F32
[256]
blk.17.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.17.attn_norm.weight
F32
F32
[2048]
blk.17.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.17.attn_q.bias
F32
F32
[2048]
blk.17.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.17.attn_v.bias
F32
F32
[256]
blk.17.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.17.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.17.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.17.ffn_norm.weight
F32
F32
[2048]
blk.17.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.18
blk.18.attn_k.bias
F32
F32
[256]
blk.18.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.18.attn_norm.weight
F32
F32
[2048]
blk.18.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.18.attn_q.bias
F32
F32
[2048]
blk.18.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.18.attn_v.bias
F32
F32
[256]
blk.18.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.18.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.18.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.18.ffn_norm.weight
F32
F32
[2048]
blk.18.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.19
blk.19.attn_k.bias
F32
F32
[256]
blk.19.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.19.attn_norm.weight
F32
F32
[2048]
blk.19.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.19.attn_q.bias
F32
F32
[2048]
blk.19.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.19.attn_v.bias
F32
F32
[256]
blk.19.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.19.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.19.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.19.ffn_norm.weight
F32
F32
[2048]
blk.19.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.20
blk.20.attn_k.bias
F32
F32
[256]
blk.20.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.20.attn_norm.weight
F32
F32
[2048]
blk.20.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.20.attn_q.bias
F32
F32
[2048]
blk.20.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.20.attn_v.bias
F32
F32
[256]
blk.20.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.20.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.20.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.20.ffn_norm.weight
F32
F32
[2048]
blk.20.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.21
blk.21.attn_k.bias
F32
F32
[256]
blk.21.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.21.attn_norm.weight
F32
F32
[2048]
blk.21.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.21.attn_q.bias
F32
F32
[2048]
blk.21.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.21.attn_v.bias
F32
F32
[256]
blk.21.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.21.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.21.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.21.ffn_norm.weight
F32
F32
[2048]
blk.21.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.22
blk.22.attn_k.bias
F32
F32
[256]
blk.22.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.22.attn_norm.weight
F32
F32
[2048]
blk.22.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.22.attn_q.bias
F32
F32
[2048]
blk.22.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.22.attn_v.bias
F32
F32
[256]
blk.22.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.22.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.22.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.22.ffn_norm.weight
F32
F32
[2048]
blk.22.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.23
blk.23.attn_k.bias
F32
F32
[256]
blk.23.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.23.attn_norm.weight
F32
F32
[2048]
blk.23.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.23.attn_q.bias
F32
F32
[2048]
blk.23.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.23.attn_v.bias
F32
F32
[256]
blk.23.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.23.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.23.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.23.ffn_norm.weight
F32
F32
[2048]
blk.23.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.24
blk.24.attn_k.bias
F32
F32
[256]
blk.24.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.24.attn_norm.weight
F32
F32
[2048]
blk.24.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.24.attn_q.bias
F32
F32
[2048]
blk.24.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.24.attn_v.bias
F32
F32
[256]
blk.24.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.24.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.24.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.24.ffn_norm.weight
F32
F32
[2048]
blk.24.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.25
blk.25.attn_k.bias
F32
F32
[256]
blk.25.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.25.attn_norm.weight
F32
F32
[2048]
blk.25.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.25.attn_q.bias
F32
F32
[2048]
blk.25.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.25.attn_v.bias
F32
F32
[256]
blk.25.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.25.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.25.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.25.ffn_norm.weight
F32
F32
[2048]
blk.25.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.26
blk.26.attn_k.bias
F32
F32
[256]
blk.26.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.26.attn_norm.weight
F32
F32
[2048]
blk.26.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.26.attn_q.bias
F32
F32
[2048]
blk.26.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.26.attn_v.bias
F32
F32
[256]
blk.26.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.26.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.26.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.26.ffn_norm.weight
F32
F32
[2048]
blk.26.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.27
blk.27.attn_k.bias
F32
F32
[256]
blk.27.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.27.attn_norm.weight
F32
F32
[2048]
blk.27.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.27.attn_q.bias
F32
F32
[2048]
blk.27.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.27.attn_v.bias
F32
F32
[256]
blk.27.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.27.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.27.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.27.ffn_norm.weight
F32
F32
[2048]
blk.27.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.28
blk.28.attn_k.bias
F32
F32
[256]
blk.28.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.28.attn_norm.weight
F32
F32
[2048]
blk.28.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.28.attn_q.bias
F32
F32
[2048]
blk.28.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.28.attn_v.bias
F32
F32
[256]
blk.28.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.28.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.28.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.28.ffn_norm.weight
F32
F32
[2048]
blk.28.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.29
blk.29.attn_k.bias
F32
F32
[256]
blk.29.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.29.attn_norm.weight
F32
F32
[2048]
blk.29.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.29.attn_q.bias
F32
F32
[2048]
blk.29.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.29.attn_v.bias
F32
F32
[256]
blk.29.attn_v.weight
Q4_K
Q4_K
[2048, 256]
blk.29.ffn_down.weight
Q4_K
Q4_K
[11008, 2048]
blk.29.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.29.ffn_norm.weight
F32
F32
[2048]
blk.29.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.30
blk.30.attn_k.bias
F32
F32
[256]
blk.30.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.30.attn_norm.weight
F32
F32
[2048]
blk.30.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.30.attn_q.bias
F32
F32
[2048]
blk.30.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.30.attn_v.bias
F32
F32
[256]
blk.30.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.30.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.30.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.30.ffn_norm.weight
F32
F32
[2048]
blk.30.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.31
blk.31.attn_k.bias
F32
F32
[256]
blk.31.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.31.attn_norm.weight
F32
F32
[2048]
blk.31.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.31.attn_q.bias
F32
F32
[2048]
blk.31.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.31.attn_v.bias
F32
F32
[256]
blk.31.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.31.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.31.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.31.ffn_norm.weight
F32
F32
[2048]
blk.31.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.32
blk.32.attn_k.bias
F32
F32
[256]
blk.32.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.32.attn_norm.weight
F32
F32
[2048]
blk.32.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.32.attn_q.bias
F32
F32
[2048]
blk.32.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.32.attn_v.bias
F32
F32
[256]
blk.32.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.32.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.32.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.32.ffn_norm.weight
F32
F32
[2048]
blk.32.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.33
blk.33.attn_k.bias
F32
F32
[256]
blk.33.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.33.attn_norm.weight
F32
F32
[2048]
blk.33.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.33.attn_q.bias
F32
F32
[2048]
blk.33.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.33.attn_v.bias
F32
F32
[256]
blk.33.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.33.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.33.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.33.ffn_norm.weight
F32
F32
[2048]
blk.33.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.34
blk.34.attn_k.bias
F32
F32
[256]
blk.34.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.34.attn_norm.weight
F32
F32
[2048]
blk.34.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.34.attn_q.bias
F32
F32
[2048]
blk.34.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.34.attn_v.bias
F32
F32
[256]
blk.34.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.34.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.34.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.34.ffn_norm.weight
F32
F32
[2048]
blk.34.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
blk.35
blk.35.attn_k.bias
F32
F32
[256]
blk.35.attn_k.weight
Q4_K
Q4_K
[2048, 256]
blk.35.attn_norm.weight
F32
F32
[2048]
blk.35.attn_output.weight
Q4_K
Q4_K
[2048, 2048]
blk.35.attn_q.bias
F32
F32
[2048]
blk.35.attn_q.weight
Q4_K
Q4_K
[2048, 2048]
blk.35.attn_v.bias
F32
F32
[256]
blk.35.attn_v.weight
Q6_K
Q6_K
[2048, 256]
blk.35.ffn_down.weight
Q6_K
Q6_K
[11008, 2048]
blk.35.ffn_gate.weight
Q4_K
Q4_K
[2048, 11008]
blk.35.ffn_norm.weight
F32
F32
[2048]
blk.35.ffn_up.weight
Q4_K
Q4_K
[2048, 11008]
output_norm.weight
F32
F32
[2048]