Models
GitHub
Discord
Docs
Cloud
Sign in
Download
Models
Download
GitHub
Discord
Docs
Cloud
Sign in
devstral-small-2
:latest
22
Downloads
Updated
2 hours ago
24B model that excels at using tools to explore codebases, editing multiple files and power software engineering agents.
24B model that excels at using tools to explore codebases, editing multiple files and power software engineering agents.
Cancel
vision
tools
cloud
24b
devstral-small-2:latest
...
/
model
c580819bed79 · 15GB
Metadata
general.architecture
mistral3
mistral3
general.file_type
Q4_K_M
Q4_K_M
mistral3.attention.head_count
32
32
mistral3.attention.head_count_kv
8
8
mistral3.attention.key_length
128
128
mistral3.attention.layer_norm_rms_epsilon
1e-05
1e-05
mistral3.attention.value_length
128
128
mistral3.block_count
40
40
mistral3.context_length
393216
393216
mistral3.embedding_length
5120
5120
mistral3.feed_forward_length
32768
32768
mistral3.image_token_index
10
10
mistral3.mm.projector_bias
false
false
mistral3.mm.projector_hidden_act
gelu
gelu
mistral3.rope.dimension_count
128
128
mistral3.rope.freq_base
1e+08
1e+08
mistral3.rope.scaling.beta_fast
32
32
mistral3.rope.scaling.beta_slow
1
1
mistral3.rope.scaling.factor
48
48
mistral3.rope.scaling.mscale
1
1
mistral3.rope.scaling.mscale_all_dim
1
1
mistral3.rope.scaling.original_context_length
8192
8192
mistral3.rope.scaling.type
yarn
yarn
mistral3.rope.scaling_beta
0.1
0.1
mistral3.spatial_merge_size
2
2
mistral3.vision.attention.head_count
16
16
mistral3.vision.attention.key_length
64
64
mistral3.vision.block_count
24
24
mistral3.vision.embedding_length
1024
1024
mistral3.vision.feed_forward_length
4096
4096
mistral3.vision.image_size
1540
1540
mistral3.vision.num_channels
3
3
mistral3.vision.patch_size
14
14
mistral3.vision.rope.freq_base
10000
10000
mistral3.vocab_size
131072
131072
tokenizer.ggml.add_bos_token
true
true
tokenizer.ggml.add_eos_token
false
false
tokenizer.ggml.add_padding_token
false
false
tokenizer.ggml.add_unknown_token
false
false
tokenizer.ggml.bos_token_id
1
1
tokenizer.ggml.eos_token_id
2
2
tokenizer.ggml.merges
[Ġ Ġ, Ġ t, e r, i n, Ġ ĠĠĠ, ...]
[Ġ Ġ, Ġ t, e r, i n, Ġ ĠĠĠ, ...]
tokenizer.ggml.model
gpt2
gpt2
tokenizer.ggml.padding_token_id
11
11
tokenizer.ggml.pre
default
default
tokenizer.ggml.scores
[0, 1, 2, 3, 4, ...]
[0, 1, 2, 3, 4, ...]
tokenizer.ggml.token_type
[3, 3, 3, 3, 3, ...]
[3, 3, 3, 3, 3, ...]
tokenizer.ggml.tokens
[<unk>, <s>, </s>, [INST], [/INST], ...]
[<unk>, <s>, </s>, [INST], [/INST], ...]
tokenizer.ggml.unknown_token_id
0
0
Tensor
Name
Type
Shape
token_embd.weight
Q4_K
Q4_K
[5120, 131072]
blk.0
blk.0.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.0.attn_norm.weight
F32
F32
[5120]
blk.0.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.0.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.0.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.0.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.0.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.0.ffn_norm.weight
F32
F32
[5120]
blk.0.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.1
blk.1.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.1.attn_norm.weight
F32
F32
[5120]
blk.1.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.1.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.1.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.1.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.1.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.1.ffn_norm.weight
F32
F32
[5120]
blk.1.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.2
blk.2.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.2.attn_norm.weight
F32
F32
[5120]
blk.2.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.2.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.2.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.2.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.2.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.2.ffn_norm.weight
F32
F32
[5120]
blk.2.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.3
blk.3.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.3.attn_norm.weight
F32
F32
[5120]
blk.3.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.3.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.3.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.3.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.3.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.3.ffn_norm.weight
F32
F32
[5120]
blk.3.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.4
blk.4.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.4.attn_norm.weight
F32
F32
[5120]
blk.4.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.4.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.4.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.4.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.4.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.4.ffn_norm.weight
F32
F32
[5120]
blk.4.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.5
blk.5.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.5.attn_norm.weight
F32
F32
[5120]
blk.5.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.5.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.5.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.5.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.5.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.5.ffn_norm.weight
F32
F32
[5120]
blk.5.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.6
blk.6.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.6.attn_norm.weight
F32
F32
[5120]
blk.6.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.6.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.6.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.6.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.6.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.6.ffn_norm.weight
F32
F32
[5120]
blk.6.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.7
blk.7.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.7.attn_norm.weight
F32
F32
[5120]
blk.7.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.7.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.7.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.7.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.7.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.7.ffn_norm.weight
F32
F32
[5120]
blk.7.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.8
blk.8.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.8.attn_norm.weight
F32
F32
[5120]
blk.8.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.8.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.8.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.8.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.8.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.8.ffn_norm.weight
F32
F32
[5120]
blk.8.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.9
blk.9.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.9.attn_norm.weight
F32
F32
[5120]
blk.9.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.9.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.9.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.9.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.9.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.9.ffn_norm.weight
F32
F32
[5120]
blk.9.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.10
blk.10.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.10.attn_norm.weight
F32
F32
[5120]
blk.10.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.10.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.10.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.10.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.10.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.10.ffn_norm.weight
F32
F32
[5120]
blk.10.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.11
blk.11.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.11.attn_norm.weight
F32
F32
[5120]
blk.11.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.11.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.11.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.11.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.11.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.11.ffn_norm.weight
F32
F32
[5120]
blk.11.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.12
blk.12.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.12.attn_norm.weight
F32
F32
[5120]
blk.12.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.12.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.12.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.12.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.12.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.12.ffn_norm.weight
F32
F32
[5120]
blk.12.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.13
blk.13.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.13.attn_norm.weight
F32
F32
[5120]
blk.13.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.13.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.13.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.13.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.13.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.13.ffn_norm.weight
F32
F32
[5120]
blk.13.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.14
blk.14.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.14.attn_norm.weight
F32
F32
[5120]
blk.14.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.14.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.14.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.14.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.14.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.14.ffn_norm.weight
F32
F32
[5120]
blk.14.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.15
blk.15.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.15.attn_norm.weight
F32
F32
[5120]
blk.15.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.15.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.15.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.15.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.15.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.15.ffn_norm.weight
F32
F32
[5120]
blk.15.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.16
blk.16.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.16.attn_norm.weight
F32
F32
[5120]
blk.16.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.16.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.16.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.16.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.16.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.16.ffn_norm.weight
F32
F32
[5120]
blk.16.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.17
blk.17.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.17.attn_norm.weight
F32
F32
[5120]
blk.17.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.17.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.17.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.17.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.17.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.17.ffn_norm.weight
F32
F32
[5120]
blk.17.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.18
blk.18.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.18.attn_norm.weight
F32
F32
[5120]
blk.18.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.18.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.18.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.18.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.18.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.18.ffn_norm.weight
F32
F32
[5120]
blk.18.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.19
blk.19.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.19.attn_norm.weight
F32
F32
[5120]
blk.19.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.19.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.19.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.19.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.19.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.19.ffn_norm.weight
F32
F32
[5120]
blk.19.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.20
blk.20.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.20.attn_norm.weight
F32
F32
[5120]
blk.20.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.20.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.20.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.20.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.20.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.20.ffn_norm.weight
F32
F32
[5120]
blk.20.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.21
blk.21.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.21.attn_norm.weight
F32
F32
[5120]
blk.21.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.21.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.21.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.21.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.21.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.21.ffn_norm.weight
F32
F32
[5120]
blk.21.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.22
blk.22.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.22.attn_norm.weight
F32
F32
[5120]
blk.22.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.22.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.22.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.22.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.22.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.22.ffn_norm.weight
F32
F32
[5120]
blk.22.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.23
blk.23.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.23.attn_norm.weight
F32
F32
[5120]
blk.23.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.23.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.23.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.23.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.23.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.23.ffn_norm.weight
F32
F32
[5120]
blk.23.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.24
blk.24.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.24.attn_norm.weight
F32
F32
[5120]
blk.24.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.24.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.24.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.24.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.24.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.24.ffn_norm.weight
F32
F32
[5120]
blk.24.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.25
blk.25.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.25.attn_norm.weight
F32
F32
[5120]
blk.25.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.25.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.25.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.25.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.25.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.25.ffn_norm.weight
F32
F32
[5120]
blk.25.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.26
blk.26.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.26.attn_norm.weight
F32
F32
[5120]
blk.26.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.26.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.26.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.26.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.26.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.26.ffn_norm.weight
F32
F32
[5120]
blk.26.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.27
blk.27.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.27.attn_norm.weight
F32
F32
[5120]
blk.27.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.27.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.27.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.27.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.27.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.27.ffn_norm.weight
F32
F32
[5120]
blk.27.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.28
blk.28.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.28.attn_norm.weight
F32
F32
[5120]
blk.28.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.28.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.28.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.28.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.28.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.28.ffn_norm.weight
F32
F32
[5120]
blk.28.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.29
blk.29.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.29.attn_norm.weight
F32
F32
[5120]
blk.29.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.29.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.29.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.29.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.29.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.29.ffn_norm.weight
F32
F32
[5120]
blk.29.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.30
blk.30.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.30.attn_norm.weight
F32
F32
[5120]
blk.30.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.30.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.30.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.30.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.30.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.30.ffn_norm.weight
F32
F32
[5120]
blk.30.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.31
blk.31.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.31.attn_norm.weight
F32
F32
[5120]
blk.31.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.31.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.31.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.31.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.31.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.31.ffn_norm.weight
F32
F32
[5120]
blk.31.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.32
blk.32.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.32.attn_norm.weight
F32
F32
[5120]
blk.32.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.32.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.32.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.32.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.32.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.32.ffn_norm.weight
F32
F32
[5120]
blk.32.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.33
blk.33.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.33.attn_norm.weight
F32
F32
[5120]
blk.33.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.33.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.33.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.33.ffn_down.weight
Q4_K
Q4_K
[32768, 5120]
blk.33.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.33.ffn_norm.weight
F32
F32
[5120]
blk.33.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.34
blk.34.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.34.attn_norm.weight
F32
F32
[5120]
blk.34.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.34.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.34.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.34.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.34.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.34.ffn_norm.weight
F32
F32
[5120]
blk.34.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.35
blk.35.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.35.attn_norm.weight
F32
F32
[5120]
blk.35.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.35.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.35.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.35.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.35.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.35.ffn_norm.weight
F32
F32
[5120]
blk.35.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.36
blk.36.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.36.attn_norm.weight
F32
F32
[5120]
blk.36.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.36.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.36.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.36.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.36.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.36.ffn_norm.weight
F32
F32
[5120]
blk.36.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.37
blk.37.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.37.attn_norm.weight
F32
F32
[5120]
blk.37.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.37.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.37.attn_v.weight
Q6_K
Q6_K
[5120, 1024]
blk.37.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.37.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.37.ffn_norm.weight
F32
F32
[5120]
blk.37.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.38
blk.38.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.38.attn_norm.weight
F32
F32
[5120]
blk.38.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.38.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.38.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.38.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.38.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.38.ffn_norm.weight
F32
F32
[5120]
blk.38.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
blk.39
blk.39.attn_k.weight
Q4_K
Q4_K
[5120, 1024]
blk.39.attn_norm.weight
F32
F32
[5120]
blk.39.attn_output.weight
Q4_K
Q4_K
[4096, 5120]
blk.39.attn_q.weight
Q4_K
Q4_K
[5120, 4096]
blk.39.attn_v.weight
Q4_K
Q4_K
[5120, 1024]
blk.39.ffn_down.weight
Q6_K
Q6_K
[32768, 5120]
blk.39.ffn_gate.weight
Q4_K
Q4_K
[5120, 32768]
blk.39.ffn_norm.weight
F32
F32
[5120]
blk.39.ffn_up.weight
Q4_K
Q4_K
[5120, 32768]
mm.linear_1.weight
BF16
BF16
[1024, 5120]
mm.linear_2.weight
BF16
BF16
[5120, 5120]
mm.norm.weight
F32
F32
[1024]
mm.patch_merger.merging_layer.weight
BF16
BF16
[4096, 1024]
output.weight
Q6_K
Q6_K
[5120, 131072]
v.blk.0
v.blk.0.attn_k.weight
F16
F16
[1024, 1024]
v.blk.0.attn_norm.weight
F32
F32
[1024]
v.blk.0.attn_output.weight
F16
F16
[1024, 1024]
v.blk.0.attn_q.weight
F16
F16
[1024, 1024]
v.blk.0.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.0.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.0.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.0.ffn_norm.weight
F32
F32
[1024]
v.blk.0.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.1
v.blk.1.attn_k.weight
F16
F16
[1024, 1024]
v.blk.1.attn_norm.weight
F32
F32
[1024]
v.blk.1.attn_output.weight
F16
F16
[1024, 1024]
v.blk.1.attn_q.weight
F16
F16
[1024, 1024]
v.blk.1.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.1.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.1.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.1.ffn_norm.weight
F32
F32
[1024]
v.blk.1.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.2
v.blk.2.attn_k.weight
F16
F16
[1024, 1024]
v.blk.2.attn_norm.weight
F32
F32
[1024]
v.blk.2.attn_output.weight
F16
F16
[1024, 1024]
v.blk.2.attn_q.weight
F16
F16
[1024, 1024]
v.blk.2.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.2.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.2.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.2.ffn_norm.weight
F32
F32
[1024]
v.blk.2.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.3
v.blk.3.attn_k.weight
F16
F16
[1024, 1024]
v.blk.3.attn_norm.weight
F32
F32
[1024]
v.blk.3.attn_output.weight
F16
F16
[1024, 1024]
v.blk.3.attn_q.weight
F16
F16
[1024, 1024]
v.blk.3.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.3.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.3.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.3.ffn_norm.weight
F32
F32
[1024]
v.blk.3.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.4
v.blk.4.attn_k.weight
F16
F16
[1024, 1024]
v.blk.4.attn_norm.weight
F32
F32
[1024]
v.blk.4.attn_output.weight
F16
F16
[1024, 1024]
v.blk.4.attn_q.weight
F16
F16
[1024, 1024]
v.blk.4.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.4.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.4.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.4.ffn_norm.weight
F32
F32
[1024]
v.blk.4.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.5
v.blk.5.attn_k.weight
F16
F16
[1024, 1024]
v.blk.5.attn_norm.weight
F32
F32
[1024]
v.blk.5.attn_output.weight
F16
F16
[1024, 1024]
v.blk.5.attn_q.weight
F16
F16
[1024, 1024]
v.blk.5.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.5.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.5.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.5.ffn_norm.weight
F32
F32
[1024]
v.blk.5.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.6
v.blk.6.attn_k.weight
F16
F16
[1024, 1024]
v.blk.6.attn_norm.weight
F32
F32
[1024]
v.blk.6.attn_output.weight
F16
F16
[1024, 1024]
v.blk.6.attn_q.weight
F16
F16
[1024, 1024]
v.blk.6.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.6.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.6.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.6.ffn_norm.weight
F32
F32
[1024]
v.blk.6.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.7
v.blk.7.attn_k.weight
F16
F16
[1024, 1024]
v.blk.7.attn_norm.weight
F32
F32
[1024]
v.blk.7.attn_output.weight
F16
F16
[1024, 1024]
v.blk.7.attn_q.weight
F16
F16
[1024, 1024]
v.blk.7.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.7.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.7.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.7.ffn_norm.weight
F32
F32
[1024]
v.blk.7.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.8
v.blk.8.attn_k.weight
F16
F16
[1024, 1024]
v.blk.8.attn_norm.weight
F32
F32
[1024]
v.blk.8.attn_output.weight
F16
F16
[1024, 1024]
v.blk.8.attn_q.weight
F16
F16
[1024, 1024]
v.blk.8.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.8.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.8.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.8.ffn_norm.weight
F32
F32
[1024]
v.blk.8.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.9
v.blk.9.attn_k.weight
F16
F16
[1024, 1024]
v.blk.9.attn_norm.weight
F32
F32
[1024]
v.blk.9.attn_output.weight
F16
F16
[1024, 1024]
v.blk.9.attn_q.weight
F16
F16
[1024, 1024]
v.blk.9.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.9.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.9.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.9.ffn_norm.weight
F32
F32
[1024]
v.blk.9.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.10
v.blk.10.attn_k.weight
F16
F16
[1024, 1024]
v.blk.10.attn_norm.weight
F32
F32
[1024]
v.blk.10.attn_output.weight
F16
F16
[1024, 1024]
v.blk.10.attn_q.weight
F16
F16
[1024, 1024]
v.blk.10.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.10.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.10.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.10.ffn_norm.weight
F32
F32
[1024]
v.blk.10.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.11
v.blk.11.attn_k.weight
F16
F16
[1024, 1024]
v.blk.11.attn_norm.weight
F32
F32
[1024]
v.blk.11.attn_output.weight
F16
F16
[1024, 1024]
v.blk.11.attn_q.weight
F16
F16
[1024, 1024]
v.blk.11.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.11.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.11.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.11.ffn_norm.weight
F32
F32
[1024]
v.blk.11.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.12
v.blk.12.attn_k.weight
F16
F16
[1024, 1024]
v.blk.12.attn_norm.weight
F32
F32
[1024]
v.blk.12.attn_output.weight
F16
F16
[1024, 1024]
v.blk.12.attn_q.weight
F16
F16
[1024, 1024]
v.blk.12.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.12.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.12.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.12.ffn_norm.weight
F32
F32
[1024]
v.blk.12.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.13
v.blk.13.attn_k.weight
F16
F16
[1024, 1024]
v.blk.13.attn_norm.weight
F32
F32
[1024]
v.blk.13.attn_output.weight
F16
F16
[1024, 1024]
v.blk.13.attn_q.weight
F16
F16
[1024, 1024]
v.blk.13.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.13.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.13.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.13.ffn_norm.weight
F32
F32
[1024]
v.blk.13.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.14
v.blk.14.attn_k.weight
F16
F16
[1024, 1024]
v.blk.14.attn_norm.weight
F32
F32
[1024]
v.blk.14.attn_output.weight
F16
F16
[1024, 1024]
v.blk.14.attn_q.weight
F16
F16
[1024, 1024]
v.blk.14.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.14.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.14.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.14.ffn_norm.weight
F32
F32
[1024]
v.blk.14.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.15
v.blk.15.attn_k.weight
F16
F16
[1024, 1024]
v.blk.15.attn_norm.weight
F32
F32
[1024]
v.blk.15.attn_output.weight
F16
F16
[1024, 1024]
v.blk.15.attn_q.weight
F16
F16
[1024, 1024]
v.blk.15.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.15.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.15.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.15.ffn_norm.weight
F32
F32
[1024]
v.blk.15.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.16
v.blk.16.attn_k.weight
F16
F16
[1024, 1024]
v.blk.16.attn_norm.weight
F32
F32
[1024]
v.blk.16.attn_output.weight
F16
F16
[1024, 1024]
v.blk.16.attn_q.weight
F16
F16
[1024, 1024]
v.blk.16.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.16.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.16.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.16.ffn_norm.weight
F32
F32
[1024]
v.blk.16.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.17
v.blk.17.attn_k.weight
F16
F16
[1024, 1024]
v.blk.17.attn_norm.weight
F32
F32
[1024]
v.blk.17.attn_output.weight
F16
F16
[1024, 1024]
v.blk.17.attn_q.weight
F16
F16
[1024, 1024]
v.blk.17.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.17.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.17.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.17.ffn_norm.weight
F32
F32
[1024]
v.blk.17.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.18
v.blk.18.attn_k.weight
F16
F16
[1024, 1024]
v.blk.18.attn_norm.weight
F32
F32
[1024]
v.blk.18.attn_output.weight
F16
F16
[1024, 1024]
v.blk.18.attn_q.weight
F16
F16
[1024, 1024]
v.blk.18.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.18.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.18.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.18.ffn_norm.weight
F32
F32
[1024]
v.blk.18.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.19
v.blk.19.attn_k.weight
F16
F16
[1024, 1024]
v.blk.19.attn_norm.weight
F32
F32
[1024]
v.blk.19.attn_output.weight
F16
F16
[1024, 1024]
v.blk.19.attn_q.weight
F16
F16
[1024, 1024]
v.blk.19.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.19.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.19.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.19.ffn_norm.weight
F32
F32
[1024]
v.blk.19.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.20
v.blk.20.attn_k.weight
F16
F16
[1024, 1024]
v.blk.20.attn_norm.weight
F32
F32
[1024]
v.blk.20.attn_output.weight
F16
F16
[1024, 1024]
v.blk.20.attn_q.weight
F16
F16
[1024, 1024]
v.blk.20.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.20.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.20.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.20.ffn_norm.weight
F32
F32
[1024]
v.blk.20.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.21
v.blk.21.attn_k.weight
F16
F16
[1024, 1024]
v.blk.21.attn_norm.weight
F32
F32
[1024]
v.blk.21.attn_output.weight
F16
F16
[1024, 1024]
v.blk.21.attn_q.weight
F16
F16
[1024, 1024]
v.blk.21.attn_v.weight
Q4_K
Q4_K
[1024, 1024]
v.blk.21.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.21.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.21.ffn_norm.weight
F32
F32
[1024]
v.blk.21.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.22
v.blk.22.attn_k.weight
F16
F16
[1024, 1024]
v.blk.22.attn_norm.weight
F32
F32
[1024]
v.blk.22.attn_output.weight
F16
F16
[1024, 1024]
v.blk.22.attn_q.weight
F16
F16
[1024, 1024]
v.blk.22.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.22.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.22.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.22.ffn_norm.weight
F32
F32
[1024]
v.blk.22.ffn_up.weight
F16
F16
[1024, 4096]
v.blk.23
v.blk.23.attn_k.weight
F16
F16
[1024, 1024]
v.blk.23.attn_norm.weight
F32
F32
[1024]
v.blk.23.attn_output.weight
F16
F16
[1024, 1024]
v.blk.23.attn_q.weight
F16
F16
[1024, 1024]
v.blk.23.attn_v.weight
Q6_K
Q6_K
[1024, 1024]
v.blk.23.ffn_down.weight
F16
F16
[4096, 1024]
v.blk.23.ffn_gate.weight
F16
F16
[1024, 4096]
v.blk.23.ffn_norm.weight
F32
F32
[1024]
v.blk.23.ffn_up.weight
F16
F16
[1024, 4096]
v.encoder_norm.weight
F32
F32
[1024]
v.patch_conv.weight
F16
F16
[14, 14, 3, 1024]
output_norm.weight
F32
F32
[5120]