model : support MiniCPM-V 4.5 (#15575)

This commit is contained in:
tc-mb
2025-08-26 16:05:55 +08:00
committed by GitHub
parent 39842a7f73
commit c4e9239064
5 changed files with 61 additions and 2 deletions

View File

@@ -6,7 +6,7 @@ Download [MiniCPM-V-4](https://huggingface.co/openbmb/MiniCPM-V-4) PyTorch model
### Build llama.cpp ### Build llama.cpp
Readme modification time: 20250206 Readme modification time: 20250731
If there are differences in usage, please refer to the official build [documentation](https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md) If there are differences in usage, please refer to the official build [documentation](https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md)

View File

@@ -0,0 +1,47 @@
## MiniCPM-V 4.5
### Prepare models and code
Download [MiniCPM-V-4_5](https://huggingface.co/openbmb/MiniCPM-V-4_5) PyTorch model from huggingface to "MiniCPM-V-4_5" folder.
### Build llama.cpp
Readme modification time: 20250826
If there are differences in usage, please refer to the official build [documentation](https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md)
Clone llama.cpp:
```bash
git clone https://github.com/ggerganov/llama.cpp
cd llama.cpp
```
Build llama.cpp using `CMake`:
```bash
cmake -B build
cmake --build build --config Release
```
### Usage of MiniCPM-V 4
Convert PyTorch model to gguf files (You can also download the converted [gguf](https://huggingface.co/openbmb/MiniCPM-V-4_5-gguf) by us)
```bash
python ./tools/mtmd/legacy-models/minicpmv-surgery.py -m ../MiniCPM-V-4_5
python ./tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py -m ../MiniCPM-V-4_5 --minicpmv-projector ../MiniCPM-V-4_5/minicpmv.projector --output-dir ../MiniCPM-V-4_5/ --minicpmv_version 6
python ./convert_hf_to_gguf.py ../MiniCPM-V-4_5/model
# quantize int4 version
./build/bin/llama-quantize ../MiniCPM-V-4_5/model/ggml-model-f16.gguf ../MiniCPM-V-4_5/model/ggml-model-Q4_K_M.gguf Q4_K_M
```
Inference on Linux or Mac
```bash
# run in single-turn mode
./build/bin/llama-mtmd-cli -m ../MiniCPM-V-4_5/model/ggml-model-f16.gguf --mmproj ../MiniCPM-V-4_5/mmproj-model-f16.gguf -c 4096 --temp 0.7 --top-p 0.8 --top-k 100 --repeat-penalty 1.05 --image xx.jpg -p "What is in the image?"
# run in conversation mode
./build/bin/llama-mtmd-cli -m ../MiniCPM-V-4_5/model/ggml-model-Q4_K_M.gguf --mmproj ../MiniCPM-V-4_5/mmproj-model-f16.gguf
```

View File

@@ -2202,6 +2202,8 @@ struct clip_model_loader {
hparams.minicpmv_query_num = 64; hparams.minicpmv_query_num = 64;
} else if (hparams.minicpmv_version == 5) { } else if (hparams.minicpmv_version == 5) {
hparams.minicpmv_query_num = 64; hparams.minicpmv_query_num = 64;
} else if (hparams.minicpmv_version == 6) {
hparams.minicpmv_query_num = 64;
} else { } else {
hparams.minicpmv_query_num = 96; hparams.minicpmv_query_num = 96;
} }
@@ -3685,6 +3687,9 @@ int clip_n_output_tokens(const struct clip_ctx * ctx, struct clip_image_f32 * im
} else if (params.minicpmv_version == 5) { } else if (params.minicpmv_version == 5) {
// MiniCPM-V 4.0 // MiniCPM-V 4.0
n_patches = 64; n_patches = 64;
} else if (params.minicpmv_version == 6) {
// MiniCPM-V 4.5
n_patches = 64;
} else { } else {
GGML_ABORT("Unknown minicpmv version"); GGML_ABORT("Unknown minicpmv version");
} }

View File

@@ -607,6 +607,9 @@ else:
elif minicpmv_version == 5: elif minicpmv_version == 5:
emb_dim = 2560 emb_dim = 2560
block_count = 27 block_count = 27
elif minicpmv_version == 6:
emb_dim = 4096
block_count = 27
default_vision_config = { default_vision_config = {
"hidden_size": 1152, "hidden_size": 1152,
@@ -630,6 +633,10 @@ elif minicpmv_version == 5:
default_vision_config["model_type"] = "siglip_vision_model" default_vision_config["model_type"] = "siglip_vision_model"
vision_config = SiglipVisionConfig(**default_vision_config) vision_config = SiglipVisionConfig(**default_vision_config)
model = SiglipVisionTransformer(vision_config) model = SiglipVisionTransformer(vision_config)
elif minicpmv_version == 6:
default_vision_config["model_type"] = "siglip_vision_model"
vision_config = SiglipVisionConfig(**default_vision_config)
model = SiglipVisionTransformer(vision_config)
processor = None processor = None
# if model.attn_pool is not None: # if model.attn_pool is not None:

View File

@@ -207,7 +207,7 @@ struct mtmd_context {
tok_row_end_trail = false; // no trailing end-of-row token tok_row_end_trail = false; // no trailing end-of-row token
ov_img_first = true; ov_img_first = true;
} else if (minicpmv_version == 3 || minicpmv_version == 4 || minicpmv_version == 5) { } else if (minicpmv_version == 3 || minicpmv_version == 4 || minicpmv_version == 5 || minicpmv_version == 6) {
// minicpmv 2.6 format: // minicpmv 2.6 format:
// <slice> (slice) </slice><slice> (slice) </slice>\n ... // <slice> (slice) </slice><slice> (slice) </slice>\n ...
slice_tmpl = MTMD_SLICE_TMPL_MINICPMV_2_6; slice_tmpl = MTMD_SLICE_TMPL_MINICPMV_2_6;