mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-31 08:51:55 +00:00 
			
		
		
		
	 46d9caa27a
			
		
	
	46d9caa27a
	
	
	
		
			
			This commit adds a new target to the Makefile for converting models that are multimodal. This target will convert the original model and in addition also create the mmproj GGUF model. The motivation for this change is that for models that are multimodal, for example those that contain a vision encoders, we will often want to upload both the quantized model and the vision encoder model to HuggingFace. Example usage: ```console $ make causal-convert-mm-model MODEL_PATH=~/work/ai/models/gemma-3-4b-it-qat-q4_0-unquantized/ ... The environment variable CONVERTED_MODEL can be set to this path using: export CONVERTED_MODEL=/home/danbev/work/ai/llama.cpp/models/gemma-3-4b-it-qat-q4_0-unquantized.gguf The mmproj model was created in /home/danbev/work/ai/llama.cpp/models/mmproj-gemma-3-4b-it-qat-q4_0-unquantized.gguf ``` The converted original model can then be quantized, and after that both the quantized model and the mmproj file can then be uploaded to HuggingFace. Refs: https://huggingface.co/ggml-org/gemma-3-4b-it-qat-GGUF/tree/main
		
			
				
	
	
		
			47 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Bash
		
	
	
		
			Executable File
		
	
	
	
	
			
		
		
	
	
			47 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Bash
		
	
	
		
			Executable File
		
	
	
	
	
| #!/bin/bash
 | |
| 
 | |
| set -e
 | |
| 
 | |
| # Parse command line arguments
 | |
| MMPROJ=""
 | |
| while [[ $# -gt 0 ]]; do
 | |
|     case $1 in
 | |
|         --mmproj)
 | |
|             MMPROJ="--mmproj"
 | |
|             shift
 | |
|             ;;
 | |
|         *)
 | |
|             shift
 | |
|             ;;
 | |
|     esac
 | |
| done
 | |
| 
 | |
| MODEL_NAME="${MODEL_NAME:-$(basename "$MODEL_PATH")}"
 | |
| OUTPUT_DIR="${OUTPUT_DIR:-../../models}"
 | |
| TYPE="${OUTTYPE:-f16}"
 | |
| METADATA_OVERRIDE="${METADATA_OVERRIDE:-}"
 | |
| CONVERTED_MODEL="${OUTPUT_DIR}/${MODEL_NAME}.gguf"
 | |
| 
 | |
| echo "Model path: ${MODEL_PATH}"
 | |
| echo "Model name: ${MODEL_NAME}"
 | |
| echo "Data  type: ${TYPE}"
 | |
| echo "Converted model path:: ${CONVERTED_MODEL}"
 | |
| echo "Metadata override: ${METADATA_OVERRIDE}"
 | |
| 
 | |
| CMD_ARGS=("python" "../../convert_hf_to_gguf.py" "--verbose")
 | |
| CMD_ARGS+=("${MODEL_PATH}")
 | |
| CMD_ARGS+=("--outfile" "${CONVERTED_MODEL}")
 | |
| CMD_ARGS+=("--outtype" "${TYPE}")
 | |
| [[ -n "$METADATA_OVERRIDE" ]] && CMD_ARGS+=("--metadata" "${METADATA_OVERRIDE}")
 | |
| [[ -n "$MMPROJ" ]] && CMD_ARGS+=("${MMPROJ}")
 | |
| 
 | |
| "${CMD_ARGS[@]}"
 | |
| 
 | |
| echo ""
 | |
| echo "The environment variable CONVERTED_MODEL can be set to this path using:"
 | |
| echo "export CONVERTED_MODEL=$(realpath ${CONVERTED_MODEL})"
 | |
| if [[ -n "$MMPROJ" ]]; then
 | |
|     mmproj_file="${OUTPUT_DIR}/mmproj-$(basename "${CONVERTED_MODEL}")"
 | |
|     echo "The mmproj model was created in $(realpath "$mmproj_file")"
 | |
| fi
 |