mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-30 08:42:00 +00:00 
			
		
		
		
	convert : fix Qwen3-Embedding pre-tokenizer hash (#15030)
This commit is contained in:
		| @@ -702,6 +702,9 @@ class TextModel(ModelBase): | |||||||
|         if chkhsh == "81212dc7cdb7e0c1074ca62c5aeab0d43c9f52b8a737be7b12a777c953027890": |         if chkhsh == "81212dc7cdb7e0c1074ca62c5aeab0d43c9f52b8a737be7b12a777c953027890": | ||||||
|             # ref: https://huggingface.co/moonshotai/Kimi-K2-Base |             # ref: https://huggingface.co/moonshotai/Kimi-K2-Base | ||||||
|             res = "kimi-k2" |             res = "kimi-k2" | ||||||
|  |         if chkhsh == "d4540891389ea895b53b399da6ac824becc30f2fba0e9ddbb98f92e55ca0e97c": | ||||||
|  |             # ref: https://huggingface.co/Qwen/Qwen3-Embedding-0.6B | ||||||
|  |             res = "qwen2" | ||||||
|         if chkhsh == "0ef9807a4087ebef797fc749390439009c3b9eda9ad1a097abbe738f486c01e5": |         if chkhsh == "0ef9807a4087ebef797fc749390439009c3b9eda9ad1a097abbe738f486c01e5": | ||||||
|             # ref: https://huggingface.co/meta-llama/Meta-Llama-3-8B |             # ref: https://huggingface.co/meta-llama/Meta-Llama-3-8B | ||||||
|             res = "llama-bpe" |             res = "llama-bpe" | ||||||
| @@ -849,9 +852,6 @@ class TextModel(ModelBase): | |||||||
|         if chkhsh == "2085e1638f6c377a0aa4ead21b27bb4cb941bf800df86ed391011769c1758dfb": |         if chkhsh == "2085e1638f6c377a0aa4ead21b27bb4cb941bf800df86ed391011769c1758dfb": | ||||||
|             # ref: https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-32B |             # ref: https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-32B | ||||||
|             res = "exaone4" |             res = "exaone4" | ||||||
|         if chkhsh == "d4540891389ea895b53b399da6ac824becc30f2fba0e9ddbb98f92e55ca0e97c": |  | ||||||
|             # ref: https://huggingface.co/Qwen/Qwen3-Embedding-8B |  | ||||||
|             res = "qwen2" |  | ||||||
|  |  | ||||||
|         if res is None: |         if res is None: | ||||||
|             logger.warning("\n") |             logger.warning("\n") | ||||||
|   | |||||||
| @@ -147,6 +147,7 @@ pre_computed_hashes = [ | |||||||
|     {"name": "falcon-h1", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/tiiuae/Falcon-H1-7B-Base", "chkhsh": "3eda48b4c4dc7de733d1a8b3e3b4a85243dbbf704da2ee9d42c6beced8897896"}, |     {"name": "falcon-h1", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/tiiuae/Falcon-H1-7B-Base", "chkhsh": "3eda48b4c4dc7de733d1a8b3e3b4a85243dbbf704da2ee9d42c6beced8897896"}, | ||||||
|     {"name": "falcon-h1", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/tiiuae/Falcon-H1-34B-Base", "chkhsh": "48f8e02c0359c0bbdd82f26909171fac1c18a457bb47573ed1fe3bbb2c1cfd4b"}, |     {"name": "falcon-h1", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/tiiuae/Falcon-H1-34B-Base", "chkhsh": "48f8e02c0359c0bbdd82f26909171fac1c18a457bb47573ed1fe3bbb2c1cfd4b"}, | ||||||
|     {"name": "kimi-k2",   "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/moonshotai/Kimi-K2-Base",   "chkhsh": "81212dc7cdb7e0c1074ca62c5aeab0d43c9f52b8a737be7b12a777c953027890"}, |     {"name": "kimi-k2",   "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/moonshotai/Kimi-K2-Base",   "chkhsh": "81212dc7cdb7e0c1074ca62c5aeab0d43c9f52b8a737be7b12a777c953027890"}, | ||||||
|  |     {"name": "qwen2",     "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/Qwen/Qwen3-Embedding-0.6B", "chkhsh": "d4540891389ea895b53b399da6ac824becc30f2fba0e9ddbb98f92e55ca0e97c"}, | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 Douglas Hanley
					Douglas Hanley