mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-11-03 09:22:01 +00:00 
			
		
		
		
	* check whether platform is 390x if yes->do not import immintrin.h * support s390x big endian * support --bigendian option for s390x 1. verified with baichuan7b-chat with float 16 on s390x 2. verified with baichuan7b-chat 3. verified with chinese-alpaca-2-13b-f16 * update format based on editor-config checker result * Update convert-baichuan-hf-to-gguf.py * 1. check in ggml.c if endianess is not match 2. update GGUF version 3. change get_pack_prefix to property 4. update information log * always use "GGUF" as beginng of GGUF file * Compare "GGUF" with file header char by char 1. Set GGUF_MAGIC to "GGUF" string instead of int value 2. Compare "GGUF" char by char to ensure its byte order 3. Move bytes swap code from convert.py to gguf.py write_tensor_data --------- Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
		
			
				
	
	
		
			30 lines
		
	
	
		
			697 B
		
	
	
	
		
			TOML
		
	
	
	
	
	
			
		
		
	
	
			30 lines
		
	
	
		
			697 B
		
	
	
	
		
			TOML
		
	
	
	
	
	
[tool.poetry]
 | 
						|
name = "gguf"
 | 
						|
version = "0.4.5"
 | 
						|
description = "Write ML models in GGUF for GGML"
 | 
						|
authors = ["GGML <ggml@ggml.ai>"]
 | 
						|
packages = [
 | 
						|
    {include = "gguf"},
 | 
						|
    {include = "gguf/py.typed"},
 | 
						|
]
 | 
						|
readme = "README.md"
 | 
						|
homepage = "https://ggml.ai"
 | 
						|
repository = "https://github.com/ggerganov/llama.cpp"
 | 
						|
keywords = ["ggml", "gguf", "llama.cpp"]
 | 
						|
classifiers = [
 | 
						|
    "Programming Language :: Python :: 3",
 | 
						|
    "License :: OSI Approved :: MIT License",
 | 
						|
    "Operating System :: OS Independent",
 | 
						|
]
 | 
						|
 | 
						|
[tool.poetry.dependencies]
 | 
						|
python = ">=3.8"
 | 
						|
numpy = ">=1.17"
 | 
						|
 | 
						|
[tool.poetry.dev-dependencies]
 | 
						|
pytest = "^5.2"
 | 
						|
 | 
						|
[build-system]
 | 
						|
requires = ["poetry-core>=1.0.0"]
 | 
						|
build-backend = "poetry.core.masonry.api"
 |