mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-31 08:51:55 +00:00 
			
		
		
		
	constants.py : add layer norm eps
This commit is contained in:
		| @@ -27,6 +27,8 @@ KEY_ATTENTION_HEAD_COUNT         = "{llm}.attention.head_count" | |||||||
| KEY_ATTENTION_HEAD_COUNT_KV      = "{llm}.attention.head_count_kv" | KEY_ATTENTION_HEAD_COUNT_KV      = "{llm}.attention.head_count_kv" | ||||||
| KEY_ATTENTION_MAX_ALIBI_BIAS     = "{llm}.attention.max_alibi_bias" | KEY_ATTENTION_MAX_ALIBI_BIAS     = "{llm}.attention.max_alibi_bias" | ||||||
| KEY_ATTENTION_CLAMP_KQV          = "{llm}.attention.clamp_kqv" | KEY_ATTENTION_CLAMP_KQV          = "{llm}.attention.clamp_kqv" | ||||||
|  | KEY_ATTENTION_LAYERNORM_EPS      = "{llm}.attention.layer_norm_epsilon" | ||||||
|  | KEY_ATTENTION_LAYERNORM_RMS_EPS  = "{llm}.attention.layer_norm_rms_epsilon" | ||||||
|  |  | ||||||
| # RoPE | # RoPE | ||||||
| KEY_ROPE_DIMENSION_COUNT         = "{llm}.rope.dimension_count" | KEY_ROPE_DIMENSION_COUNT         = "{llm}.rope.dimension_count" | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 klosax
					klosax