mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-30 08:42:00 +00:00 
			
		
		
		
	Handle null rope scaling value (#2793)
This commit is contained in:
		| @@ -170,7 +170,8 @@ class Params: | ||||
|         f_norm_eps       = config["rms_norm_eps"] | ||||
|         f_rope_freq_base = config["rope_theta"] if "rope_theta" in config else None | ||||
|  | ||||
|         if "rope_scaling" in config and config["rope_scaling"].get("type") == "linear": | ||||
|         rope_scaling = config.get("rope_scaling") | ||||
|         if isinstance(rope_scaling, dict) and rope_scaling.get("type") == "linear": | ||||
|             f_rope_scale = config["rope_scaling"].get("factor") | ||||
|         else: | ||||
|             f_rope_scale = None | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Nigel Bosch
					Nigel Bosch