mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-30 08:42:00 +00:00 
			
		
		
		
	ggml-metal: fix yarn rope (#3937)
This commit is contained in:
		| @@ -1403,7 +1403,8 @@ void ggml_metal_graph_compute( | ||||
|                             const int n_past     = ((int32_t *) dst->op_params)[0]; | ||||
|                             const int n_dims     = ((int32_t *) dst->op_params)[1]; | ||||
|                             const int mode       = ((int32_t *) dst->op_params)[2]; | ||||
|                             const int n_orig_ctx = ((int32_t *) dst->op_params)[3]; | ||||
|                             // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal | ||||
|                             const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; | ||||
|  | ||||
|                             float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; | ||||
|                             memcpy(&freq_base,   (int32_t *) dst->op_params +  5, sizeof(float)); | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Xiao-Yong Jin
					Xiao-Yong Jin