mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-31 08:51:55 +00:00 
			
		
		
		
	change default temperature of OAI compat API from 0 to 1 (#7226)
* change default temperature of OAI compat API from 0 to 1 * make tests explicitly send temperature to OAI API
This commit is contained in:
		| @@ -371,7 +371,7 @@ static json oaicompat_completion_params_parse( | ||||
|     llama_params["presence_penalty"]  = json_value(body,   "presence_penalty",  0.0); | ||||
|     llama_params["seed"]              = json_value(body,   "seed",              LLAMA_DEFAULT_SEED); | ||||
|     llama_params["stream"]            = json_value(body,   "stream",            false); | ||||
|     llama_params["temperature"]       = json_value(body,   "temperature",       0.0); | ||||
|     llama_params["temperature"]       = json_value(body,   "temperature",       1.0); | ||||
|     llama_params["top_p"]             = json_value(body,   "top_p",             1.0); | ||||
|  | ||||
|     // Apply chat template to the list of messages | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Benjamin Findley
					Benjamin Findley