mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-31 08:51:55 +00:00 
			
		
		
		
	scripts : exit compare-llama-bench.py gracefully when there's nothing to compare (#13451)
This commit is contained in:
		| @@ -318,7 +318,7 @@ else: | |||||||
|  |  | ||||||
|     show = [] |     show = [] | ||||||
|     # Show CPU and/or GPU by default even if the hardware for all results is the same: |     # Show CPU and/or GPU by default even if the hardware for all results is the same: | ||||||
|     if "n_gpu_layers" not in properties_different: |     if rows_full and "n_gpu_layers" not in properties_different: | ||||||
|         ngl = int(rows_full[0][KEY_PROPERTIES.index("n_gpu_layers")]) |         ngl = int(rows_full[0][KEY_PROPERTIES.index("n_gpu_layers")]) | ||||||
|  |  | ||||||
|         if ngl != 99 and "cpu_info" not in properties_different: |         if ngl != 99 and "cpu_info" not in properties_different: | ||||||
| @@ -338,6 +338,10 @@ else: | |||||||
|             pass |             pass | ||||||
|     rows_show = get_rows(show) |     rows_show = get_rows(show) | ||||||
|  |  | ||||||
|  | if not rows_show: | ||||||
|  |     logger.error(f"No comparable data was found between {name_baseline} and {name_compare}.\n") | ||||||
|  |     sys.exit(1) | ||||||
|  |  | ||||||
| table = [] | table = [] | ||||||
| for row in rows_show: | for row in rows_show: | ||||||
|     n_prompt = int(row[-5]) |     n_prompt = int(row[-5]) | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 Sigbjørn Skjæret
					Sigbjørn Skjæret