fix KLD percentile output (#15999)

In `llama-perplexity`, when using `--kl-divergence`, the KL divergence statistics output mistakenly displays the 99th percentile twice. This change fixes that and correctly displays the 90th percentile as originally intended (presumably).
This commit is contained in:
ddh0
2025-09-15 02:54:57 -05:00
committed by GitHub
parent b8e09f08b9
commit a68f31edd7

View File

@@ -1931,7 +1931,7 @@ static void kl_divergence(llama_context * ctx, const common_params & params) {
LOG("Maximum KLD: %10.6f\n", kld_values.back()); LOG("Maximum KLD: %10.6f\n", kld_values.back());
LOG("99.9%% KLD: %10.6f\n", percentile(kld_values, 0.999f)); LOG("99.9%% KLD: %10.6f\n", percentile(kld_values, 0.999f));
LOG("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f)); LOG("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f));
LOG("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f)); LOG("90.0%% KLD: %10.6f\n", percentile(kld_values, 0.900f));
LOG("Median KLD: %10.6f\n", kld_median); LOG("Median KLD: %10.6f\n", kld_median);
LOG("10.0%% KLD: %10.6f\n", percentile(kld_values, 0.100f)); LOG("10.0%% KLD: %10.6f\n", percentile(kld_values, 0.100f));
LOG(" 5.0%% KLD: %10.6f\n", percentile(kld_values, 0.050f)); LOG(" 5.0%% KLD: %10.6f\n", percentile(kld_values, 0.050f));