perplexity : show more kl-divergence data (#16321)

Adds additional percentile data for displayed in the output of `llama-perplexity --kl-divergence`:
- Added 95 percentile (mirroring existing 5 percentile)
- Added 0.1 percentile (mirroring existing 99.9 percentile)
This commit is contained in:
ddh0
2025-09-29 01:30:45 -05:00
committed by GitHub
parent a4a0aa5ea2
commit 3ffd0fae47

View File

@@ -1931,11 +1931,13 @@ static void kl_divergence(llama_context * ctx, const common_params & params) {
LOG("Maximum KLD: %10.6f\n", kld_values.back());
LOG("99.9%% KLD: %10.6f\n", percentile(kld_values, 0.999f));
LOG("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f));
LOG("95.0%% KLD: %10.6f\n", percentile(kld_values, 0.950f));
LOG("90.0%% KLD: %10.6f\n", percentile(kld_values, 0.900f));
LOG("Median KLD: %10.6f\n", kld_median);
LOG("10.0%% KLD: %10.6f\n", percentile(kld_values, 0.100f));
LOG(" 5.0%% KLD: %10.6f\n", percentile(kld_values, 0.050f));
LOG(" 1.0%% KLD: %10.6f\n", percentile(kld_values, 0.010f));
LOG(" 0.1%% KLD: %10.6f\n", percentile(kld_values, 0.001f));
LOG("Minimum KLD: %10.6f\n", kld_values.front());
LOG("\n");