batch : add optional for sequential equal split (#14511)

ggml-ci
This commit is contained in:
Georgi Gerganov
2025-07-04 09:08:59 +03:00
committed by GitHub
parent 7b50f7c025
commit 67d1ef23c6
5 changed files with 26 additions and 5 deletions

View File

@@ -140,7 +140,7 @@ llama_memory_context_ptr llama_kv_cache_unified_iswa::init_batch(llama_batch_all
std::vector<llama_ubatch> ubatches;
while (true) {
auto ubatch = balloc.split_equal(n_ubatch);
auto ubatch = balloc.split_equal(n_ubatch, false);
if (ubatch.n_tokens == 0) {
break;