mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2026-03-17 16:44:07 +00:00
server : warn swa-full is not supported for non-SWA models (#20291)
This commit is contained in:
@@ -729,6 +729,13 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
if (llama_model_n_swa(model) == 0) {
|
||||
if (params_base.swa_full) {
|
||||
params_base.swa_full = false;
|
||||
SRV_WRN("%s\n", "swa_full is not supported by this model, it will be disabled");
|
||||
}
|
||||
}
|
||||
|
||||
// Necessary similarity of prompt for slot selection
|
||||
slot_prompt_similarity = params_base.slot_prompt_similarity;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user