From d5165e8f2e8c765bfe7a0bf7ea6efed89940f08d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 10 May 2026 21:49:58 +0300 Subject: [PATCH] llama-eval : require --grader-model or --model when using --grader-type llm Assisted-by: llama.cpp:local pi --- examples/llama-eval/llama-eval.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/examples/llama-eval/llama-eval.py b/examples/llama-eval/llama-eval.py index 23f7359c95..2d6365f2d0 100755 --- a/examples/llama-eval/llama-eval.py +++ b/examples/llama-eval/llama-eval.py @@ -1357,6 +1357,9 @@ def main(): grader_server_url = args.grader_server if args.grader_server else server_configs[0].url grader_model_name = args.grader_model if args.grader_model else args.model + if args.grader_type == "llm" and not grader_model_name: + print("Error: --grader-type llm requires --grader-model or --model") + sys.exit(1) grader = Grader( grader_type=args.grader_type, grader_script=args.grader_script, @@ -1372,6 +1375,9 @@ def main(): grader_server_url = args.grader_server if args.grader_server else server_configs[0].url grader_model_name = args.grader_model if args.grader_model else args.model + if args.grader_type == "llm" and not grader_model_name: + print("Error: --grader-type llm requires --grader-model or --model") + sys.exit(1) grader = Grader( grader_type=args.grader_type,