mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2026-05-11 19:44:06 +00:00
examples: use HF_HUB_OFFLINE to avoid HF Hub warnings
This commit is contained in:
@@ -15,7 +15,7 @@ from tqdm import tqdm
|
||||
cache_dir = Path.home() / ".cache" / "huggingface" / "datasets"
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
os.environ["HF_DATASETS_CACHE"] = str(cache_dir)
|
||||
os.environ["HF_HUB_DISABLE_TELEMETRY"] = "1"
|
||||
os.environ["HF_HUB_OFFLINE"] = "1"
|
||||
|
||||
GRADER_PATTERNS = {
|
||||
"aime": r'\boxed{(\d+)}|\b(\d+)\b',
|
||||
|
||||
Reference in New Issue
Block a user