Files
llama.cpp/gguf-py/pyproject.toml
Georgi Gerganov a290ce6266 gguf-py : bump version to 0.19.0 (#22664)
* gguf-py : bump version to 0.19.0

* bump poetry

---------

Co-authored-by: Sigbjørn Skjæret <sigbjorn.skjaeret@scala.com>
2026-05-06 14:46:14 +02:00

46 lines
1.3 KiB
TOML

[project]
name = "gguf"
version = "0.19.0"
description = "Read and write ML models in GGUF for GGML"
keywords = ["ggml", "gguf", "llama.cpp"]
dynamic = ["classifiers"]
readme = "README.md"
authors = [{name = "GGML", email = "ggml@ggml.ai"}]
requires-python = '>=3.10'
dependencies = ['numpy (>=1.17)', 'tqdm (>=4.27)', 'pyyaml (>=5.1)', 'requests (>=2.25)']
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
[project.urls]
homepage = "https://ggml.ai"
repository = "https://github.com/ggml-org/llama.cpp"
[project.scripts]
gguf-convert-endian = "gguf.scripts.gguf_convert_endian:main"
gguf-dump = "gguf.scripts.gguf_dump:main"
gguf-set-metadata = "gguf.scripts.gguf_set_metadata:main"
gguf-new-metadata = "gguf.scripts.gguf_new_metadata:main"
gguf-editor-gui = "gguf.scripts.gguf_editor_gui:main"
[project.optional-dependencies]
gui = ['PySide6 (>=6.9,<7.0) ; python_version >= "3.9" and python_version < "3.14"']
[tool.poetry]
packages = [
{include = "gguf"},
{include = "gguf/py.typed"},
]
[tool.poetry.dependencies]
python = ">=3.10"
[tool.poetry.dev-dependencies]
pytest = "^5.2"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"