| | [build-system] |
| | requires = ["hatchling"] |
| | build-backend = "hatchling.build" |
| |
|
| | [project] |
| | name = "llamafactory" |
| | dynamic = ["version"] |
| | description = "Unified Efficient Fine-Tuning of 100+ LLMs" |
| | readme = "README.md" |
| | license = "Apache-2.0" |
| | requires-python = ">=3.11.0" |
| | authors = [ |
| | { name = "hiyouga", email = "hiyouga@buaa.edu.cn" } |
| | ] |
| | keywords = [ |
| | "AI", |
| | "LLM", |
| | "GPT", |
| | "ChatGPT", |
| | "Llama", |
| | "Transformer", |
| | "DeepSeek", |
| | "Pytorch" |
| | ] |
| | classifiers = [ |
| | "Development Status :: 4 - Beta", |
| | "Intended Audience :: Developers", |
| | "Intended Audience :: Education", |
| | "Intended Audience :: Science/Research", |
| | "License :: OSI Approved :: Apache Software License", |
| | "Operating System :: OS Independent", |
| | "Programming Language :: Python :: 3", |
| | "Programming Language :: Python :: 3.11", |
| | "Programming Language :: Python :: 3.12", |
| | "Programming Language :: Python :: 3.13", |
| | "Topic :: Scientific/Engineering :: Artificial Intelligence" |
| | ] |
| | dependencies = [ |
| | |
| | "torch>=2.4.0", |
| | "torchvision>=0.19.0", |
| | "torchaudio>=2.4.0", |
| | "transformers>=4.51.0,<=5.0.0,!=4.52.0,!=4.57.0", |
| | "datasets>=2.16.0,<=4.0.0", |
| | "accelerate>=1.3.0,<=1.11.0", |
| | "peft>=0.18.0,<=0.18.1", |
| | "trl>=0.18.0,<=0.24.0", |
| | "torchdata>=0.10.0,<=0.11.0", |
| | |
| | "gradio>=4.38.0,<=5.50.0", |
| | "matplotlib>=3.7.0", |
| | "tyro<0.9.0", |
| | |
| | "einops", |
| | "numpy", |
| | "pandas", |
| | "scipy", |
| | |
| | "sentencepiece", |
| | "tiktoken", |
| | "modelscope", |
| | "hf-transfer", |
| | "safetensors", |
| | |
| | "av>=10.0.0,<=16.0.0", |
| | "fire", |
| | "omegaconf", |
| | "packaging", |
| | "protobuf", |
| | "pyyaml", |
| | "pydantic", |
| | |
| | "uvicorn", |
| | "fastapi", |
| | "sse-starlette", |
| | ] |
| |
|
| | [project.scripts] |
| | llamafactory-cli = "llamafactory.cli:main" |
| | lmf = "llamafactory.cli:main" |
| |
|
| | [project.urls] |
| | Homepage = "https://github.com/hiyouga/LLaMA-Factory" |
| | Repository = "https://github.com/hiyouga/LLaMA-Factory" |
| |
|
| | [tool.hatch.build.targets.wheel] |
| | packages = ["src/llamafactory"] |
| |
|
| | [tool.hatch.version] |
| | path = "src/llamafactory/extras/env.py" |
| | pattern = "VERSION = \"(?P<version>[^\"]+)\"" |
| |
|
| | [tool.ruff] |
| | target-version = "py311" |
| | line-length = 119 |
| | indent-width = 4 |
| |
|
| | [tool.ruff.lint] |
| | ignore = [ |
| | "C408", |
| | "C901", |
| | "E501", |
| | "E731", |
| | "E741", |
| | "UP007", |
| | "UP045", |
| | "D100", |
| | "D101", |
| | "D102", |
| | "D103", |
| | "D104", |
| | "D105", |
| | "D107", |
| | ] |
| | extend-select = [ |
| | "C", |
| | "E", |
| | "F", |
| | "I", |
| | "W", |
| | "UP", |
| | "D", |
| | "PT009", |
| | "RUF022", |
| | ] |
| |
|
| | [tool.ruff.lint.isort] |
| | lines-after-imports = 2 |
| | known-first-party = ["llamafactory"] |
| | known-third-party = [ |
| | "accelerate", |
| | "datasets", |
| | "gradio", |
| | "numpy", |
| | "peft", |
| | "torch", |
| | "transformers", |
| | "trl", |
| | ] |
| |
|
| | [tool.ruff.lint.pydocstyle] |
| | convention = "google" |
| |
|
| | [tool.ruff.format] |
| | quote-style = "double" |
| | indent-style = "space" |
| | docstring-code-format = true |
| | skip-magic-trailing-comma = false |
| | line-ending = "auto" |
| |
|