LLMs-from-scratch/pyproject.toml
Sebastian Raschka 458f2d9b67
Test dependencies with Python 3.13 (#843)
* Custom python 3.13 entry in pyproject.toml

* amend

* update

* update

* update

* Update pyproject.toml

* Update pyproject.toml

* Update pyproject.toml

* update
2025-09-27 08:38:07 -05:00

60 lines
1.6 KiB
TOML

[build-system]
requires = ["setuptools>=61.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "llms-from-scratch"
version = "1.0.18"
description = "Implement a ChatGPT-like LLM in PyTorch from scratch, step by step"
readme = "README.md"
requires-python = ">=3.10,<3.14"
dependencies = [
'torch>=2.6; python_version >= "3.13"',
"torch>=2.2.2,<2.6; sys_platform == 'darwin' and platform_machine == 'x86_64' and python_version < '3.12'",
"torch>=2.2.2; sys_platform == 'darwin' and platform_machine == 'arm64' and python_version < '3.12'",
"torch>=2.2.2; sys_platform == 'linux' and python_version < '3.12'",
"torch>=2.2.2; sys_platform == 'win32' and python_version < '3.12'",
"tensorflow>=2.16.2; sys_platform == 'darwin' and platform_machine == 'x86_64'",
"tensorflow>=2.18.0; sys_platform == 'darwin' and platform_machine == 'arm64'",
"tensorflow>=2.18.0; sys_platform == 'linux'",
"tensorflow>=2.18.0; sys_platform == 'win32'",
"jupyterlab>=4.0",
"tiktoken>=0.5.1",
"matplotlib>=3.7.1",
"tqdm>=4.66.1",
"numpy>=1.26",
"pandas>=2.2.1",
"pip>=25.0.1",
"pytest>=8.3.5",
]
[tool.uv.sources]
llms-from-scratch = { workspace = true }
[dependency-groups]
dev = [
"build>=1.2.2.post1",
"twine>=6.1.0",
"tokenizers>=0.22.0",
"safetensors>=0.6.2",
]
[tool.ruff]
line-length = 140
[tool.ruff.lint]
exclude = [".venv"]
ignore = [
"C406", "E226", "E402", "E702", "E703",
"E722", "E731", "E741"
]
# `llms_from_scratch` PyPI package
[tool.setuptools]
package-dir = {"" = "pkg"}
[tool.setuptools.packages.find]
where = ["pkg"]