nanochat-20b-chat / pyproject.toml
Thewhey-Brian
Deploy nanochat
bd710e9
[project]
name = "nanochat"
version = "0.1.0"
description = "the minimal full-stack ChatGPT clone"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"datasets>=4.0.0",
"fastapi>=0.117.1",
"files-to-prompt>=0.6",
"numpy==1.26.4",
"psutil>=7.1.0",
"regex>=2025.9.1",
"tiktoken>=0.11.0",
"tokenizers>=0.22.0",
"torch>=2.8.0",
"uvicorn>=0.36.0",
"wandb>=0.21.3",
]
[build-system]
requires = ["maturin>=1.7,<2.0"]
build-backend = "maturin"
# target torch to cuda 12.8
[tool.uv.sources]
torch = [
{ index = "pytorch-cu128" },
]
[[tool.uv.index]]
name = "pytorch-cu128"
url = "https://download.pytorch.org/whl/cu128"
explicit = true
[tool.maturin]
module-name = "rustbpe"
bindings = "pyo3"
python-source = "."
manifest-path = "rustbpe/Cargo.toml"
[dependency-groups]
dev = [
"maturin>=1.9.4",
"pytest>=8.0.0",
]
[tool.pytest.ini_options]
markers = [
"slow: marks tests as slow (deselect with '-m \"not slow\"')",
]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]