# (Bring in everything you listed, with safer caps) # PyTorch stack (match minor versions!) torch>=2.2,<2.4 torchvision>=0.17,<0.19 torchaudio>=2.2,<2.4 # Transformers & NLP transformers>=4.41,<4.46 datasets>=2.19,<2.21 accelerate>=0.28,<0.31 peft>=0.11,<0.13 tokenizers>=0.15 # usually pulled by transformers, but explicit helps safetensors>=0.4.3 huggingface_hub>=0.24,<0.26 # If you ever use T5/ALBERT etc., add: # sentencepiece>=0.1.99 # Vision models timm>=0.9.7,<1.0 opencv-python-headless>=4.8 Pillow>=10.0,<11 # Optimization / tracking optuna>=3.5,<4 wandb>=0.16.6,<0.18 # Eval / viz scikit-learn>=1.3,<1.6 matplotlib>=3.8,<3.9 seaborn>=0.13,<0.14 tqdm>=4.66,<5 # Data processing pandas>=2.2,<2.3 numpy>=1.26,<2.2 pyyaml>=6.0 scipy>=1.11,<1.14 # sklearn relies on it; make it explicit to avoid surprises # Optional deployment (FastAPI) fastapi>=0.110,<0.114 pydantic>=2.5,<3 uvicorn>=0.27,<0.31 python-multipart>=0.0.6 # Optional perf: json>=3.9 # Linting & testing pytest>=7.4,<9 pytest-cov>=4.1,<5 pre-commit>=3.5,<4 flake8>=6.1,<7 # Optional modern linter: ruff>=0.4,<0.7