Spaces:
Running
Running
| # This file was autogenerated by uv via the following command: | |
| # uv pip compile pyproject.toml -o requirements.txt | |
| accelerate==1.12.0 | |
| # via django-ai-chat (pyproject.toml) | |
| annotated-types==0.7.0 | |
| # via pydantic | |
| asgiref==3.11.0 | |
| # via django | |
| certifi==2025.11.12 | |
| # via requests | |
| charset-normalizer==3.4.4 | |
| # via requests | |
| django==6.0 | |
| # via | |
| # django-ai-chat (pyproject.toml) | |
| # django-ninja | |
| django-ninja==1.5.1 | |
| # via django-ai-chat (pyproject.toml) | |
| filelock==3.20.0 | |
| # via | |
| # huggingface-hub | |
| # torch | |
| # transformers | |
| fsspec==2025.12.0 | |
| # via | |
| # huggingface-hub | |
| # torch | |
| hf-xet==1.2.0 | |
| # via huggingface-hub | |
| huggingface-hub==0.36.0 | |
| # via | |
| # accelerate | |
| # tokenizers | |
| # transformers | |
| idna==3.11 | |
| # via requests | |
| jinja2==3.1.6 | |
| # via torch | |
| markupsafe==3.0.3 | |
| # via jinja2 | |
| mpmath==1.3.0 | |
| # via sympy | |
| networkx==3.6 | |
| # via torch | |
| numpy==2.3.5 | |
| # via | |
| # accelerate | |
| # transformers | |
| nvidia-cublas-cu12==12.8.4.1 | |
| # via | |
| # nvidia-cudnn-cu12 | |
| # nvidia-cusolver-cu12 | |
| # torch | |
| nvidia-cuda-cupti-cu12==12.8.90 | |
| # via torch | |
| nvidia-cuda-nvrtc-cu12==12.8.93 | |
| # via torch | |
| nvidia-cuda-runtime-cu12==12.8.90 | |
| # via torch | |
| nvidia-cudnn-cu12==9.10.2.21 | |
| # via torch | |
| nvidia-cufft-cu12==11.3.3.83 | |
| # via torch | |
| nvidia-cufile-cu12==1.13.1.3 | |
| # via torch | |
| nvidia-curand-cu12==10.3.9.90 | |
| # via torch | |
| nvidia-cusolver-cu12==11.7.3.90 | |
| # via torch | |
| nvidia-cusparse-cu12==12.5.8.93 | |
| # via | |
| # nvidia-cusolver-cu12 | |
| # torch | |
| nvidia-cusparselt-cu12==0.7.1 | |
| # via torch | |
| nvidia-nccl-cu12==2.27.5 | |
| # via torch | |
| nvidia-nvjitlink-cu12==12.8.93 | |
| # via | |
| # nvidia-cufft-cu12 | |
| # nvidia-cusolver-cu12 | |
| # nvidia-cusparse-cu12 | |
| # torch | |
| nvidia-nvshmem-cu12==3.3.20 | |
| # via torch | |
| nvidia-nvtx-cu12==12.8.90 | |
| # via torch | |
| packaging==25.0 | |
| # via | |
| # accelerate | |
| # huggingface-hub | |
| # transformers | |
| protobuf==6.33.2 | |
| # via django-ai-chat (pyproject.toml) | |
| psutil==7.1.3 | |
| # via accelerate | |
| pydantic==2.12.5 | |
| # via django-ninja | |
| pydantic-core==2.41.5 | |
| # via pydantic | |
| pyyaml==6.0.3 | |
| # via | |
| # accelerate | |
| # huggingface-hub | |
| # transformers | |
| regex==2025.11.3 | |
| # via transformers | |
| requests==2.32.5 | |
| # via | |
| # huggingface-hub | |
| # transformers | |
| safetensors==0.7.0 | |
| # via | |
| # accelerate | |
| # transformers | |
| sentencepiece==0.2.1 | |
| # via django-ai-chat (pyproject.toml) | |
| setuptools==80.9.0 | |
| # via torch | |
| sqlparse==0.5.4 | |
| # via django | |
| sympy==1.14.0 | |
| # via torch | |
| tokenizers==0.22.1 | |
| # via transformers | |
| torch==2.9.1 | |
| # via | |
| # django-ai-chat (pyproject.toml) | |
| # accelerate | |
| tqdm==4.67.1 | |
| # via | |
| # huggingface-hub | |
| # transformers | |
| transformers==4.57.3 | |
| # via django-ai-chat (pyproject.toml) | |
| triton==3.5.1 | |
| # via torch | |
| typing-extensions==4.15.0 | |
| # via | |
| # huggingface-hub | |
| # pydantic | |
| # pydantic-core | |
| # torch | |
| # typing-inspection | |
| typing-inspection==0.4.2 | |
| # via pydantic | |
| urllib3==2.6.0 | |
| # via requests | |