| FROM python:3.9-slim | |
| # Set working directory | |
| WORKDIR /code | |
| # Copy requirements first (better caching) | |
| COPY ./requirements.txt /code/requirements.txt | |
| # Install dependencies | |
| RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt | |
| # Set Hugging Face cache directory inside the container | |
| ENV TRANSFORMERS_CACHE=/code/hf_cache | |
| # Create cache directory with proper permissions | |
| RUN mkdir -p /code/hf_cache && chmod -R 777 /code/hf_cache | |
| # Pre-download the model at build time (so no runtime cache issues) | |
| RUN python -c "from transformers import pipeline; pipeline('text-classification', model='knkarthick/Action_Items')" | |
| # Copy application code | |
| COPY ./app.py /code/app.py | |
| # Expose port | |
| EXPOSE 7860 | |
| # Start the FastAPI server | |
| CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] | |