FROM python:3.11-slim ENV HF_HOME=/app/hf_cache \ PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* WORKDIR /app COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt RUN mkdir -p /app/hf_cache && chmod -R 777 /app/hf_cache RUN python - <<'PY' || true from transformers import AutoTokenizer, AutoModelForSequenceClassification PRIMARY="cardiffnlp/twitter-roberta-base-sentiment" # <- stable 3-class FALLBACK="distilbert-base-uncased-finetuned-sst-2-english" # binary for mid in (PRIMARY, FALLBACK): try: AutoTokenizer.from_pretrained(mid) AutoModelForSequenceClassification.from_pretrained(mid) print("Cached:", mid) except Exception as e: print("Cache failed for", mid, "->", e) PY COPY app.py . EXPOSE 7860 CMD bash -lc "uvicorn app:app --host 0.0.0.0 --port ${PORT:-7860}"