Spaces:
Sleeping
Sleeping
| FROM python:3.10-slim | |
| WORKDIR /app | |
| RUN apt-get update && apt-get install -y git curl && rm -rf /var/lib/apt/lists/* | |
| COPY requirements.txt /app/requirements.txt | |
| RUN pip install --no-cache-dir -r /app/requirements.txt | |
| COPY app.py /app/app.py | |
| COPY model_handler.py /app/model_handler.py | |
| ARG HF_MODEL_REPO=Jaja-09/authorcheck-model | |
| # Download model snapshot from HF model repo | |
| RUN python -c "from huggingface_hub import snapshot_download; snapshot_download(repo_id='${HF_MODEL_REPO}', local_dir='/app/model')" | |
| # Use writable caches inside /app and pre-download NLTK + sentiment model | |
| ENV NLTK_DATA=/app/nltk_data | |
| ENV TRANSFORMERS_CACHE=/app/hf_cache | |
| RUN mkdir -p /app/nltk_data /app/hf_cache && \ | |
| python -c "import nltk; nltk.download('punkt', download_dir='/app/nltk_data', quiet=True); nltk.download('punkt_tab', download_dir='/app/nltk_data', quiet=True)" && \ | |
| python -c "from transformers import AutoTokenizer, AutoModelForSequenceClassification; m='distilbert-base-uncased-finetuned-sst-2-english'; AutoTokenizer.from_pretrained(m, cache_dir='/app/hf_cache'); AutoModelForSequenceClassification.from_pretrained(m, cache_dir='/app/hf_cache')" | |
| EXPOSE 7860 | |
| CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] | |
| # |