File size: 2,063 Bytes
80cb919
 
 
 
 
 
 
 
 
 
 
a89888b
2718c7b
a89888b
 
e0b2bb1
80cb919
a89888b
e0b2bb1
a89888b
e0b2bb1
a89888b
e0b2bb1
 
 
a89888b
80cb919
 
 
 
53e751d
1d46eb9
 
80cb919
 
 
 
 
 
a89888b
80cb919
 
53e751d
a89888b
53e751d
 
a89888b
53e751d
 
 
a89888b
 
80cb919
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
FROM python:3.11-slim

# Install system dependencies as root (no sudo!)
RUN apt-get update && apt-get install -y --no-install-recommends \
    ca-certificates curl && rm -rf /var/lib/apt/lists/*

# Create non-root user
RUN useradd -m -u 1000 user
ENV HOME=/home/user
WORKDIR $HOME/app

# Set dynamic mode environment variable (default to cloud mode)
ARG IS_LOCAL=false
ENV IS_LOCAL=${IS_LOCAL}

# Install Python dependencies based on mode (better layer caching)
COPY --chown=user requirements.txt .
COPY --chown=user requirements-dev.txt .
RUN pip install --upgrade pip && \
    if [ "$IS_LOCAL" = "true" ]; then \
        echo "Installing LOCAL mode dependencies (MedAlpaca-13b)"; \
        pip install --no-cache-dir -r requirements-dev.txt; \
    else \
        echo "Installing CLOUD mode dependencies (NVIDIA/Gemini APIs)"; \
        pip install --no-cache-dir -r requirements.txt; \
    fi

# Copy the application
COPY --chown=user . .

# Download Vietnamese translation model (always needed for fallback)
RUN python vi/download.py

# Hugging Face cache setup
ENV HF_HOME="$HOME/.cache/huggingface"
ENV SENTENCE_TRANSFORMERS_HOME="$HOME/.cache/huggingface/sentence-transformers"
ENV MEDGEMMA_HOME="$HOME/.cache/huggingface/sentence-transformers"

# Prepare runtime dirs
RUN mkdir -p $HOME/app/logs $HOME/app/cache $HOME/app/cache/hf $HOME/app/cache/outputs $HOME/app/data && \
    chown -R user:user $HOME/app

# Download models based on mode
RUN if [ "$IS_LOCAL" = "true" ]; then \
        echo "Downloading models for local mode..."; \
        echo "Downloading MedAlpaca-13b model..."; \
        python -c "from huggingface_hub import snapshot_download; import os; snapshot_download('medalpaca/medalpaca-13b', token=os.getenv('HF_TOKEN'), cache_dir='$HOME/.cache/huggingface')"; \
        echo "Vietnamese translation model already downloaded above"; \
    else \
        echo "Cloud mode: Only Vietnamese translation model downloaded"; \
    fi

USER user

EXPOSE 7860
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]