# Dockerfile for Distributed Llama Controller (Raspberry Pi) # This variant can download models and start the API server FROM arm64v8/debian:bookworm-slim # Install dependencies RUN apt-get update && apt-get install -y \ build-essential \ g++ \ make \ git \ python3 \ python3-pip \ curl \ wget \ ca-certificates \ && rm -rf /var/lib/apt/lists/* # Set working directory WORKDIR /app # Copy source code COPY src/ ./src/ COPY Makefile ./ COPY launch.py ./ # Copy scripts COPY scripts/download-model.sh /app/download-model.sh COPY scripts/entrypoint-controller.sh /app/entrypoint.sh # Make scripts executable RUN chmod +x /app/download-model.sh /app/entrypoint.sh # Build the applications RUN make dllama && make dllama-api # Create models directory for volume mount RUN mkdir -p /app/models # Expose the default API port EXPOSE 9999 # Use the entrypoint script ENTRYPOINT ["/app/entrypoint.sh"]