Files
dllama/Dockerfile.worker
Chris 42172cbb6f
Some checks failed
main / Linux (amd64, ubuntu-22.04) (push) Successful in 49s
main / Linux (arm64, ubuntu-24.04-arm) (push) Has been cancelled
main / Windows (push) Has been cancelled
init
2025-10-24 11:42:14 +02:00

75 lines
1.6 KiB
Docker

# Dockerfile for Distributed Llama Worker (Raspberry Pi)
# This variant runs as a worker node and connects to a controller
FROM arm64v8/debian:bookworm-slim
# Install dependencies
RUN apt-get update && apt-get install -y \
build-essential \
g++ \
make \
&& rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Copy source code
COPY src/ ./src/
COPY Makefile ./
# Build only the worker application
RUN make dllama
# Create entrypoint script
COPY <<EOF /app/entrypoint.sh
#!/bin/bash
# Default values
PORT=9999
NTHREADS=4
# Parse command line arguments
while [[ \$# -gt 0 ]]; do
case \$1 in
--port)
PORT="\$2"
shift 2
;;
--nthreads)
NTHREADS="\$2"
shift 2
;;
--help)
echo "Usage: docker run distributed-llama-worker [OPTIONS]"
echo ""
echo "Options:"
echo " --port <port> Worker port (default: 9999)"
echo " --nthreads <n> Number of threads (default: 4)"
echo ""
echo "Example:"
echo " docker run -p 9999:9999 distributed-llama-worker --port 9999 --nthreads 4"
exit 0
;;
*)
echo "Unknown option: \$1"
exit 1
;;
esac
done
# Build the command
CMD="./dllama worker --port \$PORT --nthreads \$NTHREADS"
echo "Starting worker with command:"
echo "\$CMD"
echo ""
exec \$CMD
EOF
RUN chmod +x /app/entrypoint.sh
# Expose the default worker port
EXPOSE 9999
# Use the entrypoint script
ENTRYPOINT ["/app/entrypoint.sh"]