-
Notifications
You must be signed in to change notification settings - Fork 1
/
docker-compose.yml
57 lines (55 loc) · 1.8 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
services:
# Define the services that will be run by Docker Compose
llm-container:
# Service for the LLM container
deploy:
# Resource allocation settings
resources:
reservations:
# Device reservations
devices:
- driver: nvidia
count: all
capabilities:
- gpu
build:
# Build context and Dockerfile for the LLM container
context: .
dockerfile: Dockerfile.llm-cont
user: "1000:1000" # Set the user to '1000:1000'
# Ensure the container runs with the specified user and group ID
volumes:
# Mount the local models directory to the container's /models directory
- ./models:/models
networks:
# Attach the container to the llm-network
- llm-network
ipc: host
# Use the host's IPC namespace for inter-process communication
environment:
- MODEL_NAME=${MODEL_NAME:-/models/gemma-2-2b-it}
command: --model ${MODEL_NAME-:-/models/gemma-2-2b-it}
# Command to run when the container starts
caddy:
# Service for the Caddy container
container_name: caddy-llm-container # Name of the container
user: "1000:1000" # Set the user to '1000:1000'
build:
# Build context and Dockerfile for the Caddy container
context: .
dockerfile: Dockerfile.caddy
environment:
- LLM_CONTAINER_PORT=${LLM_CONTAINER_PORT:-3334}
ports:
# Map port 3334 on the host to port 3334 in the container
- ${LLM_CONTAINER_PORT:-3334}:${LLM_CONTAINER_PORT:-3334}
networks:
# Attach the container to the llm-network
- llm-network
depends_on:
# Ensure the Caddy container starts after the LLM container
- llm-container
networks:
# Define the network for the services
llm-network:
driver: bridge