-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose.yml
106 lines (101 loc) · 3.1 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
services:
extractor:
build:
context: ./extractor
dockerfile: Dockerfile
volumes:
- ./extractor:/extractor
- cargo-cache:/usr/local/cargo/registry
- ${TRAINING_DATA_PATH}:/training_data
- ${SHARED_STORAGE_PATH}:/shared_storage
working_dir: /extractor
environment:
- RUST_LOG=debug
- REDIS_URL=${REDIS_URL}
- REDIS_PORT=${REDIS_PORT}
- RABBITMQ_URL=amqp://${RABBITMQ_USER}:${RABBITMQ_PASS}@rabbitmq:${RABBITMQ_PORT}
- EXTRACTOR_PORT=${EXTRACTOR_PORT}
- EXTRACTOR_URL=${EXTRACTOR_URL}
- RABBITMQ_EXTRACTOR_QUEUE=${RABBITMQ_EXTRACTOR_QUEUE}
- SHARED_STORAGE_PATH=/shared_storage
- TRAINING_DATA_PATH=/training_data
- OLLAMA_BASE_URL=${OLLAMA_HOST}:${OLLAMA_PORT} # Added Ollama URL
- OLLAMA_BASE_PORT=${OLLAMA_PORT} # Added Ollama URL
- OLLAMA_BASE_HOST=${OLLAMA_HOST} # Added Ollama URL
depends_on:
- redis
- rabbitmq
- ollama # Ensure Ollama starts before this service
command: ["cargo", "run"]
api:
build:
context: ./api
dockerfile: Dockerfile
ports:
- "${API_PORT}:${API_PORT}"
volumes:
- ./api:/app
- /app/node_modules
- ${SHARED_STORAGE_PATH}:/shared_storage
environment:
- NODE_ENV=development
- REDIS_URL=${REDIS_URL}
- REDIS_PORT=${REDIS_PORT}
- RABBITMQ_URL=amqp://${RABBITMQ_USER}:${RABBITMQ_PASS}@rabbitmq:${RABBITMQ_PORT}
- EXTRACTOR_PORT=${EXTRACTOR_PORT}
- EXTRACTOR_URL=${EXTRACTOR_URL}
- SHARED_STORAGE_PATH=/shared_storage
- PROTO_PATH=/app/proto
- API_PORT=${API_PORT}
- RABBITMQ_EXTRACTOR_QUEUE=${RABBITMQ_EXTRACTOR_QUEUE}
- OLLAMA_BASE_URL=${OLLAMA_HOST}:${OLLAMA_PORT} # Added Ollama URL
- OLLAMA_BASE_PORT=${OLLAMA_PORT} # Added Ollama URL
- OLLAMA_BASE_HOST=${OLLAMA_HOST} # Added Ollama URL
depends_on:
- redis
- rabbitmq
- extractor
- ollama # Ensure Ollama starts before this service
command: ["npm", "run", "dev"]
redis:
image: redis:7-alpine
ports:
- "${REDIS_PORT}:${REDIS_PORT}"
volumes:
- redis-data:/data
command: redis-server --appendonly yes
rabbitmq:
image: rabbitmq:3-management
ports:
- "${RABBITMQ_PORT}:5672" # AMQP port
- "${RABBITMQ_UI_PORT}:15672" # Management UI port
volumes:
- rabbitmq-data:/var/lib/rabbitmq
environment:
- RABBITMQ_DEFAULT_USER=${RABBITMQ_USER}
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASS}
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "check_port_connectivity"]
interval: 30s
timeout: 10s
retries: 5
ollama:
image: ollama/ollama
container_name: ollama-pdfz
ports:
- "${OLLAMA_PORT}:11434" # Expose Ollama API
volumes:
- ollama_data:/root/.ollama # Persist downloaded models
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/"]
interval: 30s
timeout: 10s
retries: 3
volumes:
cargo-cache:
training_data:
redis-data:
shared_storage:
rabbitmq-data:
ollama_data: