-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
158 lines (149 loc) · 3.87 KB
/
docker-compose.yml
File metadata and controls
158 lines (149 loc) · 3.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
# Legal Document Crawler - Docker Compose Configuration
# Solr-based setup with monitoring capabilities
services:
# Apache Solr Service - Precreated core approach
solr:
image: solr:9.4.1
container_name: legal-crawler-solr
ports:
- "8983:8983"
environment:
- SOLR_HEAP=2g
volumes:
- solr_data:/var/solr
command: ["solr-precreate", "legal-documents"]
networks:
- legal-crawler-network
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8983/solr/legal-documents/admin/ping || exit 1"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
# Legal Document Crawler Application
crawler-app:
build:
context: .
dockerfile: Dockerfile
container_name: legal-crawler-app
ports:
- "8080:8080"
- "8081:8081"
environment:
- SPRING_PROFILES_ACTIVE=docker,solr
- SOLR_URL=http://solr:8983/solr
- SOLR_COLLECTION=legal-documents
- CRAWLER_STORAGE_TYPE=solr
- JAVA_OPTS=-Xms512m -Xmx2g -XX:+UseG1GC
volumes:
- crawler_data:/app/data
- crawler_logs:/app/logs
depends_on:
solr:
condition: service_healthy
networks:
- legal-crawler-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/actuator/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 120s
# Nginx Reverse Proxy
nginx:
image: nginx:alpine
container_name: legal-crawler-nginx
ports:
- "8888:80"
volumes:
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
command: ["/bin/sh", "-c", "rm -f /etc/nginx/conf.d/default.conf && nginx -g 'daemon off;'"]
depends_on:
- crawler-app
- solr
networks:
- legal-crawler-network
restart: unless-stopped
# Monitoring with Prometheus
prometheus:
image: prom/prometheus:latest
container_name: legal-crawler-prometheus
ports:
- "9090:9090"
volumes:
- ./docker/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--web.console.libraries=/etc/prometheus/console_libraries'
- '--web.console.templates=/etc/prometheus/consoles'
networks:
- legal-crawler-network
profiles:
- monitoring
# Loki Log Aggregation
loki:
image: grafana/loki:latest
container_name: legal-crawler-loki
ports:
- "3100:3100"
volumes:
- ./docker/loki:/etc/loki
- loki_data:/loki
command: -config.file=/etc/loki/loki-config.yml
networks:
- legal-crawler-network
profiles:
- monitoring
# Promtail Log Collection
promtail:
image: grafana/promtail:latest
container_name: legal-crawler-promtail
volumes:
- ./docker/promtail:/etc/promtail
- crawler_logs:/app/logs:ro
command: -config.file=/etc/promtail/promtail-config.yml
networks:
- legal-crawler-network
depends_on:
- loki
profiles:
- monitoring
# Grafana Dashboard
grafana:
image: grafana/grafana:latest
container_name: legal-crawler-grafana
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=admin123
volumes:
- grafana_data:/var/lib/grafana
- ./docker/grafana/dashboards:/etc/grafana/provisioning/dashboards
- ./docker/grafana/datasources:/etc/grafana/provisioning/datasources
networks:
- legal-crawler-network
depends_on:
- loki
profiles:
- monitoring
volumes:
solr_data:
driver: local
crawler_data:
driver: local
crawler_logs:
driver: local
prometheus_data:
driver: local
grafana_data:
driver: local
loki_data:
driver: local
networks:
legal-crawler-network:
driver: bridge
ipam:
config:
- subnet: 172.20.0.0/16