-
-
Notifications
You must be signed in to change notification settings - Fork 32
Expand file tree
/
Copy pathDockerfile.unified
More file actions
284 lines (252 loc) · 10.8 KB
/
Dockerfile.unified
File metadata and controls
284 lines (252 loc) · 10.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
# Unified Dockerfile for SeerrBridge
# Includes: MySQL + Python Backend + Nuxt Frontend in a single container
# ==============================================================================
# Stage 1: Python Builder
# ==============================================================================
FROM python:3.10-slim AS python-builder
# Install build dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
gcc \
g++ \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install Python dependencies with cache mount
COPY requirements.txt .
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir --user -r requirements.txt
# ==============================================================================
# Stage 2: Node.js Builder (Frontend)
# ==============================================================================
FROM node:20-slim AS node-builder
WORKDIR /app/frontend
# Install build dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
make \
g++ \
&& rm -rf /var/lib/apt/lists/*
# Copy package files
COPY package*.json ./
# Install dependencies with cache mount
RUN --mount=type=cache,target=/root/.npm \
npm ci --legacy-peer-deps --omit=dev
# Copy source code
COPY . .
# Set build environment variables
ENV NODE_ENV=production
ENV NUXT_HOST=0.0.0.0
ENV NUXT_PORT=3777
# Set database environment variables for build-time runtime config
ARG DB_HOST=localhost
ARG DB_PORT=3306
ARG DB_NAME=seerrbridge
ARG DB_USER=seerrbridge
ARG DB_PASSWORD=seerrbridge
ARG SEERRBRIDGE_URL=http://localhost:8777
ENV DB_HOST=${DB_HOST}
ENV DB_PORT=${DB_PORT}
ENV DB_NAME=${DB_NAME}
ENV DB_USER=${DB_USER}
ENV DB_PASSWORD=${DB_PASSWORD}
ENV SEERRBRIDGE_URL=${SEERRBRIDGE_URL}
# Build the application
RUN npm run build
# Verify critical routes were built (fail build if missing)
RUN echo "Verifying logs API routes in build output..." && \
if [ ! -d ".output/server/chunks/routes/api/logs" ]; then \
echo "❌ ERROR: logs/ directory not found in build output!" && \
echo "Available API routes:" && \
ls -la .output/server/chunks/routes/api/ | head -20 && \
exit 1; \
fi && \
MISSING=0 && \
for route in entries.get.mjs errors.get.mjs failures.get.mjs success.get.mjs critical.get.mjs; do \
if [ ! -f ".output/server/chunks/routes/api/logs/$route" ]; then \
echo "❌ Missing route: logs/$route"; \
MISSING=1; \
else \
echo "✅ Found: logs/$route"; \
fi; \
done && \
if [ "$MISSING" -eq 1 ]; then \
echo "❌ Build verification failed - missing required routes"; \
exit 1; \
fi && \
if [ ! -f ".output/server/chunks/routes/api/logs-statistics.get.mjs" ]; then \
echo "❌ Missing route: logs-statistics.get.mjs"; \
exit 1; \
else \
echo "✅ Found: logs-statistics.get.mjs"; \
fi && \
echo "✅ All logs API routes verified successfully" && \
echo "" && \
echo "Verifying collections API routes..." && \
if [ ! -f ".output/server/chunks/routes/api/collections.get.mjs" ]; then \
echo "❌ Missing route: collections.get.mjs"; \
exit 1; \
else \
echo "✅ Found: collections.get.mjs"; \
fi && \
if [ ! -d ".output/server/chunks/routes/api/collections" ]; then \
echo "❌ ERROR: collections/ directory not found in build output!"; \
exit 1; \
else \
echo "✅ Found: collections/ directory"; \
fi && \
if [ ! -f ".output/server/chunks/routes/api/seerr-collections.get.mjs" ]; then \
echo "❌ Missing route: seerr-collections.get.mjs"; \
exit 1; \
else \
echo "✅ Found: seerr-collections.get.mjs"; \
fi && \
if [ ! -d ".output/server/chunks/routes/api/seerr-collections" ]; then \
echo "❌ ERROR: seerr-collections/ directory not found in build output!"; \
exit 1; \
else \
echo "✅ Found: seerr-collections/ directory"; \
fi && \
echo "✅ All collections API routes verified successfully"
# ==============================================================================
# Stage 3: Final Unified Image
# ==============================================================================
FROM python:3.10-slim
# Install MariaDB (MySQL-compatible), Node.js, and all runtime dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
# MariaDB server (MySQL-compatible)
mariadb-server \
mariadb-client \
# Node.js runtime
curl \
gnupg \
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \
# Runtime dependencies for Python/Chrome
wget \
unzip \
libnss3 \
libxss1 \
libasound2 \
fonts-liberation \
libappindicator3-1 \
libgbm-dev \
libgtk-3-0 \
libx11-xcb1 \
libxtst6 \
xdg-utils \
libglib2.0-0 \
libdrm2 \
libxrandr2 \
ca-certificates \
jq \
supervisor \
# Cleanup
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean
# Install browser and driver based on architecture
RUN arch=$(uname -m) && \
if [ "$arch" = "x86_64" ]; then \
PLATFORM="linux64" && \
CHROME_VERSION=$(curl -s "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json" | \
jq -r '.channels.Stable.version') && \
CHROME_URL=$(curl -s "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json" | \
jq -r ".channels.Stable.downloads.chrome[] | select(.platform == \"$PLATFORM\") | .url") && \
echo "Downloading Chrome version ${CHROME_VERSION} for $PLATFORM from: $CHROME_URL" && \
wget -O /tmp/chrome-$PLATFORM.zip $CHROME_URL && \
unzip /tmp/chrome-$PLATFORM.zip -d /opt/ && \
mv /opt/chrome-$PLATFORM /opt/chrome && \
ln -sf /opt/chrome/chrome /usr/bin/google-chrome && \
chmod +x /usr/bin/google-chrome && \
rm -f /tmp/chrome-$PLATFORM.zip && \
CHROMEDRIVER_URL=$(curl -s "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json" | \
jq -r ".channels.Stable.downloads.chromedriver[] | select(.platform == \"$PLATFORM\") | .url") && \
echo "Downloading ChromeDriver for $PLATFORM from: $CHROMEDRIVER_URL" && \
wget -O /tmp/chromedriver-$PLATFORM.zip $CHROMEDRIVER_URL && \
unzip /tmp/chromedriver-$PLATFORM.zip -d /usr/local/bin/ && \
mv /usr/local/bin/chromedriver-$PLATFORM/chromedriver /usr/local/bin/chromedriver && \
chmod +x /usr/local/bin/chromedriver && \
rm -rf /tmp/chromedriver-$PLATFORM.zip /usr/local/bin/chromedriver-$PLATFORM; \
elif [ "$arch" = "aarch64" ]; then \
echo "deb http://deb.debian.org/debian bullseye main" > /etc/apt/sources.list && \
echo "deb http://deb.debian.org/debian-security bullseye-security main" >> /etc/apt/sources.list && \
apt-get update && \
apt-get install -y --no-install-recommends chromium chromium-driver && \
ln -sf /usr/bin/chromium /usr/bin/google-chrome && \
ln -sf /usr/bin/chromium-driver /usr/local/bin/chromedriver && \
rm -rf /var/lib/apt/lists/*; \
else \
echo "Unsupported architecture: $arch"; exit 1; \
fi
# Copy Python packages from builder stage
COPY --from=python-builder /root/.local /root/.local
# Copy built Nuxt application and node_modules from builder stage
COPY --from=node-builder /app/frontend/.output /app/.output
COPY --from=node-builder /app/frontend/package*.json /app/
COPY --from=node-builder /app/frontend/node_modules /app/node_modules
WORKDIR /app
# Create necessary directories first
RUN mkdir -p /app/logs /app/data /app/data-default /var/lib/mysql /var/run/mysqld && \
chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && \
chmod 755 /var/lib/mysql /var/run/mysqld
# Explicitly copy essential data files (JSON only - images are now fetched from TMDB)
# Copy data directory BEFORE copying the rest of the application to ensure files are in build context
# These files should be committed to git (use Git LFS if too large)
# Note: This will fail if data/ directory doesn't exist, which is desired behavior
COPY data/ /tmp/data/
# Verify and copy critical data files (fail build if missing)
RUN echo "Verifying essential data files..." && \
if [ ! -d "/tmp/data" ]; then \
echo "❌ ERROR: data/ directory not found in build context"; \
echo " Ensure data/ directory exists and contains required JSON files"; \
exit 1; \
fi && \
MISSING=0 && \
for file in seerr-collections.json unified.json franchises.json regex-presets.json; do \
if [ -f "/tmp/data/$file" ]; then \
cp "/tmp/data/$file" "/app/data/$file" && \
echo "✅ Found: $file ($(du -h /app/data/$file | cut -f1))"; \
else \
echo "❌ ERROR: Missing critical data file: $file"; \
MISSING=1; \
fi; \
done && \
if [ "$MISSING" -eq 1 ]; then \
echo "❌ Build failed: Critical data files are missing from build context."; \
echo " Ensure data/*.json files are committed to git and not excluded by .dockerignore"; \
exit 1; \
fi && \
# Copy all other JSON files from data directory if they exist
cp /tmp/data/*.json /app/data/ 2>/dev/null || true && \
# Copy to data-default as backup
cp -r /app/data/*.json /app/data-default/ 2>/dev/null || true && \
chmod -R 755 /app/data /app/data-default && \
rm -rf /tmp/data && \
echo "✅ All essential data files verified successfully"
# Copy application code (after data files are verified)
COPY . .
# Copy MySQL initialization scripts
COPY mysql-init /docker-entrypoint-initdb.d/
# Copy supervisor configuration
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# Copy startup scripts
COPY scripts/start-unified.sh /usr/local/bin/start-unified.sh
COPY scripts/wait-for-mysql.sh /app/scripts/wait-for-mysql.sh
COPY scripts/wait-for-frontend.sh /app/scripts/wait-for-frontend.sh
COPY scripts/start-services-sequentially.sh /app/scripts/start-services-sequentially.sh
COPY scripts/ensure-db-user.sh /app/scripts/ensure-db-user.sh
RUN chmod +x /usr/local/bin/start-unified.sh /app/scripts/wait-for-mysql.sh /app/scripts/wait-for-frontend.sh /app/scripts/start-services-sequentially.sh /app/scripts/ensure-db-user.sh
# Set environment variables
ENV CHROME_BIN=/usr/bin/google-chrome
ENV CHROME_DRIVER_PATH=/usr/local/bin/chromedriver
ENV RUNNING_IN_DOCKER=true
ENV PYTHONPATH=/app
ENV PATH=/root/.local/bin:$PATH
ENV NODE_ENV=production
ENV NUXT_HOST=0.0.0.0
ENV NUXT_PORT=3777
# Expose ports
EXPOSE 3306 3777 8777 8778
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
CMD curl -f http://localhost:8777/status && curl -f http://localhost:3777/api/health || exit 1
# Start unified container
CMD ["/usr/local/bin/start-unified.sh"]