Skip to content

Commit f9b74b7

Browse files
committed
refactor endpoints into routes for better organization
1 parent e056351 commit f9b74b7

File tree

7 files changed

+368
-243
lines changed

7 files changed

+368
-243
lines changed

mimir-rag/src/github/utils.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,40 @@ export function buildSourceUrl(owner: string, repo: string, branch: string, file
113113
const encodedPath = encodeRepoPath(filePath);
114114
return `https://github.com/${owner}/${repo}/blob/${encodeURIComponent(branch)}/${encodedPath}`;
115115
}
116+
117+
// Webhook utilities
118+
import { createHmac, timingSafeEqual } from "node:crypto";
119+
120+
export function getHeaderValue(header: string | string[] | undefined): string | undefined {
121+
if (Array.isArray(header)) {
122+
return header[0];
123+
}
124+
return header;
125+
}
126+
127+
export function verifyGithubWebhookSignature(
128+
secret: string,
129+
signature: string | undefined,
130+
rawBody?: Buffer
131+
): boolean {
132+
if (!secret || !signature || !rawBody) {
133+
return false;
134+
}
135+
136+
const hmac = createHmac("sha256", secret);
137+
hmac.update(rawBody);
138+
const expected = `sha256=${hmac.digest("hex")}`;
139+
140+
const expectedBuffer = Buffer.from(expected);
141+
const signatureBuffer = Buffer.from(signature);
142+
143+
if (expectedBuffer.length !== signatureBuffer.length) {
144+
return false;
145+
}
146+
147+
try {
148+
return timingSafeEqual(expectedBuffer, signatureBuffer);
149+
} catch {
150+
return false;
151+
}
152+
}
Lines changed: 81 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,17 @@
1+
import type { Request, Response } from "express";
12
import type { Logger } from "pino";
2-
import { askAi } from "../query/askAi";
3-
import type { AppConfig } from "../config/types";
4-
import type { LLMClientBundle } from "../llm/types";
5-
import type { SupabaseVectorStore } from "../supabase/client";
6-
7-
export interface StreamAskOptions {
8-
question: string;
9-
matchCount?: number;
10-
similarityThreshold?: number;
11-
systemPrompt?: string;
12-
}
3+
import { askAi } from "../../query/askAi";
4+
import type { AppConfig } from "../../config/types";
5+
import type { LLMClientBundle } from "../../llm/types";
6+
import type { SupabaseVectorStore } from "../../supabase/client";
137

14-
export interface StreamAskContext {
8+
export interface AskRouteContext {
159
config: AppConfig;
1610
llm: LLMClientBundle;
1711
store: SupabaseVectorStore;
1812
}
1913

20-
export function isStreamingRequest(req: any): boolean {
14+
function isStreamingRequest(req: Request): boolean {
2115
const acceptHeader = typeof req?.headers?.accept === "string" ? req.headers.accept.toLowerCase() : "";
2216
if (acceptHeader.includes("text/event-stream")) {
2317
return true;
@@ -39,12 +33,35 @@ export function isStreamingRequest(req: any): boolean {
3933
return normalize(req?.query?.stream) || normalize(req?.body?.stream);
4034
}
4135

42-
export async function streamAskResponse(
43-
req: any,
44-
res: any,
45-
context: StreamAskContext,
36+
function initializeEventStream(res: Response): void {
37+
res.setHeader("Content-Type", "text/event-stream");
38+
res.setHeader("Cache-Control", "no-cache, no-transform");
39+
res.setHeader("Connection", "keep-alive");
40+
if (typeof res.flushHeaders === "function") {
41+
res.flushHeaders();
42+
}
43+
}
44+
45+
function pushStreamEvent(res: Response, event: string, payload: unknown): void {
46+
if (res.writableEnded) {
47+
return;
48+
}
49+
50+
res.write(`event: ${event}\n`);
51+
res.write(`data: ${JSON.stringify(payload)}\n\n`);
52+
}
53+
54+
async function handleStreamingAsk(
55+
req: Request,
56+
res: Response,
57+
context: AskRouteContext,
4658
logger: Logger,
47-
options: StreamAskOptions
59+
options: {
60+
question: string;
61+
matchCount?: number;
62+
similarityThreshold?: number;
63+
systemPrompt?: string;
64+
}
4865
): Promise<void> {
4966
initializeEventStream(res);
5067
const abortController = new AbortController();
@@ -104,20 +121,55 @@ export async function streamAskResponse(
104121
}
105122
}
106123

107-
function initializeEventStream(res: any): void {
108-
res.setHeader("Content-Type", "text/event-stream");
109-
res.setHeader("Cache-Control", "no-cache, no-transform");
110-
res.setHeader("Connection", "keep-alive");
111-
if (typeof res.flushHeaders === "function") {
112-
res.flushHeaders();
124+
export async function handleAskRequest(
125+
req: Request,
126+
res: Response,
127+
context: AskRouteContext,
128+
logger: Logger
129+
): Promise<void> {
130+
const { question, matchCount, similarityThreshold, systemPrompt } = req.body ?? {};
131+
132+
if (typeof question !== "string" || question.trim().length === 0) {
133+
res.status(400).json({
134+
status: "error",
135+
message: "Request body must include a non-empty 'question' field.",
136+
});
137+
return;
113138
}
114-
}
115139

116-
function pushStreamEvent(res: any, event: string, payload: unknown): void {
117-
if (res.writableEnded) {
140+
if (isStreamingRequest(req)) {
141+
await handleStreamingAsk(req, res, context, logger, {
142+
question,
143+
matchCount,
144+
similarityThreshold,
145+
systemPrompt,
146+
});
118147
return;
119148
}
120149

121-
res.write(`event: ${event}\n`);
122-
res.write(`data: ${JSON.stringify(payload)}\n\n`);
150+
try {
151+
const response = await askAi(
152+
context.llm,
153+
context.store,
154+
{
155+
question,
156+
matchCount,
157+
similarityThreshold,
158+
systemPrompt,
159+
},
160+
{
161+
logger,
162+
config: context.config,
163+
}
164+
);
165+
166+
res.json({
167+
status: "ok",
168+
answer: response.answer,
169+
sources: response.sources,
170+
});
171+
} catch (error) {
172+
logger.error({ err: error }, "Ask endpoint failed.");
173+
res.status(500).json({ status: "error", message: (error as Error).message });
174+
}
123175
}
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import type { Request, Response } from "express";
2+
import type { Logger } from "pino";
3+
import type { IngestRouteContext } from "./ingest";
4+
import { triggerIngestion } from "./ingest";
5+
import { getHeaderValue, verifyGithubWebhookSignature } from "../../github/utils";
6+
7+
export type RequestWithRawBody = Request & { rawBody?: Buffer };
8+
9+
export interface GithubWebhookRouteContext extends IngestRouteContext {
10+
githubWebhookSecret?: string;
11+
}
12+
13+
export async function handleGithubWebhookRequest(
14+
req: RequestWithRawBody,
15+
res: Response,
16+
context: GithubWebhookRouteContext,
17+
logger: Logger
18+
): Promise<void> {
19+
const secret = context.githubWebhookSecret;
20+
21+
if (!secret) {
22+
res.status(501).json({
23+
status: "error",
24+
message: "GitHub webhook secret is not configured on the server.",
25+
});
26+
return;
27+
}
28+
29+
const signatureHeader = getHeaderValue(req.headers["x-hub-signature-256"]);
30+
const rawBody = req.rawBody;
31+
32+
if (!verifyGithubWebhookSignature(secret, signatureHeader, rawBody)) {
33+
res.status(401).json({ status: "error", message: "Invalid GitHub signature." });
34+
return;
35+
}
36+
37+
const eventType = getHeaderValue(req.headers["x-github-event"]) ?? "unknown";
38+
39+
if (eventType === "ping") {
40+
res.json({ status: "ok", message: "pong" });
41+
return;
42+
}
43+
44+
const repoName = req.body?.repository?.full_name ?? "unknown-repo";
45+
46+
await triggerIngestion(res, context, logger, `github-webhook:${repoName}:${eventType}`, {
47+
busyStatus: 202,
48+
busyState: "pending",
49+
busyMessage: "Ingestion already running. Webhook acknowledged.",
50+
});
51+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import type { Request, Response } from "express";
2+
3+
export interface HealthRouteContext {
4+
ingestionBusy: boolean;
5+
}
6+
7+
export function handleHealthRequest(_req: Request, res: Response, context: HealthRouteContext): void {
8+
res.json({ status: "ok", ingestionBusy: context.ingestionBusy });
9+
}
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import type { Request, Response } from "express";
2+
import type { Logger } from "pino";
3+
import type { AppConfig } from "../../config/types";
4+
import type { LLMClientBundle } from "../../llm/types";
5+
import type { SupabaseVectorStore } from "../../supabase/client";
6+
import { runIngestionPipeline } from "../../ingest/pipeline";
7+
8+
export interface IngestRouteContext {
9+
config: AppConfig;
10+
llm: LLMClientBundle;
11+
store: SupabaseVectorStore;
12+
ingestionBusy: boolean;
13+
setIngestionBusy: (busy: boolean) => void;
14+
}
15+
16+
interface TriggerIngestionOptions {
17+
busyStatus?: number;
18+
busyMessage?: string;
19+
busyState?: "error" | "pending";
20+
}
21+
22+
async function triggerIngestion(
23+
res: Response,
24+
context: IngestRouteContext,
25+
logger: Logger,
26+
trigger: string,
27+
options?: TriggerIngestionOptions
28+
): Promise<void> {
29+
if (context.ingestionBusy) {
30+
res.status(options?.busyStatus ?? 409).json({
31+
status: options?.busyState ?? "error",
32+
message: options?.busyMessage ?? "Ingestion already running.",
33+
});
34+
return;
35+
}
36+
37+
context.setIngestionBusy(true);
38+
const startedAt = Date.now();
39+
40+
try {
41+
logger.info({ trigger }, "Starting ingestion.");
42+
const result = await runIngestionPipeline(context.config, context.llm, context.store, logger);
43+
res.json({
44+
status: "ok",
45+
trigger,
46+
durationMs: Date.now() - startedAt,
47+
stats: result.stats,
48+
});
49+
} catch (error) {
50+
logger.error({ err: error, trigger }, "Ingestion failed.");
51+
res.status(500).json({ status: "error", message: (error as Error).message });
52+
} finally {
53+
context.setIngestionBusy(false);
54+
}
55+
}
56+
57+
export async function handleIngestRequest(
58+
_req: Request,
59+
res: Response,
60+
context: IngestRouteContext,
61+
logger: Logger
62+
): Promise<void> {
63+
await triggerIngestion(res, context, logger, "manual-request");
64+
}
65+
66+
export { triggerIngestion };
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import type { Request, Response } from "express";
2+
import type { Logger } from "pino";
3+
import { askAi } from "../../query/askAi";
4+
import { createLLMClient } from "../../llm/factory";
5+
import type { AppConfig } from "../../config/types";
6+
import type { SupabaseVectorStore } from "../../supabase/client";
7+
8+
export interface McpAskRouteContext {
9+
config: AppConfig;
10+
store: SupabaseVectorStore;
11+
}
12+
13+
export async function handleMcpAskRequest(
14+
req: Request,
15+
res: Response,
16+
context: McpAskRouteContext,
17+
logger: Logger
18+
): Promise<void> {
19+
const { question, matchCount, similarityThreshold, systemPrompt, provider, model, apiKey } = req.body ?? {};
20+
21+
if (typeof question !== "string" || question.trim().length === 0) {
22+
res.status(400).json({
23+
status: "error",
24+
message: "Request body must include a non-empty 'question' field.",
25+
});
26+
return;
27+
}
28+
29+
// Validate MCP parameters
30+
if (!provider || !model || !apiKey) {
31+
res.status(400).json({
32+
status: "error",
33+
message: "Request body must include 'provider', 'model', and 'apiKey' fields.",
34+
});
35+
return;
36+
}
37+
38+
try {
39+
// Create a temporary LLM client with the provided credentials
40+
const mcpLlm = createLLMClient(
41+
{
42+
embedding: context.config.llm.embedding, // Use default embedding
43+
chat: {
44+
provider: provider as any,
45+
model: model,
46+
apiKey: apiKey,
47+
temperature: context.config.llm.chat.temperature,
48+
maxOutputTokens: context.config.llm.chat.maxOutputTokens,
49+
},
50+
},
51+
logger
52+
);
53+
54+
const response = await askAi(
55+
mcpLlm,
56+
context.store,
57+
{
58+
question,
59+
matchCount,
60+
similarityThreshold,
61+
systemPrompt,
62+
},
63+
{
64+
logger,
65+
config: context.config,
66+
}
67+
);
68+
69+
res.json({
70+
status: "ok",
71+
answer: response.answer,
72+
sources: response.sources,
73+
});
74+
} catch (error) {
75+
logger.error({ err: error }, "MCP Ask endpoint failed.");
76+
res.status(500).json({ status: "error", message: (error as Error).message });
77+
}
78+
}

0 commit comments

Comments
 (0)