-
Notifications
You must be signed in to change notification settings - Fork 13
issue127-skull-boneset #133
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
cadc1f4
0eae4ea
51c52d7
4898612
4df5ead
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -90,6 +90,7 @@ | |
| // console.log(`🚀 Server running on http://127.0.0.1:${PORT}`); | ||
| //}); | ||
|
|
||
| // boneset-api/server.js | ||
| // boneset-api/server.js | ||
| const express = require("express"); | ||
| const axios = require("axios"); | ||
|
|
@@ -103,7 +104,7 @@ const PORT = process.env.PORT || 8000; | |
|
|
||
| app.use(cors()); | ||
|
|
||
| // ---- Existing GitHub sources used only by /combined-data (unchanged) ---- | ||
| // ---- Existing GitHub sources used only by /combined-data (unchanged for Pelvis) ---- | ||
| const GITHUB_REPO = "https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/"; | ||
| const BONESET_JSON_URL = `${GITHUB_REPO}boneset/bony_pelvis.json`; | ||
| const BONES_DIR_URL = `${GITHUB_REPO}bones/`; | ||
|
|
@@ -120,7 +121,7 @@ const bonesetLimiter = rateLimit({ | |
| }); | ||
|
|
||
| // ---- Only allow bonesets we ship locally right now ---- | ||
| const ALLOWED_BONESETS = new Set(["bony_pelvis"]); | ||
| const ALLOWED_BONESETS = new Set(["bony_pelvis", "skull"]); | ||
|
|
||
| // ---- Helpers ---- | ||
| async function fetchJSON(url) { | ||
|
|
@@ -145,7 +146,7 @@ function safeDataPath(fileName) { | |
| return candidate; | ||
| } | ||
|
|
||
| // Tiny HTML escape (double-quotes everywhere for ESLint) | ||
| // Tiny HTML escape | ||
| function escapeHtml(str = "") { | ||
| return String(str).replace(/[&<>"']/g, (c) => ({ | ||
| "&": "&", | ||
|
|
@@ -156,14 +157,23 @@ function escapeHtml(str = "") { | |
| })[c]); | ||
| } | ||
|
|
||
| // Cache the merged boneset for fast description lookups | ||
| let cachedBoneset = null; | ||
| async function loadBoneset() { | ||
| if (cachedBoneset) return cachedBoneset; | ||
| const file = safeDataPath("final_bony_pelvis.json"); | ||
| // ---- Load local final_* boneset JSON with a small cache ---- | ||
| const bonesetCache = new Map(); | ||
|
|
||
| async function loadBoneset(bonesetId) { | ||
| if (bonesetCache.has(bonesetId)) return bonesetCache.get(bonesetId); | ||
| const file = safeDataPath(`final_${bonesetId}.json`); | ||
| const raw = await fs.readFile(file, "utf8"); | ||
| cachedBoneset = JSON.parse(raw); | ||
| return cachedBoneset; | ||
| const parsed = JSON.parse(raw); | ||
| bonesetCache.set(bonesetId, parsed); | ||
| return parsed; | ||
| } | ||
|
|
||
| // Load any local final_* boneset JSON by id (e.g., "skull") | ||
| async function loadLocalBoneset(id) { | ||
| const file = safeDataPath(`final_${id}.json`); | ||
| const raw = await fs.readFile(file, "utf8"); | ||
| return JSON.parse(raw); | ||
| } | ||
|
|
||
| function findNodeById(boneset, id) { | ||
|
|
@@ -183,64 +193,80 @@ app.get("/", (_req, res) => { | |
| res.json({ message: "Welcome to the Boneset API (GitHub-Integrated)" }); | ||
| }); | ||
|
|
||
| // Unchanged: used by the dropdowns in the current UI | ||
| // Unchanged pelvis aggregation + add Skull from local final_skull.json | ||
| app.get("/combined-data", async (_req, res) => { | ||
| try { | ||
| const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| if (!bonesetData) return res.status(500).json({ error: "Failed to load boneset data" }); | ||
|
|
||
| const bonesets = [{ id: bonesetData.id, name: bonesetData.name }]; | ||
| const bonesets = []; | ||
| const bones = []; | ||
| const subbones = []; | ||
|
|
||
| for (const boneId of bonesetData.bones) { | ||
| // --- Bony Pelvis (from GitHub) --- | ||
| const pelvis = await fetchJSON(BONESET_JSON_URL); | ||
| if (!pelvis) return res.status(500).json({ error: "Failed to load pelvis data" }); | ||
| bonesets.push({ id: pelvis.id, name: pelvis.name }); | ||
|
|
||
| for (const boneId of pelvis.bones) { | ||
| const boneJsonUrl = `${BONES_DIR_URL}${boneId}.json`; | ||
| const boneData = await fetchJSON(boneJsonUrl); | ||
| if (boneData) { | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: bonesetData.id }); | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: pelvis.id }); | ||
| (boneData.subBones || []).forEach((subBoneId) => { | ||
| subbones.push({ id: subBoneId, name: subBoneId.replace(/_/g, " "), bone: boneData.id }); | ||
| }); | ||
| } | ||
| } | ||
|
|
||
| // --- Skull (from local final_skull.json) --- | ||
| try { | ||
| const skull = await loadBoneset("skull"); | ||
| bonesets.push({ id: skull.id, name: skull.name }); | ||
| for (const b of skull.bones || []) { | ||
| bones.push({ id: b.id, name: b.name, boneset: skull.id }); | ||
| for (const sb of b.subbones || []) { | ||
| subbones.push({ id: sb.id, name: sb.name, bone: b.id }); | ||
| } | ||
| } | ||
| } catch (e) { | ||
| console.warn("Skull load failed:", e.message); | ||
| } | ||
|
|
||
| res.json({ bonesets, bones, subbones }); | ||
| } catch (error) { | ||
| console.error("Error fetching combined data:", error.message); | ||
| res.status(500).json({ error: "Internal Server Error" }); | ||
| } | ||
| }); | ||
|
|
||
| // Serve description from the *selected* local merged JSON (supports pelvis & skull) | ||
| // Serve description from the local merged JSON (no SSRF) | ||
| app.get("/api/description", bonesetLimiter, async (req, res) => { | ||
| const boneId = String(req.query.boneId || ""); | ||
| const bonesetId = String(req.query.bonesetId || "bony_pelvis"); | ||
|
|
||
| // Basic allowlist-style validation | ||
| if (!/^[a-z0-9_]+$/.test(boneId)) { | ||
| if (!/^[a-z0-9_]+$/.test(boneId) || !ALLOWED_BONESETS.has(bonesetId)) { | ||
| return res.type("text/html").send(""); | ||
| } | ||
|
|
||
| try { | ||
| const set = await loadBoneset(); | ||
| const set = await loadLocalBoneset(bonesetId); // <- IMPORTANT | ||
|
||
| const node = findNodeById(set, boneId); | ||
| if (!node) return res.type("text/html").send(""); | ||
|
|
||
| const name = node.name || boneId.replace(/_/g, " "); | ||
| const lines = Array.isArray(node.description) ? node.description : []; | ||
|
|
||
| // HTMX expects an <li> list fragment | ||
| let html = `<li><strong>${escapeHtml(name)}</strong></li>`; | ||
| for (const line of lines) { | ||
| html += `<li>${escapeHtml(line)}</li>`; | ||
| } | ||
| for (const line of lines) html += `<li>${escapeHtml(line)}</li>`; | ||
| res.type("text/html").send(html); | ||
| } catch (err) { | ||
| console.error("description error:", err); | ||
| res.type("text/html").send("<li>Description not available.</li>"); | ||
| } | ||
| }); | ||
|
|
||
| // Safe path + allowlist + rate limit | ||
|
|
||
|
|
||
| // Safe path + allowlist + rate limit to fetch the full local JSON | ||
| app.get("/api/boneset/:bonesetId", bonesetLimiter, async (req, res) => { | ||
| const { bonesetId } = req.params; | ||
|
|
||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. /api/description renders lines twice, The code appends the description lines in a loop, then immediately appends them again in a second loop. You’ll get duplicated (li) items. |
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,5 @@ | ||
| { | ||
| "name": "Labels", | ||
| "id": "labels", | ||
| "description": [] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,49 @@ | ||
| { | ||
| "slide": "slide10", | ||
| "images": [ | ||
| { | ||
| "rId": "rId8", | ||
| "extracted_name": "slide10_rId8.jpg" | ||
| } | ||
| ], | ||
| "annotations": [ | ||
| { | ||
| "text": "", | ||
| "position": { | ||
| "x": 2743200, | ||
| "y": 1299410, | ||
| "width": 5943600, | ||
| "height": 4952999 | ||
| } | ||
| }, | ||
| { | ||
| "text": "Anterior view ", | ||
| "position": { | ||
| "x": 5372100, | ||
| "y": 6252408, | ||
| "width": 685800, | ||
| "height": 215444 | ||
| }, | ||
| "link": null | ||
| }, | ||
| { | ||
| "text": "", | ||
| "position": { | ||
| "x": 3444876, | ||
| "y": 2397125, | ||
| "width": 4333875, | ||
| "height": 3282950 | ||
| } | ||
| }, | ||
| { | ||
| "text": "Mental tubercle", | ||
| "position": { | ||
| "x": 3581400, | ||
| "y": 5739596, | ||
| "width": 838200, | ||
| "height": 215444 | ||
| }, | ||
| "link": null | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,19 @@ | ||
| { | ||
| "name": "Lateral view", | ||
| "id": "lateral_view", | ||
| "description": [ | ||
| "The lateral view can be divided into 3 sections:", | ||
| "Facial \u2013 anterior", | ||
| "Temporal \u2013 middle", | ||
| "Occipital \u2013 posterior", | ||
| "The temporal section is separated by the zygomatic arch into into a", | ||
| "temporal fossa", | ||
| "superiorly and an infratemporal fossa inferiorly.", | ||
| "The origin of the temporalis muscle attaches to the temporal fossa", | ||
| "The", | ||
| "pterion", | ||
| "is located in the temporal fossa and marks the junction where the frontal, parietal, temporal, and sphenoid bones meet.", | ||
| "The pterion is an important landmark because it overlies the anterior branch of the middle meningeal artery.", | ||
| "Damage to this area of the skull can cause a rupture of this artery" | ||
| ] | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The route re-declares bonesets and mixes old pelvis logic with the new flow. You initialize bonesets twice and start a loop on bonesetData.bones while also fetching pelvis again inside that loop. This will either throw or produce wrong results. Clean this to one clear flow: build pelvis, then skull, then return