-
Notifications
You must be signed in to change notification settings - Fork 13
issue127-skull-boneset #133
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
cadc1f4
0eae4ea
51c52d7
4898612
4df5ead
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,68 @@ | ||
| <!doctype html> | ||
| <html> | ||
| <head> | ||
| <meta charset="utf-8" /> | ||
| <title>Skull Viewer</title> | ||
| <style> | ||
| body { margin: 24px; background:#111; color:#eee; font-family: system-ui, sans-serif; } | ||
| .wrap { position: relative; display: inline-block; } | ||
| .ann { position: absolute; border: 2px solid; box-sizing: border-box; pointer-events: none; } | ||
| </style> | ||
| </head> | ||
| <body> | ||
| <h1>Skull (image + annotations)</h1> | ||
|
|
||
| <div class="wrap"> | ||
| <img id="img" style="max-width:100%;height:auto" alt="Skull" /> | ||
| <div id="layer" style="position:absolute; inset:0"></div> | ||
| </div> | ||
|
|
||
| <script> | ||
| // === NEW: talk to the Node server on :8000 when we're not already on it | ||
| const API = (location.port === '8000') ? '' : 'http://127.0.0.1:8000'; | ||
|
|
||
| // PowerPoint EMU slide size (16:9). If your deck is 4:3 use 9144000 x 6858000. | ||
| const EMU_W = 12192000, EMU_H = 6858000; | ||
|
|
||
| function placeBoxes(img, anns){ | ||
| const w = img.clientWidth, h = img.clientHeight; | ||
| const layer = document.getElementById('layer'); layer.innerHTML=''; | ||
| (anns||[]).forEach(a=>{ | ||
| const p=a.position||{}, big=Math.max(p.x||0,p.y||0,p.width||0,p.height||0)>50000; | ||
| const X = big ? (p.x/EMU_W)*w : p.x; | ||
| const Y = big ? (p.y/EMU_H)*h : p.y; | ||
| const W = p.width != null ? (big ? (p.width /EMU_W)*w : p.width ) : 0; | ||
| const H = p.height != null ? (big ? (p.height/EMU_H)*h : p.height) : 0; | ||
| const d=document.createElement('div'); | ||
| d.className='ann'; | ||
| d.style.left = X+'px'; d.style.top = Y+'px'; | ||
| if (W) d.style.width=W+'px'; | ||
| if (H) d.style.height=H+'px'; | ||
| layer.appendChild(d); | ||
| }); | ||
| } | ||
|
|
||
| (async ()=>{ | ||
| // CHANGED: fetch from :8000 | ||
| const skull = await (await fetch(`${API}/api/boneset/skull`)).json(); | ||
|
|
||
| // pick a bone that has an image/annotations | ||
| const node = skull.bones.find(b => b.image_url) || skull.bones[0]; | ||
|
|
||
| const img = document.getElementById('img'); | ||
| img.onload = ()=>placeBoxes(img, node.annotations || []); | ||
| img.onerror = ()=> { | ||
| // fallback: try a subbone image if primary 404s | ||
| const alt = (node.subbones||[]).find(s => s.image_url)?.image_url; | ||
| // CHANGED: prefix with :8000 | ||
| if (alt && img.src !== `${API}${alt}`) img.src = `${API}${alt}`; | ||
| }; | ||
|
|
||
| // CHANGED: image from :8000 | ||
| img.src = `${API}${node.image_url}`; // node.image_url starts with /images/... | ||
|
|
||
| addEventListener('resize', ()=>placeBoxes(img, node.annotations||[])); | ||
| })(); | ||
| </script> | ||
| </body> | ||
| </html> |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,95 +1,3 @@ | ||
| //const express = require("express"); | ||
| //const axios = require("axios"); | ||
| //const cors = require("cors"); | ||
| //const path = require('path'); // Added for consistency, though not strictly needed for this version | ||
| // | ||
| //const app = express(); | ||
| //const PORT = process.env.PORT || 8000; | ||
| // | ||
| //app.use(cors()); | ||
| // | ||
| //// --- Original GitHub URLs --- | ||
| //const GITHUB_REPO = "https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/"; | ||
| //const BONESET_JSON_URL = `${GITHUB_REPO}boneset/bony_pelvis.json`; | ||
| //const BONES_DIR_URL = `${GITHUB_REPO}bones/`; | ||
| // | ||
| //// Helper function to fetch JSON from GitHub | ||
| //async function fetchJSON(url) { | ||
| // try { | ||
| // const response = await axios.get(url); | ||
| // return response.data; | ||
| // } catch (error) { | ||
| // console.error(`Failed to fetch ${url}:`, error.message); | ||
| // return null; | ||
| // } | ||
| //} | ||
| // | ||
| //// Home route (fixes "Cannot GET /" issue) | ||
| //app.get("/", (req, res) => { | ||
| // res.json({ message: "Welcome to the Boneset API (GitHub-Integrated)" }); | ||
| //}); | ||
| // | ||
| //// --- Original Combined Data Endpoint --- | ||
| //// This endpoint still provides the main data for the dropdowns | ||
| //app.get("/combined-data", async (req, res) => { | ||
| // try { | ||
| // const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| // if (!bonesetData) return res.status(500).json({ error: "Failed to load boneset data" }); | ||
| // | ||
| // const bonesets = [{ id: bonesetData.id, name: bonesetData.name }]; | ||
| // const bones = []; | ||
| // const subbones = []; | ||
| // | ||
| // for (const boneId of bonesetData.bones) { | ||
| // const boneJsonUrl = `${BONES_DIR_URL}${boneId}.json`; | ||
| // const boneData = await fetchJSON(boneJsonUrl); | ||
| // | ||
| // if (boneData) { | ||
| // bones.push({ id: boneData.id, name: boneData.name, boneset: bonesetData.id }); | ||
| // boneData.subBones.forEach(subBoneId => { | ||
| // subbones.push({ id: subBoneId, name: subBoneId.replace(/_/g, " "), bone: boneData.id }); | ||
| // }); | ||
| // } | ||
| // } | ||
| // | ||
| // res.json({ bonesets, bones, subbones }); | ||
| // | ||
| // } catch (error) { | ||
| // console.error("Error fetching combined data:", error.message); | ||
| // res.status(500).json({ error: "Internal Server Error" }); | ||
| // } | ||
| //}); | ||
| // | ||
| //// --- NEW HTMX ENDPOINT --- | ||
| //// This endpoint fetches a description and returns it as an HTML fragment | ||
| //app.get("/api/description/", async (req, res) => { // Path changed here | ||
| // const { boneId } = req.query; // Changed from req.params to req.query | ||
| // if (!boneId) { | ||
| // return res.send(''); // Send empty response if no boneId is provided | ||
| // } | ||
| // const GITHUB_DESC_URL = `https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/descriptions/${boneId}_description.json`; | ||
| // | ||
| // try { | ||
| // const response = await axios.get(GITHUB_DESC_URL); | ||
| // const descriptionData = response.data; | ||
| // | ||
| // let html = `<li><strong>${descriptionData.name}</strong></li>`; | ||
| // descriptionData.description.forEach(point => { | ||
| // html += `<li>${point}</li>`; | ||
| // }); | ||
| // res.send(html); | ||
| // | ||
| // } catch (error) { | ||
| // res.send('<li>Description not available.</li>'); | ||
| // } | ||
| //}); | ||
|
|
||
|
|
||
| // Start server | ||
| //app.listen(PORT, () => { | ||
| // console.log(`🚀 Server running on http://127.0.0.1:${PORT}`); | ||
| //}); | ||
|
|
||
| // boneset-api/server.js | ||
| const express = require("express"); | ||
| const axios = require("axios"); | ||
|
|
@@ -102,25 +10,31 @@ const app = express(); | |
| const PORT = process.env.PORT || 8000; | ||
|
|
||
| app.use(cors()); | ||
| app.use("/images", express.static(path.join(__dirname, "public/images"))); // local static images (useful in dev) | ||
|
|
||
| // ---- Existing GitHub sources used only by /combined-data (unchanged) ---- | ||
| // ---- GitHub sources (Pelvis + Skull) ---- | ||
| const GITHUB_REPO = "https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/"; | ||
| const BONESET_JSON_URL = `${GITHUB_REPO}boneset/bony_pelvis.json`; | ||
| const BONES_DIR_URL = `${GITHUB_REPO}bones/`; | ||
|
|
||
| // ---- Local data directory for merged files ---- | ||
| // Skull is branch-aware so a single PR works now; flip SKULL_BRANCH to "data" later | ||
| const SKULL_BRANCH = process.env.SKULL_BRANCH || "issue127-skull-boneset"; | ||
| const GITHUB_REPO_SKULL = `https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/${SKULL_BRANCH}/DataSkull/`; | ||
| const SKULL_JSON_URL = `${GITHUB_REPO_SKULL}boneset/skull.json`; | ||
|
|
||
| // ---- Local data dir (used for pelvis descriptions in dev) ---- | ||
| const DATA_DIR = path.join(__dirname, "data"); | ||
|
|
||
| // ---- Simple rate limiter for FS-backed endpoints ---- | ||
| // ---- Rate limiter for FS-backed endpoints ---- | ||
| const bonesetLimiter = rateLimit({ | ||
| windowMs: 60 * 1000, // 1 minute | ||
| max: 60, // 60 requests / min / IP | ||
| windowMs: 60 * 1000, | ||
| max: 60, | ||
| standardHeaders: true, | ||
| legacyHeaders: false, | ||
| }); | ||
|
|
||
| // ---- Only allow bonesets we ship locally right now ---- | ||
| const ALLOWED_BONESETS = new Set(["bony_pelvis"]); | ||
| // ---- Allowlist for /api/boneset/:bonesetId ---- | ||
| const ALLOWED_BONESETS = new Set(["bony_pelvis", "skull"]); | ||
|
|
||
| // ---- Helpers ---- | ||
| async function fetchJSON(url) { | ||
|
|
@@ -133,7 +47,6 @@ async function fetchJSON(url) { | |
| } | ||
| } | ||
|
|
||
| // Ensure any resolved path stays inside DATA_DIR | ||
| function safeDataPath(fileName) { | ||
| const base = path.resolve(DATA_DIR); | ||
| const candidate = path.resolve(DATA_DIR, fileName); | ||
|
|
@@ -145,7 +58,6 @@ function safeDataPath(fileName) { | |
| return candidate; | ||
| } | ||
|
|
||
| // Tiny HTML escape (double-quotes everywhere for ESLint) | ||
| function escapeHtml(str = "") { | ||
| return String(str).replace(/[&<>"']/g, (c) => ({ | ||
| "&": "&", | ||
|
|
@@ -156,14 +68,10 @@ function escapeHtml(str = "") { | |
| })[c]); | ||
| } | ||
|
|
||
| // Cache the merged boneset for fast description lookups | ||
| let cachedBoneset = null; | ||
| async function loadBoneset() { | ||
| if (cachedBoneset) return cachedBoneset; | ||
| const file = safeDataPath("final_bony_pelvis.json"); | ||
| async function loadLocalBoneset(id) { | ||
| const file = safeDataPath(`final_${id}.json`); | ||
| const raw = await fs.readFile(file, "utf8"); | ||
| cachedBoneset = JSON.parse(raw); | ||
| return cachedBoneset; | ||
| return JSON.parse(raw); | ||
| } | ||
|
|
||
| function findNodeById(boneset, id) { | ||
|
|
@@ -183,64 +91,79 @@ app.get("/", (_req, res) => { | |
| res.json({ message: "Welcome to the Boneset API (GitHub-Integrated)" }); | ||
| }); | ||
|
|
||
| // Unchanged: used by the dropdowns in the current UI | ||
| // Merged list for dropdowns (Pelvis from DataPelvis raw; Skull from branch-aware DataSkull raw) | ||
| app.get("/combined-data", async (_req, res) => { | ||
| try { | ||
| const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| if (!bonesetData) return res.status(500).json({ error: "Failed to load boneset data" }); | ||
|
|
||
| const bonesets = [{ id: bonesetData.id, name: bonesetData.name }]; | ||
| const [pelvis, skull] = await Promise.all([ | ||
| fetchJSON(BONESET_JSON_URL), // DataPelvis/boneset/bony_pelvis.json | ||
| fetchJSON(SKULL_JSON_URL), // DataSkull/boneset/skull.json (branch-aware) | ||
| ]); | ||
| if (!pelvis || !skull) return res.status(500).json({ error: "Failed to load data" }); | ||
|
|
||
| const bonesets = [ | ||
| { id: pelvis.id, name: pelvis.name }, | ||
| { id: skull.id, name: skull.name }, | ||
| ]; | ||
| const bones = []; | ||
| const subbones = []; | ||
|
|
||
| for (const boneId of bonesetData.bones) { | ||
| // Pelvis: expand each bone file from DataPelvis/bones/ | ||
| for (const boneId of pelvis.bones) { | ||
| const boneJsonUrl = `${BONES_DIR_URL}${boneId}.json`; | ||
| const boneData = await fetchJSON(boneJsonUrl); | ||
| if (boneData) { | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: bonesetData.id }); | ||
| (boneData.subBones || []).forEach((subBoneId) => { | ||
| subbones.push({ id: subBoneId, name: subBoneId.replace(/_/g, " "), bone: boneData.id }); | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: pelvis.id }); | ||
| (boneData.subBones || []).forEach(subId => { | ||
| subbones.push({ id: subId, name: subId.replace(/_/g, " "), bone: boneData.id }); | ||
| }); | ||
| } | ||
| } | ||
|
|
||
| // Skull: bones & subbones already included in master skull.json | ||
| for (const b of skull.bones || []) { | ||
| bones.push({ id: b.id, name: b.name, boneset: skull.id }); | ||
| for (const sb of b.subbones || []) { | ||
| subbones.push({ id: sb.id, name: sb.name, bone: b.id }); | ||
| } | ||
| } | ||
|
|
||
| res.json({ bonesets, bones, subbones }); | ||
| } catch (error) { | ||
| console.error("Error fetching combined data:", error.message); | ||
| res.status(500).json({ error: "Internal Server Error" }); | ||
| } | ||
| }); | ||
|
|
||
| // Serve description from the local merged JSON (no SSRF) | ||
| // Return description HTML (Skull from GitHub; Pelvis from local dev file) | ||
| app.get("/api/description", bonesetLimiter, async (req, res) => { | ||
| const boneId = String(req.query.boneId || ""); | ||
| const bonesetId = String(req.query.bonesetId || "bony_pelvis"); | ||
|
|
||
| // Basic allowlist-style validation | ||
| if (!/^[a-z0-9_]+$/.test(boneId)) { | ||
| if (!/^[a-z0-9_]+$/.test(boneId) || !ALLOWED_BONESETS.has(bonesetId)) { | ||
| return res.type("text/html").send(""); | ||
| } | ||
|
|
||
| try { | ||
| const set = await loadBoneset(); | ||
| const set = bonesetId === "skull" | ||
| ? await fetchJSON(SKULL_JSON_URL) // GitHub (no local dependency) | ||
| : await loadLocalBoneset(bonesetId); // local file for pelvis in dev | ||
|
|
||
| const node = findNodeById(set, boneId); | ||
| if (!node) return res.type("text/html").send(""); | ||
|
|
||
| const name = node.name || boneId.replace(/_/g, " "); | ||
| const lines = Array.isArray(node.description) ? node.description : []; | ||
|
|
||
| // HTMX expects an <li> list fragment | ||
| let html = `<li><strong>${escapeHtml(name)}</strong></li>`; | ||
| for (const line of lines) { | ||
| html += `<li>${escapeHtml(line)}</li>`; | ||
| } | ||
| for (const line of lines) html += `<li>${escapeHtml(line)}</li>`; | ||
| res.type("text/html").send(html); | ||
| } catch (err) { | ||
| console.error("description error:", err); | ||
| res.type("text/html").send("<li>Description not available.</li>"); | ||
| } | ||
| }); | ||
|
|
||
| // Safe path + allowlist + rate limit | ||
| // Dev helper: serve local merged JSONs (if present) | ||
| app.get("/api/boneset/:bonesetId", bonesetLimiter, async (req, res) => { | ||
| const { bonesetId } = req.params; | ||
|
|
||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. /api/description renders lines twice, The code appends the description lines in a loop, then immediately appends them again in a second loop. You’ll get duplicated (li) items. |
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The route re-declares bonesets and mixes old pelvis logic with the new flow. You initialize bonesets twice and start a loop on bonesetData.bones while also fetching pelvis again inside that loop. This will either throw or produce wrong results. Clean this to one clear flow: build pelvis, then skull, then return