-
Notifications
You must be signed in to change notification settings - Fork 13
Submitting #132
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Submitting #132
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -103,7 +103,6 @@ | |
|
|
||
| app.use(cors()); | ||
|
|
||
| // ---- Existing GitHub sources used only by /combined-data (unchanged) ---- | ||
| const GITHUB_REPO = "https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/"; | ||
| const BONESET_JSON_URL = `${GITHUB_REPO}boneset/bony_pelvis.json`; | ||
| const BONES_DIR_URL = `${GITHUB_REPO}bones/`; | ||
|
|
@@ -113,154 +112,202 @@ | |
|
|
||
| // ---- Simple rate limiter for FS-backed endpoints ---- | ||
| const bonesetLimiter = rateLimit({ | ||
| windowMs: 60 * 1000, // 1 minute | ||
| max: 60, // 60 requests / min / IP | ||
| standardHeaders: true, | ||
| legacyHeaders: false, | ||
| windowMs: 60 * 1000, // 1 minute | ||
| max: 60, // 60 requests / min / IP | ||
| standardHeaders: true, | ||
| legacyHeaders: false, | ||
| }); | ||
|
|
||
| // ---- Only allow bonesets we ship locally right now ---- | ||
| const ALLOWED_BONESETS = new Set(["bony_pelvis"]); | ||
|
|
||
| // ---- Helpers ---- | ||
| async function fetchJSON(url) { | ||
| try { | ||
| const response = await axios.get(url, { timeout: 10_000 }); | ||
| return response.data; | ||
| } catch (error) { | ||
| console.error(`Failed to fetch ${url}:`, error.message); | ||
| return null; | ||
| } | ||
| try { | ||
| const response = await axios.get(url, { timeout: 10_000 }); | ||
| return response.data; | ||
| } catch (error) { | ||
| console.error(`Failed to fetch ${url}:`, error.message); | ||
| return null; | ||
| } | ||
| } | ||
|
|
||
| // Ensure any resolved path stays inside DATA_DIR | ||
| function safeDataPath(fileName) { | ||
| const base = path.resolve(DATA_DIR); | ||
| const candidate = path.resolve(DATA_DIR, fileName); | ||
| if (!candidate.startsWith(base + path.sep)) { | ||
| const err = new Error("Invalid path"); | ||
| err.code = "EINVAL"; | ||
| throw err; | ||
| } | ||
| return candidate; | ||
| const base = path.resolve(DATA_DIR); | ||
| const candidate = path.resolve(DATA_DIR, fileName); | ||
| if (!candidate.startsWith(base + path.sep)) { | ||
| const err = new Error("Invalid path"); | ||
| err.code = "EINVAL"; | ||
| throw err; | ||
| } | ||
| return candidate; | ||
| } | ||
|
|
||
| // Tiny HTML escape (double-quotes everywhere for ESLint) | ||
| function escapeHtml(str = "") { | ||
| return String(str).replace(/[&<>"']/g, (c) => ({ | ||
| "&": "&", | ||
| "<": "<", | ||
| ">": ">", | ||
| "\"": """, | ||
| "'": "'", | ||
| })[c]); | ||
| return String(str).replace(/[&<>"']/g, (c) => ({ | ||
| "&": "&", | ||
| "<": "<", | ||
| ">": ">", | ||
| "\"": """, | ||
| "'": "'", | ||
| })[c]); | ||
| } | ||
|
|
||
| // Cache the merged boneset for fast description lookups | ||
| let cachedBoneset = null; | ||
| async function loadBoneset() { | ||
| if (cachedBoneset) return cachedBoneset; | ||
| const file = safeDataPath("final_bony_pelvis.json"); | ||
| const raw = await fs.readFile(file, "utf8"); | ||
| cachedBoneset = JSON.parse(raw); | ||
| return cachedBoneset; | ||
| if (cachedBoneset) return cachedBoneset; | ||
| const file = safeDataPath("final_bony_pelvis.json"); | ||
| const raw = await fs.readFile(file, "utf8"); | ||
|
||
| cachedBoneset = JSON.parse(raw); | ||
| return cachedBoneset; | ||
| } | ||
|
|
||
| function findNodeById(boneset, id) { | ||
| if (!boneset) return null; | ||
| for (const bone of boneset.bones || []) { | ||
| if (bone.id === id) return bone; | ||
| for (const sub of bone.subbones || []) { | ||
| if (sub.id === id) return sub; | ||
| if (!boneset) return null; | ||
| for (const bone of boneset.bones || []) { | ||
| if (bone.id === id) return bone; | ||
| for (const sub of bone.subbones || []) { | ||
| if (sub.id === id) return sub; | ||
| } | ||
| } | ||
| } | ||
| return null; | ||
| return null; | ||
| } | ||
|
|
||
| // ---- Routes ---- | ||
|
|
||
| app.get("/", (_req, res) => { | ||
| res.json({ message: "Welcome to the Boneset API (GitHub-Integrated)" }); | ||
| res.json({ message: "Welcome to the Boneset API (GitHub-Integrated)" }); | ||
| }); | ||
|
|
||
| // Unchanged: used by the dropdowns in the current UI | ||
| app.get("/combined-data", async (_req, res) => { | ||
| try { | ||
| const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| if (!bonesetData) return res.status(500).json({ error: "Failed to load boneset data" }); | ||
|
|
||
| const bonesets = [{ id: bonesetData.id, name: bonesetData.name }]; | ||
| const bones = []; | ||
| const subbones = []; | ||
|
|
||
| for (const boneId of bonesetData.bones) { | ||
| const boneJsonUrl = `${BONES_DIR_URL}${boneId}.json`; | ||
| const boneData = await fetchJSON(boneJsonUrl); | ||
| if (boneData) { | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: bonesetData.id }); | ||
| (boneData.subBones || []).forEach((subBoneId) => { | ||
| subbones.push({ id: subBoneId, name: subBoneId.replace(/_/g, " "), bone: boneData.id }); | ||
| }); | ||
| } | ||
| } | ||
| try { | ||
| const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| if (!bonesetData) return res.status(500).json({ error: "Failed to load boneset data" }); | ||
|
|
||
| res.json({ bonesets, bones, subbones }); | ||
| } catch (error) { | ||
| console.error("Error fetching combined data:", error.message); | ||
| res.status(500).json({ error: "Internal Server Error" }); | ||
| } | ||
| }); | ||
| const bonesets = [{ id: bonesetData.id, name: bonesetData.name }]; | ||
| const bones = []; | ||
| const subbones = []; | ||
|
|
||
| // Serve description from the local merged JSON (no SSRF) | ||
| app.get("/api/description", bonesetLimiter, async (req, res) => { | ||
| const boneId = String(req.query.boneId || ""); | ||
| for (const boneId of bonesetData.bones) { | ||
| const boneJsonUrl = `${BONES_DIR_URL}${boneId}.json`; | ||
| const boneData = await fetchJSON(boneJsonUrl); | ||
| if (boneData) { | ||
| bones.push({ id: boneData.id, name: boneData.name, boneset: bonesetData.id }); | ||
| (boneData.subBones || []).forEach((subBoneId) => { | ||
| subbones.push({ id: subBoneId, name: subBoneId.replace(/_/g, " "), bone: boneData.id }); | ||
| }); | ||
| } | ||
| } | ||
|
|
||
| // Basic allowlist-style validation | ||
| if (!/^[a-z0-9_]+$/.test(boneId)) { | ||
| return res.type("text/html").send(""); | ||
| } | ||
| res.json({ bonesets, bones, subbones }); | ||
| } catch (error) { | ||
| console.error("Error fetching combined data:", error.message); | ||
| res.status(500).json({ error: "Internal Server Error" }); | ||
| } | ||
| }); | ||
|
|
||
| try { | ||
| const set = await loadBoneset(); | ||
| const node = findNodeById(set, boneId); | ||
| if (!node) return res.type("text/html").send(""); | ||
| app.get("/api/description/", async (req, res) => { | ||
| const { boneId } = req.query; | ||
| if (!boneId) { | ||
| return res.send(" "); | ||
| } | ||
| const GITHUB_DESC_URL = `https://raw.githubusercontent.com/oss-slu/DigitalBonesBox/data/DataPelvis/descriptions/${boneId}_description.json`; | ||
|
|
||
| const name = node.name || boneId.replace(/_/g, " "); | ||
| const lines = Array.isArray(node.description) ? node.description : []; | ||
| try { | ||
| const response = await axios.get(GITHUB_DESC_URL); | ||
| const descriptionData = response.data; | ||
|
|
||
| // HTMX expects an <li> list fragment | ||
| let html = `<li><strong>${escapeHtml(name)}</strong></li>`; | ||
| for (const line of lines) { | ||
| html += `<li>${escapeHtml(line)}</li>`; | ||
| let html = `<li><strong>${descriptionData.name}</strong></li>`; | ||
| descriptionData.description.forEach(point => { | ||
| html += `<li>${point}</li>`; | ||
| }); | ||
| res.send(html); | ||
|
|
||
| } catch (error) { | ||
| res.send("<li>Description not available.</li>"); | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Help Notes |
||
| } | ||
| res.type("text/html").send(html); | ||
| } catch (err) { | ||
| console.error("description error:", err); | ||
| res.type("text/html").send("<li>Description not available.</li>"); | ||
| } | ||
| }); | ||
|
|
||
| // Safe path + allowlist + rate limit | ||
| app.get("/api/boneset/:bonesetId", bonesetLimiter, async (req, res) => { | ||
| const { bonesetId } = req.params; | ||
|
|
||
| if (!ALLOWED_BONESETS.has(bonesetId)) { | ||
| return res.status(404).json({ error: `Boneset '${bonesetId}' not found` }); | ||
| } | ||
|
|
||
| try { | ||
| const filePath = safeDataPath(`final_${bonesetId}.json`); | ||
| const raw = await fs.readFile(filePath, "utf8"); | ||
| res.type("application/json").send(raw); | ||
| } catch (err) { | ||
| if (err.code === "ENOENT") { | ||
| return res.status(404).json({ error: `Boneset '${bonesetId}' not found` }); | ||
| app.get("/api/search", async (req, res) => { | ||
| const query = req.query.q; | ||
|
|
||
| if (!query || query.trim() === "") { | ||
| return res.send("<li>Enter a search term</li>"); | ||
| } | ||
|
|
||
| const searchTerm = query.toLowerCase().trim(); | ||
|
|
||
| try { | ||
| const bonesetData = await fetchJSON(BONESET_JSON_URL); | ||
| if (!bonesetData) { | ||
| return res.send("<li>No data available</li>"); | ||
| } | ||
|
|
||
| const results = []; | ||
|
|
||
| // Search boneset name | ||
| if (bonesetData.name && bonesetData.name.toLowerCase().includes(searchTerm)) { | ||
| results.push({ | ||
| type: "boneset", | ||
| id: bonesetData.id, | ||
| name: bonesetData.name | ||
| }); | ||
| } | ||
|
|
||
| // Search through bones | ||
| if (bonesetData.bones) { | ||
| for (const boneId of bonesetData.bones) { | ||
| const boneData = await fetchJSON(`${BONES_DIR_URL}${boneId}.json`); | ||
|
|
||
| if (boneData) { | ||
| // Search bone name | ||
| if (boneData.name && boneData.name.toLowerCase().includes(searchTerm)) { | ||
| results.push({ | ||
| type: "bone", | ||
| id: boneData.id, | ||
| name: boneData.name | ||
| }); | ||
| } | ||
|
|
||
| // Search sub-bones | ||
| if (boneData.subBones) { | ||
| for (const subBoneId of boneData.subBones) { | ||
| const subBoneName = subBoneId.replace(/_/g, " "); | ||
| if (subBoneName.toLowerCase().includes(searchTerm)) { | ||
| results.push({ | ||
| type: "subbone", | ||
| id: subBoneId, | ||
| name: subBoneName, | ||
| parentBone: boneData.id | ||
| }); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| // Format results as HTML | ||
| if (results.length === 0) { | ||
| return res.send("<li>No results found</li>"); | ||
| } | ||
|
|
||
| let html = ""; | ||
| results.forEach(result => { | ||
| const escapedName = result.name.replace(/"/g, """).replace(/'/g, "'"); | ||
| html += `<li class="search-result" data-type="${result.type}" data-id="${result.id}">${escapedName} <small>(${result.type})</small></li>`; | ||
| }); | ||
|
|
||
| res.send(html); | ||
|
|
||
| } catch (error) { | ||
| console.error("Search error:", error); | ||
| res.status(500).send("<li>Search error occurred</li>"); | ||
| } | ||
| console.error("Error reading boneset file:", err); | ||
| res.status(500).json({ error: "Internal Server Error" }); | ||
| } | ||
| }); | ||
|
|
||
| // Only one app.listen() at the very end | ||
| app.listen(PORT, () => { | ||
| console.log(`🚀 Server running on http://127.0.0.1:${PORT}`); | ||
| }); | ||
| console.log(`🚀 Server running on http://127.0.0.1:${PORT}`); | ||
| }); | ||
Check failure
Code scanning / CodeQL
Server-side request forgery Critical
Copilot Autofix
AI 5 months ago
To fix the SSRF vulnerability, restrict what values
boneIdfrom the query can take before using it to construct the URL.boneIdvalues from an explicit allow-list of valid bone IDs, or, if that's not available, use pattern validation that strictly matches permissible formats (e.g., letters, numbers, underscores only, and no traversal characters).boneId, prior to constructing the URL (line 177)./^[a-zA-Z0-9_]+$/.boneIdin the error handling and/or returned data (although that is less crucial if the above restriction is applied).You'll need to:
boneId(e.g., using a regular expression).boneId.All changes should be made within the boneset-api/server.js file, modifying the
/api/description/endpoint logic.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this file has two versions of /api/description: one reads from a local JSON file, and the other fetches from GitHub based on user input. Having both will cause bugs and is unsafe. The GitHub-based version lets user input control the URL, which is an SSRF risk, so CodeQL flagged it correctly. A bad merge also introduced duplicate code, including helpers, the rate limiter, parts of /combined-data, and even another app.listen, along with extra braces and mixed blocks that can break the app. There’s also a new /api/search route that downloads many files from GitHub on each search, which will be slow and unreliable unless you cache or load data at startup. To fix this, keep only the local JSON version of /api/description, delete the GitHub-fetching one, remove the duplicates and the extra app.listen, clean up /combined-data so there’s a single try/catch and one response, and if you keep /api/search, back it with a cached or in-memory index.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Removed unsafe GitHub-based endpoint causing SSRF risk.
Kept only the secure local JSON version with strict input validation.
Deleted duplicate endpoints, helper functions, and rate limiters.
Fixed redundant try/catch blocks and extra app.listen().
Added startup-time cache for faster searches.
Reduced external API calls from many per search to just one at startup.
Simplified /combined-data logic and cleaned malformed JSON.
Standardized error handling across the app.