Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/js/msp.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ const MSP = {
this.state = this.decoder_states.DIRECTION_V2;
break;
default:
console.log(`Unknown protocol char ${String.fromCharCode(chunk)}`);
// Removed logging of unknown protocol characters because the log itself is binary.
// console.log(`Unknown protocol char ${String.fromCharCode(chunk)}`);
this.state = this.decoder_states.IDLE;
}
break;
Expand Down
186 changes: 65 additions & 121 deletions src/js/msp/MSPHelper.js
Original file line number Diff line number Diff line change
Expand Up @@ -2449,148 +2449,92 @@ MspHelper.prototype.setRawRx = function (channels) {
};

/**
* Send a request to read a block of data from the dataflash at the given address and pass that address and a dataview
* of the returned data to the given callback (or null for the data if an error occured).
* Send a request to read a block of data from the dataflash at the given address
* and pass that address, a DataView of the returned data, and bytesCompressed
* to the given callback.
*/
MspHelper.prototype.dataflashRead = function (address, blockSize, onDataCallback) {
let outData = [address & 0xff, (address >> 8) & 0xff, (address >> 16) & 0xff, (address >> 24) & 0xff];

outData = outData.concat([blockSize & 0xff, (blockSize >> 8) & 0xff]);
let outData = [
address & 0xff,
(address >> 8) & 0xff,
(address >> 16) & 0xff,
(address >> 24) & 0xff,
blockSize & 0xff,
(blockSize >> 8) & 0xff,
1, // allow compression
];
Comment on lines +2457 to +2465
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

🧩 Analysis chain

Gate the new request format by API version (avoid breaking older FCs).

You’ve changed MSP_DATAFLASH_READ to include blockSize and a compression flag. Some firmware builds expect only the 4‑byte address. Add an API guard and fall back to the legacy 4‑byte request for older targets.

-    let outData = [
-        address & 0xff,
-        (address >> 8) & 0xff,
-        (address >> 16) & 0xff,
-        (address >> 24) & 0xff,
-        blockSize & 0xff,
-        (blockSize >> 8) & 0xff,
-        1, // allow compression
-    ];
+    const supportsLenAndCompression = semver.gte(FC.CONFIG.apiVersion, API_VERSION_1_47); // confirm required version
+    let outData = [
+        address & 0xff,
+        (address >> 8) & 0xff,
+        (address >> 16) & 0xff,
+        (address >> 24) & 0xff,
+    ];
+    if (supportsLenAndCompression) {
+        outData.push(
+            blockSize & 0xff,
+            (blockSize >> 8) & 0xff,
+            1, // allow compression
+        );
+    }

If you’re unsure of the minimal API version that accepts (addr, len, compressFlag), please confirm before merging.


🏁 Script executed:

#!/bin/bash
set -euo pipefail

echo ">>> rg for MSP_DATAFLASH_READ"
rg -n --hidden "MSP_DATAFLASH_READ" || true

echo -e "\n>>> locate MSPHelper.js"
rg -n --hidden "MSPHelper.js" || true

echo -e "\n>>> print src/js/msp/MSPHelper.js around 2440-2475 if present"
if [ -f "src/js/msp/MSPHelper.js" ]; then
  sed -n '2438,2475p' src/js/msp/MSPHelper.js || true
else
  echo "file not found: src/js/msp/MSPHelper.js"
fi

echo -e "\n>>> search for API_VERSION_* and apiVersion"
rg -n --hidden "API_VERSION_" || true
rg -n --hidden "\bapiVersion\b" || true

echo -e "\n>>> search for FC.CONFIG"
rg -n --hidden "FC\.CONFIG" || true

echo -e "\n>>> search for semver.gte usage"
rg -n --hidden "semver\.gte" || true

echo -e "\n>>> search for dataflash references"
rg -n --hidden "dataflash|DATAFLASH" || true

Length of output: 148916


🏁 Script executed:

#!/bin/bash
set -euo pipefail
echo ">>> show MSPHelper.js 1408-1465"
sed -n '1408,1465p' src/js/msp/MSPHelper.js

Length of output: 2817


🏁 Script executed:

#!/bin/bash
set -euo pipefail
echo ">>> show test/js/msp/MSPHelper.test.js"
sed -n '1,240p' test/js/msp/MSPHelper.test.js || true

Length of output: 7050


Gate MSP_DATAFLASH_READ format behind an API-version check (avoid breaking older FCs).

New format adds blockSize + compression flag; older firmware can expect only the 4‑byte address — guard and fall back to the legacy 4‑byte request.

Location: src/js/msp/MSPHelper.js — dataflashRead (around lines 2456–2475)

-    let outData = [
-        address & 0xff,
-        (address >> 8) & 0xff,
-        (address >> 16) & 0xff,
-        (address >> 24) & 0xff,
-        blockSize & 0xff,
-        (blockSize >> 8) & 0xff,
-        1, // allow compression
-    ];
+    const supportsLenAndCompression = semver.gte(FC.CONFIG.apiVersion, API_VERSION_1_47); // confirm required version
+    let outData = [
+        address & 0xff,
+        (address >> 8) & 0xff,
+        (address >> 16) & 0xff,
+        (address >> 24) & 0xff,
+    ];
+    if (supportsLenAndCompression) {
+        outData.push(
+            blockSize & 0xff,
+            (blockSize >> 8) & 0xff,
+            1, // allow compression
+        );
+    }

Confirm the minimal API version that accepts (addr, len, compressFlag) before merging.

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In src/js/msp/MSPHelper.js around lines 2456–2475 the dataflashRead currently
always builds a 7‑byte request (addr + blockSize + compression flag) which will
break older flight controllers that expect only a 4‑byte address; change
dataflashRead to gate the new format behind an API/version check and fall back
to the legacy 4‑byte request when the FC reports an older API: detect the
MSP/API version available (or use the agreed minimal API constant — replace the
placeholder with the confirmed minimal version before merging), and if version
>= minimal use the current 7‑byte layout (address low->high, blockSize
low->high, compression flag) otherwise send only the 4 bytes of address; ensure
the code paths are explicit and documented with a TODO to confirm and set the
exact minimal API version.


// Allow compression
outData = outData.concat([1]);
const mspObj = this.msp || (typeof MSP !== "undefined" ? MSP : null);
if (!mspObj) {
console.error("MSP object not found, cannot read dataflash.");
onDataCallback(address, null, 0);
return;
}

MSP.send_message(
mspObj.send_message(
MSPCodes.MSP_DATAFLASH_READ,
outData,
false,
function (response) {
if (!response.crcError) {
const chunkAddress = response.data.readU32();
let payloadView = null;
let bytesCompressed = 0;

if (response && response.data) {
const headerSize = 7;
const chunkAddress = response.data.readU32();
const dataSize = response.data.readU16();
const dataCompressionType = response.data.readU8();

// Verify that the address of the memory returned matches what the caller asked for and there was not a CRC error
if (chunkAddress == address) {
/* Strip that address off the front of the reply and deliver it separately so the caller doesn't have to
* figure out the reply format:
*/
if (dataCompressionType == 0) {
onDataCallback(
address,
new DataView(response.data.buffer, response.data.byteOffset + headerSize, dataSize),
);
} else if (dataCompressionType == 1) {
// Read compressed char count to avoid decoding stray bit sequences as bytes
const compressedCharCount = response.data.readU16();

// Compressed format uses 2 additional bytes as a pseudo-header to denote the number of uncompressed bytes
const compressedArray = new Uint8Array(
if (chunkAddress === address) {
try {
if (dataCompressionType === 0) {
payloadView = new DataView(
response.data.buffer,
response.data.byteOffset + headerSize,
dataSize,
);
bytesCompressed = dataSize; // treat uncompressed as same size
} else if (dataCompressionType === 1) {
const compressedCharCount = response.data.readU16();
const compressedArray = new Uint8Array(
response.data.buffer,
response.data.byteOffset + headerSize + 2,
dataSize - 2,
);
const decompressedArray = huffmanDecodeBuf(
compressedArray,
compressedCharCount,
defaultHuffmanTree,
defaultHuffmanLenIndex,
);
payloadView = new DataView(decompressedArray.buffer);
bytesCompressed = compressedCharCount;
}
Comment on lines +2498 to +2512
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

bytesCompressed is wrong for compressed chunks (breaks compression stats).

For type 1 (Huffman), bytesCompressed should reflect on‑wire payload size, not the uncompressed char count. This skews the mean compression factor.

-                        } else if (dataCompressionType === 1) {
-                            const compressedCharCount = response.data.readU16();
+                        } else if (dataCompressionType === 1) {
+                            const uncompressedLen = response.data.readU16();
                             const compressedArray = new Uint8Array(
                                 response.data.buffer,
                                 response.data.byteOffset + headerSize + 2,
-                                dataSize - 2,
+                                dataSize - 2,
                             );
                             const decompressedArray = huffmanDecodeBuf(
                                 compressedArray,
-                                compressedCharCount,
+                                uncompressedLen,
                                 defaultHuffmanTree,
                                 defaultHuffmanLenIndex,
                             );
-                            payloadView = new DataView(decompressedArray.buffer);
-                            bytesCompressed = compressedCharCount;
+                            payloadView = new DataView(decompressedArray.buffer, 0, uncompressedLen);
+                            // Count the transmitted payload bytes (includes the 2-byte uncompressedLen header)
+                            bytesCompressed = dataSize;
                         }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const compressedCharCount = response.data.readU16();
const compressedArray = new Uint8Array(
response.data.buffer,
response.data.byteOffset + headerSize + 2,
dataSize - 2,
);
const decompressedArray = huffmanDecodeBuf(
compressedArray,
compressedCharCount,
defaultHuffmanTree,
defaultHuffmanLenIndex,
);
payloadView = new DataView(decompressedArray.buffer);
bytesCompressed = compressedCharCount;
}
const uncompressedLen = response.data.readU16();
const compressedArray = new Uint8Array(
response.data.buffer,
response.data.byteOffset + headerSize + 2,
dataSize - 2,
);
const decompressedArray = huffmanDecodeBuf(
compressedArray,
uncompressedLen,
defaultHuffmanTree,
defaultHuffmanLenIndex,
);
payloadView = new DataView(decompressedArray.buffer, 0, uncompressedLen);
// Count the transmitted payload bytes (includes the 2-byte uncompressedLen header)
bytesCompressed = dataSize;
}
🤖 Prompt for AI Agents
In src/js/msp/MSPHelper.js around lines 2498 to 2512, bytesCompressed is
incorrectly set to the uncompressed character count for Huffman-compressed
chunks; update the assignment so bytesCompressed reflects the on-wire compressed
payload size (use the compressedArray length or dataSize - 2) after constructing
compressedArray/decompressedArray so compression stats use the actual
transmitted byte count.

} catch (e) {
console.warn("Decompression or read failed, delivering raw data anyway");
payloadView = new DataView(
response.data.buffer,
response.data.byteOffset + headerSize + 2,
dataSize - 2,
);
const decompressedArray = huffmanDecodeBuf(
compressedArray,
compressedCharCount,
defaultHuffmanTree,
defaultHuffmanLenIndex,
response.data.byteOffset + headerSize,
dataSize,
);

onDataCallback(address, new DataView(decompressedArray.buffer), dataSize);
bytesCompressed = dataSize;
}
} else {
// Report address error
console.log(`Expected address ${address} but received ${chunkAddress} - retrying`);
onDataCallback(address, null); // returning null to the callback forces a retry
console.log(`Expected address ${address} but received ${chunkAddress}`);
}
} else {
// Report crc error
console.log(`CRC error for address ${address} - retrying`);
onDataCallback(address, null); // returning null to the callback forces a retry
}
},
true,
);
};

MspHelper.prototype.sendServoConfigurations = function (onCompleteCallback) {
let nextFunction = send_next_servo_configuration;
onDataCallback(address, payloadView, bytesCompressed);

let servoIndex = 0;

if (FC.SERVO_CONFIG.length == 0) {
onCompleteCallback();
} else {
nextFunction();
}

function send_next_servo_configuration() {
const buffer = [];

// send one at a time, with index

const servoConfiguration = FC.SERVO_CONFIG[servoIndex];

buffer
.push8(servoIndex)
.push16(servoConfiguration.min)
.push16(servoConfiguration.max)
.push16(servoConfiguration.middle)
.push8(servoConfiguration.rate);

let out = servoConfiguration.indexOfChannelToForward;
if (out == undefined) {
out = 255; // Cleanflight defines "CHANNEL_FORWARDING_DISABLED" as "(uint8_t)0xFF"
}
buffer.push8(out).push32(servoConfiguration.reversedInputSources);

// prepare for next iteration
servoIndex++;
if (servoIndex == FC.SERVO_CONFIG.length) {
nextFunction = onCompleteCallback;
}

MSP.send_message(MSPCodes.MSP_SET_SERVO_CONFIGURATION, buffer, false, nextFunction);
}
};

MspHelper.prototype.sendModeRanges = function (onCompleteCallback) {
let nextFunction = send_next_mode_range;

let modeRangeIndex = 0;

if (FC.MODE_RANGES.length == 0) {
onCompleteCallback();
} else {
send_next_mode_range();
}

function send_next_mode_range() {
const modeRange = FC.MODE_RANGES[modeRangeIndex];
const buffer = [];

buffer
.push8(modeRangeIndex)
.push8(modeRange.id)
.push8(modeRange.auxChannelIndex)
.push8((modeRange.range.start - 900) / 25)
.push8((modeRange.range.end - 900) / 25);

const modeRangeExtra = FC.MODE_RANGES_EXTRA[modeRangeIndex];

buffer.push8(modeRangeExtra.modeLogic).push8(modeRangeExtra.linkedTo);

// prepare for next iteration
modeRangeIndex++;
if (modeRangeIndex == FC.MODE_RANGES.length) {
nextFunction = onCompleteCallback;
}
MSP.send_message(MSPCodes.MSP_SET_MODE_RANGE, buffer, false, nextFunction);
}
};
if (!response || response.crcError) {
console.log(`CRC error or missing data at address ${address} - delivering whatever we got`);
} else if (payloadView) {
console.log(`Block at ${address} received (${payloadView.byteLength} bytes)`);
}
},
true,
); // end of send_message
}; // end of dataflashRead

MspHelper.prototype.sendAdjustmentRanges = function (onCompleteCallback) {
let nextFunction = send_next_adjustment_range;
Expand Down
77 changes: 39 additions & 38 deletions src/js/tabs/onboard_logging.js
Original file line number Diff line number Diff line change
Expand Up @@ -443,14 +443,16 @@ onboard_logging.initialize = function (callback) {
console.log(
`Received ${totalBytes} bytes in ${totalTime.toFixed(2)}s (${(totalBytes / totalTime / 1024).toFixed(
2,
)}kB / s) with block size ${self.blockSize}.`,
)} kB / s) with block size ${self.blockSize}.`,
);
if (!isNaN(totalBytesCompressed)) {

if (typeof totalBytesCompressed === "number" && totalBytesCompressed > 0) {
const meanCompressionFactor = totalBytes / totalBytesCompressed;
console.log(
"Compressed into",
totalBytesCompressed,
"bytes with mean compression factor of",
totalBytes / totalBytesCompressed,
meanCompressionFactor.toFixed(2),
);
}

Expand Down Expand Up @@ -490,48 +492,47 @@ onboard_logging.initialize = function (callback) {
show_saving_dialog();

function onChunkRead(chunkAddress, chunkDataView, bytesCompressed) {
if (chunkDataView !== null) {
// Did we receive any data?
if (chunkDataView.byteLength > 0) {
nextAddress += chunkDataView.byteLength;
if (isNaN(bytesCompressed) || isNaN(totalBytesCompressed)) {
totalBytesCompressed = null;
} else {
totalBytesCompressed += bytesCompressed;
}

$(".dataflash-saving progress").attr("value", (nextAddress / maxBytes) * 100);

const blob = new Blob([chunkDataView]);
FileSystem.writeChunck(openedFile, blob).then(() => {
if (saveCancelled || nextAddress >= maxBytes) {
if (saveCancelled) {
dismiss_saving_dialog();
} else {
mark_saving_dialog_done(startTime, nextAddress, totalBytesCompressed);
}
FileSystem.closeFile(openedFile);
} else {
if (!self.writeError) {
mspHelper.dataflashRead(nextAddress, self.blockSize, onChunkRead);
} else {
dismiss_saving_dialog();
FileSystem.closeFile(openedFile);
}
}
});
} else {
// A zero-byte block indicates end-of-file, so we're done
if (chunkDataView && chunkDataView.byteLength > 0) {
// Always write non-empty data, even if CRC mismatch
const blob = new Blob([chunkDataView]);
FileSystem.writeChunck(openedFile, blob);

nextAddress += chunkDataView.byteLength;

// Track total compressed bytes, if provided
if (typeof bytesCompressed === "number") {
if (totalBytesCompressed == null) totalBytesCompressed = 0; // initialize if previously unknown
totalBytesCompressed += bytesCompressed;
}

$(".dataflash-saving progress").attr("value", (nextAddress / maxBytes) * 100);

if (saveCancelled || nextAddress >= maxBytes) {
mark_saving_dialog_done(startTime, nextAddress, totalBytesCompressed);
FileSystem.closeFile(openedFile);
} else {
mspHelper.dataflashRead(nextAddress, self.blockSize, onChunkRead);
}
} else if (chunkDataView && chunkDataView.byteLength === 0) {
// Zero-length block → EOF
mark_saving_dialog_done(startTime, nextAddress, totalBytesCompressed);
FileSystem.closeFile(openedFile);
} else {
// There was an error with the received block (address didn't match the one we asked for), retry
mspHelper.dataflashRead(nextAddress, self.blockSize, onChunkRead);
// Null block → skip ahead (hard error)
console.warn(`Skipping null block at address ${nextAddress}`);
nextAddress += self.blockSize;

if (nextAddress >= maxBytes) {
mark_saving_dialog_done(startTime, nextAddress, totalBytesCompressed);
FileSystem.closeFile(openedFile);
} else {
mspHelper.dataflashRead(nextAddress, self.blockSize, onChunkRead);
}
}
}

const startTime = new Date().getTime();
const startTime = new Date().getTime(); // Start timestamp

// Fetch the initial block
FileSystem.openFile(fileWriter).then((file) => {
openedFile = file;
Expand Down