diff --git a/app/client/index.html b/app/client/index.html
index 22ef8e813f..47fdff3ccb 100644
--- a/app/client/index.html
+++ b/app/client/index.html
@@ -5,7 +5,7 @@
-
+
diff --git a/app/client/manifest.json b/app/client/manifest.json
index 9c3c01843c..9ecf5d8cb1 100644
--- a/app/client/manifest.json
+++ b/app/client/manifest.json
@@ -1,7 +1,7 @@
{
- "short_name": "Rowing Monitor",
- "name": "Open Rowing Monitor",
- "description": "A rowing monitor for rowing exercise machines",
+ "short_name": "ORM",
+ "name": "OpenRowingMonitor",
+ "description": "A rowing monitor for indoor rowing machines",
"icons": [
{
"src": "icon.png",
diff --git a/app/client/store/dashboardMetrics.js b/app/client/store/dashboardMetrics.js
index 648ea43780..de019120ec 100644
--- a/app/client/store/dashboardMetrics.js
+++ b/app/client/store/dashboardMetrics.js
@@ -48,13 +48,13 @@ export const DASHBOARD_METRICS = {
`
},
- totalStk: { displayName: 'Total strokes', size: 1, template: (metrics, config) => simpleMetricFactory(metrics?.totalNumberOfStrokes, 'stk', config?.guiConfigs?.showIcons ? iconPaddle : '') },
+ totalStk: { displayName: 'Total strokes', size: 1, template: (metrics, config) => simpleMetricFactory(metrics?.interval?.numberOfStrokes, 'stk', config?.guiConfigs?.showIcons ? iconPaddle : '') },
calories: {
displayName: 'Calories',
size: 1,
template: (metrics, config) => {
- const calories = metrics?.interval?.type === 'Calories' ? Math.max(metrics?.interval?.TargetCalories - metrics?.interval?.Calories, 0) : metrics?.totalCalories
+ const calories = metrics?.interval?.type === 'calories' ? Math.max(metrics?.interval?.calories?.toEnd, 0) : Math.max(metrics?.interval?.calories?.sinceStart, 0)
return simpleMetricFactory(formatNumber(calories ?? 0), 'kcal', config?.guiConfigs?.showIcons ? iconFire : '')
}
diff --git a/app/engine/Flywheel.js b/app/engine/Flywheel.js
index e2024e3c95..c30aa12b56 100644
--- a/app/engine/Flywheel.js
+++ b/app/engine/Flywheel.js
@@ -1,49 +1,72 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- This models the flywheel with all of its attributes, which we can also test for being powered
-
- All times and distances are defined as being before the beginning of the flank, as RowingEngine's metrics
- solely depend on times and angular positions before the flank (as they are to be certain to belong to a specific
- drive or recovery phase).
-
- Please note: The array contains a buffer of flankLenght measured currentDt's, BEFORE they are actually processed
-
- Please note2: This implements Linear regression to obtain the drag factor. We deliberatly DO NOT include the flank data
- as we don't know wether they will belong to a Drive or Recovery phase. So we include things which we know for certain that
- are part of a specific phase, i.e. dirtyDataPoints[flankLength], which will be eliminated from the flank
-
- The calculation of angular velocity and acceleration is based on Quadratic Regression, as the second derivative tends to be
- quite fragile when small errors are thrown in the mix. The math behind this approach can be found in https://physics.info/motion-equations/
- which is intended for simple linear motion, but the formula are identical when applied to angular distances, velocities and
- accelerations.
-*/
-
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This models the flywheel with all of its attributes, which we can also test for being powered
+ *
+ * All times and distances are defined as being before the beginning of the flank, as RowingEngine's metrics
+ * solely depend on times and angular positions before the flank (as they are to be certain to belong to a specific
+ * drive or recovery phase).
+ *
+ * The calculation of angular velocity and acceleration is based on regression analysis, as the second derivative tends to be
+ * quite fragile when small errors are thrown in the mix. The physics behind this approach can be found in https://physics.info/motion-equations/
+ * which is intended for simple linear motion, but the formula are identical when applied to angular distances, velocities and
+ * accelerations.
+ * @see {@link See also https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/physics_openrowingmonitor.md#determining-the-angular-velocity-and-angular-acceleration-of-the-flywheel|this approach}
+ *
+ * Please note: The array contains a buffer of flankLenght measured currentDt's, BEFORE they are actually processed
+ *
+ * Please note2: This implements Linear regression to obtain the drag factor. We deliberatly DO NOT include the flank data
+ * as we don't know wether they will belong to a Drive or Recovery phase. So we include things which we know for certain that
+ * are part of a specific phase, i.e. dirtyDataPoints[flankLength], which will be eliminated from the flank
+ */
import loglevel from 'loglevel'
-import { createStreamFilter } from './utils/StreamFilter.js'
-import { createTSLinearSeries } from './utils/FullTSLinearSeries.js'
-import { createTSQuadraticSeries } from './utils/FullTSQuadraticSeries.js'
+import { createCyclicErrorFilter } from './utils/CyclicErrorFilter.js'
+import { createTSLinearSeries } from './utils/TSLinearSeries.js'
import { createWeighedSeries } from './utils/WeighedSeries.js'
+import { createMovingRegressor } from './utils/MovingWindowRegressor.js'
const log = loglevel.getLogger('RowingEngine')
+/**
+ * @param {object} rowerSettings - The rower settings configuration object
+ * @param {integer} rowerSettings.numOfImpulsesPerRevolution - Number of impulses per flywheel revolution
+ * @param {integer} rowerSettings.flankLength - Length of the flank used
+ * @param {float} rowerSettings.minimumRecoveryTime - Minimum time a recovery should last (seconds)
+ * @param {float} rowerSettings.maximumStrokeTimeBeforePause - Minimum time that has to pass after the last drive for a pause to kick in (seconds)
+ * @param {float} rowerSettings.flywheelInertia - Inertia of the flywheel
+ * @param {float} rowerSettings.dragFactor - (initial) Dragfactor
+ * @param {boolean} rowerSettings.autoAdjustDragFactor - Indicates if the Flywheel.js is allowed to automatically adjust dragfactor (false turns the filter off)
+ * @param {integer} rowerSettings.dragFactorSmoothing - Number of recoveries to be weighed in the current dragfactor
+ * @param {float} rowerSettings.minimumRecoverySlope - (initial) recpvery slope
+ * @param {boolean} rowerSettings.autoAdjustRecoverySlope - Allow OpenRowingMonitor to adjust the recoverySlope based on the previous recoveries (and thus dragfactor)
+ * @param {float} rowerSettings.autoAdjustRecoverySlopeMargin - Margin to be maintained for the automatically adjusted recovery slope
+ * @param {float} rowerSettings.minimumStrokeQuality - Minimum Goodness Of Fit for a slope to be considered reliable for stroke detection
+ * @param {float} rowerSettings.sprocketRadius - Radius of the driving sprocket (centimeters)
+ * @param {float} rowerSettings.minimumForceBeforeStroke - Minimum force for the flywheel to be considered powered (Newton)
+ * @param {float} rowerSettings.systematicErrorAgressiveness - Agressiveness of the systematic error correction algorithm (0 turns the filter off)
+ * @param {float} rowerSettings.minimumTimeBetweenImpulses - minimum expected time between impulses (in seconds)
+ * @param {float} rowerSettings.maximumTimeBetweenImpulses - maximum expected time between impulses (in seconds)
+ */
export function createFlywheel (rowerSettings) {
const angularDisplacementPerImpulse = (2.0 * Math.PI) / rowerSettings.numOfImpulsesPerRevolution
const flankLength = rowerSettings.flankLength
const minimumDragFactorSamples = Math.floor(rowerSettings.minimumRecoveryTime / rowerSettings.maximumTimeBetweenImpulses)
const minimumAngularVelocity = angularDisplacementPerImpulse / rowerSettings.maximumTimeBetweenImpulses
const minimumTorqueBeforeStroke = rowerSettings.minimumForceBeforeStroke * (rowerSettings.sprocketRadius / 100)
- const currentDt = createStreamFilter(rowerSettings.smoothing, rowerSettings.maximumTimeBetweenImpulses)
+ const _angularDistance = createMovingRegressor(flankLength)
const _deltaTime = createTSLinearSeries(flankLength)
- const _angularDistance = createTSQuadraticSeries(flankLength)
const drag = createWeighedSeries(rowerSettings.dragFactorSmoothing, (rowerSettings.dragFactor / 1000000))
const recoveryDeltaTime = createTSLinearSeries()
+ const cyclicErrorFilter = createCyclicErrorFilter(rowerSettings, recoveryDeltaTime)
const strokedetectionMinimalGoodnessOfFit = rowerSettings.minimumStrokeQuality
const minimumRecoverySlope = createWeighedSeries(rowerSettings.dragFactorSmoothing, rowerSettings.minimumRecoverySlope)
- let _angularVelocityMatrix = []
- let _angularAccelerationMatrix = []
- let _deltaTimeBeforeFlank
+ let rawTime = 0
+ let rawNumberOfImpulses = 0
+ let totalTimeSpinning = 0
+ let totalNumberOfImpulses = 0
+ let _totalWork = 0
+ let _deltaTimeBeforeFlank = {}
let _angularVelocityAtBeginFlank
let _angularVelocityBeforeFlank
let _angularAccelerationAtBeginFlank
@@ -52,13 +75,13 @@ export function createFlywheel (rowerSettings) {
let _torqueBeforeFlank
let inRecoveryPhase
let maintainMetrics
- let totalNumberOfImpulses
- let totalTimeSpinning
- let currentCleanTime
- let currentRawTime
- let currentAngularDistance
reset()
+ /**
+ * @param {float} dataPoint - The lenght of the impuls (currentDt) in seconds
+ * @description This function is called from Rower.js each time the sensor detected an impulse. It transforms this (via the buffers) into a robust flywheel position, speed and acceleration.
+ * It also calculates dragfactor and provides the indicators for stroke detection.
+ */
/* eslint-disable max-statements -- we need to maintain a lot of metrics in the main loop, nothing we can do about that */
function pushValue (dataPoint) {
if (isNaN(dataPoint) || dataPoint < 0 || dataPoint > rowerSettings.maximumStrokeTimeBeforePause) {
@@ -83,82 +106,81 @@ export function createFlywheel (rowerSettings) {
}
}
- currentDt.push(dataPoint)
-
if (maintainMetrics && (_deltaTime.length() >= flankLength)) {
// If we maintain metrics, update the angular position, spinning time of the flywheel and the associated metrics,
// Also we nend feed the Drag calculation. We need to do this, BEFORE the array shifts, as the valueAtSeriesBeginvalue
// value before the shift is certain to be part of a specific rowing phase (i.e. Drive or Recovery), once the buffer is filled completely
totalNumberOfImpulses += 1
- _deltaTimeBeforeFlank = _deltaTime.Y.atSeriesBegin()
- totalTimeSpinning += _deltaTimeBeforeFlank
+
+ _deltaTimeBeforeFlank = cyclicErrorFilter.atSeriesBegin()
+ totalTimeSpinning += _deltaTimeBeforeFlank.clean
_angularVelocityBeforeFlank = _angularVelocityAtBeginFlank
_angularAccelerationBeforeFlank = _angularAccelerationAtBeginFlank
- _torqueBeforeFlank = _torqueAtBeginFlank
+ // As drag is recalculated at the begin of the drive, we need to recalculate the torque
+ _torqueBeforeFlank = (rowerSettings.flywheelInertia * _angularAccelerationBeforeFlank + drag.weighedAverage() * Math.pow(_angularVelocityBeforeFlank, 2))
- // Feed the drag calculation, as we didn't reset the Semaphore in the previous cycle based on the current flank
if (inRecoveryPhase) {
- recoveryDeltaTime.push(totalTimeSpinning, _deltaTimeBeforeFlank)
+ // Feed the drag calculation, as we didn't reset the Semaphore in the previous cycle based on the current flank
+ recoveryDeltaTime.push(totalTimeSpinning, _deltaTimeBeforeFlank.clean, _deltaTimeBeforeFlank.goodnessOfFit)
+ // Feed the systematic error filter buffer
+ cyclicErrorFilter.recordRawDatapoint(totalNumberOfImpulses, totalTimeSpinning, _deltaTimeBeforeFlank.raw)
+ } else {
+ // Accumulate the energy total as we are in the drive phase
+ _totalWork += Math.max(_torqueBeforeFlank * angularDisplacementPerImpulse, 0)
+ // Process a value in the systematic error filter buffer. We need to do this slowly to prevent radical changes which might disturbe the force curve etc.
+ cyclicErrorFilter.processNextRawDatapoint()
}
} else {
- _deltaTimeBeforeFlank = 0
+ _deltaTimeBeforeFlank.clean = 0
_angularVelocityBeforeFlank = 0
_angularAccelerationBeforeFlank = 0
_torqueBeforeFlank = 0
}
- // Let's feed the stroke detection algorithm
- // Please note that deltaTime MUST use dirty data to be ale to use the regression algorithms effictively (Otherwise the Goodness of Fit can't be used as a filter!)
- currentRawTime += currentDt.raw()
- currentAngularDistance += angularDisplacementPerImpulse
- _deltaTime.push(currentRawTime, currentDt.raw())
-
- // Next are the metrics that are needed for more advanced metrics, like the foce curve
- currentCleanTime += currentDt.clean()
- _angularDistance.push(currentCleanTime, currentAngularDistance)
-
- // Let's update the matrix and calculate the angular velocity and acceleration
- if (_angularVelocityMatrix.length >= flankLength) {
- // The angularVelocityMatrix has reached its maximum length
- _angularVelocityMatrix.shift()
- _angularAccelerationMatrix.shift()
- }
-
- // Let's make room for a new set of values for angular velocity and acceleration
- _angularVelocityMatrix[_angularVelocityMatrix.length] = createWeighedSeries(flankLength, 0)
- _angularAccelerationMatrix[_angularAccelerationMatrix.length] = createWeighedSeries(flankLength, 0)
-
- let i = 0
-
- while (i < _angularVelocityMatrix.length) {
- _angularVelocityMatrix[i].push(_angularDistance.firstDerivativeAtPosition(i), _angularDistance.goodnessOfFit())
- _angularAccelerationMatrix[i].push(_angularDistance.secondDerivativeAtPosition(i), _angularDistance.goodnessOfFit())
- i++
- }
+ const cleanCurrentDt = cyclicErrorFilter.applyFilter(dataPoint, totalNumberOfImpulses + flankLength)
+ rawTime += cleanCurrentDt.clean
+ rawNumberOfImpulses++
+ const currentAngularDistance = rawNumberOfImpulses * angularDisplacementPerImpulse
- _angularVelocityAtBeginFlank = _angularVelocityMatrix[0].weighedAverage()
- _angularAccelerationAtBeginFlank = _angularAccelerationMatrix[0].weighedAverage()
+ // Let's feed the stroke detection algorithm
+ _deltaTime.push(rawTime, cleanCurrentDt.clean, cleanCurrentDt.goodnessOfFit)
- // And finally calculate the torque
+ // Calculate the metrics that are needed for more advanced metrics, like the foce curve
+ _angularDistance.push(rawTime, currentAngularDistance, cleanCurrentDt.goodnessOfFit)
+ _angularVelocityAtBeginFlank = _angularDistance.firstDerivative(0)
+ _angularAccelerationAtBeginFlank = _angularDistance.secondDerivative(0)
_torqueAtBeginFlank = (rowerSettings.flywheelInertia * _angularAccelerationAtBeginFlank + drag.weighedAverage() * Math.pow(_angularVelocityAtBeginFlank, 2))
}
/* eslint-enable max-statements */
+ /**
+ * @description Function to handle the start of a pause/stop based on a trigger from Rower.js
+ */
function maintainStateOnly () {
maintainMetrics = false
}
+ /**
+ * @description Function to handle the end of a pause/stop based on a trigger from Rower.js
+ */
function maintainStateAndMetrics () {
maintainMetrics = true
+ cyclicErrorFilter.resetFilterConfiguration()
}
+ /**
+ * @description Function to handle the start of the recovery phase based on a trigger from Rower.js
+ */
function markRecoveryPhaseStart () {
inRecoveryPhase = true
recoveryDeltaTime.reset()
+ cyclicErrorFilter.clearDatapointBuffer()
}
+ /**
+ * @description Function to handle the completion of the recovery phase based on a trigger from Rower.js
+ */
function markRecoveryPhaseCompleted () {
- // Completion of the recovery phase
inRecoveryPhase = false
// Calculation of the drag-factor
@@ -169,37 +191,56 @@ export function createFlywheel (rowerSettings) {
if (rowerSettings.autoAdjustRecoverySlope) {
// We are allowed to autoadjust stroke detection slope as well, so let's do that
minimumRecoverySlope.push((1 - rowerSettings.autoAdjustRecoverySlopeMargin) * recoveryDeltaTime.slope(), recoveryDeltaTime.goodnessOfFit())
- log.debug(`*** Calculated recovery slope: ${recoveryDeltaTime.slope().toFixed(6)}, Goodness of Fit: ${recoveryDeltaTime.goodnessOfFit().toFixed(4)}`)
+ log.trace(`*** Calculated recovery slope: ${recoveryDeltaTime.slope().toFixed(6)}, Goodness of Fit: ${recoveryDeltaTime.goodnessOfFit().toFixed(4)}`)
} else {
// We aren't allowed to adjust the slope, let's report the slope to help help the user configure it
- log.debug(`*** Calculated recovery slope: ${recoveryDeltaTime.slope().toFixed(6)}, Goodness of Fit: ${recoveryDeltaTime.goodnessOfFit().toFixed(4)}, not used as autoAdjustRecoverySlope isn't set to true`)
+ log.trace(`*** Calculated recovery slope: ${recoveryDeltaTime.slope().toFixed(6)}, Goodness of Fit: ${recoveryDeltaTime.goodnessOfFit().toFixed(4)}, not used as autoAdjustRecoverySlope isn't set to true`)
}
} else {
+ // As the drag calculation is considered unreliable, we must skip updating the systematic error filter that depends on it
if (!rowerSettings.autoAdjustDragFactor) {
// autoAdjustDampingConstant = false, thus the update is skipped, but let's log the dragfactor anyway
log.debug(`*** Calculated drag factor: ${(slopeToDrag(recoveryDeltaTime.slope()) * 1000000).toFixed(4)}, slope: ${recoveryDeltaTime.slope().toFixed(8)}, not used because autoAdjustDragFactor is not true`)
} else {
log.debug(`*** Calculated drag factor: ${(slopeToDrag(recoveryDeltaTime.slope()) * 1000000).toFixed(4)}, not used because reliability was too low. no. samples: ${recoveryDeltaTime.length()}, fit: ${recoveryDeltaTime.goodnessOfFit().toFixed(4)}`)
+ cyclicErrorFilter.clearDatapointBuffer()
}
}
}
+ /**
+ * @returns {float} the time the flywheel is spinning in seconds BEFORE the beginning of the flank
+ */
function spinningTime () {
- // This function returns the time the flywheel is spinning in seconds BEFORE the beginning of the flank
return totalTimeSpinning
}
+ /**
+ * @returns {float} the total energy produced onto the flywheel in Joules BEFORE the beginning of the flank
+ */
+ function totalWork () {
+ return Math.max(_totalWork, 0)
+ }
+
+ /**
+ * @returns {float} the current DeltaTime BEFORE the flank
+ */
function deltaTime () {
- return _deltaTimeBeforeFlank
+ return _deltaTimeBeforeFlank.clean
}
+ /**
+ * @returns {float} the absolute angular position of the flywheel in Radians BEFORE the beginning of the flank
+ * Please observe that the first datapoint shouldstart at 0
+ */
function angularPosition () {
- // This function returns the absolute angular position of the flywheel in Radians BEFORE the beginning of the flank
- return totalNumberOfImpulses * angularDisplacementPerImpulse
+ return (Math.max(totalNumberOfImpulses, 0) * angularDisplacementPerImpulse)
}
+ /**
+ * @returns {float} the angular velocity of the flywheel in Radians/sec BEFORE the flank
+ */
function angularVelocity () {
- // This function returns the angular velocity of the flywheel in Radians/sec BEFORE the flank
if (maintainMetrics && (_deltaTime.length() >= flankLength)) {
return Math.max(0, _angularVelocityBeforeFlank)
} else {
@@ -207,8 +248,10 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {float} the angular acceleration of the flywheel in Radians/sec^2 BEFORE the flank
+ */
function angularAcceleration () {
- // This function returns the angular acceleration of the flywheel in Radians/sec^2 BEFORE the flanl
if (maintainMetrics && (_deltaTime.length() >= flankLength)) {
return _angularAccelerationBeforeFlank
} else {
@@ -216,6 +259,9 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {float} the torque on the flywheel in N/m BEFORE the flank
+ */
function torque () {
if (maintainMetrics && (_deltaTime.length() >= flankLength)) {
return _torqueBeforeFlank
@@ -224,14 +270,18 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {float} the current dragfactor of the flywheel
+ */
function dragFactor () {
- // This function returns the current dragfactor of the flywheel
return drag.weighedAverage()
}
+ /**
+ * @returns {boolean} whether the dragfactor is considered reliable, based on measurements instead of a default value
+ * We can't use reliable() as a filter on the dragFactor() function as Rower.js always needs some dragfactor for most calculations
+ */
function dragFactorIsReliable () {
- // This returns whether the dragfactor is considered reliable, based on measurements instead of a default value
- // We can't use reliable() as a filter on the dragFactor() function as Rower.js always needs some dragfactor for most calculations
if (rowerSettings.autoAdjustDragFactor) {
return drag.reliable()
} else {
@@ -239,11 +289,13 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator of the flywheel is spinning down beyond a recovery phase indicating that the rower has stopped rowing
+ * We conclude this based on
+ * - The angular velocity at the begin of the flank is above the minimum angular velocity (dependent on maximumTimeBetweenImpulses)
+ * - The entire flank has a positive trend, i.e. the flywheel is decelerating consistent with the dragforce being present
+ */
function isDwelling () {
- // Check if the flywheel is spinning down beyond a recovery phase indicating that the rower has stopped rowing
- // We conclude this based on
- // * The angular velocity at the begin of the flank is above the minimum angular velocity (dependent on maximumTimeBetweenImpulses)
- // * The entire flank has a positive trend, i.e. the flywheel is decelerating consistent with the dragforce being present
if (_angularVelocityAtBeginFlank < minimumAngularVelocity && deltaTimeSlopeAbove(minimumRecoverySlope.weighedAverage())) {
return true
} else {
@@ -251,9 +303,11 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator if the flywheel has reached its minimum speed, and that it isn't flywheel noise. We conclude this based on the first element in the flank
+ * as this angular velocity is created by all curves that are in that flank and having an acceleration in the rest of the flank
+ */
function isAboveMinimumSpeed () {
- // Check if the flywheel has reached its minimum speed, and that it isn't flywheel noise. We conclude this based on the first element in the flank
- // as this angular velocity is created by all curves that are in that flank and having an acceleration in the rest of the flank
if ((_angularVelocityAtBeginFlank >= minimumAngularVelocity) && (_deltaTime.Y.atSeriesBegin() <= rowerSettings.maximumTimeBetweenImpulses) && (_deltaTime.Y.atSeriesBegin() > rowerSettings.minimumTimeBetweenImpulses)) {
return true
} else {
@@ -261,9 +315,12 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator if the flywheel is unpowered
+ * We consider the flywheel unpowered when there is an acceleration consistent with the drag being the only forces AND no torque being seen
+ * As in the first stroke drag is unreliable for automatic drag updating machines, torque can't be used when drag indicates it is unreliable for these machines
+ */
function isUnpowered () {
- // We consider the flywheel unpowered when there is an acceleration consistent with the drag being the only forces AND no torque being seen
- // As in the first stroke drag is unreliable for automatic drag updating machines, torque can't be used when drag indicates it is unreliable for these machines
if (deltaTimeSlopeAbove(minimumRecoverySlope.weighedAverage()) && (torqueAbsent() || (rowerSettings.autoAdjustDragFactor && !drag.reliable()))) {
return true
} else {
@@ -271,6 +328,9 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator if the flywheel is powered
+ */
function isPowered () {
if (deltaTimeSlopeBelow(minimumRecoverySlope.weighedAverage()) && torquePresent()) {
return true
@@ -279,11 +339,15 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @param {float} threshold - Maximum slope
+ * @returns {boolean} indicator if the currentDt slope is below the specified slope
+ * This is a typical indication that the flywheel is accelerating. We use the slope of successive currentDt's
+ * A (more) negative slope indicates a powered flywheel. When set to 0, it determines whether the DeltaT's are decreasing
+ * When set to a value below 0, it will become more stringent. In automatic, a percentage of the current slope (i.e. dragfactor) is used
+ * Please note, as this acceleration isn't linear, _deltaTime.goodnessOfFit() will not be good by definition, so we need omit it
+ */
function deltaTimeSlopeBelow (threshold) {
- // This is a typical indication that the flywheel is accelerating. We use the slope of successive currentDt's
- // A (more) negative slope indicates a powered flywheel. When set to 0, it determines whether the DeltaT's are decreasing
- // When set to a value below 0, it will become more stringent. In automatic, a percentage of the current slope (i.e. dragfactor) is used
- // Please note, as this acceleration isn't linear, _deltaTime.goodnessOfFit() will not be good by definition, so we need omit it
if (_deltaTime.slope() < threshold && _deltaTime.length() >= flankLength) {
return true
} else {
@@ -291,11 +355,15 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @param {float} threshold - Maximum slope
+ * @returns {boolean} indicator if the currentDt slope is above the specified slope
+ * This is a typical indication that the flywheel is deccelerating. We use the slope of successive currentDt's
+ * A (more) positive slope indicates a unpowered flywheel. When set to 0, it determines whether the DeltaT's are increasing
+ * When set to a value below 0, it will become more stringent as it will detect a power inconsistent with the drag
+ * Typically, a percentage of the current slope (i.e. dragfactor) is use
+ */
function deltaTimeSlopeAbove (threshold) {
- // This is a typical indication that the flywheel is deccelerating. We use the slope of successive currentDt's
- // A (more) positive slope indicates a unpowered flywheel. When set to 0, it determines whether the DeltaT's are increasing
- // When set to a value below 0, it will become more stringent as it will detect a power inconsistent with the drag
- // Typically, a percentage of the current slope (i.e. dragfactor) is use
if (_deltaTime.slope() >= threshold && _deltaTime.goodnessOfFit() >= strokedetectionMinimalGoodnessOfFit && _deltaTime.length() >= flankLength) {
return true
} else {
@@ -303,8 +371,11 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator if there is a torque present at the beginning of the flank above the minimum torque
+ * This is a typical indication that the flywheel is accelerating: the torque is above a certain threshold (so a force is present on the handle)
+ */
function torquePresent () {
- // This is a typical indication that the flywheel is accelerating: the torque is above a certain threshold (so a force is present on the handle)
if (_torqueAtBeginFlank >= minimumTorqueBeforeStroke) {
return true
} else {
@@ -312,11 +383,14 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @returns {boolean} indicator if there is a torque present at the beginning of the flank below the minimum torque
+ * This is a typical indication that the flywheel is decelerating: the torque is below a certain threshold (so a force is absent on the handle)
+ * We need to consider the situation rowerSettings.autoAdjustDragFactor && !drag.reliable() as a high default dragfactor (as set via config) blocks the
+ * detection of the first recovery based on Torque, and thus the calculation of the true dragfactor in that setting.
+ * This let the recovery detection fall back onto slope-based stroke detection only for the first stroke (until drag is calculated reliably)
+ */
function torqueAbsent () {
- // This is a typical indication that the flywheel is decelerating: the torque is below a certain threshold (so a force is absent on the handle)
- // We need to consider the situation rowerSettings.autoAdjustDragFactor && !drag.reliable() as a high default dragfactor (as set via config) blocks the
- // detection of the first recovery based on Torque, and thus the calculation of the true dragfactor in that setting.
- // This let the recovery detection fall back onto slope-based stroke detection only for the first stroke (until drag is calculated reliably)
if (_torqueAtBeginFlank < minimumTorqueBeforeStroke) {
return true
} else {
@@ -324,29 +398,35 @@ export function createFlywheel (rowerSettings) {
}
}
+ /**
+ * @param {float} slope - Recovery slope to be converted
+ * @returns {float} Dragfactor to be used in all calculations
+ * @description Helper function to convert a recovery slope into a dragfactor
+ */
function slopeToDrag (slope) {
return ((slope * rowerSettings.flywheelInertia) / angularDisplacementPerImpulse)
}
+ /**
+ * @description This function is used for clearing all data, returning the flywheel.js to its initial state
+ */
function reset () {
maintainMetrics = false
inRecoveryPhase = false
+ rawTime = 0
+ rawNumberOfImpulses = 0
+ totalTimeSpinning = 0
+ totalNumberOfImpulses = -1
+ _totalWork = 0
drag.reset()
+ cyclicErrorFilter.reset()
+ cyclicErrorFilter.applyFilter(0, flankLength - 1)
recoveryDeltaTime.reset()
_deltaTime.reset()
_angularDistance.reset()
- totalNumberOfImpulses = 0
- totalTimeSpinning = 0
- currentCleanTime = 0
- currentRawTime = 0
- currentAngularDistance = 0
- _angularVelocityMatrix = null
- _angularVelocityMatrix = []
- _angularAccelerationMatrix = null
- _angularAccelerationMatrix = []
_deltaTime.push(0, 0)
_angularDistance.push(0, 0)
- _deltaTimeBeforeFlank = 0
+ _deltaTimeBeforeFlank.clean = 0
_angularVelocityBeforeFlank = 0
_angularAccelerationBeforeFlank = 0
_torqueAtBeginFlank = 0
@@ -360,6 +440,7 @@ export function createFlywheel (rowerSettings) {
markRecoveryPhaseStart,
markRecoveryPhaseCompleted,
spinningTime,
+ totalWork,
deltaTime,
angularPosition,
angularVelocity,
diff --git a/app/engine/Flywheel.test.js b/app/engine/Flywheel.test.js
index 0fa485eb45..d98494d350 100644
--- a/app/engine/Flywheel.test.js
+++ b/app/engine/Flywheel.test.js
@@ -1,7 +1,9 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file Tests of the Flywheel object
+ */
import { test } from 'uvu'
import * as assert from 'uvu/assert'
import { deepMerge } from '../tools/Helper.js'
@@ -21,7 +23,8 @@ const baseConfig = { // Based on Concept 2 settings, as this is the validation s
minimumTimeBetweenImpulses: 0.005,
maximumTimeBetweenImpulses: 0.020,
flankLength: 12,
- smoothing: 1,
+ systematicErrorAgressiveness: 0,
+ systematicErrorNumberOfDatapoints: 1,
minimumStrokeQuality: 0.36,
minimumForceBeforeStroke: 10,
minimumRecoverySlope: 0.00070,
@@ -33,7 +36,9 @@ const baseConfig = { // Based on Concept 2 settings, as this is the validation s
magicConstant: 2.8
}
-// Test behaviour for no datapoints
+/**
+ * @description Test behaviour for no datapoints
+ */
test('Correct Flywheel behaviour at initialisation', () => {
const flywheel = createFlywheel(baseConfig)
testDeltaTime(flywheel, 0)
@@ -48,13 +53,694 @@ test('Correct Flywheel behaviour at initialisation', () => {
testIsPowered(flywheel, false)
})
-// Test behaviour for one datapoint
+/**
+ * @todo Test behaviour for one datapoint
+ */
-// Test behaviour for perfect upgoing flank
+/**
+ * @todo Test behaviour for perfect upgoing flank
+ */
-// Test behaviour for perfect downgoing flank
+/**
+ * @todo Test behaviour for perfect downgoing flank
+ */
-// Test behaviour for perfect stroke
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * This uses the same data as the function y = 2 x^2 + 4 * x
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and Flywheel object for quadratic function f(x) = 2 * x^2 + 4 * x', () => {
+ const testConfig = {
+ numOfImpulsesPerRevolution: 6,
+ sprocketRadius: 1,
+ maximumStrokeTimeBeforePause: 6.0,
+ dragFactor: 10,
+ autoAdjustDragFactor: false,
+ minimumDragQuality: 0.95,
+ dragFactorSmoothing: 3,
+ minimumTimeBetweenImpulses: 0,
+ maximumTimeBetweenImpulses: 1,
+ flankLength: 12,
+ systematicErrorAgressiveness: 0,
+ systematicErrorNumberOfDatapoints: 1,
+ minimumStrokeQuality: 0.36,
+ minimumForceBeforeStroke: 0,
+ minimumRecoverySlope: 0.00070,
+ autoAdjustRecoverySlope: false,
+ autoAdjustRecoverySlopeMargin: 0.15,
+ minimumDriveTime: 0.40,
+ minimumRecoveryTime: 0.90,
+ flywheelInertia: 0.1031,
+ magicConstant: 2.8
+ }
+ const flywheel = createFlywheel(testConfig) // Please note, Datapoint 0 is automatically added by this initialisation
+ flywheel.maintainStateAndMetrics()
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.234341433963188) // Datapoint 1
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.196461680094298) // Datapoint 2
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.172567188397595) // Datapoint 3
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.155718979643243) // Datapoint 4
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.143013206725950) // Datapoint 5
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.132987841748253) // Datapoint 6
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.124815090780014) // Datapoint 7
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.117986192571703) // Datapoint 8
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.112168841458569) // Datapoint 9
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.107135523306685) // Datapoint 10
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.102724506937187) // Datapoint 11
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.098817239158663) // Datapoint 12
+ testDeltaTime(flywheel, 0) // Values from Datapoint 0 are now passsing through
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 4.000000000000004)
+ testAngularAcceleration(flywheel, 3.99999999999998)
+ flywheel.pushValue(0.095324565640171) // Datapoint 13
+ testDeltaTime(flywheel, 0.234341433963188) // Values from Datapoint 1 are now passsing through
+ testSpinningTime(flywheel, 0.234341433963188)
+ testAngularPosition(flywheel, 1.0471975511965976)
+ testAngularVelocity(flywheel, 4.937365735852752)
+ testAngularAcceleration(flywheel, 3.999999999999979)
+ flywheel.pushValue(0.092177973027300) // Datapoint 14
+ testDeltaTime(flywheel, 0.196461680094298) // Values from Datapoint 2 are now passsing through
+ testSpinningTime(flywheel, 0.430803114057486)
+ testAngularPosition(flywheel, 2.0943951023931953)
+ testAngularVelocity(flywheel, 5.723212456229939)
+ testAngularAcceleration(flywheel, 3.999999999999979)
+ flywheel.pushValue(0.089323823233014) // Datapoint 15
+ testDeltaTime(flywheel, 0.172567188397595) // Values from Datapoint 3 are now passsing through
+ testSpinningTime(flywheel, 0.6033703024550809)
+ testAngularPosition(flywheel, 3.141592653589793)
+ testAngularVelocity(flywheel, 6.413481209820315)
+ testAngularAcceleration(flywheel, 3.9999999999999787)
+ flywheel.pushValue(0.086719441920360) // Datapoint 16
+ testDeltaTime(flywheel, 0.155718979643243) // Values from Datapoint 4 are now passsing through
+ testSpinningTime(flywheel, 0.7590892820983239)
+ testAngularPosition(flywheel, 4.1887902047863905)
+ testAngularVelocity(flywheel, 7.036357128393282)
+ testAngularAcceleration(flywheel, 3.999999999999977)
+ flywheel.pushValue(0.084330395149166) // Datapoint 17
+ testDeltaTime(flywheel, 0.143013206725950) // Values from Datapoint 5 are now passsing through
+ testSpinningTime(flywheel, 0.9021024888242739)
+ testAngularPosition(flywheel, 5.235987755982988)
+ testAngularVelocity(flywheel, 7.608409955297075)
+ testAngularAcceleration(flywheel, 3.999999999999975)
+ flywheel.pushValue(0.082128549835466) // Datapoint 18
+ testDeltaTime(flywheel, 0.132987841748253) // Values from Datapoint 6 are now passsing through
+ testSpinningTime(flywheel, 1.035090330572527)
+ testAngularPosition(flywheel, 6.283185307179586)
+ testAngularVelocity(flywheel, 8.140361322290087)
+ testAngularAcceleration(flywheel, 3.9999999999999782)
+ flywheel.pushValue(0.080090664596669) // Datapoint 19
+ testDeltaTime(flywheel, 0.124815090780014) // Values from Datapoint 7 are now passsing through
+ testSpinningTime(flywheel, 1.159905421352541)
+ testAngularPosition(flywheel, 7.330382858376184)
+ testAngularVelocity(flywheel, 8.639621685410138)
+ testAngularAcceleration(flywheel, 3.99999999999998)
+ flywheel.pushValue(0.078197347646078) // Datapoint 20
+ testDeltaTime(flywheel, 0.117986192571703) // Values from Datapoint 8 are now passsing through
+ testSpinningTime(flywheel, 1.277891613924244)
+ testAngularPosition(flywheel, 8.377580409572781)
+ testAngularVelocity(flywheel, 9.111566455696952)
+ testAngularAcceleration(flywheel, 3.999999999999985)
+ flywheel.pushValue(0.076432273828253) // Datapoint 21
+ testDeltaTime(flywheel, 0.112168841458569) // Values from Datapoint 9 are now passsing through
+ testSpinningTime(flywheel, 1.390060455382813)
+ testAngularPosition(flywheel, 9.42477796076938)
+ testAngularVelocity(flywheel, 9.560241821531228)
+ testAngularAcceleration(flywheel, 3.9999999999999845)
+ flywheel.pushValue(0.074781587915460) // Datapoint 22
+ testDeltaTime(flywheel, 0.107135523306685) // Values from Datapoint 10 are now passsing through
+ testSpinningTime(flywheel, 1.4971959786894982)
+ testAngularPosition(flywheel, 10.471975511965976)
+ testAngularVelocity(flywheel, 9.98878391475797)
+ testAngularAcceleration(flywheel, 3.99999999999998)
+ flywheel.pushValue(0.073233443959153) // Datapoint 23
+ testDeltaTime(flywheel, 0.102724506937187) // Values from Datapoint 11 are now passsing through
+ testSpinningTime(flywheel, 1.599920485626685)
+ testAngularPosition(flywheel, 11.519173063162574)
+ testAngularVelocity(flywheel, 10.399681942506724)
+ testAngularAcceleration(flywheel, 3.999999999999972)
+ flywheel.pushValue(0.071777645486524) // Datapoint 24
+ testDeltaTime(flywheel, 0.098817239158663) // Values from Datapoint 12 are now passsing through
+ testSpinningTime(flywheel, 1.6987377247853481)
+ testAngularPosition(flywheel, 12.566370614359172)
+ testAngularVelocity(flywheel, 10.794950899141375)
+ testAngularAcceleration(flywheel, 3.99999999999996)
+ flywheel.pushValue(0.070405361445316) // Datapoint 25
+ testDeltaTime(flywheel, 0.095324565640171) // Values from Datapoint 13 are now passsing through
+ testSpinningTime(flywheel, 1.794062290425519)
+ testAngularPosition(flywheel, 13.613568165555769)
+ testAngularVelocity(flywheel, 11.17624916170206)
+ testAngularAcceleration(flywheel, 3.9999999999999463)
+ flywheel.pushValue(0.069108899742145) // Datapoint 26
+ testDeltaTime(flywheel, 0.092177973027300) // Values from Datapoint 14 are now passsing through
+ testSpinningTime(flywheel, 1.886240263452819)
+ testAngularPosition(flywheel, 14.660765716752367)
+ testAngularVelocity(flywheel, 11.544961053811264)
+ testAngularAcceleration(flywheel, 3.999999999999933)
+ flywheel.pushValue(0.067881525062373) // Datapoint 27
+ testDeltaTime(flywheel, 0.089323823233014) // Values from Datapoint 15 are now passsing through
+ testSpinningTime(flywheel, 1.975564086685833)
+ testAngularPosition(flywheel, 15.707963267948964)
+ testAngularVelocity(flywheel, 11.902256346743307)
+ testAngularAcceleration(flywheel, 3.9999999999999245)
+ flywheel.pushValue(0.066717311088441) // Datapoint 28
+ testDeltaTime(flywheel, 0.086719441920360) // Values from Datapoint 16 are now passsing through
+ testSpinningTime(flywheel, 2.062283528606193)
+ testAngularPosition(flywheel, 16.755160819145562)
+ testAngularVelocity(flywheel, 12.249134114424734)
+ testAngularAcceleration(flywheel, 3.9999999999999245)
+ flywheel.pushValue(0.065611019694526) // Datapoint 29
+ testDeltaTime(flywheel, 0.084330395149166) // Values from Datapoint 17 are now passsing through
+ testSpinningTime(flywheel, 2.1466139237553588)
+ testAngularPosition(flywheel, 17.80235837034216)
+ testAngularVelocity(flywheel, 12.586455695021384)
+ testAngularAcceleration(flywheel, 3.9999999999999396)
+ flywheel.pushValue(0.064558001484125) // Datapoint 30
+ testDeltaTime(flywheel, 0.082128549835466) // Values from Datapoint 18 are now passsing through
+ testSpinningTime(flywheel, 2.228742473590825)
+ testAngularPosition(flywheel, 18.84955592153876)
+ testAngularVelocity(flywheel, 12.914969894363232)
+ testAngularAcceleration(flywheel, 3.9999999999999574)
+ flywheel.pushValue(0.063554113352442) // Datapoint 31
+ testDeltaTime(flywheel, 0.080090664596669) // Values from Datapoint 19 are now passsing through
+ testSpinningTime(flywheel, 2.308833138187494)
+ testAngularPosition(flywheel, 19.896753472735355)
+ testAngularVelocity(flywheel, 13.235332552749886)
+ testAngularAcceleration(flywheel, 3.9999999999999867)
+})
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * The data follows the function y = X^3 + 2 * x^2 + 4 * x
+ * To test if multiple quadratic regressions can decently approximate a cubic function
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and Flywheel object for cubic function f(x) = X^3 + 2 * x^2 + 4 * x', () => {
+ const testConfig = {
+ numOfImpulsesPerRevolution: 6,
+ sprocketRadius: 1,
+ maximumStrokeTimeBeforePause: 6.0,
+ dragFactor: 10,
+ autoAdjustDragFactor: false,
+ minimumDragQuality: 0.95,
+ dragFactorSmoothing: 3,
+ minimumTimeBetweenImpulses: 0,
+ maximumTimeBetweenImpulses: 1,
+ flankLength: 12,
+ systematicErrorAgressiveness: 0,
+ systematicErrorNumberOfDatapoints: 1,
+ minimumStrokeQuality: 0.36,
+ minimumForceBeforeStroke: 0,
+ minimumRecoverySlope: 0.00070,
+ autoAdjustRecoverySlope: false,
+ autoAdjustRecoverySlopeMargin: 0.15,
+ minimumDriveTime: 0.40,
+ minimumRecoveryTime: 0.90,
+ flywheelInertia: 0.1031,
+ magicConstant: 2.8
+ }
+ const flywheel = createFlywheel(testConfig) // Please note, Datapoint 0 is automatically added by this initialisation
+ flywheel.maintainStateAndMetrics()
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.231815755285445) // Datapoint 1
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.186170118209325) // Datapoint 2
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.155673811324399) // Datapoint 3
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.134264409859047) // Datapoint 4
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.118490308292909) // Datapoint 5
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.106396192260267) // Datapoint 6
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.096822693623239) // Datapoint 7
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.08904704613513) // Datapoint 8
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.08259777558252) // Datapoint 9
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.077155055952201) // Datapoint 10
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.072494552013330) // Datapoint 11
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.068454336759262) // Datapoint 12
+ testDeltaTime(flywheel, 0) // Values from Datapoint 0 are now passsing through
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 3.1619218560691382) // Theoretical value: 4
+ testAngularAcceleration(flywheel, 7.251023549310239) // Theoretical value: 4
+ flywheel.pushValue(0.064914611722656) // Datapoint 13
+ testDeltaTime(flywheel, 0.231815755285445) // Values from Datapoint 1 are now passsing through
+ testSpinningTime(flywheel, 0.231815755285445)
+ testAngularPosition(flywheel, 1.0471975511965976)
+ testAngularVelocity(flywheel, 4.7950174071704375) // Theoretical value: 5.088478654, error: -6,64%
+ testAngularAcceleration(flywheel, 7.324931550092107) // Theoretical value: 5.390894532, error: 38,46%
+ flywheel.pushValue(0.061784830519864) // Datapoint 14
+ testDeltaTime(flywheel, 0.186170118209325) // Values from Datapoint 2 are now passsing through
+ testSpinningTime(flywheel, 0.41798587349477)
+ testAngularPosition(flywheel, 2.0943951023931953)
+ testAngularVelocity(flywheel, 6.098616558470422) // Theoretical value: 6.196080065, error: -2,14%
+ testAngularAcceleration(flywheel, 7.6561044943826655) // Theoretical value: 6.507915241, error: 18,21%
+ flywheel.pushValue(0.058995265576639) // Datapoint 15
+ testDeltaTime(flywheel, 0.155673811324399) // Values from Datapoint 3 are now passsing through
+ testSpinningTime(flywheel, 0.5736596848191691)
+ testAngularPosition(flywheel, 3.141592653589793)
+ testAngularVelocity(flywheel, 7.261045146876231) // Theoretical value: 7.281895041, error: -0,79%
+ testAngularAcceleration(flywheel, 8.125127482273879) // Theoretical value: 7.441958109, error: 9,49%
+ flywheel.pushValue(0.056491331538715) // Datapoint 16
+ testDeltaTime(flywheel, 0.134264409859047) // Values from Datapoint 4 are now passsing through
+ testSpinningTime(flywheel, 0.707924094678216)
+ testAngularPosition(flywheel, 4.1887902047863905)
+ testAngularVelocity(flywheel, 8.335452316712825) // Theoretical value: 8.33516595, error: -0,42%
+ testAngularAcceleration(flywheel, 8.591085532405152) // Theoretical value: 8.247544568, error: 4,32%
+ flywheel.pushValue(0.054229670373632) // Datapoint 17
+ testDeltaTime(flywheel, 0.118490308292909) // Values from Datapoint 5 are now passsing through
+ testSpinningTime(flywheel, 0.826414402971125)
+ testAngularPosition(flywheel, 5.235987755982988)
+ testAngularVelocity(flywheel, 9.346198019520214) // Theoretical value: 9.354539908, error: -0,44%
+ testAngularAcceleration(flywheel, 9.058162877855903) // Theoretical value: 8.958486418, error: 1,06%
+ flywheel.pushValue(0.052175392433679) // Datapoint 18
+ testDeltaTime(flywheel, 0.106396192260267) // Values from Datapoint 6 are now passsing through
+ testSpinningTime(flywheel, 0.932810595231392)
+ testAngularPosition(flywheel, 6.283185307179586)
+ testAngularVelocity(flywheel, 10.314972131734738) // Theoretical value: 10.3416492, error: -0,56%
+ testAngularAcceleration(flywheel, 9.531782371110172) // Theoretical value: 9.596863571, error: -0,95%
+ flywheel.pushValue(0.05030009417797) // Datapoint 19
+ testDeltaTime(flywheel, 0.096822693623239) // Values from Datapoint 7 are now passsing through
+ testSpinningTime(flywheel, 1.029633288854631)
+ testAngularPosition(flywheel, 7.330382858376184)
+ testAngularVelocity(flywheel, 11.253026452431792) // Theoretical value: 11.29896728, error: -0,68%
+ testAngularAcceleration(flywheel, 10.006689891934712) // Theoretical value: 10.17779973, error: -2,15%
+ flywheel.pushValue(0.04858040892819) // Datapoint 20
+ testDeltaTime(flywheel, 0.08904704613513) // Values from Datapoint 8 are now passsing through
+ testSpinningTime(flywheel, 1.118680334989761)
+ testAngularPosition(flywheel, 8.377580409572781)
+ testAngularVelocity(flywheel, 12.167114512288897) // Theoretical value: 12.22905842, error: -0,76%
+ testAngularAcceleration(flywheel, 10.479926499860289) // Theoretical value: 10.71208201, error: -2,78%
+ flywheel.pushValue(0.046996930546829) // Datapoint 21
+ testDeltaTime(flywheel, 0.08259777558252) // Values from Datapoint 9 are now passsing through
+ testSpinningTime(flywheel, 1.201278110572281)
+ testAngularPosition(flywheel, 9.42477796076938)
+ testAngularVelocity(flywheel, 13.062289353874645) // Theoretical value: 13.13431974, error: -0,79%
+ testAngularAcceleration(flywheel, 10.945741904208647) // Theoretical value: 11.20766866, error: -3,03%
+ flywheel.pushValue(0.045533402601137) // Datapoint 22
+ testDeltaTime(flywheel, 0.077155055952201) // Values from Datapoint 10 are now passsing through
+ testSpinningTime(flywheel, 1.278433166524482)
+ testAngularPosition(flywheel, 10.471975511965976)
+ testAngularVelocity(flywheel, 13.94075092506632) // Theoretical value: 14.01690675, error: -0,78%
+ testAngularAcceleration(flywheel, 11.403650671998298) // Theoretical value: 11.670599, error: -2,98%
+ flywheel.pushValue(0.044176099545603) // Datapoint 23
+ testDeltaTime(flywheel, 0.072494552013330) // Values from Datapoint 11 are now passsing through
+ testSpinningTime(flywheel, 1.350927718537812)
+ testAngularPosition(flywheel, 11.519173063162574)
+ testAngularVelocity(flywheel, 14.80669498176648) // Theoretical value: 14.87872798, error: -0,69%
+ testAngularAcceleration(flywheel, 11.856689681955814) // Theoretical value: 12.10556631, error: -2,69%
+ flywheel.pushValue(0.042913348809906) // Datapoint 24
+ testDeltaTime(flywheel, 0.068454336759262) // Values from Datapoint 12 are now passsing through
+ testSpinningTime(flywheel, 1.419382055297074)
+ testAngularPosition(flywheel, 12.566370614359172)
+ testAngularVelocity(flywheel, 15.659331443649155) // Theoretical value: 15.72146448, error: -0,57%
+ testAngularAcceleration(flywheel, 12.303309060000915) // Theoretical value: 12.51629233, error: -2,22%
+ flywheel.pushValue(0.041735157665124) // Datapoint 25
+ testDeltaTime(flywheel, 0.064914611722656) // Values from Datapoint 13 are now passsing through, so we cleared all startup noise
+ testSpinningTime(flywheel, 1.484296667019730)
+ testAngularPosition(flywheel, 13.613568165555769)
+ testAngularVelocity(flywheel, 16.492736768968747) // Theoretical value: 16.54659646, error: -0,47%
+ testAngularAcceleration(flywheel, 12.721354618621062) // Theoretical value: 12.90578, error: -1,86%
+ flywheel.pushValue(0.040632918960300) // Datapoint 26
+ testDeltaTime(flywheel, 0.061784830519864) // Values from Datapoint 14 are now passsing through
+ testSpinningTime(flywheel, 1.546081497539594)
+ testAngularPosition(flywheel, 14.660765716752367)
+ testAngularVelocity(flywheel, 17.307691210719753) // Theoretical value: 17.35542998, error: -0,40%
+ testAngularAcceleration(flywheel, 13.11397255097641) // Theoretical value: 13.27648899, error: -1,59%
+ flywheel.pushValue(0.039599176898486) // Datapoint 27
+ testDeltaTime(flywheel, 0.058995265576639) // Values from Datapoint 15 are now passsing through
+ testSpinningTime(flywheel, 1.605076763116233)
+ testAngularPosition(flywheel, 15.707963267948964)
+ testAngularVelocity(flywheel, 18.10649398672465) // Theoretical value: 18.1491213, error: -0,34%
+ testAngularAcceleration(flywheel, 13.486098587071863) // Theoretical value: 13.63046058, error: -1,38%
+ flywheel.pushValue(0.038627438996519) // Datapoint 28
+ testDeltaTime(flywheel, 0.056491331538715) // Values from Datapoint 16 are now passsing through
+ testSpinningTime(flywheel, 1.661568094654948)
+ testAngularPosition(flywheel, 16.755160819145562)
+ testAngularVelocity(flywheel, 18.890426542395847) // Theoretical value: 18.92869798, error: -0,29%
+ testAngularAcceleration(flywheel, 13.840428977171639) // Theoretical value: 13.96940857, error: -1,20%
+ flywheel.pushValue(0.037712023914259) // Datapoint 29
+ testDeltaTime(flywheel, 0.054229670373632) // Values from Datapoint 17 are now passsing through
+ testSpinningTime(flywheel, 1.715797765028580)
+ testAngularPosition(flywheel, 17.80235837034216)
+ testAngularVelocity(flywheel, 19.660398675998614) // Theoretical value: 19.69507697, error: -0,26%
+ testAngularAcceleration(flywheel, 14.178743620219855) // Theoretical value: 14.29478659, error: -1,06%
+ flywheel.pushValue(0.036847937394809) // Datapoint 30
+ testDeltaTime(flywheel, 0.052175392433679) // Values from Datapoint 18 are now passsing through
+ testSpinningTime(flywheel, 1.767973157462259)
+ testAngularPosition(flywheel, 18.84955592153876)
+ testAngularVelocity(flywheel, 20.41744737019342) // Theoretical value: 20.44907989, error: -0,23%
+ testAngularAcceleration(flywheel, 14.502790132816358) // Theoretical value: 14.60783894, error: -0,94%
+ flywheel.pushValue(0.036030770419579) // Datapoint 31
+ testDeltaTime(flywheel, 0.05030009417797) // Values from Datapoint 19 are now passsing through
+ testSpinningTime(flywheel, 1.8182732516402291)
+ testAngularPosition(flywheel, 19.896753472735355)
+ testAngularVelocity(flywheel, 21.162376267362376) // Theoretical value: 21.19144586, error: -0,20%
+ testAngularAcceleration(flywheel, 14.813903373334538) // Theoretical value: 14.90963951, error: -0,83%
+})
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * The data follows the function y = X^3 + 2 * x^2 + 4 * x with a +/-0.0001 sec injected noise in currentDt
+ * To test if multiple quadratic regressions can decently approximate a cubic function with noise
+ * Please note: theoretical values are based on the perfect function (i.e. without noise)
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and Flywheel object for cubic function f(x) = X^3 + 2 * x^2 + 4 * x with +/- 0.0001 error', () => {
+ const testConfig = {
+ numOfImpulsesPerRevolution: 6,
+ sprocketRadius: 1,
+ maximumStrokeTimeBeforePause: 6.0,
+ dragFactor: 10,
+ autoAdjustDragFactor: false,
+ minimumDragQuality: 0.95,
+ dragFactorSmoothing: 3,
+ minimumTimeBetweenImpulses: 0,
+ maximumTimeBetweenImpulses: 1,
+ flankLength: 12,
+ systematicErrorAgressiveness: 0,
+ systematicErrorNumberOfDatapoints: 1,
+ minimumStrokeQuality: 0.36,
+ minimumForceBeforeStroke: 0,
+ minimumRecoverySlope: 0.00070,
+ autoAdjustRecoverySlope: false,
+ autoAdjustRecoverySlopeMargin: 0.15,
+ minimumDriveTime: 0.40,
+ minimumRecoveryTime: 0.90,
+ flywheelInertia: 0.1031,
+ magicConstant: 2.8
+ }
+ const flywheel = createFlywheel(testConfig) // Please note, Datapoint 0 is automatically added by this initialisation
+ flywheel.maintainStateAndMetrics()
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.231915755285445) // Datapoint 1
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.186070118209325) // Datapoint 2
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.155773811324398) // Datapoint 3
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.134164409859047) // Datapoint 4
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.118590308292909) // Datapoint 5
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.106296192260267) // Datapoint 6
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.096922693623239) // Datapoint 7
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.08894704613513) // Datapoint 8
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.08269777558252) // Datapoint 9
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.077055055952201) // Datapoint 10
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.07259455201333) // Datapoint 11
+ testDeltaTime(flywheel, 0)
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 0)
+ testAngularAcceleration(flywheel, 0)
+ flywheel.pushValue(0.068354336759262) // Datapoint 12
+ testDeltaTime(flywheel, 0) // Values from Datapoint 0 are now passsing through
+ testSpinningTime(flywheel, 0)
+ testAngularPosition(flywheel, 0)
+ testAngularVelocity(flywheel, 3.1651252708296993) // Theoretical value: 4
+ testAngularAcceleration(flywheel, 7.2468812808500696) // Theoretical value: 4
+ flywheel.pushValue(0.065014611722656) // Datapoint 13
+ testDeltaTime(flywheel, 0.231915755285445) // Values from Datapoint 1 are now passsing through
+ testSpinningTime(flywheel, 0.231915755285445)
+ testAngularPosition(flywheel, 1.0471975511965976)
+ testAngularVelocity(flywheel, 4.7981896825575205) // Theoretical value: 5.088478654, error: -6.58%
+ testAngularAcceleration(flywheel, 7.320784012928006) // Theoretical value: 5.390894532, error: 38.38%
+ flywheel.pushValue(0.061684830519864) // Datapoint 14
+ testDeltaTime(flywheel, 0.186070118209325) // Values from Datapoint 2 are now passsing through
+ testSpinningTime(flywheel, 0.41798587349477)
+ testAngularPosition(flywheel, 2.0943951023931953)
+ testAngularVelocity(flywheel, 6.100352571838149) // Theoretical value: 6.196080065, error: -2.11%
+ testAngularAcceleration(flywheel, 7.650380140052492) // Theoretical value: 6.507915241, error: 18.14%
+ flywheel.pushValue(0.059095265576639) // Datapoint 15
+ testDeltaTime(flywheel, 0.155773811324398) // Values from Datapoint 3 are now passsing through
+ testSpinningTime(flywheel, 0.573759684819168)
+ testAngularPosition(flywheel, 3.141592653589793)
+ testAngularVelocity(flywheel, 7.262664999379819) // Theoretical value: 7.281895041, error: -0.77%
+ testAngularAcceleration(flywheel, 8.117964017032822) // Theoretical value: 7.441958109, error: 9.40%
+ flywheel.pushValue(0.056391331538715) // Datapoint 16
+ testDeltaTime(flywheel, 0.134164409859047) // Values from Datapoint 4 are now passsing through
+ testSpinningTime(flywheel, 0.7079240946782149)
+ testAngularPosition(flywheel, 4.1887902047863905)
+ testAngularVelocity(flywheel, 8.335671487914347) // Theoretical value: 8.33516595, error: -0.42%
+ testAngularAcceleration(flywheel, 8.58427221387106) // Theoretical value: 8.247544568, error: 4.24%
+ flywheel.pushValue(0.054329670373632) // Datapoint 17
+ testDeltaTime(flywheel, 0.118590308292909) // Values from Datapoint 5 are now passsing through
+ testSpinningTime(flywheel, 0.8265144029711239)
+ testAngularPosition(flywheel, 5.235987755982988)
+ testAngularVelocity(flywheel, 9.347109926263196) // Theoretical value: 9.354539908, error: -0.44%
+ testAngularAcceleration(flywheel, 9.052626876076234) // Theoretical value: 8.958486418, error: 1.00%
+ flywheel.pushValue(0.052075392433679) // Datapoint 18
+ testDeltaTime(flywheel, 0.106296192260267) // Values from Datapoint 6 are now passsing through
+ testSpinningTime(flywheel, 0.9328105952313909)
+ testAngularPosition(flywheel, 6.283185307179586)
+ testAngularVelocity(flywheel, 10.314706935144432) // Theoretical value: 10.3416492, error: -0.56%
+ testAngularAcceleration(flywheel, 9.52653469078407) // Theoretical value: 9.596863571, error: -1.00%
+ flywheel.pushValue(0.05040009417797) // Datapoint 19
+ testDeltaTime(flywheel, 0.096922693623239) // Values from Datapoint 7 are now passsing through
+ testSpinningTime(flywheel, 1.0297332888546298)
+ testAngularPosition(flywheel, 7.330382858376184)
+ testAngularVelocity(flywheel, 11.253653421461035) // Theoretical value: 11.29896728, error: -0.67%
+ testAngularAcceleration(flywheel, 10.001358612662711) // Theoretical value: 10.17779973, error: -2.21%
+ flywheel.pushValue(0.04848040892819) // Datapoint 20
+ testDeltaTime(flywheel, 0.08894704613513) // Values from Datapoint 8 are now passsing through
+ testSpinningTime(flywheel, 1.1186803349897598)
+ testAngularPosition(flywheel, 8.377580409572781)
+ testAngularVelocity(flywheel, 12.166767447463288) // Theoretical value: 12.22905842, error: -0.76%
+ testAngularAcceleration(flywheel, 10.47394441606818) // Theoretical value: 10.71208201, error: -2.84%
+ flywheel.pushValue(0.047096930546829) // Datapoint 21
+ testDeltaTime(flywheel, 0.08269777558252) // Values from Datapoint 9 are now passsing through
+ testSpinningTime(flywheel, 1.2013781105722798)
+ testAngularPosition(flywheel, 9.42477796076938)
+ testAngularVelocity(flywheel, 13.062997567333893) // Theoretical value: 13.13431974, error: -0.79%
+ testAngularAcceleration(flywheel, 10.940063240068076) // Theoretical value: 11.20766866, error: -3.08%
+ flywheel.pushValue(0.045433402601137) // Datapoint 22
+ testDeltaTime(flywheel, 0.077055055952201) // Values from Datapoint 10 are now passsing through
+ testSpinningTime(flywheel, 1.2784331665244808)
+ testAngularPosition(flywheel, 10.471975511965976)
+ testAngularVelocity(flywheel, 13.940480188006552) // Theoretical value: 14.01690675, error: -0.78%
+ testAngularAcceleration(flywheel, 11.397389413208364) // Theoretical value: 11.670599, error: -3.04%
+ flywheel.pushValue(0.044276099545603) // Datapoint 23
+ testDeltaTime(flywheel, 0.07259455201333) // Values from Datapoint 11 are now passsing through
+ testSpinningTime(flywheel, 1.3510277185378108)
+ testAngularPosition(flywheel, 11.519173063162574)
+ testAngularVelocity(flywheel, 14.807840698982423) // Theoretical value: 14.87872798, error: -0.68%
+ testAngularAcceleration(flywheel, 11.848780564150369) // Theoretical value: 12.10556631, error: -2.76%
+ flywheel.pushValue(0.042813348809906) // Datapoint 24
+ testDeltaTime(flywheel, 0.068354336759262) // Values from Datapoint 12 are now passsing through
+ testSpinningTime(flywheel, 1.4193820552970728)
+ testAngularPosition(flywheel, 12.566370614359172)
+ testAngularVelocity(flywheel, 15.65917726721796) // Theoretical value: 15.72146448, error: -0.57%
+ testAngularAcceleration(flywheel, 12.293943915780252) // Theoretical value: 12.51629233, error: -2.30%
+ flywheel.pushValue(0.041835157665124) // Datapoint 25
+ testDeltaTime(flywheel, 0.065014611722656) // Values from Datapoint 13 are now passsing through, so we cleared all startup noise
+ testSpinningTime(flywheel, 1.4843966670197288)
+ testAngularPosition(flywheel, 13.613568165555769)
+ testAngularVelocity(flywheel, 16.49447250537608) // Theoretical value: 16.54659646, error: -0.46%
+ testAngularAcceleration(flywheel, 12.710407075508567) // Theoretical value: 12.90578, error: -1.95%
+ flywheel.pushValue(0.040532918960300) // Datapoint 26
+ testDeltaTime(flywheel, 0.061684830519864) // Values from Datapoint 14 are now passsing through
+ testSpinningTime(flywheel, 1.546081497539593)
+ testAngularPosition(flywheel, 14.660765716752367)
+ testAngularVelocity(flywheel, 17.308891329044464) // Theoretical value: 17.35542998, error: -0.39%
+ testAngularAcceleration(flywheel, 13.100466914875906) // Theoretical value: 13.27648899, error: -1.70%
+ flywheel.pushValue(0.039699176898486) // Datapoint 27
+ testDeltaTime(flywheel, 0.059095265576639) // Values from Datapoint 15 are now passsing through
+ testSpinningTime(flywheel, 1.605176763116232)
+ testAngularPosition(flywheel, 15.707963267948964)
+ testAngularVelocity(flywheel, 18.109702829774772) // Theoretical value: 18.1491213, error: -0.32%
+ testAngularAcceleration(flywheel, 13.469377816872242) // Theoretical value: 13.63046058, error: -1.51%
+ flywheel.pushValue(0.038527438996519) // Datapoint 28
+ testDeltaTime(flywheel, 0.056391331538715) // Values from Datapoint 16 are now passsing through
+ testSpinningTime(flywheel, 1.661568094654947)
+ testAngularPosition(flywheel, 16.755160819145562)
+ testAngularVelocity(flywheel, 18.892749084779705) // Theoretical value: 18.92869798, error: -0.28%
+ testAngularAcceleration(flywheel, 13.819955339924142) // Theoretical value: 13.96940857, error: -1.35%
+ flywheel.pushValue(0.037812023914259) // Datapoint 29
+ testDeltaTime(flywheel, 0.054329670373632) // Values from Datapoint 17 are now passsing through
+ testSpinningTime(flywheel, 1.715897765028579)
+ testAngularPosition(flywheel, 17.80235837034216)
+ testAngularVelocity(flywheel, 19.664430174199474) // Theoretical value: 19.69507697, error: -0.24%
+ testAngularAcceleration(flywheel, 14.154531841302834) // Theoretical value: 14.29478659, error: -1.23%
+ flywheel.pushValue(0.036747937394809) // Datapoint 30
+ testDeltaTime(flywheel, 0.052075392433679) // Values from Datapoint 18 are now passsing through
+ testSpinningTime(flywheel, 1.767973157462258)
+ testAngularPosition(flywheel, 18.84955592153876)
+ testAngularVelocity(flywheel, 20.419916102229333) // Theoretical value: 20.44907989, error: -0.21%
+ testAngularAcceleration(flywheel, 14.474639639378996) // Theoretical value: 14.60783894, error: -1.13%
+ flywheel.pushValue(0.036130770419579) // Datapoint 31
+ testDeltaTime(flywheel, 0.05040009417797) // Values from Datapoint 19 are now passsing through
+ testSpinningTime(flywheel, 1.818373251640228)
+ testAngularPosition(flywheel, 19.896753472735355)
+ testAngularVelocity(flywheel, 21.16654168342182) // Theoretical value: 21.19144586, error: -0.18%
+ testAngularAcceleration(flywheel, 14.782028789603949) // Theoretical value: 14.90963951, error: -1.05%
+})
+
+/**
+ * @todo Test behaviour with noise CEC filter active
+ */
+
+/**
+ * @description Test behaviour for perfect stroke
+ */
test('Correct Flywheel behaviour for a noisefree stroke', () => {
const flywheel = createFlywheel(baseConfig)
flywheel.maintainStateAndMetrics()
@@ -89,10 +775,10 @@ test('Correct Flywheel behaviour for a noisefree stroke', () => {
flywheel.pushValue(0.010386684)
testDeltaTime(flywheel, 0.011062297)
testSpinningTime(flywheel, 0.077918634)
- testAngularPosition(flywheel, 8.377580409572781)
- testAngularVelocity(flywheel, 94.77498684553687)
- testAngularAcceleration(flywheel, 28.980405331480235)
- testTorque(flywheel, 3.975932584148498)
+ testAngularPosition(flywheel, 7.330382858376184)
+ testAngularVelocity(flywheel, 94.88636656676766)
+ testAngularAcceleration(flywheel, 28.483961147946758)
+ testTorque(flywheel, 3.9270728759800413)
testDragFactor(flywheel, 0.00011)
testIsDwelling(flywheel, false)
testIsUnpowered(flywheel, false)
@@ -114,10 +800,10 @@ test('Correct Flywheel behaviour for a noisefree stroke', () => {
flywheel.pushValue(0.011209919)
testDeltaTime(flywheel, 0.010722165)
testSpinningTime(flywheel, 0.23894732900000007)
- testAngularPosition(flywheel, 24.085543677521745)
- testAngularVelocity(flywheel, 97.12541571421204)
- testAngularAcceleration(flywheel, -29.657604177526746)
- testTorque(flywheel, -2.0200308891605716)
+ testAngularPosition(flywheel, 23.03834612632515)
+ testAngularVelocity(flywheel, 97.06865123831865)
+ testAngularAcceleration(flywheel, -32.75873752642214)
+ testTorque(flywheel, -2.340970303119225)
testDragFactor(flywheel, 0.00011)
testIsDwelling(flywheel, false)
testIsUnpowered(flywheel, true)
@@ -139,27 +825,39 @@ test('Correct Flywheel behaviour for a noisefree stroke', () => {
flywheel.pushValue(0.021209919)
testDeltaTime(flywheel, 0.020722165)
testSpinningTime(flywheel, 0.43343548300000007)
- testAngularPosition(flywheel, 39.79350694547071)
- testAngularVelocity(flywheel, 50.85265548983507)
- testAngularAcceleration(flywheel, -159.89027501034317)
- testTorque(flywheel, -16.20022817082592)
+ testAngularPosition(flywheel, 38.746309394274114)
+ testAngularVelocity(flywheel, 50.975321595240146)
+ testAngularAcceleration(flywheel, -157.76768934416432)
+ testTorque(flywheel, -15.980015596092146)
testDragFactor(flywheel, 0.00011)
testIsDwelling(flywheel, true)
testIsUnpowered(flywheel, true)
testIsPowered(flywheel, false)
})
-// Test behaviour for noisy upgoing flank
+/**
+ * @todo Test behaviour for noisy upgoing flank
+ */
-// Test behaviour for noisy downgoing flank
+/**
+ * @todo Test behaviour for noisy downgoing flank
+ */
-// Test behaviour for noisy stroke
+/**
+ * @todo Test behaviour for noisy stroke
+ */
-// Test drag factor calculation
+/**
+ * @todo Test drag factor calculation
+ */
-// Test Dynamic stroke detection
+/**
+ * @todo Test Dynamic stroke detection
+ */
-// Test behaviour for not maintaining metrics
+/**
+ * @description Test behaviour for not maintaining metrics
+ */
test('Correct Flywheel behaviour at maintainStateOnly', () => {
const flywheel = createFlywheel(baseConfig)
flywheel.maintainStateAndMetrics()
@@ -230,6 +928,9 @@ test('Correct Flywheel behaviour at maintainStateOnly', () => {
testIsPowered(flywheel, false)
})
+/**
+ * @description Test behaviour for the WRX700
+ */
test('Correct Flywheel behaviour with a SportsTech WRX700', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700))
testSpinningTime(flywheel, 0)
@@ -239,14 +940,18 @@ test('Correct Flywheel behaviour with a SportsTech WRX700', async () => {
// Inject 16 strokes
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
+
testSpinningTime(flywheel, 46.302522627)
- testAngularPosition(flywheel, 741.4158662471912)
+ testAngularPosition(flywheel, 738.2742735936014)
testDragFactor(flywheel, (rowerProfiles.Sportstech_WRX700.dragFactor / 1000000))
})
+/**
+ * @description Test behaviour for the DKN R-320
+ */
test('Correct Flywheel behaviour with a DKN R-320', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.DKN_R320))
- testSpinningTime(flywheel, 0)
+
testAngularPosition(flywheel, 0)
testDragFactor(flywheel, (rowerProfiles.DKN_R320.dragFactor / 1000000))
flywheel.maintainStateAndMetrics()
@@ -255,11 +960,14 @@ test('Correct Flywheel behaviour with a DKN R-320', async () => {
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/DKNR320.csv', realtime: false, loop: false })
testSpinningTime(flywheel, 22.249536391000003)
- testAngularPosition(flywheel, 496.37163926718733)
+ testAngularPosition(flywheel, 490.0884539600077)
// As dragfactor is static, it should remain the same
testDragFactor(flywheel, (rowerProfiles.DKN_R320.dragFactor / 1000000))
})
+/**
+ * @description Test behaviour for the NordicTrack RX800
+ */
test('Correct Flywheel behaviour with a NordicTrack RX800', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800))
testSpinningTime(flywheel, 0)
@@ -270,12 +978,15 @@ test('Correct Flywheel behaviour with a NordicTrack RX800', async () => {
// Inject 10 strokes
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
- testSpinningTime(flywheel, 22.612226401999987)
- testAngularPosition(flywheel, 1443.5618243245099)
+ testSpinningTime(flywheel, 22.721492397999985)
+ testAngularPosition(flywheel, 1448.2742133048946)
// As we don't detect strokes here (this is a function of Rower.js, the dragcalculation shouldn't be triggered
testDragFactor(flywheel, (rowerProfiles.NordicTrack_RX800.dragFactor / 1000000))
})
+/**
+ * @description Test behaviour for the SportsTech WRX700
+ */
test('Correct Flywheel behaviour with a full session on a SportsTech WRX700', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700))
testSpinningTime(flywheel, 0)
@@ -285,12 +996,16 @@ test('Correct Flywheel behaviour with a full session on a SportsTech WRX700', as
// Inject 846 strokes
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
+
testSpinningTime(flywheel, 2340.0100514160117)
- testAngularPosition(flywheel, 37325.26231730033)
+ testAngularPosition(flywheel, 37322.120724646746)
// The dragfactor should remain static
testDragFactor(flywheel, (rowerProfiles.Sportstech_WRX700.dragFactor / 1000000))
})
+/**
+ * @description Test behaviour for the C2 Model C
+ */
test('A full session for a Concept2 Model C should produce plausible results', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C))
testSpinningTime(flywheel, 0)
@@ -301,11 +1016,14 @@ test('A full session for a Concept2 Model C should produce plausible results', a
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
testSpinningTime(flywheel, 181.47141999999985)
- testAngularPosition(flywheel, 15636.753834467596)
+ testAngularPosition(flywheel, 15634.659439365203)
// As we don't detect strokes here (this is a function of Rower.js, the dragcalculation shouldn't be triggered
testDragFactor(flywheel, (rowerProfiles.Concept2_Model_C.dragFactor / 1000000))
})
+/**
+ * @description Test behaviour for the C2 RowErg
+ */
test('A full session for a Concept2 RowErg should produce plausible results', async () => {
const flywheel = createFlywheel(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg))
testSpinningTime(flywheel, 0)
@@ -316,12 +1034,14 @@ test('A full session for a Concept2 RowErg should produce plausible results', as
await replayRowingSession(flywheel.pushValue, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
testSpinningTime(flywheel, 591.0432650000008)
- testAngularPosition(flywheel, 65961.92655232249)
+ testAngularPosition(flywheel, 65960.87935477128)
// As we don't detect strokes here (this is a function of Rower.js, the dragcalculation shouldn't be triggered
testDragFactor(flywheel, (rowerProfiles.Concept2_RowErg.dragFactor / 1000000))
})
-// Test behaviour after reset
+/**
+ * @todo Test behaviour after reset
+ */
function testDeltaTime (flywheel, expectedValue) {
assert.ok(flywheel.deltaTime() === expectedValue, `deltaTime should be ${expectedValue} sec at ${flywheel.spinningTime()} sec, is ${flywheel.deltaTime()}`)
diff --git a/app/engine/Rower.js b/app/engine/Rower.js
index 6857fb7f55..8f37cd68ee 100644
--- a/app/engine/Rower.js
+++ b/app/engine/Rower.js
@@ -1,15 +1,16 @@
'use strict'
/*
Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- The Rowing Engine models the physics of a real rowing boat.
- It takes impulses from the flywheel of a rowing machine and estimates
- parameters such as energy, stroke rates and movement.
-
- This implementation uses concepts that are described here:
- Physics of Rowing by Anu Dudhia: http://eodg.atm.ox.ac.uk/user/dudhia/rowing/physics
- Also Dave Vernooy has some good explanations here: https://dvernooy.github.io/projects/ergware
*/
+/**
+ * @file The Rowing Engine models the physics of a real rowing boat. It takes impulses from the flywheel of a rowing machine
+ * and calculates parameters such as work, stroke rates and linear movement.
+ *
+ * This implementation uses concepts that are described here:
+ * - @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/physics_openrowingmonitor.md#relevant-linear-metrics|the description of our underlying physics model}
+ * - @see {@link https://eodg.atm.ox.ac.uk/user/dudhia/rowing/physics/ergometer.html|Physics of Ergometers by Anu Dudhia}
+ * - @see {@link https://dvernooy.github.io/projects/ergware|Dave Vernooy's good explanation of the physics involved}
+ */
/* eslint-disable max-lines -- There is a lot of state machine dependent math going on here. Hard to keep short while maintaining readability */
import loglevel from 'loglevel'
import { createFlywheel } from './Flywheel.js'
@@ -29,6 +30,8 @@ export function createRower (rowerSettings) {
let _recoveryDuration
let drivePhaseStartTime = 0.0
let _driveDuration
+ let drivePhaseStartFlywheelWork = 0.0
+ let _driveFlywheelWork = 0.0
let drivePhaseStartAngularPosition = 0.0
let drivePhaseAngularDisplacement = 0.0
let _driveLinearDistance = 0.0
@@ -157,6 +160,8 @@ export function createRower (rowerSettings) {
// Here, we conclude the Drive Phase
// The FSM guarantees that we have a credible driveDuration and cycletime in normal operation, but NOT at the start
_driveDuration = flywheel.spinningTime() - drivePhaseStartTime
+ _driveFlywheelWork = flywheel.totalWork() - drivePhaseStartFlywheelWork
+ drivePhaseStartFlywheelWork = flywheel.totalWork()
drivePhaseAngularDisplacement = flywheel.angularPosition() - drivePhaseStartAngularPosition
_driveLength = drivePhaseAngularDisplacement * sprocketRadius
_driveLinearDistance = calculateLinearDistance(drivePhaseAngularDisplacement, _driveDuration)
@@ -253,6 +258,10 @@ export function createRower (rowerSettings) {
return flywheel.spinningTime()
}
+ function totalFlywheelWorkSinceStart () {
+ return flywheel.totalWork()
+ }
+
function driveLastStartTime () {
return drivePhaseStartTime
}
@@ -316,6 +325,14 @@ export function createRower (rowerSettings) {
}
}
+ function driveFlywheelWork () {
+ if (_driveDuration >= rowerSettings.minimumDriveTime) {
+ return _driveFlywheelWork
+ } else {
+ return undefined
+ }
+ }
+
function driveAverageHandleForce () {
if (_driveDuration >= rowerSettings.minimumDriveTime) {
return driveHandleForce.average()
@@ -410,6 +427,8 @@ export function createRower (rowerSettings) {
drivePhaseStartTime = 0.0
drivePhaseStartAngularPosition = 0.0
_driveDuration = 0.0
+ drivePhaseStartFlywheelWork = 0.0
+ _driveFlywheelWork = 0.0
drivePhaseAngularDisplacement = 0.0
_driveLinearDistance = 0.0
recoveryPhaseStartTime = 0.0
@@ -435,6 +454,7 @@ export function createRower (rowerSettings) {
driveLastStartTime,
totalMovingTimeSinceStart,
totalLinearDistanceSinceStart,
+ totalFlywheelWorkSinceStart,
cycleDuration,
cycleLinearDistance,
cycleLinearVelocity,
@@ -442,6 +462,7 @@ export function createRower (rowerSettings) {
driveDuration,
driveLinearDistance,
driveLength,
+ driveFlywheelWork,
driveAverageHandleForce,
drivePeakHandleForce,
driveHandleForceCurve,
diff --git a/app/engine/Rower.test.js b/app/engine/Rower.test.js
index 938d255988..3d0813594a 100644
--- a/app/engine/Rower.test.js
+++ b/app/engine/Rower.test.js
@@ -1,9 +1,8 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * This test is a test of the Rower object, that tests wether this object fills all fields correctly, given one validated rower, (the
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This test is a test of the Rower object, that tests wether this object fills all fields correctly, given one validated rower, (the
* Concept2 RowErg) using a validated cycle of strokes. This thoroughly tests the raw physics of the translation of Angular physics
* to Linear physics. The combination with all possible known rowers is tested when testing the above function RowingStatistics, as
* these statistics are dependent on these settings as well.
@@ -27,7 +26,8 @@ const baseConfig = { // Based on Concept 2 settings, as this is the validation s
minimumTimeBetweenImpulses: 0.005,
maximumTimeBetweenImpulses: 0.017,
flankLength: 12,
- smoothing: 1,
+ systematicErrorAgressiveness: 0,
+ systematicErrorMaximumChange: 1,
minimumStrokeQuality: 0.36,
minimumForceBeforeStroke: 20, // Modification to standard settings to shorten test cases
minimumRecoverySlope: 0.00070,
@@ -39,7 +39,9 @@ const baseConfig = { // Based on Concept 2 settings, as this is the validation s
magicConstant: 2.8
}
-// Test behaviour for no datapoints
+/**
+ * @description Test behaviour for no datapoints
+ */
test('Correct rower behaviour at initialisation', () => {
const rower = createRower(baseConfig)
testStrokeState(rower, 'WaitingForDrive')
@@ -60,9 +62,13 @@ test('Correct rower behaviour at initialisation', () => {
testInstantHandlePower(rower, 0)
})
-// Test behaviour for one datapoint
+/**
+ * @todo Test behaviour for one datapoint
+ */
-// Test behaviour for three perfect identical strokes, including settingling behaviour of metrics
+/**
+ * @description Test behaviour for three perfect identical strokes, including settingling behaviour of metrics
+ */
test('Test behaviour for three perfect identical strokes, including settingling behaviour of metrics', () => {
const rower = createRower(baseConfig)
testStrokeState(rower, 'WaitingForDrive')
@@ -103,7 +109,7 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.010386684)
testStrokeState(rower, 'Drive')
testTotalMovingTimeSinceStart(rower, 0.077918634)
- testTotalLinearDistanceSinceStart(rower, 0.2491943602992768)
+ testTotalLinearDistanceSinceStart(rower, 0.2135951659708087)
testTotalNumberOfStrokes(rower, 1)
testCycleDuration(rower, undefined) // still default value
testCycleLinearDistance(rower, undefined)
@@ -116,7 +122,7 @@ test('Test behaviour for three perfect identical strokes, including settingling
testDrivePeakHandleForce(rower, undefined)
testRecoveryDuration(rower, undefined)
testRecoveryDragFactor(rower, undefined)
- testInstantHandlePower(rower, 372.09477620281604)
+ testInstantHandlePower(rower, 367.97696436918955)
// Recovery initial stroke starts here
rower.handleRotationImpulse(0.010769)
rower.handleRotationImpulse(0.010707554)
@@ -135,17 +141,17 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.011209919)
testStrokeState(rower, 'Recovery')
testTotalMovingTimeSinceStart(rower, 0.23894732900000007)
- testTotalLinearDistanceSinceStart(rower, 0.7831822752262985)
+ testTotalLinearDistanceSinceStart(rower, 0.7475830808978304)
testTotalNumberOfStrokes(rower, 1)
testCycleDuration(rower, undefined)
testCycleLinearDistance(rower, undefined)
testCycleLinearVelocity(rower, undefined)
testCyclePower(rower, undefined)
testDriveDuration(rower, 0.143485717)
- testDriveLinearDistance(rower, 0.46278952627008546)
- testDriveLength(rower, 0.19058995431778075)
- testDriveAverageHandleForce(rower, 276.20193475035796)
- testDrivePeakHandleForce(rower, 325.1619554833936)
+ testDriveLinearDistance(rower, 0.4271903319416174)
+ testDriveLength(rower, 0.1759291886010284)
+ testDriveAverageHandleForce(rower, 276.6342676838739)
+ testDrivePeakHandleForce(rower, 332.99182222129025)
testRecoveryDuration(rower, undefined)
testRecoveryDragFactor(rower, undefined)
testInstantHandlePower(rower, 0)
@@ -171,20 +177,20 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.010386684)
testStrokeState(rower, 'Drive')
testTotalMovingTimeSinceStart(rower, 0.44915539800000004)
- testTotalLinearDistanceSinceStart(rower, 1.828822466846578)
+ testTotalLinearDistanceSinceStart(rower, 1.790756456114694)
testTotalNumberOfStrokes(rower, 2)
- testCycleDuration(rower, 0.34889498300000005)
- testCycleLinearDistance(rower, 1.3660329405764926)
- testCycleLinearVelocity(rower, 4.474643028948317)
- testCyclePower(rower, 250.86103806520188)
+ testCycleDuration(rower, 0.36002684500000004)
+ testCycleLinearDistance(rower, 1.3635661241730768)
+ testCycleLinearVelocity(rower, 4.328458575829895)
+ testCyclePower(rower, 227.0689899451657)
testDriveDuration(rower, 0.143485717)
- testDriveLinearDistance(rower, 0.43908201661387253)
- testDriveLength(rower, 0.19058995431778075)
- testDriveAverageHandleForce(rower, 236.59556700196183)
- testDrivePeakHandleForce(rower, 380.1396336099103)
- testRecoveryDuration(rower, 0.20540926600000003)
- testRecoveryDragFactor(rower, 283.12720365097886)
- testInstantHandlePower(rower, 504.63602120716615)
+ testDriveLinearDistance(rower, 0.3895903211923076)
+ testDriveLength(rower, 0.1759291886010284)
+ testDriveAverageHandleForce(rower, 236.92279327988305)
+ testDrivePeakHandleForce(rower, 378.60223820258005)
+ testRecoveryDuration(rower, 0.21654112800000003)
+ testRecoveryDragFactor(rower, 281.5961372923874)
+ testInstantHandlePower(rower, 502.7377823299629)
// Recovery second stroke starts here
rower.handleRotationImpulse(0.010769)
rower.handleRotationImpulse(0.010707554)
@@ -203,19 +209,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.011209919)
testStrokeState(rower, 'Recovery')
testTotalMovingTimeSinceStart(rower, 0.6101840930000001)
- testTotalLinearDistanceSinceStart(rower, 2.5606258278697)
+ testTotalLinearDistanceSinceStart(rower, 2.521238308350271)
testTotalNumberOfStrokes(rower, 2)
testCycleDuration(rower, 0.44526865700000007)
- testCycleLinearDistance(rower, 1.1708853776369939)
- testCycleLinearVelocity(rower, 4.492259872066099)
- testCyclePower(rower, 253.83566752220193)
- testDriveDuration(rower, 0.23985939100000003)
- testDriveLinearDistance(rower, 1.0733115961672441)
- testDriveLength(rower, 0.322536845768552)
- testDriveAverageHandleForce(rower, 285.0923064376231)
- testDrivePeakHandleForce(rower, 439.7407274840117)
- testRecoveryDuration(rower, 0.20540926600000003)
- testRecoveryDragFactor(rower, 283.12720365097886) // As we decelerate the flywheel quite fast, this is expected
+ testCycleLinearDistance(rower, 1.1200721734278847)
+ testCycleLinearVelocity(rower, 4.484147636986217)
+ testCyclePower(rower, 252.46300093932936)
+ testDriveDuration(rower, 0.22872752900000004)
+ testDriveLinearDistance(rower, 1.0226745931298076)
+ testDriveLength(rower, 0.3078760800517996)
+ testDriveAverageHandleForce(rower, 288.45140756259053)
+ testDrivePeakHandleForce(rower, 447.108514349131)
+ testRecoveryDuration(rower, 0.21654112800000003)
+ testRecoveryDragFactor(rower, 281.5961372923874)
testInstantHandlePower(rower, 0)
// Drive third stroke starts here
rower.handleRotationImpulse(0.011221636)
@@ -239,20 +245,20 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.010386684)
testStrokeState(rower, 'Drive')
testTotalMovingTimeSinceStart(rower, 0.8203921620000004)
- testTotalLinearDistanceSinceStart(rower, 3.4875767518323193)
+ testTotalLinearDistanceSinceStart(rower, 3.4465153211820017)
testTotalNumberOfStrokes(rower, 3)
- testCycleDuration(rower, 0.3379838680000002)
- testCycleLinearDistance(rower, 1.0245247054323694)
- testCycleLinearVelocity(rower, 4.4747508859834575)
- testCyclePower(rower, 250.8791788061379)
- testDriveDuration(rower, 0.23985939100000003)
- testDriveLinearDistance(rower, 0.5854426888184969)
- testDriveLength(rower, 0.322536845768552)
- testDriveAverageHandleForce(rower, 194.28476369698888)
- testDrivePeakHandleForce(rower, 380.1396336085015)
+ testCycleDuration(rower, 0.3268520060000002)
+ testCycleLinearDistance(rower, 1.0226745931298078)
+ testCycleLinearVelocity(rower, 4.469801860329268)
+ testCyclePower(rower, 250.04769019893303)
+ testDriveDuration(rower, 0.22872752900000004)
+ testDriveLinearDistance(rower, 0.5843854817884615)
+ testDriveLength(rower, 0.3078760800517996)
+ testDriveAverageHandleForce(rower, 192.2653879294337)
+ testDrivePeakHandleForce(rower, 378.6022382039591)
testRecoveryDuration(rower, 0.09812447700000015)
- testRecoveryDragFactor(rower, 283.12720365097886)
- testInstantHandlePower(rower, 504.63602120535336)
+ testRecoveryDragFactor(rower, 281.5961372923874)
+ testInstantHandlePower(rower, 502.73778233173203)
// Recovery third stroke starts here
rower.handleRotationImpulse(0.010769)
rower.handleRotationImpulse(0.010707554)
@@ -271,19 +277,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rower.handleRotationImpulse(0.011209919)
testStrokeState(rower, 'Recovery')
testTotalMovingTimeSinceStart(rower, 0.9814208570000005)
- testTotalLinearDistanceSinceStart(rower, 4.219380112855441)
+ testTotalLinearDistanceSinceStart(rower, 4.176997173417578)
testTotalNumberOfStrokes(rower, 3)
testCycleDuration(rower, 0.3712367640000004)
- testCycleLinearDistance(rower, 1.3172460498416183)
- testCycleLinearVelocity(rower, 4.46818431211662)
- testCyclePower(rower, 249.77632391313173)
+ testCycleLinearDistance(rower, 1.314867334024038)
+ testCycleLinearVelocity(rower, 4.4601155532842265)
+ testCyclePower(rower, 248.42560903379598)
testDriveDuration(rower, 0.27311228700000023)
- testDriveLinearDistance(rower, 1.2196722683718688)
- testDriveLength(rower, 0.3665191429188092)
- testDriveAverageHandleForce(rower, 254.91449219500532)
- testDrivePeakHandleForce(rower, 439.74072748282515)
+ testDriveLinearDistance(rower, 1.2174697537259611)
+ testDriveLength(rower, 0.36651914291880905)
+ testDriveAverageHandleForce(rower, 256.5447026931294)
+ testDrivePeakHandleForce(rower, 447.1085143512751)
testRecoveryDuration(rower, 0.09812447700000015)
- testRecoveryDragFactor(rower, 283.12720365097886)
+ testRecoveryDragFactor(rower, 281.5961372923874)
testInstantHandlePower(rower, 0)
// Dwelling state starts here
rower.handleRotationImpulse(0.020769)
@@ -304,41 +310,36 @@ test('Test behaviour for three perfect identical strokes, including settingling
testStrokeState(rower, 'WaitingForDrive')
testTotalMovingTimeSinceStart(rower, 1.1344792920000004)
testTotalNumberOfStrokes(rower, 3)
- testTotalLinearDistanceSinceStart(rower, 4.8536096924088135)
+ testTotalLinearDistanceSinceStart(rower, 4.810081445355078)
testCycleDuration(rower, 0.4476004410000002)
- testCycleLinearDistance(rower, 1.9514756293949902)
- testCycleLinearVelocity(rower, 4.359860828186694)
- testCyclePower(rower, 232.0469744651364)
+ testCycleLinearDistance(rower, 1.947951605961538)
+ testCycleLinearVelocity(rower, 4.35198768260717)
+ testCyclePower(rower, 230.79213454994928)
testDriveDuration(rower, 0.27311228700000023)
- testDriveLinearDistance(rower, 1.2196722683718688)
- testDriveLength(rower, 0.3665191429188092)
- testDriveAverageHandleForce(rower, 254.91449219500532)
- testDrivePeakHandleForce(rower, 439.74072748282515)
+ testDriveLinearDistance(rower, 1.2174697537259611)
+ testDriveLength(rower, 0.36651914291880905)
+ testDriveAverageHandleForce(rower, 256.5447026931294)
+ testDrivePeakHandleForce(rower, 447.1085143512751)
testRecoveryDuration(rower, 0.17448815399999995)
- testRecoveryDragFactor(rower, 283.12720365097886)
+ testRecoveryDragFactor(rower, 281.5961372923874)
testInstantHandlePower(rower, 0)
})
-// Test behaviour for noisy upgoing flank
-
-// Test behaviour for noisy downgoing flank
-
-// Test behaviour for noisy stroke
-
-// Test behaviour after reset
-
-// Test behaviour for one datapoint
-
-// Test behaviour for noisy stroke
-
-// Test drag factor calculation
-
-// Test Dynamic stroke detection
+/**
+ * @todo Test behaviour for noisy stroke
+ */
-// Test behaviour after reset
+/**
+ * @todo Test behaviour after reset
+ */
-// Test behaviour with real-life data
+/**
+ * @todo Test drag factor calculation
+ */
+/**
+ * @description Test behaviour for the Sportstech WRX700
+ */
test('sample data for Sportstech WRX700 should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700))
testTotalMovingTimeSinceStart(rower, 0)
@@ -349,12 +350,15 @@ test('sample data for Sportstech WRX700 should produce plausible results', async
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
testTotalMovingTimeSinceStart(rower, 46.302522627)
- testTotalLinearDistanceSinceStart(rower, 166.29596716416734)
+ testTotalLinearDistanceSinceStart(rower, 165.58832475070278)
testTotalNumberOfStrokes(rower, 16)
// As dragFactor is static, it should remain in place
testRecoveryDragFactor(rower, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the DKN R-320
+ */
test('sample data for DKN R-320 should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.DKN_R320))
testTotalMovingTimeSinceStart(rower, 0)
@@ -366,12 +370,15 @@ test('sample data for DKN R-320 should produce plausible results', async () => {
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/DKNR320.csv', realtime: false, loop: false })
testTotalMovingTimeSinceStart(rower, 21.701535821)
- testTotalLinearDistanceSinceStart(rower, 70.11298001986664)
+ testTotalLinearDistanceSinceStart(rower, 69.20242183779045)
testTotalNumberOfStrokes(rower, 10)
// As dragFactor is static, it should remain in place
testRecoveryDragFactor(rower, rowerProfiles.DKN_R320.dragFactor)
})
+/**
+ * @description Test behaviour for the NordicTrack RX800
+ */
test('sample data for NordicTrack RX800 should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800))
testTotalMovingTimeSinceStart(rower, 0)
@@ -381,13 +388,16 @@ test('sample data for NordicTrack RX800 should produce plausible results', async
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
- testTotalMovingTimeSinceStart(rower, 22.259092749999997)
- testTotalLinearDistanceSinceStart(rower, 80.49260485116434)
+ testTotalMovingTimeSinceStart(rower, 22.368358745999995)
+ testTotalLinearDistanceSinceStart(rower, 80.8365747440095)
testTotalNumberOfStrokes(rower, 10)
// As dragFactor is dynamic, it should have changed
- testRecoveryDragFactor(rower, 491.1395313462149)
+ testRecoveryDragFactor(rower, 493.8082148322739)
})
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a full session
+ */
test('A full session for SportsTech WRX700 should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700))
testTotalMovingTimeSinceStart(rower, 0)
@@ -398,12 +408,15 @@ test('A full session for SportsTech WRX700 should produce plausible results', as
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
testTotalMovingTimeSinceStart(rower, 2340.0100514160117)
- testTotalLinearDistanceSinceStart(rower, 8406.791871958883)
+ testTotalLinearDistanceSinceStart(rower, 8406.084229545408)
testTotalNumberOfStrokes(rower, 846)
// As dragFactor is static, it should remain in place
testRecoveryDragFactor(rower, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the C2 Model C
+ */
test('A full session for a Concept2 Model C should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C))
testTotalMovingTimeSinceStart(rower, 0)
@@ -414,12 +427,15 @@ test('A full session for a Concept2 Model C should produce plausible results', a
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
testTotalMovingTimeSinceStart(rower, 181.47141999999985)
- testTotalLinearDistanceSinceStart(rower, 552.0863658667265)
- testTotalNumberOfStrokes(rower, 84)
+ testTotalLinearDistanceSinceStart(rower, 552.2056895088467)
+ testTotalNumberOfStrokes(rower, 83)
// As dragFactor isn't static, it should have changed
- testRecoveryDragFactor(rower, 123.82587294279575)
+ testRecoveryDragFactor(rower, 123.64632740545646)
})
+/**
+ * @description Test behaviour for the C2 RowErg
+ */
test('A full session for a Concept2 RowErg should produce plausible results', async () => {
const rower = createRower(deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg))
testTotalMovingTimeSinceStart(rower, 0)
@@ -429,11 +445,11 @@ test('A full session for a Concept2 RowErg should produce plausible results', as
await replayRowingSession(rower.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
- testTotalMovingTimeSinceStart(rower, 590.111937)
- testTotalLinearDistanceSinceStart(rower, 2027.493082238415)
+ testTotalMovingTimeSinceStart(rower, 590.0231672202852)
+ testTotalLinearDistanceSinceStart(rower, 2027.8388877679706)
testTotalNumberOfStrokes(rower, 206)
// As dragFactor isn't static, it should have changed
- testRecoveryDragFactor(rower, 80.60573080009686)
+ testRecoveryDragFactor(rower, 80.70871681344696)
})
function testStrokeState (rower, expectedValue) {
diff --git a/app/engine/RowingStatistics.js b/app/engine/RowingStatistics.js
index 44ff977070..a443d0b0c6 100644
--- a/app/engine/RowingStatistics.js
+++ b/app/engine/RowingStatistics.js
@@ -3,10 +3,11 @@
Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
*/
/**
- * This Module creates a persistent, consistent and user presentable set of metrics.
+ * @file This Module creates a persistent, consistent and user presentable set of metrics.
+ * @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/Architecture.md#rowingstatisticsjs|the architecture description}
*/
import { createRower } from './Rower.js'
-import { createOLSLinearSeries } from './utils/OLSLinearSeries.js'
+import { createWLSLinearSeries } from './utils/WLSLinearSeries.js'
import { createStreamFilter } from './utils/StreamFilter.js'
import { createCurveAligner } from './utils/CurveAligner.js'
@@ -29,8 +30,9 @@ export function createRowingStatistics (config) {
let totalNumberOfStrokes = -1
let driveLastStartTime = 0
let strokeCalories = 0
+ let totalCalories = 0
let strokeWork = 0
- const calories = createOLSLinearSeries()
+ const calories = createWLSLinearSeries()
const driveDuration = createStreamFilter(halfNumOfDataPointsForAveraging, undefined)
const driveLength = createStreamFilter(halfNumOfDataPointsForAveraging, undefined)
const driveDistance = createStreamFilter(halfNumOfDataPointsForAveraging, undefined)
@@ -180,6 +182,7 @@ export function createRowingStatistics (config) {
totalMovingTime = rower.totalMovingTimeSinceStart()
totalLinearDistance = rower.totalLinearDistanceSinceStart()
instantPower = rower.instantHandlePower()
+ totalCalories = ((4 * rower.totalFlywheelWorkSinceStart()) + (350 * rower.totalMovingTimeSinceStart())) / 4200
}
function updateCycleMetrics () {
@@ -204,6 +207,12 @@ export function createRowingStatistics (config) {
driveHandleForceCurve.push(rower.driveHandleForceCurve())
driveHandleVelocityCurve.push(rower.driveHandleVelocityCurve())
driveHandlePowerCurve.push(rower.driveHandlePowerCurve())
+ // based on: http://eodg.atm.ox.ac.uk/user/dudhia/rowing/physics/ergometer.html#section11
+ strokeWork = rower.driveFlywheelWork()
+ strokeCalories = ((4 * rower.driveFlywheelWork()) + (350 * cycleDuration.clean())) / 4200
+ if (cyclePower.reliable() && cycleDuration.reliable()) {
+ calories.push(totalMovingTime, totalCalories, 1)
+ }
}
}
@@ -221,12 +230,7 @@ export function createRowingStatistics (config) {
}
if (cyclePower.reliable() && cycleDuration.reliable()) {
- // ToDo: see if this can be made part of the continuousmatrcs as Garmin and Concept2 also have a 'calories' type of training
- // based on: http://eodg.atm.ox.ac.uk/user/dudhia/rowing/physics/ergometer.html#section11
- strokeCalories = (4 * cyclePower.clean() + 350) * (cycleDuration.clean()) / 4200
- strokeWork = cyclePower.clean() * cycleDuration.clean()
- const totalCalories = calories.Y.atSeriesEnd() + strokeCalories
- calories.push(totalMovingTime, totalCalories)
+ calories.push(totalMovingTime, totalCalories, 1)
}
}
@@ -241,7 +245,7 @@ export function createRowingStatistics (config) {
totalLinearDistance: totalLinearDistance > 0 ? totalLinearDistance : 0, // meters
strokeCalories: strokeCalories > 0 ? strokeCalories : 0, // kCal
strokeWork: strokeWork > 0 ? strokeWork : 0, // Joules
- totalCalories: calories.Y.atSeriesEnd() > 0 ? calories.Y.atSeriesEnd() : 0, // kcal
+ totalCalories: totalCalories > 0 ? totalCalories : 0, // kcal
totalCaloriesPerMinute: totalMovingTime > 60 ? caloriesPerPeriod(totalMovingTime - 60, totalMovingTime) : caloriesPerPeriod(0, 60),
totalCaloriesPerHour: totalMovingTime > 3600 ? caloriesPerPeriod(totalMovingTime - 3600, totalMovingTime) : caloriesPerPeriod(0, 3600),
cycleDuration: cycleDuration.reliable() && cycleDuration.clean() > minimumStrokeTime && cycleDuration.clean() < maximumStrokeTime && cycleLinearVelocity.raw() > 0 && totalNumberOfStrokes > 0 && metricsContext.isMoving === true ? cycleDuration.clean() : undefined, // seconds
diff --git a/app/engine/RowingStatistics.test.js b/app/engine/RowingStatistics.test.js
index 7a39617b4c..2a45e65ebc 100644
--- a/app/engine/RowingStatistics.test.js
+++ b/app/engine/RowingStatistics.test.js
@@ -1,14 +1,10 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * This test is a test of the Rower object, that tests wether this object fills all fields correctly, given one validated rower, (the
- * Concept2 RowErg) using a validated cycle of strokes. This thoroughly tests the raw physics of the translation of Angular physics
- * to Linear physics. The combination with all possible known rowers is tested when testing the above function RowingStatistics, as
- * these statistics are dependent on these settings as well.
-*/
-// ToDo: test the effects of smoothing parameters
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This test is a test of the Rowingstatistics object, that tests wether this object fills all fields correctly, given one validated rower, (the
+ * Concept2 RowErg) using a validated cycle of strokes. The combination with all possible known rowers is tested.
+ */
import { test } from 'uvu'
import * as assert from 'uvu/assert'
import rowerProfiles from '../../config/rowerProfiles.js'
@@ -34,7 +30,8 @@ const baseConfig = {
minimumTimeBetweenImpulses: 0.005,
maximumTimeBetweenImpulses: 0.017,
flankLength: 12,
- smoothing: 1,
+ systematicErrorAgressiveness: 0,
+ systematicErrorMaximumChange: 1,
minimumStrokeQuality: 0.36,
minimumForceBeforeStroke: 20, // Modification to standard settings to shorten test cases
minimumRecoverySlope: 0.00070,
@@ -47,7 +44,9 @@ const baseConfig = {
}
}
-// Test behaviour for no datapoints
+/**
+ * @description Test behaviour for no datapoints
+ */
test('Correct rower behaviour at initialisation', () => {
const rowingStatistics = createRowingStatistics(baseConfig)
testStrokeState(rowingStatistics, 'WaitingForDrive')
@@ -68,9 +67,13 @@ test('Correct rower behaviour at initialisation', () => {
testInstantHandlePower(rowingStatistics, undefined)
})
-// Test behaviour for one datapoint
+/**
+ * @todo Test behaviour for one datapoint
+ */
-// Test behaviour for three perfect identical strokes, including settingling behaviour of metrics
+/**
+ * @description Test behaviour for three perfect identical strokes, including settingling behaviour of metrics
+ */
test('Test behaviour for three perfect identical strokes, including settingling behaviour of metrics', () => {
const rowingStatistics = createRowingStatistics(baseConfig)
testStrokeState(rowingStatistics, 'WaitingForDrive')
@@ -111,7 +114,7 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.010386684)
testStrokeState(rowingStatistics, 'Drive')
testTotalMovingTime(rowingStatistics, 0.077918634)
- testTotalLinearDistance(rowingStatistics, 0.2491943602992768)
+ testTotalLinearDistance(rowingStatistics, 0.2135951659708087)
testTotalNumberOfStrokes(rowingStatistics, 0)
testCycleDuration(rowingStatistics, undefined) // still default value
testCycleDistance(rowingStatistics, undefined)
@@ -143,17 +146,17 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.011209919)
testStrokeState(rowingStatistics, 'Recovery')
testTotalMovingTime(rowingStatistics, 0.23894732900000007)
- testTotalLinearDistance(rowingStatistics, 0.7831822752262985)
+ testTotalLinearDistance(rowingStatistics, 0.7475830808978304)
testTotalNumberOfStrokes(rowingStatistics, 0)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
testCyclePower(rowingStatistics, undefined)
testDriveDuration(rowingStatistics, undefined)
- testDriveDistance(rowingStatistics, 0.46278952627008546)
- testDriveLength(rowingStatistics, 0.19058995431778075)
- testDriveAverageHandleForce(rowingStatistics, 276.20193475035796)
- testDrivePeakHandleForce(rowingStatistics, 325.1619554833936)
+ testDriveDistance(rowingStatistics, 0.4271903319416174)
+ testDriveLength(rowingStatistics, 0.1759291886010284)
+ testDriveAverageHandleForce(rowingStatistics, 276.6342676838739)
+ testDrivePeakHandleForce(rowingStatistics, 332.99182222129025)
testRecoveryDuration(rowingStatistics, undefined)
testDragFactor(rowingStatistics, undefined)
testInstantHandlePower(rowingStatistics, undefined)
@@ -179,19 +182,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.010386684)
testStrokeState(rowingStatistics, 'Drive')
testTotalMovingTime(rowingStatistics, 0.44915539800000004)
- testTotalLinearDistance(rowingStatistics, 1.828822466846578)
+ testTotalLinearDistance(rowingStatistics, 1.790756456114694)
testTotalNumberOfStrokes(rowingStatistics, 1)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
testCyclePower(rowingStatistics, undefined)
testDriveDuration(rowingStatistics, 0.143485717)
- testDriveDistance(rowingStatistics, 0.46278952627008546)
- testDriveLength(rowingStatistics, 0.19058995431778075)
- testDriveAverageHandleForce(rowingStatistics, 276.20193475035796)
- testDrivePeakHandleForce(rowingStatistics, 325.1619554833936)
- testRecoveryDuration(rowingStatistics, 0.20540926600000003)
- testDragFactor(rowingStatistics, 283.12720365097886)
+ testDriveDistance(rowingStatistics, 0.4271903319416174)
+ testDriveLength(rowingStatistics, 0.1759291886010284)
+ testDriveAverageHandleForce(rowingStatistics, 276.6342676838739)
+ testDrivePeakHandleForce(rowingStatistics, 332.99182222129025)
+ testRecoveryDuration(rowingStatistics, 0.21654112800000003)
+ testDragFactor(rowingStatistics, 281.5961372923874)
testInstantHandlePower(rowingStatistics, undefined)
// Recovery second stroke starts here
rowingStatistics.handleRotationImpulse(0.010769)
@@ -211,19 +214,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.011209919)
testStrokeState(rowingStatistics, 'Recovery')
testTotalMovingTime(rowingStatistics, 0.6101840930000001)
- testTotalLinearDistance(rowingStatistics, 2.5606258278697)
+ testTotalLinearDistance(rowingStatistics, 2.521238308350271)
testTotalNumberOfStrokes(rowingStatistics, 1)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
testCyclePower(rowingStatistics, undefined)
- testDriveDuration(rowingStatistics, 0.23985939100000003)
- testDriveDistance(rowingStatistics, 1.0733115961672441)
- testDriveLength(rowingStatistics, 0.322536845768552)
- testDriveAverageHandleForce(rowingStatistics, 285.0923064376231)
- testDrivePeakHandleForce(rowingStatistics, 439.7407274840117)
- testRecoveryDuration(rowingStatistics, 0.20540926600000003)
- testDragFactor(rowingStatistics, 283.12720365097886) // As we decelerate the flywheel quite fast, this is expected
+ testDriveDuration(rowingStatistics, 0.22872752900000004)
+ testDriveDistance(rowingStatistics, 1.0226745931298076)
+ testDriveLength(rowingStatistics, 0.3078760800517996)
+ testDriveAverageHandleForce(rowingStatistics, 288.45140756259053)
+ testDrivePeakHandleForce(rowingStatistics, 447.108514349131)
+ testRecoveryDuration(rowingStatistics, 0.21654112800000003)
+ testDragFactor(rowingStatistics, 281.5961372923874)
testInstantHandlePower(rowingStatistics, undefined)
// Drive third stroke starts here
rowingStatistics.handleRotationImpulse(0.011221636)
@@ -247,19 +250,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.010386684)
testStrokeState(rowingStatistics, 'Drive')
testTotalMovingTime(rowingStatistics, 0.8203921620000004)
- testTotalLinearDistance(rowingStatistics, 3.4875767518323193)
+ testTotalLinearDistance(rowingStatistics, 3.4465153211820017)
testTotalNumberOfStrokes(rowingStatistics, 2)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
testCyclePower(rowingStatistics, undefined)
- testDriveDuration(rowingStatistics, 0.23985939100000003)
- testDriveDistance(rowingStatistics, 1.0733115961672441)
- testDriveLength(rowingStatistics, 0.322536845768552)
- testDriveAverageHandleForce(rowingStatistics, 285.0923064376231)
- testDrivePeakHandleForce(rowingStatistics, 439.7407274840117)
+ testDriveDuration(rowingStatistics, 0.22872752900000004)
+ testDriveDistance(rowingStatistics, 1.0226745931298076)
+ testDriveLength(rowingStatistics, 0.3078760800517996)
+ testDriveAverageHandleForce(rowingStatistics, 288.45140756259053)
+ testDrivePeakHandleForce(rowingStatistics, 447.108514349131)
testRecoveryDuration(rowingStatistics, 0.09812447700000015)
- testDragFactor(rowingStatistics, 283.12720365097886)
+ testDragFactor(rowingStatistics, 281.5961372923874)
testInstantHandlePower(rowingStatistics, undefined)
// Recovery third stroke starts here
rowingStatistics.handleRotationImpulse(0.010769)
@@ -279,19 +282,19 @@ test('Test behaviour for three perfect identical strokes, including settingling
rowingStatistics.handleRotationImpulse(0.011209919)
testStrokeState(rowingStatistics, 'Recovery')
testTotalMovingTime(rowingStatistics, 0.9814208570000005)
- testTotalLinearDistance(rowingStatistics, 4.219380112855441)
+ testTotalLinearDistance(rowingStatistics, 4.176997173417578)
testTotalNumberOfStrokes(rowingStatistics, 2)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
testCyclePower(rowingStatistics, undefined)
testDriveDuration(rowingStatistics, 0.27311228700000023)
- testDriveDistance(rowingStatistics, 1.2196722683718688)
- testDriveLength(rowingStatistics, 0.3665191429188092)
- testDriveAverageHandleForce(rowingStatistics, 254.91449219500532)
- testDrivePeakHandleForce(rowingStatistics, 439.74072748282515)
+ testDriveDistance(rowingStatistics, 1.2174697537259611)
+ testDriveLength(rowingStatistics, 0.36651914291880905)
+ testDriveAverageHandleForce(rowingStatistics, 256.5447026931294)
+ testDrivePeakHandleForce(rowingStatistics, 447.1085143512751)
testRecoveryDuration(rowingStatistics, 0.09812447700000015)
- testDragFactor(rowingStatistics, 283.12720365097886)
+ testDragFactor(rowingStatistics, 281.5961372923874)
testInstantHandlePower(rowingStatistics, undefined)
// Dwelling state starts here
rowingStatistics.handleRotationImpulse(0.020769)
@@ -312,7 +315,7 @@ test('Test behaviour for three perfect identical strokes, including settingling
testStrokeState(rowingStatistics, 'WaitingForDrive')
testTotalMovingTime(rowingStatistics, 1.1137102920000004)
testTotalNumberOfStrokes(rowingStatistics, 2)
- testTotalLinearDistance(rowingStatistics, 4.804822801673938)
+ testTotalLinearDistance(rowingStatistics, 4.76138265520604)
testCycleDuration(rowingStatistics, undefined)
testCycleDistance(rowingStatistics, undefined)
testCycleLinearVelocity(rowingStatistics, undefined)
@@ -323,30 +326,25 @@ test('Test behaviour for three perfect identical strokes, including settingling
testDriveAverageHandleForce(rowingStatistics, undefined)
testDrivePeakHandleForce(rowingStatistics, undefined)
testRecoveryDuration(rowingStatistics, undefined)
- testDragFactor(rowingStatistics, 283.12720365097886)
+ testDragFactor(rowingStatistics, 281.5961372923874)
testInstantHandlePower(rowingStatistics, undefined)
})
-// Test behaviour for noisy upgoing flank
-
-// Test behaviour for noisy downgoing flank
-
-// Test behaviour for noisy stroke
-
-// Test behaviour after reset
-
-// Test behaviour for one datapoint
-
-// Test behaviour for noisy stroke
-
-// Test drag factor calculation
-
-// Test Dynamic stroke detection
+/**
+ * @todo Test the effects of smoothing parameters
+ */
-// Test behaviour after reset
+/**
+ * @todo Test force curve behaviour
+ */
-// Test behaviour with real-life data
+/**
+ * @todo Test behaviour after reset
+ */
+/**
+ * @description Test behaviour for the Sportstech WRX700
+ */
test('sample data for Sportstech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -366,12 +364,15 @@ test('sample data for Sportstech WRX700 should produce plausible results', async
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
testTotalMovingTime(rowingStatistics, 46.302522627)
- testTotalLinearDistance(rowingStatistics, 166.29596716416734)
+ testTotalLinearDistance(rowingStatistics, 165.58832475070278)
testTotalNumberOfStrokes(rowingStatistics, 15)
// As dragFactor is static, it should remain in place
testDragFactor(rowingStatistics, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the DKN R-320
+ */
test('sample data for DKN R-320 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.DKN_R320)
const testConfig = {
@@ -391,12 +392,15 @@ test('sample data for DKN R-320 should produce plausible results', async () => {
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/DKNR320.csv', realtime: false, loop: false })
testTotalMovingTime(rowingStatistics, 21.701535821)
- testTotalLinearDistance(rowingStatistics, 70.11298001986664)
+ testTotalLinearDistance(rowingStatistics, 69.20242183779045)
testTotalNumberOfStrokes(rowingStatistics, 9)
// As dragFactor is static, it should remain in place
testDragFactor(rowingStatistics, rowerProfiles.DKN_R320.dragFactor)
})
+/**
+ * @description Test behaviour for the NordicTrack RX800
+ */
test('sample data for NordicTrack RX800 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800)
const testConfig = {
@@ -415,13 +419,16 @@ test('sample data for NordicTrack RX800 should produce plausible results', async
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
- testTotalMovingTime(rowingStatistics, 22.259092749999997)
- testTotalLinearDistance(rowingStatistics, 80.49260485116434)
+ testTotalMovingTime(rowingStatistics, 22.368358745999995)
+ testTotalLinearDistance(rowingStatistics, 80.8365747440095)
testTotalNumberOfStrokes(rowingStatistics, 9)
// As dragFactor is dynamic, it should have changed
- testDragFactor(rowingStatistics, 491.1395313462149)
+ testDragFactor(rowingStatistics, 493.8082148322739)
})
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a full session
+ */
test('A full session for SportsTech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -441,12 +448,15 @@ test('A full session for SportsTech WRX700 should produce plausible results', as
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
testTotalMovingTime(rowingStatistics, 2340.0100514160117)
- testTotalLinearDistance(rowingStatistics, 8406.791871958883)
+ testTotalLinearDistance(rowingStatistics, 8406.084229545408)
testTotalNumberOfStrokes(rowingStatistics, 845)
// As dragFactor is static, it should remain in place
testDragFactor(rowingStatistics, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the C2 Model C
+ */
test('A full session for a Concept2 Model C should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C)
const testConfig = {
@@ -466,12 +476,15 @@ test('A full session for a Concept2 Model C should produce plausible results', a
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
testTotalMovingTime(rowingStatistics, 181.47141999999985)
- testTotalLinearDistance(rowingStatistics, 552.0863658667265)
- testTotalNumberOfStrokes(rowingStatistics, 83)
+ testTotalLinearDistance(rowingStatistics, 552.2056895088467)
+ testTotalNumberOfStrokes(rowingStatistics, 82)
// As dragFactor isn't static, it should have changed
- testDragFactor(rowingStatistics, 123.82587294279575)
+ testDragFactor(rowingStatistics, 123.64632740545646)
})
+/**
+ * @description Test behaviour for the C2 RowErg
+ */
test('A full session for a Concept2 RowErg should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg)
const testConfig = {
@@ -490,11 +503,11 @@ test('A full session for a Concept2 RowErg should produce plausible results', as
await replayRowingSession(rowingStatistics.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
- testTotalMovingTime(rowingStatistics, 590.111937)
- testTotalLinearDistance(rowingStatistics, 2027.493082238415)
+ testTotalMovingTime(rowingStatistics, 590.0231672202852)
+ testTotalLinearDistance(rowingStatistics, 2027.8388877679706)
testTotalNumberOfStrokes(rowingStatistics, 205)
// As dragFactor isn't static, it should have changed
- testDragFactor(rowingStatistics, 80.60573080009686)
+ testDragFactor(rowingStatistics, 80.70871681344696)
})
function testStrokeState (rowingStatistics, expectedValue) {
diff --git a/app/engine/SessionManager.test.js b/app/engine/SessionManager.test.js
index 8f2e4c3ea1..a50f4142e4 100644
--- a/app/engine/SessionManager.test.js
+++ b/app/engine/SessionManager.test.js
@@ -1,12 +1,10 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * This test is a test of the SessionManager, that tests wether this object fills all fields correctly,
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This test is a test of the SessionManager, that tests wether this object fills all fields correctly,
* and cuts off a session, interval and split decently
*/
-// ToDo: test the effects of smoothing parameters
import { test } from 'uvu'
import * as assert from 'uvu/assert'
import rowerProfiles from '../../config/rowerProfiles.js'
@@ -15,6 +13,21 @@ import { deepMerge } from '../tools/Helper.js'
import { createSessionManager } from './SessionManager.js'
+/**
+ * @todo Add inspections to all tests to inspect whether the 'workout' object contains all correct values as well
+ */
+
+/**
+ * @todo Add inspections to all tests to inspect whether the 'interval' object contains all correct values
+ */
+
+/**
+ * @todo Add splits and tests to inspect whether the 'split' object contains all correct values as well
+ */
+
+/**
+ * @description Test behaviour for the Sportstech WRX700 in a 'Just Row' session
+ */
test('sample data for Sportstech WRX700 should produce plausible results for an unlimited run', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -23,23 +36,31 @@ test('sample data for Sportstech WRX700 should produce plausible results for an
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 46.302522627)
- testTotalLinearDistance(sessionManager, 166.29596716416734)
+ testTotalLinearDistance(sessionManager, 165.58832475070278)
+ testTotalCalories(sessionManager, 13.142874997261865)
testTotalNumberOfStrokes(sessionManager, 15)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the Sportstech WRX700 in a single interval session with a Distance target
+ */
test('sample data for Sportstech WRX700 should produce plausible results for a 150 meter session', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -48,6 +69,9 @@ test('sample data for Sportstech WRX700 should produce plausible results for a 1
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -62,18 +86,23 @@ test('sample data for Sportstech WRX700 should produce plausible results for a 1
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 41.734896595)
+ testTotalMovingTime(sessionManager, 41.876875768000005)
testTotalLinearDistance(sessionManager, 150.02019165448286)
+ testTotalCalories(sessionManager, 12.047320967455441)
testTotalNumberOfStrokes(sessionManager, 14)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the Sportstech WRX700 in a single interval session with a Time target
+ */
test('sample data for Sportstech WRX700 should produce plausible results for a 45 seconds session', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -82,6 +111,9 @@ test('sample data for Sportstech WRX700 should produce plausible results for a 4
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -96,18 +128,64 @@ test('sample data for Sportstech WRX700 should produce plausible results for a 4
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 45.077573161000004)
- testTotalLinearDistance(sessionManager, 163.46539751030917)
+ testTotalLinearDistance(sessionManager, 162.75775509684462)
+ testTotalCalories(sessionManager, 13.040795875095199)
+ testTotalNumberOfStrokes(sessionManager, 15)
+ // As dragFactor is static, it should remain in place
+ testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
+})
+
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a single interval session with a Calorie target
+ */
+test('sample data for Sportstech WRX700 should produce plausible results for a 13 calories session', async () => {
+ const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
+ const testConfig = {
+ loglevel: {
+ default: 'silent',
+ RowingEngine: 'silent'
+ },
+ numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
+ rowerSettings: rowerProfile
+ }
+ const sessionManager = createSessionManager(testConfig)
+
+ const intervalSettings = []
+ intervalSettings[0] = {
+ type: 'calories',
+ targetCalories: 13
+ }
+ sessionManager.handleCommand('updateIntervalSettings', intervalSettings)
+
+ testTotalMovingTime(sessionManager, 0)
+ testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
+ testTotalNumberOfStrokes(sessionManager, 0)
+ testDragFactor(sessionManager, undefined)
+
+ await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets.csv', realtime: false, loop: false })
+
+ testTotalMovingTime(sessionManager, 44.674583250000005)
+ testTotalLinearDistance(sessionManager, 161.3424702699155)
+ testTotalCalories(sessionManager, 13.007213382511864)
testTotalNumberOfStrokes(sessionManager, 15)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the DKN R-320 in a 'Just Row' session
+ */
test('sample data for DKN R-320 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.DKN_R320)
const testConfig = {
@@ -116,23 +194,31 @@ test('sample data for DKN R-320 should produce plausible results', async () => {
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/DKNR320.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 21.701535821)
- testTotalLinearDistance(sessionManager, 70.11298001986664)
+ testTotalLinearDistance(sessionManager, 69.20242183779045)
+ testTotalCalories(sessionManager, 6.7615440068583315)
testTotalNumberOfStrokes(sessionManager, 9)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.DKN_R320.dragFactor)
})
+/**
+ * @description Test behaviour for the NordicTrack RX800 in a 'Just Row' session
+ */
test('sample data for NordicTrack RX800 should produce plausible results without intervalsettings', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800)
const testConfig = {
@@ -141,23 +227,31 @@ test('sample data for NordicTrack RX800 should produce plausible results without
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 22.259092749999997)
- testTotalLinearDistance(sessionManager, 80.49260485116434)
+ testTotalMovingTime(sessionManager, 22.368358745999995)
+ testTotalLinearDistance(sessionManager, 80.8365747440095)
+ testTotalCalories(sessionManager, 4.8487817727235765)
testTotalNumberOfStrokes(sessionManager, 9)
// As dragFactor is dynamic, it should have changed
- testDragFactor(sessionManager, 491.1395313462149)
+ testDragFactor(sessionManager, 493.8082148322739)
})
+/**
+ * @description Test behaviour for the NordicTrack RX800 in a single interval session with a Time target
+ */
test('sample data for NordicTrack RX800 should produce plausible results for a 20 seconds session', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800)
const testConfig = {
@@ -166,6 +260,9 @@ test('sample data for NordicTrack RX800 should produce plausible results for a 2
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -180,18 +277,64 @@ test('sample data for NordicTrack RX800 should produce plausible results for a 2
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 20.02496380499998)
- testTotalLinearDistance(sessionManager, 72.3905525302199)
+ testTotalLinearDistance(sessionManager, 72.36563503912126)
+ testTotalCalories(sessionManager, 4.369289275497461)
testTotalNumberOfStrokes(sessionManager, 8)
// As dragFactor is dynamic, it should have changed
- testDragFactor(sessionManager, 487.65077394777813)
+ testDragFactor(sessionManager, 489.6362497474688)
})
+/**
+ * @description Test behaviour for the NordicTrack RX800 in a single interval session with a Calorie target
+ */
+test('sample data for NordicTrack RX800 should produce plausible results for a 20 calories session', async () => {
+ const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800)
+ const testConfig = {
+ loglevel: {
+ default: 'silent',
+ RowingEngine: 'silent'
+ },
+ numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
+ rowerSettings: rowerProfile
+ }
+ const sessionManager = createSessionManager(testConfig)
+
+ const intervalSettings = []
+ intervalSettings[0] = {
+ type: 'calories',
+ targetCalories: 20
+ }
+ sessionManager.handleCommand('updateIntervalSettings', intervalSettings)
+
+ testTotalMovingTime(sessionManager, 0)
+ testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
+ testTotalNumberOfStrokes(sessionManager, 0)
+ testDragFactor(sessionManager, undefined)
+
+ await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
+
+ testTotalMovingTime(sessionManager, 22.368358745999995)
+ testTotalLinearDistance(sessionManager, 80.8365747440095)
+ testTotalCalories(sessionManager, 4.8487817727235765)
+ testTotalNumberOfStrokes(sessionManager, 9)
+ // As dragFactor is dynamic, it should have changed
+ testDragFactor(sessionManager, 493.8082148322739)
+})
+
+/**
+ * @description Test behaviour for the NordicTrack RX800 in a single interval session with a Distance target
+ */
test('sample data for NordicTrack RX800 should produce plausible results for a 75 meter session', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.NordicTrack_RX800)
const testConfig = {
@@ -200,6 +343,9 @@ test('sample data for NordicTrack RX800 should produce plausible results for a 7
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -214,18 +360,23 @@ test('sample data for NordicTrack RX800 should produce plausible results for a 7
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/RX800.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 20.78640177499998)
- testTotalLinearDistance(sessionManager, 75.04096463553918)
+ testTotalLinearDistance(sessionManager, 75.02272363260582)
+ testTotalCalories(sessionManager, 4.701450875048449)
testTotalNumberOfStrokes(sessionManager, 9)
// As dragFactor is dynamic, it should have changed
- testDragFactor(sessionManager, 491.1395313462149)
+ testDragFactor(sessionManager, 493.8082148322739)
})
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a 'Just Row' session
+ */
test('A full unlimited session for SportsTech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -234,23 +385,31 @@ test('A full unlimited session for SportsTech WRX700 should produce plausible re
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 2340.0100514160117)
- testTotalLinearDistance(sessionManager, 8406.791871958883)
+ testTotalLinearDistance(sessionManager, 8406.084229545408)
+ testTotalCalories(sessionManager, 659.4761649276804)
testTotalNumberOfStrokes(sessionManager, 845)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a single interval session with a Distance target
+ */
test('A 8000 meter session for SportsTech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -259,6 +418,9 @@ test('A 8000 meter session for SportsTech WRX700 should produce plausible result
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -273,18 +435,23 @@ test('A 8000 meter session for SportsTech WRX700 should produce plausible result
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 2236.509317727007)
- testTotalLinearDistance(sessionManager, 8000.605126630236)
+ testTotalMovingTime(sessionManager, 2236.631120457007)
+ testTotalLinearDistance(sessionManager, 8000.605126630226)
+ testTotalCalories(sessionManager, 625.5636651176962)
testTotalNumberOfStrokes(sessionManager, 804)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a single interval session with a Time target
+ */
test('A 2300 sec session for SportsTech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
@@ -293,6 +460,9 @@ test('A 2300 sec session for SportsTech WRX700 should produce plausible results'
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -307,19 +477,24 @@ test('A 2300 sec session for SportsTech WRX700 should produce plausible results'
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 2300.00695516701)
- testTotalLinearDistance(sessionManager, 8252.525825823619)
+ testTotalLinearDistance(sessionManager, 8251.818183410143)
+ testTotalCalories(sessionManager, 646.8205257461132)
testTotalNumberOfStrokes(sessionManager, 830)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
-test('A 2400 sec session for SportsTech WRX700 should produce plausible results', async () => {
+/**
+ * @description Test behaviour for the SportsTech WRX700 in a single interval session with a Time target, which will not be reached (test of stopping behaviour)
+ */
+test('A 2400 sec session with premature stop for SportsTech WRX700 should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Sportstech_WRX700)
const testConfig = {
loglevel: {
@@ -327,6 +502,9 @@ test('A 2400 sec session for SportsTech WRX700 should produce plausible results'
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -341,18 +519,23 @@ test('A 2400 sec session for SportsTech WRX700 should produce plausible results'
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/WRX700_2magnets_session.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 2340.0100514160117)
- testTotalLinearDistance(sessionManager, 8406.791871958883)
+ testTotalLinearDistance(sessionManager, 8406.084229545408)
+ testTotalCalories(sessionManager, 659.4761649276804)
testTotalNumberOfStrokes(sessionManager, 845)
// As dragFactor is static, it should remain in place
testDragFactor(sessionManager, rowerProfiles.Sportstech_WRX700.dragFactor)
})
+/**
+ * @description Test behaviour for the C2 Model C in a 'Just Row' session
+ */
test('A full session for a Concept2 Model C should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C)
const testConfig = {
@@ -361,23 +544,31 @@ test('A full session for a Concept2 Model C should produce plausible results', a
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 181.47141999999985)
- testTotalLinearDistance(sessionManager, 552.0863658667265)
- testTotalNumberOfStrokes(sessionManager, 83)
+ testTotalLinearDistance(sessionManager, 552.2056895088467)
+ testTotalCalories(sessionManager, 33.96141888570208)
+ testTotalNumberOfStrokes(sessionManager, 82)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 123.82587294279575)
+ testDragFactor(sessionManager, 123.64632740545646)
})
+/**
+ * @description Test behaviour for the C2 Model C in a single interval session with a Distance target
+ */
test('A 500 meter session for a Concept2 Model C should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C)
const testConfig = {
@@ -386,6 +577,9 @@ test('A 500 meter session for a Concept2 Model C should produce plausible result
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -400,18 +594,23 @@ test('A 500 meter session for a Concept2 Model C should produce plausible result
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 156.87138200000004)
- testTotalLinearDistance(sessionManager, 500.03019828253076)
+ testTotalMovingTime(sessionManager, 156.83075199999985)
+ testTotalLinearDistance(sessionManager, 500.0178754492436)
+ testTotalCalories(sessionManager, 30.87012556034265)
testTotalNumberOfStrokes(sessionManager, 73)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 123.69864738410088)
+ testDragFactor(sessionManager, 123.18123281481081)
})
+/**
+ * @description Test behaviour for the C2 Model C in a single interval session with a Time target
+ */
test('A 3 minute session for a Concept2 Model C should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C)
const testConfig = {
@@ -420,6 +619,9 @@ test('A 3 minute session for a Concept2 Model C should produce plausible results
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -434,18 +636,64 @@ test('A 3 minute session for a Concept2 Model C should produce plausible results
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
testTotalMovingTime(sessionManager, 180.96533299999987)
- testTotalLinearDistance(sessionManager, 551.8641725505744)
- testTotalNumberOfStrokes(sessionManager, 83)
+ testTotalLinearDistance(sessionManager, 551.9836036368948)
+ testTotalCalories(sessionManager, 33.91002253445811)
+ testTotalNumberOfStrokes(sessionManager, 82)
+ // As dragFactor isn't static, it should have changed
+ testDragFactor(sessionManager, 123.64632740545646)
+})
+
+/**
+ * @description Test behaviour for the C2 Model C in a single interval session with a Calorie target
+ */
+test('A 30 calorie session for a Concept2 Model C should produce plausible results', async () => {
+ const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_Model_C)
+ const testConfig = {
+ loglevel: {
+ default: 'silent',
+ RowingEngine: 'silent'
+ },
+ numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
+ rowerSettings: rowerProfile
+ }
+ const sessionManager = createSessionManager(testConfig)
+
+ const intervalSettings = []
+ intervalSettings[0] = {
+ type: 'calories',
+ targetCalories: 30
+ }
+ sessionManager.handleCommand('updateIntervalSettings', intervalSettings)
+
+ testTotalMovingTime(sessionManager, 0)
+ testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
+ testTotalNumberOfStrokes(sessionManager, 0)
+ testDragFactor(sessionManager, undefined)
+
+ await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_Model_C.csv', realtime: false, loop: false })
+
+ testTotalMovingTime(sessionManager, 153.93554999999992)
+ testTotalLinearDistance(sessionManager, 490.5541073829962)
+ testTotalCalories(sessionManager, 30.018254924945477)
+ testTotalNumberOfStrokes(sessionManager, 72)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 123.82587294279575)
+ testDragFactor(sessionManager, 123.18123281481081)
})
+/**
+ * @description Test behaviour for the C2 RowErg in a 'Just Row' session
+ */
test('A full session for a Concept2 RowErg should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg)
const testConfig = {
@@ -454,23 +702,31 @@ test('A full session for a Concept2 RowErg should produce plausible results', as
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 590.111937)
- testTotalLinearDistance(sessionManager, 2027.493082238415)
+ testTotalMovingTime(sessionManager, 590.0231672202852)
+ testTotalLinearDistance(sessionManager, 2027.8388877679706)
+ testTotalCalories(sessionManager, 113.71189726505551)
testTotalNumberOfStrokes(sessionManager, 205)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 80.60573080009686)
+ testDragFactor(sessionManager, 80.70871681344696)
})
+/**
+ * @description Test behaviour for the C2 RowErg in a single interval session with a Distance target
+ */
test('A 2000 meter session for a Concept2 RowErg should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg)
const testConfig = {
@@ -479,6 +735,9 @@ test('A 2000 meter session for a Concept2 RowErg should produce plausible result
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -493,18 +752,23 @@ test('A 2000 meter session for a Concept2 RowErg should produce plausible result
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 582.1907659999988)
- testTotalLinearDistance(sessionManager, 2000.0158938948496)
+ testTotalMovingTime(sessionManager, 582.0171075172801)
+ testTotalLinearDistance(sessionManager, 2000.029064226818)
+ testTotalCalories(sessionManager, 112.32788945271977)
testTotalNumberOfStrokes(sessionManager, 203)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 80.55270240035931)
+ testDragFactor(sessionManager, 80.67710663511312)
})
+/**
+ * @description Test behaviour for the C2 RowErg in a single interval session with a Time target
+ */
test('A 580 seconds session for a Concept2 RowErg should produce plausible results', async () => {
const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg)
const testConfig = {
@@ -513,6 +777,9 @@ test('A 580 seconds session for a Concept2 RowErg should produce plausible resul
RowingEngine: 'silent'
},
numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
rowerSettings: rowerProfile
}
const sessionManager = createSessionManager(testConfig)
@@ -527,18 +794,69 @@ test('A 580 seconds session for a Concept2 RowErg should produce plausible resul
testTotalMovingTime(sessionManager, 0)
testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
testTotalNumberOfStrokes(sessionManager, 0)
testDragFactor(sessionManager, undefined)
await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
- testTotalMovingTime(sessionManager, 580.0033639999992)
- testTotalLinearDistance(sessionManager, 1992.6040191024413)
+ testTotalMovingTime(sessionManager, 580.0043837232224)
+ testTotalLinearDistance(sessionManager, 1993.2553343495642)
+ testTotalCalories(sessionManager, 111.91692589466263)
testTotalNumberOfStrokes(sessionManager, 202)
// As dragFactor isn't static, it should have changed
- testDragFactor(sessionManager, 80.5946092810885)
+ testDragFactor(sessionManager, 80.69990852674552)
+})
+
+/**
+ * @description Test behaviour for the C2 RowErg in a single interval session with a Calorie target
+ */
+test('A 100 calories session for a Concept2 RowErg should produce plausible results', async () => {
+ const rowerProfile = deepMerge(rowerProfiles.DEFAULT, rowerProfiles.Concept2_RowErg)
+ const testConfig = {
+ loglevel: {
+ default: 'silent',
+ RowingEngine: 'silent'
+ },
+ numOfPhasesForAveragingScreenData: 2,
+ userSettings: {
+ sex: 'male'
+ },
+ rowerSettings: rowerProfile
+ }
+ const sessionManager = createSessionManager(testConfig)
+
+ const intervalSettings = []
+ intervalSettings[0] = {
+ type: 'calories',
+ targetCalories: 100
+ }
+ sessionManager.handleCommand('updateIntervalSettings', intervalSettings)
+
+ testTotalMovingTime(sessionManager, 0)
+ testTotalLinearDistance(sessionManager, 0)
+ testTotalCalories(sessionManager, 0)
+ testTotalNumberOfStrokes(sessionManager, 0)
+ testDragFactor(sessionManager, undefined)
+
+ await replayRowingSession(sessionManager.handleRotationImpulse, { filename: 'recordings/Concept2_RowErg_Session_2000meters.csv', realtime: false, loop: false })
+
+ testTotalMovingTime(sessionManager, 518.7774144765336)
+ testTotalLinearDistance(sessionManager, 1780.5168110240045)
+ testTotalCalories(sessionManager, 100.00038138743304)
+ testTotalNumberOfStrokes(sessionManager, 181)
+ // As dragFactor isn't static, it should have changed
+ testDragFactor(sessionManager, 80.66540957118806)
})
+/**
+ * @todo Add tests for multiple planned intervals of the same type
+ */
+
+/**
+ * @todo Add tests for multiple planned intervals of a different type, including pauses
+ */
+
function testTotalMovingTime (sessionManager, expectedValue) {
assert.ok(sessionManager.getMetrics().totalMovingTime === expectedValue, `totalMovingTime should be ${expectedValue} sec at ${sessionManager.getMetrics().totalMovingTime} sec, is ${sessionManager.getMetrics().totalMovingTime}`)
}
@@ -552,6 +870,10 @@ function testTotalLinearDistance (sessionManager, expectedValue) {
assert.ok(sessionManager.getMetrics().totalLinearDistance === expectedValue, `totalLinearDistance should be ${expectedValue} meters at ${sessionManager.getMetrics().totalMovingTime} sec, is ${sessionManager.getMetrics().totalLinearDistance}`)
}
+function testTotalCalories (sessionManager, expectedValue) {
+ assert.ok(sessionManager.getMetrics().totalCalories === expectedValue, `totalCalories should be ${expectedValue} kCal at ${sessionManager.getMetrics().totalMovingTime} sec, is ${sessionManager.getMetrics().totalCalories}`)
+}
+
function testDragFactor (sessionManager, expectedValue) {
assert.ok(sessionManager.getMetrics().dragFactor === expectedValue, `dragFactor should be ${expectedValue} N*m*s^2 at ${sessionManager.getMetrics().totalMovingTime} sec, is ${sessionManager.getMetrics().dragFactor}`)
}
diff --git a/app/engine/utils/BinarySearchTree.js b/app/engine/utils/BinarySearchTree.js
index 2fb7c1bc8a..c5a0148475 100644
--- a/app/engine/utils/BinarySearchTree.js
+++ b/app/engine/utils/BinarySearchTree.js
@@ -1,53 +1,72 @@
'use strict'
/*
Open Rowing Monitor, https://github.com/jaapvanekris/openrowingmonitor
-
- This creates an ordered series with labels
- It allows for efficient determining the Median, Number of Above and Below
*/
+/**
+ * This creates an ordered series with labels and optional weights
+ * It allows for efficient determining the Weighted Median, Number of Above and Below
+ */
export function createLabelledBinarySearchTree () {
let tree = null
- function push (label, value) {
+ /**
+ * @param {float} label to use to destroy it later
+ * @param {float} value to store
+ * @param {float} weight attributed to the value (default = 1)
+ */
+ function push (label, value, weight = 1) {
if (value === undefined || isNaN(value)) { return }
if (tree === null) {
- tree = newNode(label, value)
+ tree = newNode(label, value, weight)
} else {
- tree = pushInTree(tree, label, value)
+ tree = pushInTree(tree, label, value, weight)
}
}
- function pushInTree (currentTree, label, value) {
+ /**
+ * Helper function to actually push value in the current tree
+ * @param {object} the current tree
+ * @param {float} label to use to destroy it later
+ * @param {float} value to store
+ * @param {float} weight attributed to the value
+ */
+ function pushInTree (currentTree, label, value, weight) {
if (value <= currentTree.value) {
// The value should be on the left side of currentTree
if (currentTree.leftNode === null) {
- currentTree.leftNode = newNode(label, value)
+ currentTree.leftNode = newNode(label, value, weight)
} else {
- currentTree.leftNode = pushInTree(currentTree.leftNode, label, value)
+ currentTree.leftNode = pushInTree(currentTree.leftNode, label, value, weight)
}
} else {
// The value should be on the right side of currentTree
if (currentTree.rightNode === null) {
- currentTree.rightNode = newNode(label, value)
+ currentTree.rightNode = newNode(label, value, weight)
} else {
- currentTree.rightNode = pushInTree(currentTree.rightNode, label, value)
+ currentTree.rightNode = pushInTree(currentTree.rightNode, label, value, weight)
}
}
currentTree.numberOfLeafsAndNodes = currentTree.numberOfLeafsAndNodes + 1
+ currentTree.totalWeight = currentTree.totalWeight + weight
return currentTree
}
- function newNode (label, value) {
+ function newNode (label, value, weight) {
return {
label,
value,
+ weight,
leftNode: null,
rightNode: null,
- numberOfLeafsAndNodes: 1
+ numberOfLeafsAndNodes: 1,
+ totalWeight: weight
}
}
+ /**
+ * @result {integer} number of values stored in the tree
+ */
function size () {
if (tree !== null) {
return tree.numberOfLeafsAndNodes
@@ -56,6 +75,17 @@ export function createLabelledBinarySearchTree () {
}
}
+ /**
+ * @result {float} total weight stored in the tree
+ */
+ function totalWeight () {
+ if (tree !== null) {
+ return tree.totalWeight
+ } else {
+ return 0
+ }
+ }
+
function minimum () {
return minimumValueInTree(tree)
}
@@ -136,10 +166,12 @@ export function createLabelledBinarySearchTree () {
// Next, handle the situation when we need to remove the node itself
if (currentTree.label === label) {
// First we need to remove the current node, then we need to investigate the underlying sub-trees to determine how it is resolved
- // First, release the memory of the current node before we start to rearrange the tree, as this might cause a memory leak
+ // We start by releasing the memory of the current node before we start to rearrange the tree, as this might cause a memory leak
currentTree.label = null
currentTree.value = null
+ currentTree.weight = null
currentTree.numberOfLeafsAndNodes = null
+ currentTree.totalWeight = null
switch (true) {
case (currentTree.leftNode === null && currentTree.rightNode === null):
// As the underlying sub-trees are empty as well, we return an empty tree
@@ -158,21 +190,25 @@ export function createLabelledBinarySearchTree () {
// as there are two potential nodes to use, we try to balance the tree a bit more as this increases performance
if (currentTree.leftNode.numberOfLeafsAndNodes > currentTree.rightNode.numberOfLeafsAndNodes) {
// The left sub-tree is bigger then the right one, lets use the closest predecessor to restore some balance
- currentTree.value = clostestPredecessor(currentTree.leftNode).value
- currentTree.label = clostestPredecessor(currentTree.leftNode).label
- currentTree.leftNode = destroyClostestPredecessor(currentTree.leftNode)
+ const _closestPredecessor = closestPredecessor(currentTree.leftNode)
+ currentTree.value = _closestPredecessor.value
+ currentTree.label = _closestPredecessor.label
+ currentTree.weight = _closestPredecessor.weight
+ currentTree.leftNode = destroyclosestPredecessor(currentTree.leftNode)
} else {
// The right sub-tree is smaller then the right one, lets use the closest successor to restore some balance
- currentTree.value = clostestSuccesor(currentTree.rightNode).value
- currentTree.label = clostestSuccesor(currentTree.rightNode).label
- currentTree.rightNode = destroyClostestSuccessor(currentTree.rightNode)
+ const _closestSuccesor = closestSuccesor(currentTree.rightNode)
+ currentTree.value = _closestSuccesor.value
+ currentTree.label = _closestSuccesor.label
+ currentTree.weight = _closestSuccesor.weight
+ currentTree.rightNode = destroyclosestSuccessor(currentTree.rightNode)
}
break
// no default
}
}
- // Recalculate the tree size
+ // Recalculate the tree size and total weight
switch (true) {
case (currentTree === null):
// We are now an empty leaf, nothing to do here
@@ -180,87 +216,109 @@ export function createLabelledBinarySearchTree () {
case (currentTree.leftNode === null && currentTree.rightNode === null):
// This is a filled leaf
currentTree.numberOfLeafsAndNodes = 1
+ currentTree.totalWeight = currentTree.weight
break
case (currentTree.leftNode !== null && currentTree.rightNode === null):
currentTree.numberOfLeafsAndNodes = currentTree.leftNode.numberOfLeafsAndNodes + 1
+ currentTree.totalWeight = currentTree.leftNode.totalWeight + currentTree.weight
break
case (currentTree.leftNode === null && currentTree.rightNode !== null):
currentTree.numberOfLeafsAndNodes = currentTree.rightNode.numberOfLeafsAndNodes + 1
+ currentTree.totalWeight = currentTree.rightNode.totalWeight + currentTree.weight
break
case (currentTree.leftNode !== null && currentTree.rightNode !== null):
currentTree.numberOfLeafsAndNodes = currentTree.leftNode.numberOfLeafsAndNodes + currentTree.rightNode.numberOfLeafsAndNodes + 1
+ currentTree.totalWeight = currentTree.leftNode.totalWeight + currentTree.rightNode.totalWeight + currentTree.weight
break
// no default
}
return currentTree
}
- function clostestPredecessor (currentTree) {
+ function closestPredecessor (currentTree) {
// This function finds the maximum value in a tree
if (currentTree.rightNode !== null) {
// We haven't reached the end of the tree yet
- return clostestPredecessor(currentTree.rightNode)
+ return closestPredecessor(currentTree.rightNode)
} else {
// We reached the largest value in the tree
return {
label: currentTree.label,
- value: currentTree.value
+ value: currentTree.value,
+ weight: currentTree.weight
}
}
}
- function destroyClostestPredecessor (currentTree) {
+ function destroyclosestPredecessor (currentTree) {
// This function finds the maximum value in a tree
if (currentTree.rightNode !== null) {
// We haven't reached the end of the tree yet
- currentTree.rightNode = destroyClostestPredecessor(currentTree.rightNode)
+ currentTree.rightNode = destroyclosestPredecessor(currentTree.rightNode)
currentTree.numberOfLeafsAndNodes = currentTree.numberOfLeafsAndNodes - 1
+ let totalWeight = currentTree.weight
+ if (currentTree.rightNode !== null && currentTree.rightNode.totalWeight !== undefined) { totalWeight += currentTree.rightNode.totalWeight }
+ if (currentTree.leftNode !== null && currentTree.leftNode.totalWeight !== undefined) { totalWeight += currentTree.leftNode.totalWeight }
+ currentTree.totalWeight = totalWeight
return currentTree
} else {
// We reached the largest value in the tree
// First, release the memory of the current node before we start to rearrange the tree, as this might cause a memory leak
currentTree.label = null
currentTree.value = null
+ currentTree.weight = null
currentTree.numberOfLeafsAndNodes = null
+ currentTree.totalWeight = null
return currentTree.leftNode
}
}
- function clostestSuccesor (currentTree) {
+ function closestSuccesor (currentTree) {
// This function finds the maximum value in a tree
if (currentTree.leftNode !== null) {
// We haven't reached the end of the tree yet
- return clostestSuccesor(currentTree.leftNode)
+ return closestSuccesor(currentTree.leftNode)
} else {
// We reached the smallest value in the tree
return {
label: currentTree.label,
- value: currentTree.value
+ value: currentTree.value,
+ weight: currentTree.weight
}
}
}
- function destroyClostestSuccessor (currentTree) {
+ function destroyclosestSuccessor (currentTree) {
// This function finds the maximum value in a tree
if (currentTree.leftNode !== null) {
// We haven't reached the end of the tree yet
- currentTree.leftNode = destroyClostestSuccessor(currentTree.leftNode)
+ currentTree.leftNode = destroyclosestSuccessor(currentTree.leftNode)
currentTree.numberOfLeafsAndNodes = currentTree.numberOfLeafsAndNodes - 1
+ let totalWeight = currentTree.weight
+ if (currentTree.rightNode !== null && currentTree.rightNode.totalWeight !== undefined) { totalWeight += currentTree.rightNode.totalWeight }
+ if (currentTree.leftNode !== null && currentTree.leftNode.totalWeight !== undefined) { totalWeight += currentTree.leftNode.totalWeight }
+ currentTree.totalWeight = totalWeight
return currentTree
} else {
// We reached the smallest value in the tree
// First, release the memory of the current node before we start to rearrange the tree, as this might cause a memory leak
currentTree.label = null
currentTree.value = null
+ currentTree.weight = null
currentTree.numberOfLeafsAndNodes = null
+ currentTree.totalWeight = null
return currentTree.rightNode
}
}
+ /**
+ * BE AWARE, UNLIKE WITH ARRAYS, THE COUNTING STARTS WITH THE WEIGHT SUM! !!! !!!
+ * THIS LOGIC THUS WORKS DIFFERENT THAN STANDARD MEDIAN! !!!!!!
+ * @returns {float} the median of the tree
+ */
function median () {
if (tree !== null && tree.numberOfLeafsAndNodes > 0) {
- // BE AWARE, UNLIKE WITH ARRAYS, THE COUNTING OF THE ELEMENTS STARTS WITH 1 !!!!!!!
- // THIS LOGIC THUS WORKS DIFFERENT THAN MOST ARRAYS FOUND IN ORM!!!!!!!
+ // Standard median calculation (weight = 1 for all nodes)
const mid = Math.floor(tree.numberOfLeafsAndNodes / 2)
return tree.numberOfLeafsAndNodes % 2 !== 0 ? valueAtInorderPosition(tree, mid + 1) : (valueAtInorderPosition(tree, mid) + valueAtInorderPosition(tree, mid + 1)) / 2
} else {
@@ -269,7 +327,79 @@ export function createLabelledBinarySearchTree () {
}
/**
- * @remark: // BE AWARE TESTING PURPOSSES ONLY
+ * @returns {float} the weighed median of the entire tree, with linear interpolation between datapoints if needed
+ */
+ function weightedMedian () {
+ if (!tree || tree.totalWeight === 0) { return undefined }
+
+ const half = tree.totalWeight / 2
+ const underNode = findUndershootingNode(tree, half, 0)
+ const overNode = findOvershootingNode(tree, half, 0)
+
+ switch (true) {
+ case (!underNode && !overNode):
+ return undefined
+ case (!underNode):
+ return overNode.value
+ case (!overNode):
+ return underNode.value
+ case (underNode.cumulativeWeight === overNode.cumulativeWeight || (half === underNode.cumulativeWeight && underNode.value !== overNode.value)):
+ // If at exact boundary or weights are equal, return average
+ return (underNode.value + overNode.value) / 2
+ default:
+ // Interpolate based on where target falls in the weight range
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const interpolationFactor = (half - underNode.cumulativeWeight) / (overNode.cumulativeWeight - underNode.cumulativeWeight)
+ return underNode.value + (overNode.value - underNode.value) * interpolationFactor
+ }
+ }
+
+ /**
+ * This helper function identifies the node that is closest below the set weight
+ */
+ function findUndershootingNode (node, targetWeight, accWeight = 0) {
+ if (!node) { return null }
+
+ const leftWeight = node.leftNode ? node.leftNode.totalWeight : 0
+ const weightBeforeNode = accWeight + leftWeight
+ const weightUpToNode = weightBeforeNode + node.weight
+
+ switch (true) {
+ case (targetWeight <= weightBeforeNode):
+ return findUndershootingNode(node.leftNode, targetWeight, accWeight)
+ case (targetWeight > weightUpToNode):
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const rightResult = findUndershootingNode(node.rightNode, targetWeight, weightUpToNode)
+ return rightResult || { value: node.value, cumulativeWeight: weightUpToNode }
+ default:
+ return { value: node.value, cumulativeWeight: weightUpToNode }
+ }
+ }
+
+ /**
+ * This helper function identifies the node that is closest above the set weight
+ */
+ function findOvershootingNode (node, targetWeight, accWeight = 0) {
+ if (!node) { return null }
+
+ const leftWeight = node.leftNode ? node.leftNode.totalWeight : 0
+ const weightBeforeNode = accWeight + leftWeight
+ const weightUpToNode = weightBeforeNode + node.weight
+
+ switch (true) {
+ case (targetWeight < weightBeforeNode):
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const leftResult = findOvershootingNode(node.leftNode, targetWeight, accWeight)
+ return leftResult || { value: node.value, cumulativeWeight: weightBeforeNode }
+ case (targetWeight >= weightUpToNode):
+ return findOvershootingNode(node.rightNode, targetWeight, weightUpToNode)
+ default:
+ return { value: node.value, cumulativeWeight: weightUpToNode }
+ }
+ }
+
+ /**
+ * @remark: BE AWARE TESTING PURPOSSES ONLY
*/
function valueAtInorderPos (position) {
if (tree !== null && position >= 1) {
@@ -333,6 +463,7 @@ export function createLabelledBinarySearchTree () {
if (currentTree !== null) {
currentTree.label = null
currentTree.value = null
+ currentTree.weight = null
if (currentTree.leftNode !== null) {
resetTree(currentTree.leftNode)
currentTree.leftNode = null
@@ -342,6 +473,7 @@ export function createLabelledBinarySearchTree () {
currentTree.rightNode = null
}
currentTree.numberOfLeafsAndNodes = null
+ currentTree.totalWeight = null
}
}
@@ -349,11 +481,13 @@ export function createLabelledBinarySearchTree () {
push,
remove,
size,
+ totalWeight,
numberOfValuesAbove,
numberOfValuesEqualOrBelow,
minimum,
maximum,
median,
+ weightedMedian,
valueAtInorderPos,
orderedSeries,
reset
diff --git a/app/engine/utils/BinarySearchTree.test.js b/app/engine/utils/BinarySearchTree.test.js
index e7b8e541ae..2e6c1151ff 100644
--- a/app/engine/utils/BinarySearchTree.test.js
+++ b/app/engine/utils/BinarySearchTree.test.js
@@ -1,9 +1,10 @@
'use strict'
/*
Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- As this object is fundamental for most other utility objects, we must test its behaviour quite thoroughly
*/
+/**
+ * As this object is fundamental for most other utility objects, we must test its behaviour quite thoroughly
+ */
import { test } from 'uvu'
import * as assert from 'uvu/assert'
@@ -12,32 +13,37 @@ import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
test('Series behaviour with an empty tree', () => {
const dataTree = createLabelledBinarySearchTree()
testSize(dataTree, 0)
+ testTotalWeight(dataTree, 0)
testNumberOfValuesAbove(dataTree, 0, 0)
testNumberOfValuesEqualOrBelow(dataTree, 0, 0)
testNumberOfValuesAbove(dataTree, 10, 0)
testNumberOfValuesEqualOrBelow(dataTree, 10, 0)
testMedian(dataTree, 0)
+ testWeightedMedian(dataTree, undefined)
})
test('Tree behaviour with a single pushed value. Tree = [9]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
+ dataTree.push(1, 9, 1)
testOrderedSeries(dataTree, [9])
testSize(dataTree, 1)
+ testTotalWeight(dataTree, 1)
testValueAtInorderPos(dataTree, 1, 9)
testNumberOfValuesAbove(dataTree, 0, 1)
testNumberOfValuesEqualOrBelow(dataTree, 0, 0)
testNumberOfValuesAbove(dataTree, 10, 0)
testNumberOfValuesEqualOrBelow(dataTree, 10, 1)
testMedian(dataTree, 9)
+ testWeightedMedian(dataTree, 9)
})
test('Tree behaviour with a second pushed value. Tree = [9, 3]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
- dataTree.push(2, 3)
+ dataTree.push(1, 9, 1)
+ dataTree.push(2, 3, 1)
testOrderedSeries(dataTree, [3, 9])
testSize(dataTree, 2)
+ testTotalWeight(dataTree, 2)
testValueAtInorderPos(dataTree, 1, 3)
testValueAtInorderPos(dataTree, 2, 9)
testNumberOfValuesAbove(dataTree, 0, 2)
@@ -45,15 +51,17 @@ test('Tree behaviour with a second pushed value. Tree = [9, 3]', () => {
testNumberOfValuesAbove(dataTree, 10, 0)
testNumberOfValuesEqualOrBelow(dataTree, 10, 2)
testMedian(dataTree, 6)
+ testWeightedMedian(dataTree, 6)
})
test('Tree behaviour with a third pushed value. Tree = [9, 3, 6]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
- dataTree.push(2, 3)
- dataTree.push(3, 6)
+ dataTree.push(1, 9, 1)
+ dataTree.push(2, 3, 1)
+ dataTree.push(3, 6, 1)
testOrderedSeries(dataTree, [3, 6, 9])
testSize(dataTree, 3)
+ testTotalWeight(dataTree, 3)
testValueAtInorderPos(dataTree, 1, 3)
testValueAtInorderPos(dataTree, 2, 6)
testValueAtInorderPos(dataTree, 3, 9)
@@ -62,17 +70,23 @@ test('Tree behaviour with a third pushed value. Tree = [9, 3, 6]', () => {
testNumberOfValuesAbove(dataTree, 10, 0)
testNumberOfValuesEqualOrBelow(dataTree, 10, 3)
testMedian(dataTree, 6)
+ testWeightedMedian(dataTree, 6)
})
test('Tree behaviour with a fourth pushed value. Tree = [3, 6, 12]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
- dataTree.push(2, 3)
- dataTree.push(3, 6)
+ dataTree.push(1, 9, 0.5)
+ dataTree.push(2, 3, 0)
+ dataTree.push(3, 6, 1)
+ testSize(dataTree, 3)
+ testTotalWeight(dataTree, 1.5)
dataTree.remove(1)
- dataTree.push(4, 12)
+ testSize(dataTree, 2)
+ testTotalWeight(dataTree, 1)
+ dataTree.push(4, 12, 1)
testOrderedSeries(dataTree, [3, 6, 12])
testSize(dataTree, 3)
+ testTotalWeight(dataTree, 2)
testValueAtInorderPos(dataTree, 1, 3)
testValueAtInorderPos(dataTree, 2, 6)
testValueAtInorderPos(dataTree, 3, 12)
@@ -81,19 +95,21 @@ test('Tree behaviour with a fourth pushed value. Tree = [3, 6, 12]', () => {
testNumberOfValuesAbove(dataTree, 10, 1)
testNumberOfValuesEqualOrBelow(dataTree, 10, 2)
testMedian(dataTree, 6)
+ testWeightedMedian(dataTree, 9)
})
test('Tree behaviour with a fifth pushed value. Series = [6, 12, -3]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
- dataTree.push(2, 3)
- dataTree.push(3, 6)
+ dataTree.push(1, 9, 0)
+ dataTree.push(2, 3, 0)
+ dataTree.push(3, 6, 0)
dataTree.remove(1)
- dataTree.push(4, 12)
+ dataTree.push(4, 12, 1)
dataTree.remove(2)
- dataTree.push(5, -3)
+ dataTree.push(5, -3, 0)
testOrderedSeries(dataTree, [-3, 6, 12])
testSize(dataTree, 3)
+ testTotalWeight(dataTree, 1)
testValueAtInorderPos(dataTree, 1, -3)
testValueAtInorderPos(dataTree, 2, 6)
testValueAtInorderPos(dataTree, 3, 12)
@@ -102,66 +118,79 @@ test('Tree behaviour with a fifth pushed value. Series = [6, 12, -3]', () => {
testNumberOfValuesAbove(dataTree, 10, 1)
testNumberOfValuesEqualOrBelow(dataTree, 10, 2)
testMedian(dataTree, 6)
+ testWeightedMedian(dataTree, 12)
})
test('Tree behaviour with complex removals. Series = [9, 6, 5, 8, 7, 9, 12, 10, 11]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 9)
- dataTree.push(2, 6)
- dataTree.push(3, 5)
- dataTree.push(4, 8)
- dataTree.push(5, 7)
- dataTree.push(6, 9)
- dataTree.push(7, 12)
- dataTree.push(8, 10)
- dataTree.push(9, 11)
+ dataTree.push(1, 9, 0.5)
+ dataTree.push(2, 6, 0.5)
+ dataTree.push(3, 5, 1)
+ dataTree.push(4, 8, 0.5)
+ dataTree.push(5, 7, 1)
+ dataTree.push(6, 9, 1)
+ dataTree.push(7, 12, 1)
+ dataTree.push(8, 10, 1)
+ dataTree.push(9, 11, 1)
testOrderedSeries(dataTree, [5, 6, 7, 8, 9, 9, 10, 11, 12])
testSize(dataTree, 9)
+ testTotalWeight(dataTree, 7.5)
testValueAtInorderPos(dataTree, 5, 9)
testMedian(dataTree, 9)
+ testMedian(dataTree, 9)
dataTree.remove(1)
testOrderedSeries(dataTree, [5, 6, 7, 8, 9, 10, 11, 12])
testSize(dataTree, 8)
+ testTotalWeight(dataTree, 7)
testValueAtInorderPos(dataTree, 4, 8)
testValueAtInorderPos(dataTree, 5, 9)
testMedian(dataTree, 8.5)
+ testWeightedMedian(dataTree, 9)
dataTree.remove(3)
testOrderedSeries(dataTree, [6, 7, 8, 9, 10, 11, 12])
testSize(dataTree, 7)
+ testTotalWeight(dataTree, 6)
testValueAtInorderPos(dataTree, 4, 9)
testMedian(dataTree, 9)
+ testWeightedMedian(dataTree, 9.5)
})
// Test based on https://levelup.gitconnected.com/deletion-in-binary-search-tree-with-javascript-fded82e1791c
test('Tree behaviour with complex removals. Series = [50, 30, 70, 20, 40, 60, 80]', () => {
const dataTree = createLabelledBinarySearchTree()
- dataTree.push(1, 50)
- dataTree.push(2, 30)
- dataTree.push(3, 70)
- dataTree.push(4, 20)
- dataTree.push(5, 40)
- dataTree.push(6, 60)
- dataTree.push(7, 80)
+ dataTree.push(1, 50, 1)
+ dataTree.push(2, 30, 1)
+ dataTree.push(3, 70, 0.5)
+ dataTree.push(4, 20, 1)
+ dataTree.push(5, 40, 1)
+ dataTree.push(6, 60, 1)
+ dataTree.push(7, 80, 0.5)
testOrderedSeries(dataTree, [20, 30, 40, 50, 60, 70, 80])
testSize(dataTree, 7)
+ testTotalWeight(dataTree, 6)
testValueAtInorderPos(dataTree, 4, 50)
dataTree.remove(4)
testOrderedSeries(dataTree, [30, 40, 50, 60, 70, 80])
testSize(dataTree, 6)
+ testTotalWeight(dataTree, 5)
testValueAtInorderPos(dataTree, 3, 50)
testValueAtInorderPos(dataTree, 4, 60)
testMedian(dataTree, 55)
+ testWeightedMedian(dataTree, 50)
dataTree.remove(2)
testOrderedSeries(dataTree, [40, 50, 60, 70, 80])
testSize(dataTree, 5)
testValueAtInorderPos(dataTree, 3, 60)
testMedian(dataTree, 60)
+ testWeightedMedian(dataTree, 55)
dataTree.remove(1)
testOrderedSeries(dataTree, [40, 60, 70, 80])
testSize(dataTree, 4)
+ testTotalWeight(dataTree, 3)
testValueAtInorderPos(dataTree, 2, 60)
testValueAtInorderPos(dataTree, 3, 70)
testMedian(dataTree, 65)
+ testWeightedMedian(dataTree, 60)
})
test('Tree behaviour with a five pushed values followed by a reset, Tree = []', () => {
@@ -184,6 +213,10 @@ function testSize (tree, expectedValue) {
assert.ok(tree.size() === expectedValue, `Expected size should be ${expectedValue}, encountered ${tree.size()}`)
}
+function testTotalWeight (tree, expectedValue) {
+ assert.ok(tree.totalWeight() === expectedValue, `Expected weight should be ${expectedValue}, encountered ${tree.totalWeight()}`)
+}
+
function testNumberOfValuesAbove (tree, cutoff, expectedValue) {
assert.ok(tree.numberOfValuesAbove(cutoff) === expectedValue, `Expected numberOfValuesAbove(${cutoff}) to be ${expectedValue}, encountered ${tree.numberOfValuesAbove(cutoff)}`)
}
@@ -204,4 +237,8 @@ function testMedian (tree, expectedValue) {
assert.ok(tree.median() === expectedValue, `Expected median to be ${expectedValue}, encountered ${tree.median()}`)
}
+function testWeightedMedian (tree, expectedValue) {
+ assert.ok(tree.weightedMedian() === expectedValue, `Expected weighted median to be ${expectedValue}, encountered ${tree.weightedMedian()}`)
+}
+
test.run()
diff --git a/app/engine/utils/CyclicErrorFilter.js b/app/engine/utils/CyclicErrorFilter.js
new file mode 100644
index 0000000000..7c8532bd31
--- /dev/null
+++ b/app/engine/utils/CyclicErrorFilter.js
@@ -0,0 +1,264 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This implements a cyclic error filter. This is used to create a profile
+ * The filterArray does the calculation, the slope and intercept arrays contain the results for easy retrieval
+ * the slopeCorrection and interceptCorrection ensure preventing time dilation due to excessive corrections
+ * @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/Mathematical_Foundations.md|for the underlying math description)
+ */
+import loglevel from 'loglevel'
+import { createSeries } from './Series.js'
+import { createWLSLinearSeries } from './WLSLinearSeries.js'
+
+const log = loglevel.getLogger('RowingEngine')
+
+/**
+ * @param {object} rowerSettings - The rower settings configuration object
+ * @param {integer} rowerSettings.numOfImpulsesPerRevolution - Number of impulses per flywheel revolution
+ * @param {integer} rowerSettings.flankLength - Length of the flank used
+ * @param {boolean} rowerSettings.autoAdjustDragFactor - Indicates if the Flywheel.js is allowed to automatically adjust dragfactor (false turns the filter off)
+ * @param {float} rowerSettings.systematicErrorAgressiveness - Agressiveness of the systematic error correction algorithm (0 turns the filter off)
+ * @param {integer} rowerSettings.systematicErrorNumberOfDatapoints - size of the systematic error correction algorithm filter
+ * @param {float} rowerSettings.minimumTimeBetweenImpulses - minimum expected time between impulses (in seconds)
+ * @param {float} rowerSettings.maximumTimeBetweenImpulses - maximum expected time between impulses (in seconds)
+ * @param {function} deltaTime - injection of the linear regression function used for the drag calculation
+ */
+export function createCyclicErrorFilter (rowerSettings, deltaTime) {
+ const _numberOfMagnets = rowerSettings.numOfImpulsesPerRevolution
+ const _flankLength = rowerSettings.flankLength
+ const _agressiveness = Math.min(Math.max(rowerSettings.systematicErrorAgressiveness, 0), 1)
+ const _invAgressiveness = Math.min(Math.max(1 - _agressiveness, 0), 1)
+ const _numberOfFilterSamples = Math.max(Math.round((rowerSettings.systematicErrorNumberOfDatapoints / _numberOfMagnets)), 5)
+ const _minimumTimeBetweenImpulses = rowerSettings.minimumTimeBetweenImpulses
+ const _maximumTimeBetweenImpulses = rowerSettings.maximumTimeBetweenImpulses
+ const raw = createSeries(_flankLength)
+ const clean = createSeries(_flankLength)
+ const goodnessOfFit = createSeries(_flankLength)
+ const linearRegressor = deltaTime
+ const domainBorder = (_minimumTimeBetweenImpulses > 0 ? _minimumTimeBetweenImpulses : 0.0001)
+ let recordedRelativePosition = []
+ let recordedAbsolutePosition = []
+ let recordedRawValue = []
+ let filterArray = []
+ let slope = []
+ let intercept = []
+ let startPosition
+ let lowerCursor
+ let upperCursor
+ let slopeSum = _numberOfMagnets
+ let interceptSum = 0
+ let slopeCorrection = 1
+ let interceptCorrection = 0
+ resetFilterConfiguration()
+
+ /**
+ * @param {float} the raw recorded value to be cleaned up
+ * @param {integer} the position of the flywheel
+ * @returns {object} result
+ * @returns {float} result.clean - the resulting clean value
+ * @returns {float} result.goodnessOfFit - The goodness of fit indication for the specific datapoint
+ * @description Applies the filter on the raw value for the given position (i.e. magnet). Please note: this function is NOT stateless, it also fills a hystoric buffer of raw and clean values
+ */
+ function applyFilter (rawValue, position) {
+ if (startPosition === undefined) { startPosition = position + _flankLength }
+ const magnet = position % _numberOfMagnets
+ raw.push(rawValue)
+
+ if (rowerSettings.autoAdjustDragFactor && _agressiveness > 0) {
+ const cleanValue = projectX(magnet, rawValue)
+ clean.push(cleanValue)
+ goodnessOfFit.push(filterArray[magnet].goodnessOfFit() * domainFit(rawValue) * domainFit(cleanValue))
+ } else {
+ // In essence, the filter is turned off
+ clean.push(rawValue)
+ goodnessOfFit.push(1)
+ }
+
+ return {
+ clean: clean.atSeriesEnd(),
+ goodnessOfFit: goodnessOfFit.atSeriesEnd()
+ }
+ }
+
+ /**
+ * @param {integer} magnet - the magnet number
+ * @param {float} rawValue - the raw value to be projected by the function for that magnet
+ * @returns {float} projected result
+ */
+ function projectX (magnet, rawValue) {
+ return (rawValue * slope[magnet] * slopeCorrection) + (intercept[magnet] - interceptCorrection)
+ }
+
+ /**
+ * @param {float} rawValue - the raw value to be mapped onto the domain
+ * @returns {float} an indication of the fit with the domain
+ * @description a very crude approach to downgrade the weight outliers bring to the Weighed TS algorithms. Extremely crude, but it works.
+ */
+ function domainFit (value) {
+ switch (true) {
+ case (value < _minimumTimeBetweenImpulses):
+ // We are below the intended range
+ return Math.min(Math.max(1 - ((_minimumTimeBetweenImpulses - value) / domainBorder), 0.001), 1)
+ case (value > _maximumTimeBetweenImpulses):
+ // We are above the intended range
+ return Math.min(Math.max(1 - ((value - _maximumTimeBetweenImpulses) / domainBorder), 0.001), 1)
+ default:
+ // We are inside the intended range
+ return 1
+ }
+ }
+
+ /**
+ * @returns {object} result - provides the (oldest) object at the head of the FiFo buffer, as once returned as a repsonse to the 'applyFilter()' function
+ * @returns {float} result.clean - the resulting clean value as once returned
+ * @returns {float} result.raw - the initial (raw) datapoint before applying the filter
+ * @returns {float} result.goodnessOfFit - The goodness of fit indication for the specific datapoint
+ */
+ function atSeriesBegin () {
+ if (clean.length() >= _flankLength) {
+ return {
+ clean: clean.atSeriesBegin(),
+ raw: raw.atSeriesBegin(),
+ goodnessOfFit: goodnessOfFit.atSeriesBegin()
+ }
+ } else {
+ return {
+ clean: undefined,
+ raw: undefined,
+ goodnessOfFit: 0
+ }
+ }
+ }
+
+ /**
+ * @param {integer} relativePosition - the position of the recorded datapoint (i.e the sequence number of the datapoint)
+ * @param {float} absolutePosition - the total spinning time of the flywheel
+ * @param {float} rawValue - the raw value
+ */
+ function recordRawDatapoint (relativePosition, absolutePosition, rawValue) {
+ if (rowerSettings.autoAdjustDragFactor && _agressiveness > 0 && rawValue >= _minimumTimeBetweenImpulses && _maximumTimeBetweenImpulses >= rawValue) {
+ recordedRelativePosition.push(relativePosition)
+ recordedAbsolutePosition.push(absolutePosition)
+ recordedRawValue.push(rawValue)
+ }
+ }
+
+ /**
+ * @description This processes a next two datapoints from the queue
+ */
+ function processNextRawDatapoint () {
+ let perfectCurrentDt
+ let weightCorrectedCorrectedDatapoint
+ let GoF
+ if (lowerCursor === undefined || upperCursor === undefined) {
+ lowerCursor = Math.ceil(recordedRelativePosition.length * 0.1)
+ upperCursor = Math.floor(recordedRelativePosition.length * 0.9)
+ }
+
+ if (lowerCursor < upperCursor && recordedRelativePosition[lowerCursor] > startPosition) {
+ perfectCurrentDt = linearRegressor.projectX(recordedAbsolutePosition[lowerCursor])
+ weightCorrectedCorrectedDatapoint = (_invAgressiveness * recordedRawValue[lowerCursor]) + (_agressiveness * perfectCurrentDt)
+ GoF = linearRegressor.goodnessOfFit() * linearRegressor.localGoodnessOfFit(lowerCursor)
+ updateFilter(recordedRelativePosition[lowerCursor] % _numberOfMagnets, recordedRawValue[lowerCursor], weightCorrectedCorrectedDatapoint, GoF)
+ }
+ lowerCursor++
+
+ if (lowerCursor < upperCursor && recordedRelativePosition[upperCursor] > startPosition) {
+ perfectCurrentDt = linearRegressor.projectX(recordedAbsolutePosition[upperCursor])
+ weightCorrectedCorrectedDatapoint = (_invAgressiveness * recordedRawValue[upperCursor]) + (_agressiveness * perfectCurrentDt)
+ GoF = linearRegressor.goodnessOfFit() * linearRegressor.localGoodnessOfFit(upperCursor)
+ updateFilter(recordedRelativePosition[upperCursor] % _numberOfMagnets, recordedRawValue[upperCursor], weightCorrectedCorrectedDatapoint, GoF)
+ }
+ upperCursor--
+ }
+
+ /**
+ * @description Helper function to actually update the filter and calculate all dependent parameters
+ */
+ function updateFilter (magnet, rawDatapoint, correctedDatapoint, goodnessOfFit) {
+ slopeSum -= slope[magnet]
+ interceptSum -= intercept[magnet]
+ filterArray[magnet].push(rawDatapoint, correctedDatapoint, goodnessOfFit)
+ slope[magnet] = filterArray[magnet].slope()
+ slopeSum += slope[magnet]
+ if (slopeSum !== 0) { slopeCorrection = _numberOfMagnets / slopeSum }
+ intercept[magnet] = filterArray[magnet].intercept()
+ interceptSum += intercept[magnet]
+ interceptCorrection = interceptSum / _numberOfMagnets
+ }
+
+ /**
+ * @description This function is used for clearing the buffers in order to prepare to record for a new set of datapoints, or clear it when the buffer is filled with a recovery with too weak GoF
+ */
+ function clearDatapointBuffer () {
+ if (isNaN(lowerCursor)) { log.trace('*** Cyclic error filter: cleared datapoint buffer before processing its datapoints has started') }
+ recordedRelativePosition = []
+ recordedAbsolutePosition = []
+ recordedRawValue = []
+ lowerCursor = undefined
+ upperCursor = undefined
+ }
+
+ /**
+ * @description This function is used for clearing the predictive buffers as the flywheel seems to have stopped
+ */
+ function resetFilterConfiguration () {
+ if (slopeSum !== _numberOfMagnets || interceptSum !== 0) { log.debug('*** WARNING: cyclic error filter has configuration forcefully been reset') }
+ const noIncrements = _numberOfFilterSamples
+ const increment = (_maximumTimeBetweenImpulses - _minimumTimeBetweenImpulses) / noIncrements
+
+ lowerCursor = undefined
+ clearDatapointBuffer()
+
+ let i = 0
+ let j = 0
+ let datapoint = 0
+ while (i < _numberOfMagnets) {
+ if (i < filterArray.length) {
+ filterArray[i]?.reset()
+ } else {
+ filterArray[i] = createWLSLinearSeries(_numberOfFilterSamples)
+ }
+ j = 0
+ while (j <= noIncrements) {
+ // This initializes this filter with an identity function (the clean value will be identical to the raw value), to allow a controlled startup of the filter
+ datapoint = _maximumTimeBetweenImpulses - (j * increment)
+ filterArray[i].push(datapoint, datapoint, 0.5)
+ j++
+ }
+ slope[i] = 1
+ intercept[i] = 0
+ i++
+ }
+ slopeSum = _numberOfMagnets
+ interceptSum = 0
+ slopeCorrection = 1
+ interceptCorrection = 0
+ startPosition = undefined
+ }
+
+ /**
+ * @description This function is used for clearing all buffers (i.e. the currentDt's maintained in the flank and the predictive buffers) when the flywheel is completely reset
+ */
+ function reset () {
+ log.debug('*** WARNING: cyclic error filter is reset')
+ slopeSum = _numberOfMagnets
+ interceptSum = 0
+ resetFilterConfiguration()
+ raw.reset()
+ clean.reset()
+ goodnessOfFit.reset()
+ }
+
+ return {
+ applyFilter,
+ recordRawDatapoint,
+ processNextRawDatapoint,
+ updateFilter,
+ atSeriesBegin,
+ clearDatapointBuffer,
+ resetFilterConfiguration,
+ reset
+ }
+}
diff --git a/app/engine/utils/CyclicErrorFilter.test.js b/app/engine/utils/CyclicErrorFilter.test.js
new file mode 100644
index 0000000000..c476a7ed16
--- /dev/null
+++ b/app/engine/utils/CyclicErrorFilter.test.js
@@ -0,0 +1,192 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This tests all functions of the CyclicErrorCorrection filter
+ */
+import { test } from 'uvu'
+import * as assert from 'uvu/assert'
+
+import { createCyclicErrorFilter } from './CyclicErrorFilter.js'
+
+const baseRowerConfig = {
+ numOfImpulsesPerRevolution: 2,
+ flankLength: 4,
+ autoAdjustDragFactor: true,
+ systematicErrorAgressiveness: 1.0,
+ systematicErrorNumberOfDatapoints: 20,
+ minimumTimeBetweenImpulses: 0.5,
+ maximumTimeBetweenImpulses: 1
+}
+
+function baseRegressionFunction () {
+ /**
+ * @todo Implement a basic regression function to measure the noise residu
+ */
+}
+
+/**
+ * @descrition This test to verify filter initial behaviour.
+ * As no changes have been made to the filter profiles, one would expect that clean = raw values, and a Goodness of Fit of 1 inside the domain
+ */
+test('Correct behaviour of the filter directly after initialisation, withou filter updates, including domain filter behaviour and sync with flank', () => {
+ let cleanCurrentDt
+ let currentDtAtSeriesBegin
+ const CECFilter = createCyclicErrorFilter(baseRowerConfig, baseRegressionFunction)
+ cleanCurrentDt = CECFilter.applyFilter(1.6, 5)
+ testCleanValueEquals(cleanCurrentDt, 1.6)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0)
+ cleanCurrentDt = CECFilter.applyFilter(1.5, 6)
+ testCleanValueEquals(cleanCurrentDt, 1.5)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0)
+ cleanCurrentDt = CECFilter.applyFilter(1.4, 7)
+ testCleanValueEquals(cleanCurrentDt, 1.4)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.04000000000000007)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, undefined)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0)
+ cleanCurrentDt = CECFilter.applyFilter(1.3, 8)
+ testCleanValueEquals(cleanCurrentDt, 1.3)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.15999999999999992)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.6)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.6)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.000001)
+ cleanCurrentDt = CECFilter.applyFilter(1.2, 9)
+ testCleanValueEquals(cleanCurrentDt, 1.2)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.3600000000000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.5)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.5)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.000001)
+ cleanCurrentDt = CECFilter.applyFilter(1.1, 10)
+ testCleanValueEquals(cleanCurrentDt, 1.1)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.6399999999999997)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.4)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.4)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.04000000000000007)
+ cleanCurrentDt = CECFilter.applyFilter(1.0, 11)
+ testCleanValueEquals(cleanCurrentDt, 1.0)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.3)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.3)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.15999999999999992)
+ cleanCurrentDt = CECFilter.applyFilter(0.9, 12)
+ testCleanValueEquals(cleanCurrentDt, 0.9)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.2)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.2)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.3600000000000001)
+ cleanCurrentDt = CECFilter.applyFilter(0.8, 13)
+ testCleanValueEquals(cleanCurrentDt, 0.8)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.1)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.1)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.6399999999999997)
+ cleanCurrentDt = CECFilter.applyFilter(0.7, 14)
+ testCleanValueEquals(cleanCurrentDt, 0.7)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.6, 15)
+ testCleanValueEquals(cleanCurrentDt, 0.6)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.9)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.9)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.5, 16)
+ testCleanValueEquals(cleanCurrentDt, 0.5)
+ testGoodnessOfFitEquals(cleanCurrentDt, 1.0)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.8)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.8)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.4, 17)
+ testCleanValueEquals(cleanCurrentDt, 0.4)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.6400000000000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.7)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.7)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.3, 18)
+ testCleanValueEquals(cleanCurrentDt, 0.3)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.36)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.6)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.6)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.2, 19)
+ testCleanValueEquals(cleanCurrentDt, 0.2)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.16000000000000003)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.5)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.5)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 1.0)
+ cleanCurrentDt = CECFilter.applyFilter(0.1, 20)
+ testCleanValueEquals(cleanCurrentDt, 0.1)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.03999999999999998)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.4)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.4)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.6400000000000001)
+ cleanCurrentDt = CECFilter.applyFilter(0.0, 21)
+ testCleanValueEquals(cleanCurrentDt, 0.0)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.3)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.3)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.36)
+ cleanCurrentDt = CECFilter.applyFilter(-0.1, 22)
+ testCleanValueEquals(cleanCurrentDt, -0.1)
+ testGoodnessOfFitEquals(cleanCurrentDt, 0.000001)
+ currentDtAtSeriesBegin = CECFilter.atSeriesBegin()
+ testRawValueAtBeginEquals(currentDtAtSeriesBegin, 0.2)
+ testCleanValueAtBeginEquals(currentDtAtSeriesBegin, 0.2)
+ testGoodnessOfFitAtBeginEquals(currentDtAtSeriesBegin, 0.16000000000000003)
+})
+
+/**
+ * @todo Add test to verify correct updates of the filters due to known noise
+ */
+
+/**
+ * @todo Add test to verify good application of non-identity filters
+ */
+
+function testCleanValueEquals (object, expectedValue) {
+ assert.ok(object.clean === expectedValue, `Expected cleaned currentDt value to be ${expectedValue}, encountered ${object.clean}`)
+}
+
+function testGoodnessOfFitEquals (object, expectedValue) {
+ assert.ok(object.goodnessOfFit === expectedValue, `Expected goodnessOfFit to be ${expectedValue}, encountered ${object.goodnessOfFit}`)
+}
+
+function testRawValueAtBeginEquals (object, expectedValue) {
+ assert.ok(object.raw === expectedValue, `Expected raw value atSeriesBegin() to be ${expectedValue}, encountered ${object.raw}`)
+}
+
+function testCleanValueAtBeginEquals (object, expectedValue) {
+ assert.ok(object.clean === expectedValue, `Expected clean value atSeriesBegin() to be ${expectedValue}, encountered ${object.clean}`)
+}
+
+function testGoodnessOfFitAtBeginEquals (object, expectedValue) {
+ assert.ok(object.goodnessOfFit === expectedValue, `Expected goodnessOfFit atSeriesBegin() to be ${expectedValue}, encountered ${object.goodnessOfFit}`)
+}
+
+test.run()
diff --git a/app/engine/utils/FullTSLinearSeries.js b/app/engine/utils/FullTSLinearSeries.js
deleted file mode 100644
index 3cef184821..0000000000
--- a/app/engine/utils/FullTSLinearSeries.js
+++ /dev/null
@@ -1,217 +0,0 @@
-'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- The TSLinearSeries is a datatype that represents a Linear Series. It allows
- values to be retrieved (like a FiFo buffer, or Queue) but it also includes
- a Theil-Sen estimator Linear Regressor to determine the slope of this timeseries.
-
- At creation its length is determined. After it is filled, the oldest will be pushed
- out of the queue) automatically. This is a property of the Series object
-
- A key constraint is to prevent heavy calculations at the end (due to large
- array based curve fitting), which might happen on a Pi zero
-
- In order to prevent unneccessary calculations, this implementation uses lazy evaluation,
- so it will calculate the intercept and goodnessOfFit only when needed, as many uses only
- (first) need the slope.
-
- This implementation uses concepts that are described here:
- https://en.wikipedia.org/wiki/Theil%E2%80%93Sen_estimator
-
- The array is ordered such that x[0] is the oldest, and x[x.length-1] is the youngest
-*/
-
-import { createSeries } from './Series.js'
-import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
-
-import loglevel from 'loglevel'
-const log = loglevel.getLogger('RowingEngine')
-
-export function createTSLinearSeries (maxSeriesLength = 0) {
- const X = createSeries(maxSeriesLength)
- const Y = createSeries(maxSeriesLength)
- const A = createLabelledBinarySearchTree()
-
- let _A = 0
- let _B = 0
- let _goodnessOfFit = 0
-
- function push (x, y) {
- // Invariant: A contains all a's (as in the general formula y = a * x + b)
- // Where the a's are labeled in the Binary Search Tree with their xi when they BEGIN in the point (xi, yi)
- if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
-
- if (maxSeriesLength > 0 && X.length() >= maxSeriesLength) {
- // The maximum of the array has been reached, so when pushing the x,y the array gets shifted,
- // thus we have to remove the a's belonging to the current position X0 as well before this value is trashed
- A.remove(X.get(0))
- }
-
- X.push(x)
- Y.push(y)
-
- // Calculate all the slopes of the newly added point
- if (X.length() > 1) {
- // There are at least two points in the X and Y arrays, so let's add the new datapoint
- let i = 0
- while (i < X.length() - 1) {
- // Calculate the slope with all preceeding datapoints and X.length() - 1'th datapoint (as the array starts at zero)
- A.push(X.get(i), calculateSlope(i, X.length() - 1))
- i++
- }
- }
-
- // Calculate the median of the slopes
- if (X.length() > 1) {
- _A = A.median()
- } else {
- _A = 0
- }
-
- // Invalidate the previously calculated intercept and goodnessOfFit. We'll only calculate them if we need them
- _B = null
- _goodnessOfFit = null
- }
-
- function slope () {
- return _A
- }
-
- function intercept () {
- calculateIntercept()
- return _B
- }
-
- function coefficientA () {
- // For testing purposses only!
- return _A
- }
-
- function coefficientB () {
- // For testing purposses only!
- calculateIntercept()
- return _B
- }
-
- function length () {
- return X.length()
- }
-
- function goodnessOfFit () {
- // This function returns the R^2 as a goodness of fit indicator
- // It will automatically recalculate the _goodnessOfFit when it isn't defined
- // This lazy approach is intended to prevent unneccesary calculations
- let i = 0
- let sse = 0
- let sst = 0
- if (_goodnessOfFit === null) {
- if (X.length() >= 2) {
- while (i < X.length()) {
- sse += Math.pow((Y.get(i) - projectX(X.get(i))), 2)
- sst += Math.pow((Y.get(i) - Y.average()), 2)
- i++
- }
- switch (true) {
- case (sse === 0):
- _goodnessOfFit = 1
- break
- case (sse > sst):
- // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
- _goodnessOfFit = 0
- break
- case (sst !== 0):
- _goodnessOfFit = 1 - (sse / sst)
- break
- default:
- // When SST = 0, R2 isn't defined
- _goodnessOfFit = 0
- }
- } else {
- _goodnessOfFit = 0
- }
- }
- return _goodnessOfFit
- }
-
- function projectX (x) {
- if (X.length() >= 2) {
- calculateIntercept()
- return (_A * x) + _B
- } else {
- return 0
- }
- }
-
- function projectY (y) {
- if (X.length() >= 2 && _A !== 0) {
- calculateIntercept()
- return ((y - _B) / _A)
- } else {
- log.error('TS Linear Regressor, attempted a Y-projection while slope was zero!')
- return 0
- }
- }
-
- function calculateSlope (pointOne, pointTwo) {
- if (pointOne !== pointTwo && X.get(pointOne) !== X.get(pointTwo)) {
- return ((Y.get(pointTwo) - Y.get(pointOne)) / (X.get(pointTwo) - X.get(pointOne)))
- } else {
- log.error('TS Linear Regressor, Division by zero prevented!')
- return 0
- }
- }
-
- function calculateIntercept () {
- // Calculate all the intercepts for the newly added point and the newly calculated A, when needed
- // This function is only called when an intercept is really needed, as this saves a lot of CPU cycles when only a slope suffices
- const B = createLabelledBinarySearchTree()
- if (_B === null) {
- if (X.length() > 1) {
- // There are at least two points in the X and Y arrays, so let's calculate the intercept
- let i = 0
- while (i < X.length()) {
- // Please note , as we need to recreate the B-tree for each newly added datapoint anyway, the label i isn't relevant
- B.push(i, (Y.get(i) - (_A * X.get(i))))
- i++
- }
- _B = B.median()
- } else {
- _B = 0
- }
- }
- B.reset()
- }
-
- function reliable () {
- return (X.length() >= 2)
- }
-
- function reset () {
- if (X.length() > 0) {
- // There is something to reset
- X.reset()
- Y.reset()
- A.reset()
- _A = 0
- _B = 0
- _goodnessOfFit = 0
- }
- }
-
- return {
- push,
- X,
- Y,
- slope,
- intercept,
- coefficientA,
- coefficientB,
- length,
- goodnessOfFit,
- projectX,
- projectY,
- reliable,
- reset
- }
-}
diff --git a/app/engine/utils/FullTSQuadraticSeries.js b/app/engine/utils/FullTSQuadraticSeries.js
deleted file mode 100644
index 138a899715..0000000000
--- a/app/engine/utils/FullTSQuadraticSeries.js
+++ /dev/null
@@ -1,272 +0,0 @@
-'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- The FullTSQuadraticSeries is a datatype that represents a Quadratic Series. It allows
- values to be retrieved (like a FiFo buffer, or Queue) but it also includes
- a Theil-Sen Quadratic Regressor to determine the coefficients of this dataseries.
-
- At creation its length is determined. After it is filled, the oldest will be pushed
- out of the queue) automatically.
-
- A key constraint is to prevent heavy calculations at the end of a stroke (due to large
- array based curve fitting), which might be performed on a Pi zero or Zero 2W
-
- In order to prevent unneccessary calculations, this implementation uses lazy evaluation,
- so it will calculate the B, C and goodnessOfFit only when needed, as many uses only
- (first) need the first and second direvative.
-
- The Theil-Senn implementation uses concepts that are described here:
- https://stats.stackexchange.com/questions/317777/theil-sen-estimator-for-polynomial,
-
- The determination of the coefficients is based on the Lagrange interpolation, which is descirbed here:
- https://www.quora.com/How-do-I-find-a-quadratic-equation-from-points/answer/Robert-Paxson,
- https://www.physicsforums.com/threads/quadratic-equation-from-3-points.404174/
-*/
-
-import { createSeries } from './Series.js'
-import { createTSLinearSeries } from './FullTSLinearSeries.js'
-import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
-
-import loglevel from 'loglevel'
-const log = loglevel.getLogger('RowingEngine')
-
-export function createTSQuadraticSeries (maxSeriesLength = 0) {
- const X = createSeries(maxSeriesLength)
- const Y = createSeries(maxSeriesLength)
- const A = createLabelledBinarySearchTree()
- const linearResidu = createTSLinearSeries(maxSeriesLength)
- let _A = 0
- let _B = 0
- let _C = 0
- let _goodnessOfFit = 0
-
- function push (x, y) {
- // Invariant: A contains all a's (as in the general formula y = a * x^2 + b * x + c)
- // Where the a's are labeled in the Binary Search Tree with their Xi when they BEGIN in the point (Xi, Yi)
- if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
-
- if (maxSeriesLength > 0 && X.length() >= maxSeriesLength) {
- // The maximum of the array has been reached, so when pushing the new datapoint (x,y), the array will get shifted,
- // thus we have to remove all the A's that start with the old position X0 BEFORE this value gets thrown away
- A.remove(X.get(0))
- }
-
- X.push(x)
- Y.push(y)
-
- // Calculate the coefficient a for the new interval by adding the newly added datapoint
- let i = 0
- let j = 0
-
- switch (true) {
- case (X.length() >= 3):
- // There are now at least three datapoints in the X and Y arrays, so let's calculate the A portion belonging for the new datapoint via Quadratic Theil-Sen regression
- // First we calculate the A for the formula
- while (i < X.length() - 2) {
- j = i + 1
- while (j < X.length() - 1) {
- A.push(X.get(i), calculateA(i, j, X.length() - 1))
- j++
- }
- i++
- }
- _A = A.median()
-
- // We invalidate the linearResidu, B, C, and goodnessOfFit, as this will trigger a recalculate when they are needed
- linearResidu.reset()
- _B = null
- _C = null
- _goodnessOfFit = null
- break
- default:
- _A = 0
- _B = 0
- _C = 0
- _goodnessOfFit = 0
- }
- }
-
- function firstDerivativeAtPosition (position) {
- if (X.length() >= 3 && position < X.length()) {
- calculateB()
- return ((_A * 2 * X.get(position)) + _B)
- } else {
- return 0
- }
- }
-
- function secondDerivativeAtPosition (position) {
- if (X.length() >= 3 && position < X.length()) {
- return (_A * 2)
- } else {
- return 0
- }
- }
-
- function slope (x) {
- if (X.length() >= 3) {
- calculateB()
- return ((_A * 2 * x) + _B)
- } else {
- return 0
- }
- }
-
- function coefficientA () {
- // For testing purposses only!
- return _A
- }
-
- function coefficientB () {
- // For testing purposses only!
- calculateB()
- return _B
- }
-
- function coefficientC () {
- // For testing purposses only!
- calculateB()
- calculateC()
- return _C
- }
-
- function intercept () {
- calculateB()
- calculateC()
- return _C
- }
-
- function length () {
- return X.length()
- }
-
- function goodnessOfFit () {
- // This function returns the R^2 as a goodness of fit indicator
- let i = 0
- let sse = 0
- let sst = 0
- if (_goodnessOfFit === null) {
- if (X.length() >= 3) {
- while (i < X.length()) {
- sse += Math.pow((Y.get(i) - projectX(X.get(i))), 2)
- sst += Math.pow((Y.get(i) - Y.average()), 2)
- i++
- }
- switch (true) {
- case (sse === 0):
- _goodnessOfFit = 1
- break
- case (sse > sst):
- // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
- _goodnessOfFit = 0
- break
- case (sst !== 0):
- _goodnessOfFit = 1 - (sse / sst)
- break
- default:
- // When SST = 0, R2 isn't defined
- _goodnessOfFit = 0
- }
- } else {
- _goodnessOfFit = 0
- }
- }
- return _goodnessOfFit
- }
-
- function projectX (x) {
- if (X.length() >= 3) {
- calculateB()
- calculateC()
- return ((_A * x * x) + (_B * x) + _C)
- } else {
- return 0
- }
- }
-
- function calculateA (pointOne, pointTwo, pointThree) {
- let result = 0
- if (X.get(pointOne) !== X.get(pointTwo) && X.get(pointOne) !== X.get(pointThree) && X.get(pointTwo) !== X.get(pointThree)) {
- // For the underlying math, see https://www.quora.com/How-do-I-find-a-quadratic-equation-from-points/answer/Robert-Paxson
- result = (X.get(pointOne) * (Y.get(pointThree) - Y.get(pointTwo)) + Y.get(pointOne) * (X.get(pointTwo) - X.get(pointThree)) + (X.get(pointThree) * Y.get(pointTwo) - X.get(pointTwo) * Y.get(pointThree))) / ((X.get(pointOne) - X.get(pointTwo)) * (X.get(pointOne) - X.get(pointThree)) * (X.get(pointTwo) - X.get(pointThree)))
- return result
- } else {
- log.error('TS Quadratic Regressor, Division by zero prevented in CalculateA!')
- return 0
- }
- }
-
- function calculateB () {
- // Calculate all the linear slope for the newly added point and the newly calculated A
- // This function is only called when a linear slope is really needed, as this saves a lot of CPU cycles when only a slope suffices
- if (_B === null) {
- if (X.length() >= 3) {
- fillLinearResidu()
- _B = linearResidu.slope()
- } else {
- _B = 0
- }
- }
- }
-
- function calculateC () {
- // Calculate all the intercept for the newly added point and the newly calculated A
- // This function is only called when a linear intercept is really needed, as this saves a lot of CPU cycles when only a slope suffices
- if (_C === null) {
- if (X.length() >= 3) {
- fillLinearResidu()
- _C = linearResidu.intercept()
- } else {
- _C = 0
- }
- }
- }
-
- function fillLinearResidu () {
- // To calculate the B and C via Linear regression over the residu, we need to fill it if empty
- if (linearResidu.length() === 0) {
- let i = 0
- while (i < X.length()) {
- linearResidu.push(X.get(i), Y.get(i) - (_A * Math.pow(X.get(i), 2)))
- i++
- }
- }
- }
-
- function reliable () {
- return (X.length() >= 3)
- }
-
- function reset () {
- if (X.length() > 0) {
- // There is something to reset
- X.reset()
- Y.reset()
- A.reset()
- linearResidu.reset()
- _A = 0
- _B = 0
- _C = 0
- _goodnessOfFit = 0
- }
- }
-
- return {
- push,
- X,
- Y,
- firstDerivativeAtPosition,
- secondDerivativeAtPosition,
- slope,
- coefficientA,
- coefficientB,
- coefficientC,
- intercept,
- length,
- goodnessOfFit,
- projectX,
- reliable,
- reset
- }
-}
diff --git a/app/engine/utils/FullTSQuadraticSeries.test.js b/app/engine/utils/FullTSQuadraticSeries.test.js
deleted file mode 100644
index 211dc1450c..0000000000
--- a/app/engine/utils/FullTSQuadraticSeries.test.js
+++ /dev/null
@@ -1,626 +0,0 @@
-'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
-/**
- * This tests the Quadratic Theil-Senn Regression algorithm. As regression is an estimation and methods have biasses,
- * we need to accept some slack with respect to real-life examples
- */
-import { test } from 'uvu'
-import * as assert from 'uvu/assert'
-
-import { createTSQuadraticSeries } from './FullTSQuadraticSeries.js'
-
-test('Quadratic Approximation startup behaviour', () => {
- const dataSeries = createTSQuadraticSeries(10)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- dataSeries.push(-1, 2)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- dataSeries.push(0, 2)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- dataSeries.push(1, 6)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
-})
-
-test('Quadratic Approximation on a perfect noisefree function y = 2 * Math.pow(x, 2) + 2 * x + 2, 21 datapoints', () => {
- // Data based on 2 x^2 + 2 x + 2
- const dataSeries = createTSQuadraticSeries(21)
- dataSeries.push(-10, 182)
- dataSeries.push(-9, 146)
- dataSeries.push(-8, 114)
- dataSeries.push(-7, 86)
- dataSeries.push(-6, 62)
- dataSeries.push(-5, 42)
- dataSeries.push(-4, 26)
- dataSeries.push(-3, 14) // Pi ;)
- dataSeries.push(-2, 6)
- dataSeries.push(-1, 2)
- dataSeries.push(0, 2)
- dataSeries.push(1, 6)
- dataSeries.push(2, 14)
- dataSeries.push(3, 26)
- dataSeries.push(4, 42)
- dataSeries.push(5, 62)
- dataSeries.push(6, 86)
- dataSeries.push(7, 114)
- dataSeries.push(8, 146)
- dataSeries.push(9, 182)
- dataSeries.push(10, 222)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
-})
-
-test('Quadratic Approximation on a perfect noisefree function y = 2 * Math.pow(x, 2) + 2 * x + 2, with 10 datapoints and some shifting in the series', () => {
- // Data based on 2 x^2 + 2 x + 2, split the dataset in two to see its behaviour when it is around the Vertex
- const dataSeries = createTSQuadraticSeries(10)
- dataSeries.push(-10, 182)
- dataSeries.push(-9, 146)
- dataSeries.push(-8, 114)
- dataSeries.push(-7, 86)
- dataSeries.push(-6, 62)
- dataSeries.push(-5, 42)
- dataSeries.push(-4, 26)
- dataSeries.push(-3, 14) // Pi ;)
- dataSeries.push(-2, 6)
- dataSeries.push(-1, 2)
- dataSeries.push(0, 2)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(1, 6)
- dataSeries.push(2, 14)
- dataSeries.push(3, 26)
- dataSeries.push(4, 42)
- dataSeries.push(5, 62)
- dataSeries.push(6, 86)
- dataSeries.push(7, 114)
- dataSeries.push(8, 146)
- dataSeries.push(9, 182)
- dataSeries.push(10, 222)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
-})
-
-test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, noisefree', () => {
- // Data based on 4 x^2 + 4 x + 4
- const dataSeries = createTSQuadraticSeries(11)
- dataSeries.push(-11, 444)
- dataSeries.push(-10, 364)
- dataSeries.push(-9, 292)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-8, 228)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-7, 172)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-6, 124)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-5, 84)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-4, 52)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-3, 28)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-2, 12)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-1, 4)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(0, 4)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(1, 12)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(2, 28)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(3, 52)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(4, 84)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(5, 124)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(6, 172)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(7, 228)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(8, 292)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(9, 364)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(10, 444)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 1)
-})
-
-test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, with some noise (+/- 1)', () => {
- // Data based on 4 x^2 + 4 x + 4
- const dataSeries = createTSQuadraticSeries(11)
- dataSeries.push(-11, 443)
- dataSeries.push(-10, 365)
- dataSeries.push(-9, 291)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, -36)
- testCoefficientC(dataSeries, -195)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-8, 229)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4.333333333333334)
- testCoefficientC(dataSeries, 7.166666666666671)
- testGoodnessOfFitEquals(dataSeries, 0.9998746217034155)
- dataSeries.push(-7, 171)
- testCoefficientA(dataSeries, 3.3333333333333335)
- testCoefficientB(dataSeries, -7.999999999999991)
- testCoefficientC(dataSeries, -48.33333333333328)
- testGoodnessOfFitEquals(dataSeries, 0.9998468647471163)
- dataSeries.push(-6, 125)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999165499911914)
- dataSeries.push(-5, 83)
- testCoefficientA(dataSeries, 3.8666666666666667)
- testCoefficientB(dataSeries, 1.8666666666666671)
- testCoefficientC(dataSeries, -4.333333333333336) // This is quite acceptable as ORM ignores the C
- testGoodnessOfFitEquals(dataSeries, 0.9999366117119067)
- dataSeries.push(-4, 53)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999402806808002)
- dataSeries.push(-3, 27)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9999042318865254)
- dataSeries.push(-2, 13)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999495097395712)
- dataSeries.push(-1, 3)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9999117149452151)
- dataSeries.push(0, 5)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9998721709098177)
- dataSeries.push(1, 11)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9997996371611135)
- dataSeries.push(2, 29)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9996545703483187)
- dataSeries.push(3, 51)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9993201651380683)
- dataSeries.push(4, 85)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9987227718173796)
- dataSeries.push(5, 123)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9986961263098004)
- dataSeries.push(6, 173)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9993274803746546)
- dataSeries.push(7, 227)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9996526505917571)
- dataSeries.push(8, 293)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9998002774328024)
- dataSeries.push(9, 363)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3) // We get a 3 instead of 4, which is quite acceptable (especially since ORM ignores the C)
- testGoodnessOfFitEquals(dataSeries, 0.9998719089295779)
- dataSeries.push(10, 444)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999558104799866)
-})
-
-test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, with some noise (+/- 1) and spikes (+/- 9)', () => {
- // Data based on 4 x^2 + 4 x + 4
- const dataSeries = createTSQuadraticSeries(11)
- dataSeries.push(-11, 443)
- dataSeries.push(-10, 365)
- dataSeries.push(-9, 291)
- dataSeries.push(-8, 229)
- dataSeries.push(-7, 171)
- dataSeries.push(-6, 125)
- dataSeries.push(-5, 83)
- dataSeries.push(-4, 53)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999402806808002)
- dataSeries.push(-3, 37) // FIRST SPIKE +9
- testCoefficientA(dataSeries, 4.215277777777778)
- testCoefficientB(dataSeries, 7.694940476190471)
- testCoefficientC(dataSeries, 18.816964285714235)
- testGoodnessOfFitEquals(dataSeries, 0.9997971509015441)
- dataSeries.push(-2, 3) // SECOND SPIKE -9
- testCoefficientA(dataSeries, 3.9714285714285715)
- testCoefficientB(dataSeries, 3.6000000000000036) // Coefficient B seems to take a hit anyway
- testCoefficientC(dataSeries, 2.842857142857163) // We get a 2.8 instead of 4, which is quite acceptable (especially since ORM ignores the C)
- testGoodnessOfFitEquals(dataSeries, 0.9991656951087963)
- dataSeries.push(-1, 3)
- testCoefficientA(dataSeries, 3.9555555555555557)
- testCoefficientB(dataSeries, 3.37777777777778)
- testCoefficientC(dataSeries, 2.4222222222222243)
- testGoodnessOfFitEquals(dataSeries, 0.9992769580376006)
- dataSeries.push(0, 5)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9988530568930122)
- dataSeries.push(1, 11)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9982053643291688)
- dataSeries.push(2, 29)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9969166946967148)
- dataSeries.push(3, 51)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9939797134586851)
- dataSeries.push(4, 85)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 5)
- testGoodnessOfFitEquals(dataSeries, 0.9888468297958631)
- dataSeries.push(5, 123)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9886212128178015)
- dataSeries.push(6, 173)
- testCoefficientA(dataSeries, 4.044444444444444)
- testCoefficientB(dataSeries, 3.822222222222223)
- testCoefficientC(dataSeries, 3.577777777777783)
- testGoodnessOfFitEquals(dataSeries, 0.9945681627011398)
- dataSeries.push(7, 227)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9968997006175546)
- dataSeries.push(8, 293)
- testCoefficientA(dataSeries, 3.9047619047619047)
- testCoefficientB(dataSeries, 4.888888888888889)
- testCoefficientC(dataSeries, 2.9682539682539684) // This is quite acceptable as ORM ignores the C
- testGoodnessOfFitEquals(dataSeries, 0.9995034675221599)
- dataSeries.push(9, 363)
- testCoefficientA(dataSeries, 4) // These results match up 100% with the previous test, showing that a spike has no carry over effects
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 3)
- testGoodnessOfFitEquals(dataSeries, 0.9998719089295779)
- dataSeries.push(10, 444)
- testCoefficientA(dataSeries, 4)
- testCoefficientB(dataSeries, 4)
- testCoefficientC(dataSeries, 4)
- testGoodnessOfFitEquals(dataSeries, 0.9999558104799866)
-})
-
-test('Quadratic TS Estimation should be decent for standard real-life example from MathBits with some noise', () => {
- // Data based on https://mathbits.com/MathBits/TISection/Statistics2/quadratic.html
- const dataSeries = createTSQuadraticSeries(13)
- dataSeries.push(10, 115.6)
- dataSeries.push(15, 157.2)
- dataSeries.push(20, 189.2)
- dataSeries.push(24, 220.8)
- dataSeries.push(30, 253.8)
- dataSeries.push(34, 269.2)
- dataSeries.push(40, 284.8)
- dataSeries.push(45, 285.0)
- dataSeries.push(48, 277.4)
- dataSeries.push(50, 269.2)
- dataSeries.push(58, 244.2)
- dataSeries.push(60, 231.4)
- dataSeries.push(64, 180.4)
- testCoefficientA(dataSeries, -0.17702838827838824) // In the example, the TI084 results in -0.1737141137, which we consider acceptably close
- testCoefficientB(dataSeries, 14.929144536019532) // In the example, the TI084 results in 14.52117133, which we consider acceptably close
- testCoefficientC(dataSeries, -31.325531135531037) // In the example, the TI084 results in -21.89774466, which we consider acceptably close
- testGoodnessOfFitEquals(dataSeries, 0.9781087883163964)
-})
-
-test('Quadratic TS Estimation should be decent for standard real-life example from VarsityTutors with some noise', () => {
- // Test based on https://www.varsitytutors.com/hotmath/hotmath_help/topics/quadratic-regression
- const dataSeries = createTSQuadraticSeries(7)
- dataSeries.push(-3, 7.5)
- dataSeries.push(-2, 3)
- dataSeries.push(-1, 0.5)
- dataSeries.push(0, 1)
- dataSeries.push(1, 3)
- dataSeries.push(2, 6)
- dataSeries.push(3, 14)
- testCoefficientA(dataSeries, 1.0833333333333333) // The example results in 1.1071 for OLS, which we consider acceptably close
- testCoefficientB(dataSeries, 1.0833333333333333) // The example results in 1 for OLS, which we consider acceptably close
- testCoefficientC(dataSeries, 0.8333333333333335) // The example results in 0.5714 for OLS, which we consider acceptably close
- testGoodnessOfFitEquals(dataSeries, 0.9851153039832286)
-})
-
-test('Quadratic TS Estimation should be decent for standard example from VTUPulse with some noise, without the vertex being part of the dataset', () => {
- // Test based on https://www.vtupulse.com/machine-learning/quadratic-polynomial-regression-model-solved-example/
- const dataSeries = createTSQuadraticSeries(5)
- dataSeries.push(3, 2.5)
- dataSeries.push(4, 3.3)
- dataSeries.push(5, 3.8)
- dataSeries.push(6, 6.5)
- dataSeries.push(7, 11.5)
- testCoefficientA(dataSeries, 0.8583333333333334) // The example results in 0.7642857 for OLS, which we consider acceptably close given the small sample size
- testCoefficientB(dataSeries, -6.420833333333334) // The example results in -5.5128571 for OLS, which we consider acceptably close given the small sample size
- testCoefficientC(dataSeries, 14.387500000000003) // The example results in 12.4285714 for OLS, which we consider acceptably close given the small sample size
- testGoodnessOfFitEquals(dataSeries, 0.9825283785404673)
-})
-
-test('Quadratic TS Estimation should be decent for standard real-life example from Uni Berlin with some noise without the vertex being part of the dataset', () => {
- // Test based on https://www.geo.fu-berlin.de/en/v/soga/Basics-of-statistics/Linear-Regression/Polynomial-Regression/Polynomial-Regression---An-example/index.html
- const dataSeries = createTSQuadraticSeries(25)
- dataSeries.push(0.001399613, -0.23436656)
- dataSeries.push(0.971629779, 0.64689524)
- dataSeries.push(0.579119475, -0.92635765)
- dataSeries.push(0.335693937, 0.13000706)
- dataSeries.push(0.736736086, -0.89294863)
- dataSeries.push(0.492572335, 0.33854780)
- dataSeries.push(0.737133774, -1.24171910)
- dataSeries.push(0.563693769, -0.22523318)
- dataSeries.push(0.877603280, -0.12962722)
- dataSeries.push(0.141426545, 0.37632006)
- dataSeries.push(0.307203910, 0.30299077)
- dataSeries.push(0.024509308, -0.21162739)
- dataSeries.push(0.843665029, -0.76468719)
- dataSeries.push(0.771206067, -0.90455412)
- dataSeries.push(0.149670258, 0.77097952)
- dataSeries.push(0.359605608, 0.56466366)
- dataSeries.push(0.049612895, 0.18897607)
- dataSeries.push(0.409898906, 0.32531750)
- dataSeries.push(0.935457898, -0.78703491)
- dataSeries.push(0.149476207, 0.80585375)
- dataSeries.push(0.234315216, 0.62944986)
- dataSeries.push(0.455297119, 0.02353327)
- dataSeries.push(0.102696671, 0.27621694)
- dataSeries.push(0.715372314, -1.20379729)
- dataSeries.push(0.681745393, -0.83059624)
- testCoefficientA(dataSeries, -2.030477132951317)
- testCoefficientB(dataSeries, 0.5976858995201227)
- testCoefficientC(dataSeries, 0.17630021024409503)
- testGoodnessOfFitEquals(dataSeries, 0.23921110548689295)
-})
-
-test('Quadratic TS Estimation should be decent for standard real-life example from Statology.org with some noise and chaotic X values', () => {
- // Test based on https://www.statology.org/quadratic-regression-r/
- const dataSeries = createTSQuadraticSeries(11)
- dataSeries.push(6, 14)
- dataSeries.push(9, 28)
- dataSeries.push(12, 50)
- dataSeries.push(14, 70)
- dataSeries.push(30, 89)
- dataSeries.push(35, 94)
- dataSeries.push(40, 90)
- dataSeries.push(47, 75)
- dataSeries.push(51, 59)
- dataSeries.push(55, 44)
- dataSeries.push(60, 27)
- testCoefficientA(dataSeries, -0.10119047619047619) // The example results in -0.1012 for R after two rounds, which we consider acceptably close
- testCoefficientB(dataSeries, 6.801190476190477) // The example results in 6.7444 for R after two rounds, which we consider acceptably close
- testCoefficientC(dataSeries, -21.126190476190516) // The example results in 18.2536 for R after two rounds, but for ORM, this factor is irrelevant
- testGoodnessOfFitEquals(dataSeries, 0.9571127392718894)
-})
-
-test('Quadratic TS Estimation should be decent for standard real-life example from StatsDirect.com with some noise and chaotic X values', () => {
- // Test based on https://www.statsdirect.com/help/regression_and_correlation/polynomial.htm
- const dataSeries = createTSQuadraticSeries(10)
- dataSeries.push(1290, 1182)
- dataSeries.push(1350, 1172)
- dataSeries.push(1470, 1264)
- dataSeries.push(1600, 1493)
- dataSeries.push(1710, 1571)
- dataSeries.push(1840, 1711)
- dataSeries.push(1980, 1804)
- dataSeries.push(2230, 1840)
- dataSeries.push(2400, 1956)
- dataSeries.push(2930, 1954)
- testCoefficientA(dataSeries, -0.00046251263566907585) // The example results in -0.00045 through QR decomposition by Givens rotations, which we consider acceptably close
- testCoefficientB(dataSeries, 2.441798780934297) // The example results in 2.39893 for QR decomposition by Givens rotations, which we consider acceptably close
- testCoefficientC(dataSeries, -1235.044997485239) // The example results in -1216.143887 for QR decomposition by Givens rotations, but for ORM, this factor is irrelevant
- testGoodnessOfFitEquals(dataSeries, 0.9790379024208455)
-})
-
-test('Quadratic Approximation with a clean function and a reset', () => {
- // Data based on 2 x^2 + 2 x + 2
- const dataSeries = createTSQuadraticSeries(10)
- dataSeries.push(-10, 182)
- dataSeries.push(-9, 146)
- dataSeries.push(-8, 114)
- dataSeries.push(-7, 86)
- dataSeries.push(-6, 62)
- dataSeries.push(-5, 42)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(-4, 26)
- dataSeries.push(-3, 14) // Pi ;)
- dataSeries.push(-2, 6)
- dataSeries.push(-1, 2)
- dataSeries.push(0, 2)
- dataSeries.push(1, 6)
- dataSeries.push(2, 14)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.push(3, 26)
- dataSeries.push(4, 42)
- dataSeries.push(5, 62)
- dataSeries.push(6, 86)
- dataSeries.push(7, 114)
- dataSeries.push(8, 146)
- dataSeries.push(9, 182)
- dataSeries.push(10, 222)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
- dataSeries.reset()
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- testGoodnessOfFitEquals(dataSeries, 0)
- dataSeries.push(-1, 2)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- testGoodnessOfFitEquals(dataSeries, 0)
- dataSeries.push(0, 2)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 0)
- testCoefficientC(dataSeries, 0)
- testGoodnessOfFitEquals(dataSeries, 0)
- dataSeries.push(1, 6)
- testCoefficientA(dataSeries, 2)
- testCoefficientB(dataSeries, 2)
- testCoefficientC(dataSeries, 2)
- testGoodnessOfFitEquals(dataSeries, 1)
-})
-
-test('Quadratic TS Estimation should result in a straight line for function y = x', () => {
- // As ORM will encounter straight lines (when forces are balanced on the flywheel, there is no acceleration/deceleration), so we need to test this as well
- const dataSeries = createTSQuadraticSeries(7)
- dataSeries.push(0, 0)
- dataSeries.push(1, 1)
- dataSeries.push(2, 2)
- dataSeries.push(3, 3)
- dataSeries.push(4, 4)
- dataSeries.push(5, 5)
- dataSeries.push(6, 6)
- testCoefficientA(dataSeries, 0)
- testCoefficientB(dataSeries, 1)
- testCoefficientC(dataSeries, 0)
- testGoodnessOfFitEquals(dataSeries, 1)
-})
-
-function testCoefficientA (series, expectedValue) {
- assert.ok(series.coefficientA() === expectedValue, `Expected value for coefficientA at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientA()}`)
-}
-
-function testCoefficientB (series, expectedValue) {
- assert.ok(series.coefficientB() === expectedValue, `Expected value for coefficientB at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientB()}`)
-}
-
-function testCoefficientC (series, expectedValue) {
- assert.ok(series.coefficientC() === expectedValue, `Expected value for coefficientC at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientC()}`)
-}
-
-function testGoodnessOfFitEquals (series, expectedValue) {
- assert.ok(series.goodnessOfFit() === expectedValue, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered ${series.goodnessOfFit()}`)
-}
-
-function testGoodnessOfFitBetween (series, expectedValueAbove, expectedValueBelow) { // eslint-disable-line no-unused-vars
- assert.ok(series.goodnessOfFit() > expectedValueAbove, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} above ${expectedValueAbove}, encountered ${series.goodnessOfFit()}`)
- assert.ok(series.goodnessOfFit() < expectedValueBelow, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} below ${expectedValueBelow}, encountered ${series.goodnessOfFit()}`)
-}
-
-function testSlope (series, position, expectedValue) { // eslint-disable-line no-unused-vars
- assert.ok(series.slope(position) === expectedValue, `Expected value for Slope-${position} at X-position ${series.X.atSeriesEnd()} (slope at X-position ${series.X.atPosition(position)}) is ${expectedValue}, encountered a ${series.slope(position)}`)
-}
-
-function reportAll (series) { // eslint-disable-line no-unused-vars
- assert.ok(series.coefficientA() === 99, `time: ${series.X.atSeriesEnd()}, coefficientA: ${series.coefficientA()}, coefficientB: ${series.coefficientB()}, coefficientC: ${series.coefficientC()}, Slope-10: ${series.slope(10)}, Slope-9: ${series.slope(9)}, Slope-8: ${series.slope(8)}, Slope-7: ${series.slope(7)}, Slope-6: ${series.slope(6)}, Slope-5: ${series.slope(5)}, Slope-4: ${series.slope(4)}, Slope-3: ${series.slope(3)}, Slope-2: ${series.slope(2)}, Slope-1: ${series.slope(1)}, Slope-0: ${series.slope(0)}`)
-}
-
-test.run()
diff --git a/app/engine/utils/Gaussian.js b/app/engine/utils/Gaussian.js
new file mode 100644
index 0000000000..fbd767f14e
--- /dev/null
+++ b/app/engine/utils/Gaussian.js
@@ -0,0 +1,41 @@
+'use strict'
+/*
+ Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
+*/
+/**
+ * This implements a Gausian weight function, which is used in the moving regression filter
+ * @see {@link https://en.wikipedia.org/wiki/Kernel_(statistics)#Kernel_functions_in_common_use|the description of the various kernels}
+ * Please realize the constant factor 1/Math.Pow(2 * Math.pi(), 0.5) is omitted as it cancels out in the subsequent weight averaging filtering
+ */
+let begin
+let end
+let halfLength
+let middle
+
+export function createGaussianWeightFunction () {
+ begin = 0
+ end = 0
+ halfLength = 0
+ middle = 0
+
+ function setWindowWidth (beginpos, endpos) {
+ begin = beginpos
+ end = endpos
+ halfLength = (end - begin) / 2
+ middle = halfLength + begin
+ }
+
+ function weight (position) {
+ if (position >= begin && end >= position) {
+ const normalizedDistance = Math.abs((middle - position) / halfLength)
+ return Math.exp(-0.5 * Math.pow(normalizedDistance, 2))
+ } else {
+ return 0
+ }
+ }
+
+ return {
+ setWindowWidth,
+ weight
+ }
+}
diff --git a/app/engine/utils/Gaussian.test.js b/app/engine/utils/Gaussian.test.js
new file mode 100644
index 0000000000..139d1592c1
--- /dev/null
+++ b/app/engine/utils/Gaussian.test.js
@@ -0,0 +1,97 @@
+'use strict'
+/*
+ Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
+*/
+/**
+ * This tests the Gaussian Weight Kernel
+ */
+import { test } from 'uvu'
+import * as assert from 'uvu/assert'
+import { createGaussianWeightFunction } from './Gaussian.js'
+
+/**
+ * Test of the weight function in the basic -1 to +1 domain
+ */
+test('Test of weight function for basic -1 to +1 domain', () => {
+ const gaussianWeight = createGaussianWeightFunction()
+ gaussianWeight.setWindowWidth(-1, 1)
+ testWeight(gaussianWeight, -1.125, 0)
+ testWeight(gaussianWeight, -1, 0.6065306597126334)
+ testWeight(gaussianWeight, -0.875, 0.6819407511903481)
+ testWeight(gaussianWeight, -0.75, 0.7548396019890073)
+ testWeight(gaussianWeight, -0.625, 0.8225775623986646)
+ testWeight(gaussianWeight, -0.5, 0.8824969025845955)
+ testWeight(gaussianWeight, -0.375, 0.9321024923595276)
+ testWeight(gaussianWeight, -0.25, 0.9692332344763441)
+ testWeight(gaussianWeight, -0.125, 0.9922179382602435)
+ testWeight(gaussianWeight, 0, 1)
+ testWeight(gaussianWeight, 0.125, 0.9922179382602435)
+ testWeight(gaussianWeight, 0.25, 0.9692332344763441)
+ testWeight(gaussianWeight, 0.375, 0.9321024923595276)
+ testWeight(gaussianWeight, 0.5, 0.8824969025845955)
+ testWeight(gaussianWeight, 0.625, 0.8225775623986646)
+ testWeight(gaussianWeight, 0.75, 0.7548396019890073)
+ testWeight(gaussianWeight, 0.875, 0.6819407511903481)
+ testWeight(gaussianWeight, 1, 0.6065306597126334)
+ testWeight(gaussianWeight, 1.125, 0)
+})
+
+/**
+ * Test of the weight function in the -10 to +10 domain
+ */
+test('Test of weight function for basic -10 to +10 domain', () => {
+ const gaussianWeight = createGaussianWeightFunction()
+ gaussianWeight.setWindowWidth(-10, 10)
+ testWeight(gaussianWeight, -11.25, 0)
+ testWeight(gaussianWeight, -10, 0.6065306597126334)
+ testWeight(gaussianWeight, -8.75, 0.6819407511903481)
+ testWeight(gaussianWeight, -7.5, 0.7548396019890073)
+ testWeight(gaussianWeight, -6.25, 0.8225775623986646)
+ testWeight(gaussianWeight, -5, 0.8824969025845955)
+ testWeight(gaussianWeight, -3.75, 0.9321024923595276)
+ testWeight(gaussianWeight, -2.5, 0.9692332344763441)
+ testWeight(gaussianWeight, -1.25, 0.9922179382602435)
+ testWeight(gaussianWeight, 0, 1)
+ testWeight(gaussianWeight, 1.25, 0.9922179382602435)
+ testWeight(gaussianWeight, 2.5, 0.9692332344763441)
+ testWeight(gaussianWeight, 3.75, 0.9321024923595276)
+ testWeight(gaussianWeight, 5, 0.8824969025845955)
+ testWeight(gaussianWeight, 6.25, 0.8225775623986646)
+ testWeight(gaussianWeight, 7.5, 0.7548396019890073)
+ testWeight(gaussianWeight, 8.75, 0.6819407511903481)
+ testWeight(gaussianWeight, 10, 0.6065306597126334)
+ testWeight(gaussianWeight, 11.25, 0)
+})
+
+/**
+ * Test of the weight function in the 100 to 120 domain
+ */
+test('Test of weight function for basic 100 to +120 domain', () => {
+ const gaussianWeight = createGaussianWeightFunction()
+ gaussianWeight.setWindowWidth(100, 120)
+ testWeight(gaussianWeight, 98.75, 0)
+ testWeight(gaussianWeight, 100, 0.6065306597126334)
+ testWeight(gaussianWeight, 101.25, 0.6819407511903481)
+ testWeight(gaussianWeight, 102.5, 0.7548396019890073)
+ testWeight(gaussianWeight, 103.75, 0.8225775623986646)
+ testWeight(gaussianWeight, 105, 0.8824969025845955)
+ testWeight(gaussianWeight, 106.25, 0.9321024923595276)
+ testWeight(gaussianWeight, 107.5, 0.9692332344763441)
+ testWeight(gaussianWeight, 108.75, 0.9922179382602435)
+ testWeight(gaussianWeight, 110, 1)
+ testWeight(gaussianWeight, 111.25, 0.9922179382602435)
+ testWeight(gaussianWeight, 112.5, 0.9692332344763441)
+ testWeight(gaussianWeight, 113.75, 0.9321024923595276)
+ testWeight(gaussianWeight, 115, 0.8824969025845955)
+ testWeight(gaussianWeight, 116.25, 0.8225775623986646)
+ testWeight(gaussianWeight, 117.5, 0.7548396019890073)
+ testWeight(gaussianWeight, 118.75, 0.6819407511903481)
+ testWeight(gaussianWeight, 120, 0.6065306597126334)
+ testWeight(gaussianWeight, 121.25, 0)
+})
+
+function testWeight (weightFunction, xValue, expectedValue) {
+ assert.ok(weightFunction.weight(xValue) === expectedValue, `Weight should be should be ${expectedValue} at x = ${xValue}, is ${weightFunction.weight(xValue)}`)
+}
+
+test.run()
diff --git a/app/engine/utils/MovingWindowRegressor.js b/app/engine/utils/MovingWindowRegressor.js
new file mode 100644
index 0000000000..2f5f325088
--- /dev/null
+++ b/app/engine/utils/MovingWindowRegressor.js
@@ -0,0 +1,244 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This implements a Moving Regression Algorithm to obtain a coefficients, first (angular velocity) and
+ * second derivative (angular acceleration) at the front of the flank
+ */
+import { createTSQuadraticSeries } from './TSQuadraticSeries.js'
+import { createWeighedSeries } from './WeighedSeries.js'
+import { createGaussianWeightFunction } from './Gaussian.js'
+
+export function createMovingRegressor (bandwith) {
+ const flankLength = bandwith
+ const quadraticTheilSenRegressor = createTSQuadraticSeries(flankLength)
+ const gaussianWeight = createGaussianWeightFunction()
+ let aMatrix = []
+ let bMatrix = []
+ let cMatrix = []
+
+ /**
+ * @param {float} the x value of the datapoint
+ * @param {float} the y value of the datapoint
+ */
+ function push (x, y, w = 1) {
+ quadraticTheilSenRegressor.push(x, y, w)
+
+ // Let's shift the matrix to make room for a new datapoint
+ if (aMatrix.length >= flankLength) {
+ // The angularVelocityMatrix has reached its maximum length, we need to remove the first element
+ aMatrix[0].reset()
+ aMatrix[0] = null
+ aMatrix.shift()
+ bMatrix[0].reset()
+ bMatrix[0] = null
+ bMatrix.shift()
+ cMatrix[0].reset()
+ cMatrix[0] = null
+ cMatrix.shift()
+ }
+
+ // Let's make room for a new set of values for first and second derivatives
+ // Please note: a weighed median would work here, but results in much less fluid force curves
+ aMatrix[aMatrix.length] = createWeighedSeries(flankLength, 0)
+ bMatrix[bMatrix.length] = createWeighedSeries(flankLength, 0)
+ cMatrix[cMatrix.length] = createWeighedSeries(flankLength, 0)
+
+ let i = 0
+ let weight = 0
+ gaussianWeight.setWindowWidth(quadraticTheilSenRegressor.X.atSeriesBegin(), quadraticTheilSenRegressor.X.atSeriesEnd())
+
+ // Let's calculate the first and second derivatives for each datapoint and store them in their matrices
+ while (i < aMatrix.length && quadraticTheilSenRegressor.reliable()) {
+ weight = quadraticTheilSenRegressor.goodnessOfFit() * quadraticTheilSenRegressor.localGoodnessOfFit(i) * gaussianWeight.weight(quadraticTheilSenRegressor.X.get(i))
+ aMatrix[i].push(quadraticTheilSenRegressor.coefficientA(), weight)
+ bMatrix[i].push(quadraticTheilSenRegressor.coefficientB(), weight)
+ cMatrix[i].push(quadraticTheilSenRegressor.coefficientC(), weight)
+ i++
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @returns {float} the coefficient a of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientA (position = 0) {
+ if (aMatrix.length === flankLength && position < aMatrix.length) {
+ return aMatrix[position].weighedAverage()
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @returns {float} the coefficient b of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientB (position = 0) {
+ if (bMatrix.length === flankLength && position < aMatrix.length) {
+ return bMatrix[position].weighedAverage()
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @returns {float} the coefficient c of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientC (position = 0) {
+ if (cMatrix.length === flankLength && position < aMatrix.length) {
+ return cMatrix[position].weighedAverage()
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @returns {float} the firdt derivative of the quadratic function y = a x^2 + b x + c
+ */
+ function firstDerivative (position = 0) {
+ if (aMatrix.length === flankLength && position < aMatrix.length) {
+ return ((aMatrix[position].weighedAverage() * 2 * quadraticTheilSenRegressor.X.get(position)) + bMatrix[position].weighedAverage())
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @returns {float} the second derivative of the quadratic function y = a x^2 + b x + c
+ */
+ function secondDerivative (position = 0) {
+ if (aMatrix.length === flankLength && position < aMatrix.length) {
+ return (aMatrix[position].weighedAverage() * 2)
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @param {float} the x to project onto the function
+ * @returns {float} the resulting y from the projection
+ */
+ function projectX (position, x) {
+ if (aMatrix[position].length() >= 3) {
+ return ((aMatrix[position].weighedAverage() * Math.pow(x, 2)) + (bMatrix[position].weighedAverage() * x) + cMatrix[position].weighedAverage())
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {integer} the position in the flank of the requested value (default = 0)
+ * @param {float} the y to project onto the function
+ * @returns {array} the resulting x's from the projection
+ */
+ function projectY (position, y) {
+ // Calculate the discriminant
+ const discriminant = Math.pow(bMatrix[position].weighedAverage(), 2) - (4 * aMatrix[position].weighedAverage() * (cMatrix[position].weighedAverage() - y))
+
+ switch (true) {
+ case (aMatrix[position].weighedAverage() === 0 && bMatrix[position].weighedAverage() === 0):
+ // The function is a horizontal flat line, let's return the orignal observation
+ return [quadraticTheilSenRegressor.X.get(position)]
+ case (aMatrix[position].weighedAverage() === 0):
+ // The function is a tilted line, we need to handle this to prevent a division by zero
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const projection = (y - cMatrix[position].weighedAverage()) / bMatrix[position].weighedAverage()
+ return [projection]
+ case (discriminant > 0):
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const root1 = (-bMatrix[position].weighedAverage() + Math.sqrt(discriminant)) / (2 * aMatrix[position].weighedAverage())
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const root2 = (-bMatrix[position].weighedAverage() - Math.sqrt(discriminant)) / (2 * aMatrix[position].weighedAverage())
+ return [root1, root2]
+ case (discriminant === 0):
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const root = -bMatrix[position].weighedAverage() / (2 * aMatrix[position].weighedAverage())
+ return [root]
+ default:
+ return []
+ }
+ }
+
+ /**
+ * Resets the series to its initial state
+ */
+ function reset () {
+ quadraticTheilSenRegressor.reset()
+ let i = aMatrix.length
+ while (i > 0) {
+ aMatrix[0].reset()
+ aMatrix[0] = null
+ aMatrix.shift()
+ i--
+ }
+ aMatrix = null
+ aMatrix = []
+
+ let j = bMatrix.length
+ while (j > 0) {
+ bMatrix[0].reset()
+ bMatrix[0] = null
+ bMatrix.shift()
+ j--
+ }
+ bMatrix = null
+ bMatrix = []
+
+ let k = cMatrix.length
+ while (k > 0) {
+ cMatrix[0].reset()
+ cMatrix[0] = null
+ cMatrix.shift()
+ k--
+ }
+ cMatrix = null
+ cMatrix = []
+ }
+
+ /**
+ * @param {integer} position - position to be retrieved, starting at 0
+ * @returns {float} X value at that specific postion in the series
+ */
+ function Xget (position = 0) {
+ if (position < quadraticTheilSenRegressor.length()) {
+ return quadraticTheilSenRegressor.X.get(position)
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {integer} position - position to be retrieved, starting at 0
+ * @returns {float} Y value at that specific postion in the series
+ */
+ function Yget (position = 0) {
+ if (position < quadraticTheilSenRegressor.length()) {
+ return quadraticTheilSenRegressor.Y.get(position)
+ } else {
+ return undefined
+ }
+ }
+
+ return {
+ push,
+ X: {
+ get: Xget
+ },
+ Y: {
+ get: Yget
+ },
+ coefficientA,
+ coefficientB,
+ coefficientC,
+ firstDerivative,
+ secondDerivative,
+ projectX,
+ projectY,
+ reset
+ }
+}
diff --git a/app/engine/utils/MovingWindowRegressor.test.js b/app/engine/utils/MovingWindowRegressor.test.js
new file mode 100644
index 0000000000..985c7656d4
--- /dev/null
+++ b/app/engine/utils/MovingWindowRegressor.test.js
@@ -0,0 +1,1154 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file Tests of the movingRegressor object
+ */
+import { test } from 'uvu'
+import * as assert from 'uvu/assert'
+import { createMovingRegressor } from './MovingWindowRegressor.js'
+
+function flywheelPosition (position) {
+ return ((position * Math.PI) / 3)
+}
+
+/**
+ * @description Test behaviour for no datapoints
+ */
+test('Correct movingRegressor behaviour at initialisation', () => {
+ const flankLength = 12
+ const movingRegressor = createMovingRegressor(flankLength)
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+})
+
+/**
+ * @todo Test behaviour for one datapoint
+ */
+
+/**
+ * @todo Test behaviour for perfect upgoing flank
+ */
+
+/**
+ * @todo Test behaviour for perfect downgoing flank
+ */
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * This uses the same data as the function y = 2 x^2 + 4 * x
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and movingRegressor object for quadratic function f(x) = 2 * x^2 + 4 * x', () => {
+ const flankLength = 12
+ const movingRegressor = createMovingRegressor(flankLength)
+
+ movingRegressor.push(0, flywheelPosition(0)) // Datapoint 0
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.234341433963188, flywheelPosition(1)) // Datapoint 1
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.430803114057485, flywheelPosition(2)) // Datapoint 2
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.603370302455080, flywheelPosition(3)) // Datapoint 3
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.759089282098323, flywheelPosition(4)) // Datapoint 4
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.902102488824273, flywheelPosition(5)) // Datapoint 5
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.035090330572530, flywheelPosition(6)) // Datapoint 6
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.159905421352540, flywheelPosition(7)) // Datapoint 7
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.27789161392424, flywheelPosition(8)) // Datapoint 8
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.39006045538281, flywheelPosition(9)) // Datapoint 9
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.4971959786895, flywheelPosition(10)) // Datapoint 10
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.59992048562668, flywheelPosition(11)) // Datapoint 11
+ testFirstDerivative(movingRegressor, 4.0000000000000115) // Values from Datapoint 0 are now passsing through
+ testSecondDerivative(movingRegressor, 3.9999999999999933)
+ movingRegressor.push(1.69873772478535, flywheelPosition(12)) // Datapoint 12
+ testFirstDerivative(movingRegressor, 4.937365735852762) // Values from Datapoint 1 are now passsing through
+ testSecondDerivative(movingRegressor, 3.9999999999999925)
+ movingRegressor.push(1.79406229042552, flywheelPosition(13)) // Datapoint 13
+ testFirstDerivative(movingRegressor, 5.723212456229947) // Values from Datapoint 2 are now passsing through
+ testSecondDerivative(movingRegressor, 3.999999999999984)
+ movingRegressor.push(1.88624026345282, flywheelPosition(14)) // Datapoint 14
+ testFirstDerivative(movingRegressor, 6.413481209820322) // Values from Datapoint 3 are now passsing through
+ testSecondDerivative(movingRegressor, 3.9999999999999742)
+ movingRegressor.push(1.97556408668583, flywheelPosition(15)) // Datapoint 15
+ testFirstDerivative(movingRegressor, 7.036357128393286) // Values from Datapoint 4 are now passsing through
+ testSecondDerivative(movingRegressor, 3.9999999999999747)
+ movingRegressor.push(2.06228352860619, flywheelPosition(16)) // Datapoint 16
+ testFirstDerivative(movingRegressor, 7.608409955297076) // Values from Datapoint 5 are now passsing through
+ testSecondDerivative(movingRegressor, 3.999999999999983)
+ movingRegressor.push(2.14661392375536, flywheelPosition(17)) // Datapoint 17
+ testFirstDerivative(movingRegressor, 8.140361322290104) // Values from Datapoint 6 are now passsing through
+ testSecondDerivative(movingRegressor, 3.9999999999999916)
+ movingRegressor.push(2.22874247359082, flywheelPosition(18)) // Datapoint 18
+ testFirstDerivative(movingRegressor, 8.639621685410132) // Values from Datapoint 7 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000013)
+ movingRegressor.push(2.30883313818749, flywheelPosition(19)) // Datapoint 19
+ testFirstDerivative(movingRegressor, 9.111566455696927) // Values from Datapoint 8 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000042)
+ movingRegressor.push(2.38703048583357, flywheelPosition(20)) // Datapoint 20
+ testFirstDerivative(movingRegressor, 9.560241821531205) // Values from Datapoint 9 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000061)
+ movingRegressor.push(2.46346275966182, flywheelPosition(21)) // Datapoint 21
+ testFirstDerivative(movingRegressor, 9.988783914757967) // Values from Datapoint 10 are now passsing through
+ testSecondDerivative(movingRegressor, 4.00000000000007)
+ movingRegressor.push(2.53824434757728, flywheelPosition(22)) // Datapoint 22
+ testFirstDerivative(movingRegressor, 10.399681942506692) // Values from Datapoint 11 are now passsing through
+ testSecondDerivative(movingRegressor, 4.0000000000000835)
+ movingRegressor.push(2.61147779153643, flywheelPosition(23)) // Datapoint 23
+ testFirstDerivative(movingRegressor, 10.794950899141389) // Values from Datapoint 12 are now passsing through
+ testSecondDerivative(movingRegressor, 4.0000000000000915)
+ movingRegressor.push(2.68325543702296, flywheelPosition(24)) // Datapoint 24
+ testFirstDerivative(movingRegressor, 11.176249161702088) // Values from Datapoint 13 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000075)
+ movingRegressor.push(2.75366079846827, flywheelPosition(25)) // Datapoint 25
+ testFirstDerivative(movingRegressor, 11.544961053811306) // Values from Datapoint 14 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000067)
+ movingRegressor.push(2.82276969821042, flywheelPosition(26)) // Datapoint 26
+ testFirstDerivative(movingRegressor, 11.902256346743357) // Values from Datapoint 15 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000058)
+ movingRegressor.push(2.89065122327279, flywheelPosition(27)) // Datapoint 27
+ testFirstDerivative(movingRegressor, 12.249134114424805) // Values from Datapoint 16 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000049)
+ movingRegressor.push(2.95736853436123, flywheelPosition(28)) // Datapoint 28
+ testFirstDerivative(movingRegressor, 12.586455695021487) // Values from Datapoint 17 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000033)
+ movingRegressor.push(3.02297955405576, flywheelPosition(29)) // Datapoint 29
+ testFirstDerivative(movingRegressor, 12.91496989436332) // Values from Datapoint 18 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000027)
+ movingRegressor.push(3.08753755553988, flywheelPosition(30)) // Datapoint 30
+ testFirstDerivative(movingRegressor, 13.23533255274999) // Values from Datapoint 19 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000016)
+ movingRegressor.push(3.15109166889232, flywheelPosition(31)) // Datapoint 31
+ testFirstDerivative(movingRegressor, 13.548121943334301) // Values from Datapoint 20 are now passsing through
+ testSecondDerivative(movingRegressor, 4.000000000000006)
+})
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * The data follows the function y = X^3 + 2 * x^2 + 4 * x
+ * To test if multiple quadratic regressions can decently approximate a cubic function
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and movingRegressor object for cubic function f(x) = X^3 + 2 * x^2 + 4 * x', () => {
+ const flankLength = 12
+ const movingRegressor = createMovingRegressor(flankLength)
+
+ movingRegressor.push(0, flywheelPosition(0)) // Datapoint 0
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.231815755285445, flywheelPosition(1)) // Datapoint 1
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.41798587349477, flywheelPosition(2)) // Datapoint 2
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.573659684819169, flywheelPosition(3)) // Datapoint 3
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.707924094678216, flywheelPosition(4)) // Datapoint 4
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.826414402971124, flywheelPosition(5)) // Datapoint 5
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.932810595231392, flywheelPosition(6)) // Datapoint 6
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.02963328885463, flywheelPosition(7)) // Datapoint 7
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.11868033498976, flywheelPosition(8)) // Datapoint 8
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.20127811057228, flywheelPosition(9)) // Datapoint 9
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.27843316652448, flywheelPosition(10)) // Datapoint 10
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.35092771853781, flywheelPosition(11)) // Datapoint 11
+ testFirstDerivative(movingRegressor, 3.1619218560690983) // Datapoint 0, Theoretical value: 4
+ testSecondDerivative(movingRegressor, 7.251023549310305) // Datapoint 0, Theoretical value: 4
+ movingRegressor.push(1.41938205529707, flywheelPosition(12)) // Datapoint 12
+ testFirstDerivative(movingRegressor, 4.795017407170356) // Datapoint 1, Theoretical value: 5.088478654, error: -6,64%
+ testSecondDerivative(movingRegressor, 7.324931550092253) // Datapoint 1, Theoretical value: 5.390894532, error: 38,46%
+ movingRegressor.push(1.48429666701973, flywheelPosition(13)) // Datapoint 13
+ testFirstDerivative(movingRegressor, 6.098616558470358) // Datapoint 2, Theoretical value: 6.196080065, error: -2,14%
+ testSecondDerivative(movingRegressor, 7.656104494382835) // Datapoint 2, Theoretical value: 6.507915241, error: 18,21%
+ movingRegressor.push(1.54608149753959, flywheelPosition(14)) // Datapoint 14
+ testFirstDerivative(movingRegressor, 7.261045146876182) // Datapoint 3, Theoretical value: 7.281895041, error: -0,79%
+ testSecondDerivative(movingRegressor, 8.125127482274081) // Datapoint 3, Theoretical value: 7.441958109, error: 9,49%
+ movingRegressor.push(1.60507676311623, flywheelPosition(15)) // Datapoint 15
+ testFirstDerivative(movingRegressor, 8.3354523167128) // Datapoint 4, Theoretical value: 8.33516595, error: -0,42%
+ testSecondDerivative(movingRegressor, 8.591085532405375) // Datapoint 4, Theoretical value: 8.247544568, error: 4,32%
+ movingRegressor.push(1.66156809465495, flywheelPosition(16)) // Datapoint 16
+ testFirstDerivative(movingRegressor, 9.346198019520195) // Datapoint 5, Theoretical value: 9.354539908, error: -0,44%
+ testSecondDerivative(movingRegressor, 9.05816287785614) // Datapoint 5, Theoretical value: 8.958486418, error: 1,06%
+ movingRegressor.push(1.71579776502858, flywheelPosition(17)) // Datapoint 17
+ testFirstDerivative(movingRegressor, 10.314972131734764) // Datapoint 6, Theoretical value: 10.3416492, error: -0,56%
+ testSecondDerivative(movingRegressor, 9.531782371110388) // Datapoint 6, Theoretical value: 9.596863571, error: -0,95%
+ movingRegressor.push(1.76797315746226, flywheelPosition(18)) // Datapoint 18
+ testFirstDerivative(movingRegressor, 11.253026452431916) // Datapoint 7, Theoretical value: 11.29896728, error: -0,68%
+ testSecondDerivative(movingRegressor, 10.006689891934682) // Datapoint 7, Theoretical value: 10.17779973, error: -2,15%
+ movingRegressor.push(1.81827325164023, flywheelPosition(19)) // Datapoint 19
+ testFirstDerivative(movingRegressor, 12.167114512288997) // Datapoint 8, Theoretical value: 12.22905842, error: -0,76%
+ testSecondDerivative(movingRegressor, 10.479926499860278) // Datapoint 8, Theoretical value: 10.71208201, error: -2,78%
+ movingRegressor.push(1.86685366056842, flywheelPosition(20)) // Datapoint 20
+ testFirstDerivative(movingRegressor, 13.06228935387478) // Datapoint 9, Theoretical value: 13.13431974, error: -0,79%
+ testSecondDerivative(movingRegressor, 10.94574190420843) // Datapoint 9, Theoretical value: 11.20766866, error: -3,03%
+ movingRegressor.push(1.91385059111525, flywheelPosition(21)) // Datapoint 21
+ testFirstDerivative(movingRegressor, 13.940750925066355) // Datapoint 10, Theoretical value: 14.01690675, error: -0,78%
+ testSecondDerivative(movingRegressor, 11.403650671998214) // Datapoint 10, Theoretical value: 11.670599, error: -2,98%
+ movingRegressor.push(1.95938399371638, flywheelPosition(22)) // Datapoint 22
+ testFirstDerivative(movingRegressor, 14.80669498176647) // Datapoint 11, Theoretical value: 14.87872798, error: -0,69%
+ testSecondDerivative(movingRegressor, 11.856689681955745) // Datapoint 11, Theoretical value: 12.10556631, error: -2,69%
+ movingRegressor.push(2.00356009326199, flywheelPosition(23)) // Datapoint 23
+ testFirstDerivative(movingRegressor, 15.659331443649235) // Datapoint 12, Theoretical value: 15.72146448, error: -0,57%
+ testSecondDerivative(movingRegressor, 12.3033090600004) // Datapoint 12, Theoretical value: 12.51629233, error: -2,22%
+ movingRegressor.push(2.04647344207189, flywheelPosition(24)) // Datapoint 24
+ testFirstDerivative(movingRegressor, 16.492736768968758) // Datapoint 13, Theoretical value: 16.54659646, error: -0,47%
+ testSecondDerivative(movingRegressor, 12.721354618620541) // Datapoint 13, Theoretical value: 12.90578, error: -1,86%
+ movingRegressor.push(2.08820859973702, flywheelPosition(25)) // Datapoint 25
+ testFirstDerivative(movingRegressor, 17.307691210719657) // Datapoint 14, Theoretical value: 17.35542998, error: -0,40%
+ testSecondDerivative(movingRegressor, 13.11397255097589) // Datapoint 14, Theoretical value: 13.27648899, error: -1,59%
+ movingRegressor.push(2.12884151869732, flywheelPosition(26)) // Datapoint 26
+ testFirstDerivative(movingRegressor, 18.106493986724217) // Datapoint 15, Theoretical value: 18.1491213, error: -0,34%
+ testSecondDerivative(movingRegressor, 13.486098587072668) // Datapoint 15, Theoretical value: 13.63046058, error: -1,38%
+ movingRegressor.push(2.1684406955958, flywheelPosition(27)) // Datapoint 27
+ testFirstDerivative(movingRegressor, 18.890426542395396) // Datapoint 16, Theoretical value: 18.92869798, error: -0,29%
+ testSecondDerivative(movingRegressor, 13.840428977173227) // Datapoint 16, Theoretical value: 13.96940857, error: -1,20%
+ movingRegressor.push(2.20706813459232, flywheelPosition(28)) // Datapoint 28
+ testFirstDerivative(movingRegressor, 19.660398675998493) // Datapoint 17, Theoretical value: 19.69507697, error: -0,26%
+ testSecondDerivative(movingRegressor, 14.178743620220295) // Datapoint 17, Theoretical value: 14.29478659, error: -1,06%
+ movingRegressor.push(2.24478015850658, flywheelPosition(29)) // Datapoint 29
+ testFirstDerivative(movingRegressor, 20.41744737019293) // Datapoint 18, Theoretical value: 20.44907989, error: -0,23%
+ testSecondDerivative(movingRegressor, 14.502790132819) // Datapoint 18, Theoretical value: 14.60783894, error: -0,94%
+ movingRegressor.push(2.28162809590139, flywheelPosition(30)) // Datapoint 30
+ testFirstDerivative(movingRegressor, 21.1623762673629) // Datapoint 19, Theoretical value: 21.19144586, error: -0,20%
+ testSecondDerivative(movingRegressor, 14.813903373334561) // Datapoint 19, Theoretical value: 14.90963951, error: -0,83%
+ movingRegressor.push(2.31765886632097, flywheelPosition(31)) // Datapoint 31
+ testFirstDerivative(movingRegressor, 21.89597076848041) // Datapoint 20, Theoretical value: , error: %
+ testSecondDerivative(movingRegressor, 15.113402988997308) // Datapoint 20, Theoretical value: , error: %
+})
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * The data follows the function y = X^3 + 2 * x^2 + 4 * x with a +/-0.0001 sec injected noise in currentDt
+ * To test if multiple quadratic regressions can decently approximate a cubic function with noise
+ * Please note: theoretical values are based on the perfect function (i.e. without noise)
+ */
+test('Test of correct algorithmic integration of FullTSQuadraticEstimator and movingRegressor object for cubic function f(x) = X^3 + 2 * x^2 + 4 * x with +/- 0.0001 error', () => {
+ const flankLength = 12
+ const movingRegressor = createMovingRegressor(flankLength)
+
+ movingRegressor.push(0, flywheelPosition(0)) // Datapoint 0
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.231815755285445, flywheelPosition(1)) // Datapoint 1
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.41798587349477, flywheelPosition(2)) // Datapoint 2
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.573659684819169, flywheelPosition(3)) // Datapoint 3
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.707924094678216, flywheelPosition(4)) // Datapoint 4
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.826414402971124, flywheelPosition(5)) // Datapoint 5
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(0.932810595231392, flywheelPosition(6)) // Datapoint 6
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.02963328885463, flywheelPosition(7)) // Datapoint 7
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.11868033498976, flywheelPosition(8)) // Datapoint 8
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.20127811057228, flywheelPosition(9)) // Datapoint 9
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.27843316652448, flywheelPosition(10)) // Datapoint 10
+ testFirstDerivative(movingRegressor, undefined)
+ testSecondDerivative(movingRegressor, undefined)
+ movingRegressor.push(1.35092771853781, flywheelPosition(11)) // Datapoint 11
+ testFirstDerivative(movingRegressor, 3.1619218560690983) // Datapoint 0, Theoretical value: 4
+ testSecondDerivative(movingRegressor, 7.251023549310305) // Datapoint 0, Theoretical value: 4
+ movingRegressor.push(1.41938205529707, flywheelPosition(12)) // Datapoint 12
+ testFirstDerivative(movingRegressor, 4.795017407170356) // Datapoint 1, Theoretical value: 5.088478654, error: -6.58%
+ testSecondDerivative(movingRegressor, 7.324931550092253) // Datapoint 1, Theoretical value: 5.390894532, error: 38.38%
+ movingRegressor.push(1.48429666701973, flywheelPosition(13)) // Datapoint 13
+ testFirstDerivative(movingRegressor, 6.098616558470358) // Datapoint 2, Theoretical value: 6.196080065, error: -2.11%
+ testSecondDerivative(movingRegressor, 7.656104494382835) // Datapoint 2, Theoretical value: 6.507915241, error: 18.14%
+ movingRegressor.push(1.54608149753959, flywheelPosition(14)) // Datapoint 14
+ testFirstDerivative(movingRegressor, 7.261045146876182) // Datapoint 3, Theoretical value: 7.281895041, error: -0.77%
+ testSecondDerivative(movingRegressor, 8.125127482274081) // Datapoint 3, Theoretical value: 7.441958109, error: 9.40%
+ movingRegressor.push(1.60507676311623, flywheelPosition(15)) // Datapoint 15
+ testFirstDerivative(movingRegressor, 8.3354523167128) // Datapoint 4, Theoretical value: 8.33516595, error: -0.42%
+ testSecondDerivative(movingRegressor, 8.591085532405375) // Datapoint 4, Theoretical value: 8.247544568, error: 4.24%
+ movingRegressor.push(1.66156809465495, flywheelPosition(16)) // Datapoint 16
+ testFirstDerivative(movingRegressor, 9.346198019520195) // Datapoint 5, Theoretical value: 9.354539908, error: -0.44%
+ testSecondDerivative(movingRegressor, 9.05816287785614) // Datapoint 5, Theoretical value: 8.958486418, error: 1.00%
+ movingRegressor.push(1.71579776502858, flywheelPosition(17)) // Datapoint 17
+ testFirstDerivative(movingRegressor, 10.314972131734764) // Datapoint 6, Theoretical value: 10.3416492, error: -0.56%
+ testSecondDerivative(movingRegressor, 9.531782371110388) // Datapoint 6, Theoretical value: 9.596863571, error: -1.00%
+ movingRegressor.push(1.76797315746226, flywheelPosition(18)) // Datapoint 18
+ testFirstDerivative(movingRegressor, 11.253026452431916) // Datapoint 7, Theoretical value: 11.29896728, error: -0.67%
+ testSecondDerivative(movingRegressor, 10.006689891934682) // Datapoint 7, Theoretical value: 10.17779973, error: -2.21%
+ movingRegressor.push(1.81827325164023, flywheelPosition(19)) // Datapoint 19
+ testFirstDerivative(movingRegressor, 12.167114512288997) // Datapoint 8, Theoretical value: 12.22905842, error: -0.76%
+ testSecondDerivative(movingRegressor, 10.479926499860278) // Datapoint 8, Theoretical value: 10.71208201, error: -2.84%
+ movingRegressor.push(1.86685366056842, flywheelPosition(20)) // Datapoint 20
+ testFirstDerivative(movingRegressor, 13.06228935387478) // Datapoint 9, Theoretical value: 13.13431974, error: -0.79%
+ testSecondDerivative(movingRegressor, 10.94574190420843) // Datapoint 9, Theoretical value: 11.20766866, error: -3.08%
+ movingRegressor.push(1.91385059111525, flywheelPosition(21)) // Datapoint 21
+ testFirstDerivative(movingRegressor, 13.940750925066355) // Datapoint 10, Theoretical value: 14.01690675, error: -0.78%
+ testSecondDerivative(movingRegressor, 11.403650671998214) // Datapoint 10, Theoretical value: 11.670599, error: -3.04%
+ movingRegressor.push(1.95938399371638, flywheelPosition(22)) // Datapoint 22
+ testFirstDerivative(movingRegressor, 14.80669498176647) // Datapoint 11, Theoretical value: 14.87872798, error: -0.68%
+ testSecondDerivative(movingRegressor, 11.856689681955745) // Datapoint 11, Theoretical value: 12.10556631, error: -2.76%
+ movingRegressor.push(2.00356009326199, flywheelPosition(23)) // Datapoint 23
+ testFirstDerivative(movingRegressor, 15.659331443649235) // Datapoint 12, Theoretical value: 15.72146448, error: -0.57%
+ testSecondDerivative(movingRegressor, 12.3033090600004) // Datapoint 12, Theoretical value: 12.51629233, error: -2.30%
+ movingRegressor.push(2.04647344207189, flywheelPosition(24)) // Datapoint 24
+ testFirstDerivative(movingRegressor, 16.492736768968758) // Datapoint 13, Theoretical value: 16.54659646, error: -0.46%
+ testSecondDerivative(movingRegressor, 12.721354618620541) // Datapoint 13, Theoretical value: 12.90578, error: -1.95%
+ movingRegressor.push(2.08820859973702, flywheelPosition(25)) // Datapoint 25
+ testFirstDerivative(movingRegressor, 17.307691210719657) // Datapoint 14, Theoretical value: 17.35542998, error: -0.39%
+ testSecondDerivative(movingRegressor, 13.11397255097589) // Datapoint 14, Theoretical value: 13.27648899, error: -1.70%
+ movingRegressor.push(2.12884151869732, flywheelPosition(26)) // Datapoint 26
+ testFirstDerivative(movingRegressor, 18.106493986724217) // Datapoint 15, Theoretical value: 18.1491213, error: -0.32%
+ testSecondDerivative(movingRegressor, 13.486098587072668) // Datapoint 15, Theoretical value: 13.63046058, error: -1.51%
+ movingRegressor.push(2.1684406955958, flywheelPosition(27)) // Datapoint 27
+ testFirstDerivative(movingRegressor, 18.890426542395396) // Datapoint 16, Theoretical value: 18.92869798, error: -0.28%
+ testSecondDerivative(movingRegressor, 13.840428977173227) // Datapoint 16, Theoretical value: 13.96940857, error: -1.35%
+ movingRegressor.push(2.20706813459232, flywheelPosition(28)) // Datapoint 28
+ testFirstDerivative(movingRegressor, 19.660398675998493) // Datapoint 17, Theoretical value: 19.69507697, error: -0.24%
+ testSecondDerivative(movingRegressor, 14.178743620220295) // Datapoint 17, Theoretical value: 14.29478659, error: -1.23%
+ movingRegressor.push(2.24478015850658, flywheelPosition(29)) // Datapoint 29
+ testFirstDerivative(movingRegressor, 20.41744737019293) // Datapoint 18, Theoretical value: 20.44907989, error: -0.21%
+ testSecondDerivative(movingRegressor, 14.502790132819) // Datapoint 18, Theoretical value: 14.60783894, error: -1.13%
+ movingRegressor.push(2.28162809590139, flywheelPosition(30)) // Datapoint 30
+ testFirstDerivative(movingRegressor, 21.1623762673629) // Datapoint 19, Theoretical value: 21.19144586, error: -0.18%
+ testSecondDerivative(movingRegressor, 14.813903373334561) // Datapoint 19, Theoretical value: 14.90963951, error: -1.05%
+ movingRegressor.push(2.31765886632097, flywheelPosition(31)) // Datapoint 31
+ testFirstDerivative(movingRegressor, 21.89597076848041) // Datapoint 20, Theoretical value: 21.19144586, error: -0.18%
+ testSecondDerivative(movingRegressor, 15.113402988997308) // Datapoint 20, Theoretical value: 14.90963951, error: -1.05%
+})
+
+/**
+ * @description Test of the integration of the underlying FullTSQuadraticEstimator object
+ * The data follows the function y = (x + 3,22398390803294)^3 + 33,5103216382911
+ * To test if multiple quadratic regressions can decently approximate a cubic function
+ */
+test('Test of correct algorithmic behaviourof FullTSQuadraticEstimator in movingRegressor object for function f(x) = (x + 3,22398390803294)^3 + 33,5103216382911', () => {
+ const flankLength = 11
+ const movingRegressor = createMovingRegressor(flankLength)
+
+ movingRegressor.push(0, flywheelPosition(0)) // Datapoint 0
+ movingRegressor.push(0.0339391931958861, flywheelPosition(1)) // Datapoint 1
+ movingRegressor.push(0.0686163387311174, flywheelPosition(2)) // Datapoint 2
+ movingRegressor.push(0.104072908191785, flywheelPosition(3)) // Datapoint 3
+ movingRegressor.push(0.140354232816639, flywheelPosition(4)) // Datapoint 4
+ movingRegressor.push(0.177510015343162, flywheelPosition(5)) // Datapoint 5
+ movingRegressor.push(0.215594931499885, flywheelPosition(6)) // Datapoint 6
+ movingRegressor.push(0.254669340957169, flywheelPosition(7)) // Datapoint 7
+ movingRegressor.push(0.294800132909893, flywheelPosition(8)) // Datapoint 8
+ movingRegressor.push(0.336061738566166, flywheelPosition(9)) // Datapoint 9
+ movingRegressor.push(0.378537352322414, flywheelPosition(10)) // Datapoint 10
+ testFirstDerivative(movingRegressor, 31.15175433824249) // Datapoint: 0, Theoretical value: 31.182216717766, Error: -0.0977%
+ testSecondDerivative(movingRegressor, -18.70425512401731) // Datapoint: 0, Theoretical value: -19.3439034481976, Error: -3.3067%
+ movingRegressor.push(0.422320416281029, flywheelPosition(11)) // Datapoint 11
+ testFirstDerivative(movingRegressor, 30.513098889123356) // Datapoint: 1, Theoretical value: 30.5291558479794, Error: -0.0526%
+ testSecondDerivative(movingRegressor, -18.681649183708288) // Datapoint: 1, Theoretical value: -19.1402682890223, Error: -2.3961%
+ movingRegressor.push(0.467516440428408, flywheelPosition(12)) // Datapoint 12
+ testFirstDerivative(movingRegressor, 29.85986155955904) // Datapoint: 2, Theoretical value: 29.8690334922051, Error: -0.0307%
+ testSecondDerivative(movingRegressor, -18.589545542159925) // Datapoint: 2, Theoretical value: -18.9322054158109, Error: -1.8099%
+ movingRegressor.push(0.514245255352352, flywheelPosition(13)) // Datapoint 13
+ testFirstDerivative(movingRegressor, 29.195872708833107) // Datapoint: 3, Theoretical value: 29.2015339407895, Error: -0.0194%
+ testSecondDerivative(movingRegressor, -18.4460337009964) // Datapoint: 3, Theoretical value: -18.7194659990469, Error: -1.4607%
+ movingRegressor.push(0.562643829050002, flywheelPosition(14)) // Datapoint 14
+ testFirstDerivative(movingRegressor, 28.52198052325988) // Datapoint: 4, Theoretical value: 28.5263159216238, Error: -0.0152%
+ testSecondDerivative(movingRegressor, -18.289734522926175) // Datapoint: 4, Theoretical value: -18.5017780512978, Error: -1.1461%
+ movingRegressor.push(0.612869829134886, flywheelPosition(15)) // Datapoint 15
+ testFirstDerivative(movingRegressor, 27.838071498993234) // Datapoint: 5, Theoretical value: 27.8430095365212, Error: -0.0177%
+ testSecondDerivative(movingRegressor, -18.118042271601063) // Datapoint: 5, Theoretical value: -18.2788433561387, Error: -0.8797%
+ movingRegressor.push(0.665106184462922, flywheelPosition(16)) // Datapoint 16
+ testFirstDerivative(movingRegressor, 27.144243702454105) // Datapoint: 6, Theoretical value: 27.1512127023768, Error: -0.0257%
+ testSecondDerivative(movingRegressor, -17.929615535082775) // Datapoint: 6, Theoretical value: -18.0503338591983, Error: -0.6688%
+ movingRegressor.push(0.719567008604913, flywheelPosition(17)) // Datapoint 17
+ testFirstDerivative(movingRegressor, 26.44064030984685) // Datapoint: 7, Theoretical value: 26.4504869947451, Error: -0.0372%
+ testSecondDerivative(movingRegressor, -17.724298552595286) // Datapoint: 7, Theoretical value: -17.8158874024546, Error: -0.5141%
+ movingRegressor.push(0.776505412873583, flywheelPosition(18)) // Datapoint 18
+ testFirstDerivative(movingRegressor, 25.727388210906753) // Datapoint: 8, Theoretical value: 25.7403527653323, Error: -0.0504%
+ testSecondDerivative(movingRegressor, -17.502608958090796) // Datapoint: 8, Theoretical value: -17.5751026507383, Error: -0.4125%
+ movingRegressor.push(0.836223994993886, flywheelPosition(19)) // Datapoint 19
+ testFirstDerivative(movingRegressor, 25.004447900335343) // Datapoint: 9, Theoretical value: 25.020283370693, Error: -0.0633%
+ testSecondDerivative(movingRegressor, -17.265411481197397) // Datapoint: 9, Theoretical value: -17.3275330168006, Error: -0.3585%
+ movingRegressor.push(0.899089205013686, flywheelPosition(20)) // Datapoint 20
+ testFirstDerivative(movingRegressor, 24.271609293004744) // Datapoint: 10, Theoretical value: 24.2896983042147, Error: -0.0745%
+ testSecondDerivative(movingRegressor, -17.01327023725214) // Datapoint: 10, Theoretical value: -17.0726793342632, Error: -0.348%
+ movingRegressor.push(0.96555148010585, flywheelPosition(21)) // Datapoint 21
+ testFirstDerivative(movingRegressor, 23.528633993819774) // Datapoint: 11, Theoretical value: 23.5479549630465, Error: -0.082%
+ testSecondDerivative(movingRegressor, -16.74548308493857) // Datapoint: 11, Theoretical value: -16.8099809505115, Error: -0.3837%
+ movingRegressor.push(1.03617422913716, flywheelPosition(22)) // Datapoint 22
+ testFirstDerivative(movingRegressor, 22.773623149365505) // Datapoint: 12, Theoretical value: 22.7943386998864, Error: -0.0909%
+ testSecondDerivative(movingRegressor, -16.468465526497997) // Datapoint: 12, Theoretical value: -16.5388048056272, Error: -0.4253%
+ movingRegressor.push(1.11167688752162, flywheelPosition(23)) // Datapoint 23
+ testFirstDerivative(movingRegressor, 22.005733141869783) // Datapoint: 13, Theoretical value: 22.0280506974936, Error: -0.1013%
+ testSecondDerivative(movingRegressor, -16.18133180794129) // Datapoint: 13, Theoretical value: -16.2584319160835, Error: -0.4742%
+ movingRegressor.push(1.19300131290642, flywheelPosition(24)) // Datapoint 24
+ testFirstDerivative(movingRegressor, 21.22397651669105) // Datapoint: 14, Theoretical value: 21.2481930480027, Error: -0.114%
+ testSecondDerivative(movingRegressor, -15.883041508980552) // Datapoint: 14, Theoretical value: -15.9680404738976, Error: -0.5323%
+ movingRegressor.push(1.28141893909019, flywheelPosition(25)) // Datapoint 25
+ testFirstDerivative(movingRegressor, 20.427255162207537) // Datapoint: 15, Theoretical value: 20.4537501990589, Error: -0.1295%
+ testSecondDerivative(movingRegressor, -15.572356144457256) // Datapoint: 15, Theoretical value: -15.6666844733883, Error: -0.6021%
+ movingRegressor.push(1.37871375938891, flywheelPosition(26)) // Datapoint 26
+ testFirstDerivative(movingRegressor, 19.614341957383857) // Datapoint: 16, Theoretical value: 19.6435656125486, Error: -0.1488%
+ testSecondDerivative(movingRegressor, -15.247777333845995) // Datapoint: 16, Theoretical value: -15.3532663414201, Error: -0.6871%
+ movingRegressor.push(1.48751821514026, flywheelPosition(27)) // Datapoint 27
+ testFirstDerivative(movingRegressor, 18.783685853718048) // Datapoint: 17, Theoretical value: 18.8163120184221, Error: -0.1734%
+ testSecondDerivative(movingRegressor, -14.907455157933619) // Datapoint: 17, Theoretical value: -15.0265013965682, Error: -0.7922%
+ movingRegressor.push(1.61199195401647, flywheelPosition(28)) // Datapoint 28
+ testFirstDerivative(movingRegressor, 17.933422588953594) // Datapoint: 18, Theoretical value: 17.9704529528025, Error: -0.2061%
+ testSecondDerivative(movingRegressor, -14.54879094209201) // Datapoint: 18, Theoretical value: -14.6848709709561, Error: -0.9267%
+ movingRegressor.push(1.75939202047142, flywheelPosition(29)) // Datapoint 29
+ testFirstDerivative(movingRegressor, 17.061479737474052) // Datapoint: 19, Theoretical value: 17.1041922069488, Error: -0.2497%
+ testSecondDerivative(movingRegressor, -14.168437758362021) // Datapoint: 19, Theoretical value: -14.3265594782343, Error: -1.1037%
+ movingRegressor.push(1.94454504624793, flywheelPosition(30)) // Datapoint 30
+ testFirstDerivative(movingRegressor, 16.16525797986565) // Datapoint: 20, Theoretical value: 16.2154061403809, Error: -0.3093%
+ testSecondDerivative(movingRegressor, -13.763237669973963) // Datapoint: 20, Theoretical value: -13.9493682181155, Error: -1.3343%
+ movingRegressor.push(2.20849261046968, flywheelPosition(31)) // Datapoint 31
+ testFirstDerivative(movingRegressor, 15.240931059835175) // Datapoint: 21, Theoretical value: 15.3015510945379, Error: -0.3962%
+ testSecondDerivative(movingRegressor, -13.326034947566537) // Datapoint: 21, Theoretical value: -13.5505945675625, Error: -1.6572%
+ movingRegressor.push(3.22398390803294, flywheelPosition(32)) // Datapoint 32
+ testFirstDerivative(movingRegressor, 14.287862296451902) // Datapoint: 22, Theoretical value: 14.3595335732101, Error: -0.4991%
+ testSecondDerivative(movingRegressor, -12.86939888687925) // Datapoint: 22, Theoretical value: -13.1268580733747, Error: -1.9613%
+ movingRegressor.push(4.2394752055962, flywheelPosition(33)) // Datapoint 33
+ testFirstDerivative(movingRegressor, 13.343446992890124) // Datapoint: 23, Theoretical value: 13.3855228467043, Error: -0.3097%
+ testSecondDerivative(movingRegressor, -12.593658621633756) // Datapoint: 23, Theoretical value: -12.6738421230679, Error: -0.6068%
+ movingRegressor.push(4.50342276981795, flywheelPosition(34)) // Datapoint 34
+ testFirstDerivative(movingRegressor, 12.272424425339942) // Datapoint: 24, Theoretical value: 12.3746709051205, Error: -0.8234%
+ testSecondDerivative(movingRegressor, -11.943049564910012) // Datapoint: 24, Theoretical value: -12.1858955707591, Error: -1.9677%
+ movingRegressor.push(4.68857579559446, flywheelPosition(35)) // Datapoint 35
+ testFirstDerivative(movingRegressor, 11.111275577176187) // Datapoint: 25, Theoretical value: 11.3206759756907, Error: -1.8498%
+ testSecondDerivative(movingRegressor, -11.10165547134061) // Datapoint: 25, Theoretical value: -11.6553898136565, Error: -4.7286%
+ movingRegressor.push(4.83597586204941, flywheelPosition(36)) // Datapoint 36
+ testFirstDerivative(movingRegressor, 9.85670546231101) // Datapoint: 26, Theoretical value: 10.2150657644303, Error: -3.5125%
+ testSecondDerivative(movingRegressor, -10.131638260894363) // Datapoint: 26, Theoretical value: -11.0716208918642, Error: -8.4727%
+ movingRegressor.push(4.96044960092562, flywheelPosition(37)) // Datapoint 37
+ testFirstDerivative(movingRegressor, 8.33150256290571) // Datapoint: 27, Theoretical value: 9.04593930777978, Error: -7.9107%
+ testSecondDerivative(movingRegressor, -8.90260010296994) // Datapoint: 27, Theoretical value: -10.4187941573561, Error: -14.5446%
+ movingRegressor.push(5.06925405667697, flywheelPosition(38)) // Datapoint 38
+ testFirstDerivative(movingRegressor, 6.616103614905059) // Datapoint: 28, Theoretical value: 7.79555417944151, Error: -15.1565%
+ testSecondDerivative(movingRegressor, -7.485916659905859) // Datapoint: 28, Theoretical value: -9.67195172409882, Error: -22.6092%
+ movingRegressor.push(5.16654887697569, flywheelPosition(39)) // Datapoint 39
+ testFirstDerivative(movingRegressor, 4.888940778715439) // Datapoint: 29, Theoretical value: 6.43508819133308, Error: -24.0732%
+ testSecondDerivative(movingRegressor, -5.956802316997369) // Datapoint: 29, Theoretical value: -8.78755132536914, Error: -32.2443%
+ movingRegressor.push(5.25496650315946, flywheelPosition(40)) // Datapoint 40
+ testFirstDerivative(movingRegressor, 3.6316979581923494) // Datapoint: 30, Theoretical value: 4.91089140313715, Error: -26.1114%
+ testSecondDerivative(movingRegressor, -4.771588801465927) // Datapoint: 30, Theoretical value: -7.67663317071005, Error: -37.9068%
+ movingRegressor.push(5.33629092854426, flywheelPosition(41)) // Datapoint 41
+ testFirstDerivative(movingRegressor, 2.3077423939611448) // Datapoint: 31, Theoretical value: 3.09366772628014, Error: -25.4724%
+ testSecondDerivative(movingRegressor, -3.5593152612469012) // Datapoint: 31, Theoretical value: -6.09294778537956, Error: -41.7127%
+ movingRegressor.push(5.41179358692871, flywheelPosition(42)) // Datapoint 42
+ testFirstDerivative(movingRegressor, 1.5335044322403928) // Datapoint: 32, Theoretical value: 0
+ testSecondDerivative(movingRegressor, 3.8379764035844055e-14) // Datapoint: 32, Theoretical value: 0
+ movingRegressor.push(5.48241633596003, flywheelPosition(43)) // Datapoint 43
+ testFirstDerivative(movingRegressor, 2.3077423939611457) // Datapoint: 33, Theoretical value: 3.09366772628014, Error: -25.4724%
+ testSecondDerivative(movingRegressor, 3.5593152612468977) // Datapoint: 33, Theoretical value: 6.09294778537956, Error: -41.7127%
+ movingRegressor.push(5.54887861105219, flywheelPosition(44)) // Datapoint 44
+ testFirstDerivative(movingRegressor, 3.6316979581922624) // Datapoint: 34, Theoretical value: 4.91089140313715, Error: -26.1114%
+ testSecondDerivative(movingRegressor, 4.771588801466153) // Datapoint: 34, Theoretical value: 7.67663317071005, Error: -37.9068%
+ movingRegressor.push(5.61174382107199, flywheelPosition(45)) // Datapoint 45
+ testFirstDerivative(movingRegressor, 4.888940778715508) // Datapoint: 35, Theoretical value: 6.43508819133308, Error: -24.0732%
+ testSecondDerivative(movingRegressor, 5.956802316997485) // Datapoint: 35, Theoretical value: 8.78755132536914, Error: -32.2443%
+ movingRegressor.push(5.6714624031923, flywheelPosition(46)) // Datapoint 46
+ testFirstDerivative(movingRegressor, 6.616103614905288) // Datapoint: 36, Theoretical value: 7.79555417944151, Error: -15.1565%
+ testSecondDerivative(movingRegressor, 7.485916659905754) // Datapoint: 36, Theoretical value: 9.67195172409882, Error: -22.6092%
+ movingRegressor.push(5.72840080746097, flywheelPosition(47)) // Datapoint 47
+ testFirstDerivative(movingRegressor, 8.331502562905932) // Datapoint: 37, Theoretical value: 9.04593930777979, Error: -7.9107%
+ testSecondDerivative(movingRegressor, 8.902600102969847) // Datapoint: 37, Theoretical value: 10.4187941573561, Error: -14.5446%
+ movingRegressor.push(5.78286163160296, flywheelPosition(48)) // Datapoint 48
+ testFirstDerivative(movingRegressor, 9.856705462311233) // Datapoint: 38, Theoretical value: 10.2150657644303, Error: -3.5125%
+ testSecondDerivative(movingRegressor, 10.131638260894228) // Datapoint: 38, Theoretical value: 11.0716208918642, Error: -8.4727%
+ movingRegressor.push(5.83509798693099, flywheelPosition(49)) // Datapoint 49
+ testFirstDerivative(movingRegressor, 11.111275577176677) // Datapoint: 39, Theoretical value: 11.3206759756907, Error: -1.8498%
+ testSecondDerivative(movingRegressor, 11.101655471339662) // Datapoint: 39, Theoretical value: 11.6553898136565, Error: -4.7286%
+ movingRegressor.push(5.88532398701588, flywheelPosition(50)) // Datapoint 50
+ testFirstDerivative(movingRegressor, 12.272424425340205) // Datapoint: 40, Theoretical value: 12.3746709051205, Error: -0.8234%
+ testSecondDerivative(movingRegressor, 11.943049564909279) // Datapoint: 40, Theoretical value: 12.1858955707591, Error: -1.9677%
+ movingRegressor.push(5.93372256071353, flywheelPosition(51)) // Datapoint 51
+ testFirstDerivative(movingRegressor, 13.343446992889987) // Datapoint: 41, Theoretical value: 13.3855228467043, Error: -0.3097%
+ testSecondDerivative(movingRegressor, 12.593658621633706) // Datapoint: 41, Theoretical value: 12.6738421230679, Error: -0.6068%
+ movingRegressor.push(5.98045137563747, flywheelPosition(52)) // Datapoint 52
+ testFirstDerivative(movingRegressor, 14.287862296451628) // Datapoint: 42, Theoretical value: 14.3595335732101, Error: -0.4991%
+ testSecondDerivative(movingRegressor, 12.869398886878852) // Datapoint: 42, Theoretical value: 13.1268580733747, Error: -1.9613%
+ movingRegressor.push(6.02564739978485, flywheelPosition(53)) // Datapoint 53
+ testFirstDerivative(movingRegressor, 15.240931059835063) // Datapoint: 43, Theoretical value: 15.3015510945379, Error: -0.3962%
+ testSecondDerivative(movingRegressor, 13.326034947565466) // Datapoint: 43, Theoretical value: 13.5505945675625, Error: -1.6572%
+ movingRegressor.push(6.06943046374346, flywheelPosition(54)) // Datapoint 54
+ testFirstDerivative(movingRegressor, 16.165257979865466) // Datapoint: 44, Theoretical value: 16.2154061403809, Error: -0.3093%
+ testSecondDerivative(movingRegressor, 13.763237669972586) // Datapoint: 44, Theoretical value: 13.9493682181155, Error: -1.3343%
+ movingRegressor.push(6.11190607749971, flywheelPosition(55)) // Datapoint 55
+ testFirstDerivative(movingRegressor, 17.061479737473583) // Datapoint: 45, Theoretical value: 17.1041922069488, Error: -0.2497%
+ testSecondDerivative(movingRegressor, 14.16843775836098) // Datapoint: 45, Theoretical value: 14.3265594782343, Error: -1.1037%
+ movingRegressor.push(6.15316768315598, flywheelPosition(56)) // Datapoint 56
+ testFirstDerivative(movingRegressor, 17.933422588953036) // Datapoint: 46, Theoretical value: 17.9704529528025, Error: -0.2061%
+ testSecondDerivative(movingRegressor, 14.548790942092099) // Datapoint: 46, Theoretical value: 14.6848709709561, Error: -0.9267%
+ movingRegressor.push(6.19329847510871, flywheelPosition(57)) // Datapoint 57
+ testFirstDerivative(movingRegressor, 18.783685853717557) // Datapoint: 47, Theoretical value: 18.8163120184221, Error: -0.1734%
+ testSecondDerivative(movingRegressor, 14.907455157934207) // Datapoint: 47, Theoretical value: 15.0265013965682, Error: -0.7922%
+ movingRegressor.push(6.23237288456599, flywheelPosition(58)) // Datapoint 58
+ testFirstDerivative(movingRegressor, 19.614341957383544) // Datapoint: 48, Theoretical value: 19.6435656125486, Error: -0.1488%
+ testSecondDerivative(movingRegressor, 15.247777333846214) // Datapoint: 48, Theoretical value: 15.3532663414201, Error: -0.6871%
+ movingRegressor.push(6.27045780072272, flywheelPosition(59)) // Datapoint 59
+ testFirstDerivative(movingRegressor, 20.427255162207402) // Datapoint: 49, Theoretical value: 20.4537501990589, Error: -0.1295%
+ testSecondDerivative(movingRegressor, 15.572356144456794) // Datapoint: 49, Theoretical value: 15.6666844733883, Error: -0.6021%
+ movingRegressor.push(6.30761358324924, flywheelPosition(60)) // Datapoint 60
+ testFirstDerivative(movingRegressor, 21.223976516691252) // Datapoint: 50, Theoretical value: 21.2481930480027, Error: -0.114%
+ testSecondDerivative(movingRegressor, 15.883041508981133) // Datapoint: 50, Theoretical value: 15.9680404738976, Error: -0.5323%
+ movingRegressor.push(6.34389490787409, flywheelPosition(61)) // Datapoint 61
+ testFirstDerivative(movingRegressor, 22.005733141870095) // Datapoint: 51, Theoretical value: 22.0280506974936, Error: -0.1013%
+ testSecondDerivative(movingRegressor, 16.18133180794123) // Datapoint: 51, Theoretical value: 16.2584319160835, Error: -0.4742%
+ movingRegressor.push(6.37935147733476, flywheelPosition(62)) // Datapoint 62
+ testFirstDerivative(movingRegressor, 22.77362314936576) // Datapoint: 52, Theoretical value: 22.7943386998864, Error: -2.3204%
+ testSecondDerivative(movingRegressor, 16.46846552649644) // Datapoint: 52, Theoretical value: 16.5388048056272, Error: -0.4253%
+ movingRegressor.push(6.41402862286999, flywheelPosition(63)) // Datapoint 63
+ testFirstDerivative(movingRegressor, 23.528633993819923) // Datapoint: 53, Theoretical value: 23.5479549630465, Error: 0.2272%
+ testSecondDerivative(movingRegressor, 16.745483084939668) // Datapoint: 53, Theoretical value: 16.8099809505115, Error: -0.3837%
+ movingRegressor.push(6.41402862286999, flywheelPosition(64)) // Datapoint 64
+ testFirstDerivative(movingRegressor, 24.271654626903214) // Datapoint: 54, Theoretical value: 24.2896983042147, Error: 1.9476%
+ testSecondDerivative(movingRegressor, 17.012520012731436) // Datapoint: 54, Theoretical value: 17.0726793342632, Error: -0.3524%
+ movingRegressor.push(6.44796781606588, flywheelPosition(65)) // Datapoint 65
+ testFirstDerivative(movingRegressor, 25.003717545806197) // Datapoint: 55, Theoretical value: 25.020283370693, Error: 2.9728%
+ testSecondDerivative(movingRegressor, 17.27251336444372) // Datapoint: 55, Theoretical value: 17.3275330168006, Error: -0.3175%
+ movingRegressor.push(6.48264496160111, flywheelPosition(66)) // Datapoint 66
+ testFirstDerivative(movingRegressor, 25.72670308190149) // Datapoint: 56, Theoretical value: 25.7403527653323, Error: 3.4113%
+ testSecondDerivative(movingRegressor, 17.524882364022666) // Datapoint: 56, Theoretical value: 17.5751026507383, Error: -0.2857%
+ movingRegressor.push(6.51810153106178, flywheelPosition(67)) // Datapoint 67
+ testFirstDerivative(movingRegressor, 26.624349154334766) // Datapoint: 57, Theoretical value: 26.4504869947451, Error: 3.3525%
+ testSecondDerivative(movingRegressor, 17.75730877420827) // Datapoint: 57, Theoretical value: 17.8158874024546, Error: -7.2011%
+ movingRegressor.push(6.55438285568663, flywheelPosition(68)) // Datapoint 68
+ testFirstDerivative(movingRegressor, 27.579555943496985) // Datapoint: 58, Theoretical value: 27.1512127023768, Error: 2.871%
+ testSecondDerivative(movingRegressor, 17.955629118247334) // Datapoint: 58, Theoretical value: 18.0503338591983, Error: -23.3038%
+ movingRegressor.push(6.59153863821315, flywheelPosition(69)) // Datapoint 69
+ testFirstDerivative(movingRegressor, 28.89052571737747) // Datapoint: 59, Theoretical value: 27.8430095365212, Error: 2.0292%
+ testSecondDerivative(movingRegressor, 15.665483074845106) // Datapoint: 59, Theoretical value: 18.2788433561387, Error: -38.625%
+ movingRegressor.push(6.62962355436988, flywheelPosition(70)) // Datapoint 70
+ testFirstDerivative(movingRegressor, 29.951538371892738) // Datapoint: 60, Theoretical value: 28.5263159216238, Error: 0.8795%
+ testSecondDerivative(movingRegressor, 12.901462381724917) // Datapoint: 60, Theoretical value: 18.5017780512978, Error: -53.23%
+ movingRegressor.push(6.66869796382716, flywheelPosition(71)) // Datapoint 71
+ testFirstDerivative(movingRegressor, 30.806657246584255) // Datapoint: 61, Theoretical value: 29.2015339407895, Error: -0.5338%
+ testSecondDerivative(movingRegressor, 9.67140666836943) // Datapoint: 61, Theoretical value: 18.7194659990469, Error: -67.1765%
+ movingRegressor.push(6.70882875577989, flywheelPosition(72)) // Datapoint 72
+ testFirstDerivative(movingRegressor, 31.405396142822518) // Datapoint: 62, Theoretical value: 29.8690334922051, Error: -2.173%
+ testSecondDerivative(movingRegressor, 6.000343686171773) // Datapoint: 62, Theoretical value: 18.9322054158109, Error: -80.516%
+ movingRegressor.push(6.75009036143616, flywheelPosition(73)) // Datapoint 73
+ testFirstDerivative(movingRegressor, 31.70459727805595) // Datapoint: 63, Theoretical value: 30.5291558479794, Error: -4.0058%
+ testSecondDerivative(movingRegressor, 1.9206519801849915) // Datapoint: 63, Theoretical value: 19.1402682890223, Error: -93.2941%
+ movingRegressor.push(6.79256597519241, flywheelPosition(74)) // Datapoint 74
+ testFirstDerivative(movingRegressor, 31.765903811444034) // Datapoint: 64, Theoretical value: 31.182216717766, Error: -6.0163%
+ testSecondDerivative(movingRegressor, -1.9932629732984233) // Datapoint: 64, Theoretical value: -19.3439034481976, Error: -106.6353%, This is expected as it is a welding point between two graphs
+ movingRegressor.push(6.83634903915102, flywheelPosition(75)) // Datapoint 75
+ testFirstDerivative(movingRegressor, 31.56636248448924) // Datapoint: 65, Theoretical value: 30.5291558479794, Error: -3.9942%
+ testSecondDerivative(movingRegressor, -6.041241293208837) // Datapoint: 65, Theoretical value: -19.1402682890223, Error: -94.3891%
+ movingRegressor.push(6.8815450632984, flywheelPosition(76)) // Datapoint 76
+ testFirstDerivative(movingRegressor, 31.068488732359405) // Datapoint: 66, Theoretical value: 29.8690334922051, Error: -2.1371%
+ testSecondDerivative(movingRegressor, -9.708056537795159) // Datapoint: 66, Theoretical value: -18.9322054158109, Error: -81.5859%
+ movingRegressor.push(6.92827387822234, flywheelPosition(77)) // Datapoint 77
+ testFirstDerivative(movingRegressor, 30.315160610570246) // Datapoint: 67, Theoretical value: 29.2015339407895, Error: -0.4733%
+ testSecondDerivative(movingRegressor, -12.951751177840897) // Datapoint: 67, Theoretical value: -18.7194659990469, Error: -68.1811%
+ movingRegressor.push(6.97667245191999, flywheelPosition(78)) // Datapoint 78
+ testFirstDerivative(movingRegressor, 29.199797883477018) // Datapoint: 68, Theoretical value: 28.5263159216238, Error: 0.964%
+ testSecondDerivative(movingRegressor, -15.873257856339913) // Datapoint: 68, Theoretical value: -18.5017780512978, Error: -54.1246%
+ movingRegressor.push(7.02689845200488, flywheelPosition(79)) // Datapoint 79
+ testFirstDerivative(movingRegressor, 27.971567095364264) // Datapoint: 69, Theoretical value: 27.8430095365212, Error: 2.1359%
+ testSecondDerivative(movingRegressor, -18.17668918703874) // Datapoint: 69, Theoretical value: -18.2788433561387, Error: -39.3603%
+ movingRegressor.push(7.07913480733291, flywheelPosition(80)) // Datapoint 80
+ testFirstDerivative(movingRegressor, 27.14327299326837) // Datapoint: 70, Theoretical value: 27.1512127023768, Error: 2.9966%
+ testSecondDerivative(movingRegressor, -17.984173147864997) // Datapoint: 70, Theoretical value: -18.0503338591983, Error: -23.8249%
+ movingRegressor.push(7.1335956314749, flywheelPosition(81)) // Datapoint 81
+ testFirstDerivative(movingRegressor, 26.437672893260455) // Datapoint: 71, Theoretical value: 26.4504869947451, Error: 3.4917%
+ testSecondDerivative(movingRegressor, -17.759972771852997) // Datapoint: 71, Theoretical value: -17.8158874024546, Error: -7.4468%
+ movingRegressor.push(7.19053403574357, flywheelPosition(82)) // Datapoint 82
+ testFirstDerivative(movingRegressor, 25.72556390928743) // Datapoint: 72, Theoretical value: 25.7403527653323, Error: 3.5565%
+ testSecondDerivative(movingRegressor, -17.51562527634176) // Datapoint: 72, Theoretical value: -17.5751026507383, Error: -0.3384%
+ movingRegressor.push(7.25025261786388, flywheelPosition(83)) // Datapoint 83
+ testFirstDerivative(movingRegressor, 25.00442855876365) // Datapoint: 73, Theoretical value: 25.020283370693, Error: 3.1135%
+ testSecondDerivative(movingRegressor, -17.264560797969754) // Datapoint: 73, Theoretical value: -17.3275330168006, Error: -0.3634%
+ movingRegressor.push(7.31311782788368, flywheelPosition(84)) // Datapoint 84
+ testFirstDerivative(movingRegressor, 24.27277785941115) // Datapoint: 74, Theoretical value: 24.2896983042147, Error: 2.0695%
+ testSecondDerivative(movingRegressor, -17.007291771200208) // Datapoint: 74, Theoretical value: -17.0726793342632, Error: -0.383%
+ movingRegressor.push(7.37958010297584, flywheelPosition(85)) // Datapoint 85
+ testFirstDerivative(movingRegressor, 23.52863399382072) // Datapoint: 75, Theoretical value: 23.5479549630465, Error: 0.3114%
+ testSecondDerivative(movingRegressor, -16.745483084942506) // Datapoint: 75, Theoretical value: -16.8099809505115, Error: -0.3837%
+ movingRegressor.push(7.45020285200716, flywheelPosition(86)) // Datapoint 86
+ testFirstDerivative(movingRegressor, 22.7736231493666) // Datapoint: 76, Theoretical value: 22.7943386998864, Error: -2.2989%
+ testSecondDerivative(movingRegressor, -16.468465526502353) // Datapoint: 76, Theoretical value: -16.5388048056272, Error: -0.4253%
+ movingRegressor.push(7.52570551039161, flywheelPosition(87)) // Datapoint 87
+ testFirstDerivative(movingRegressor, 22.00573314187072) // Datapoint: 77, Theoretical value: 22.0280506974936, Error: -0.1013%
+ testSecondDerivative(movingRegressor, -16.18133180794765) // Datapoint: 77, Theoretical value: -16.2584319160835, Error: -0.4742%
+ movingRegressor.push(7.60702993577641, flywheelPosition(88)) // Datapoint 88
+ testFirstDerivative(movingRegressor, 21.22397651669148) // Datapoint: 78, Theoretical value: 21.2481930480027, Error: -0.114%
+ testSecondDerivative(movingRegressor, -15.883041508986818) // Datapoint: 78, Theoretical value: -15.9680404738976, Error: -0.5323%
+ movingRegressor.push(7.69544756196018, flywheelPosition(89)) // Datapoint 89
+ testFirstDerivative(movingRegressor, 20.427255162207473) // Datapoint: 79, Theoretical value: 20.4537501990589, Error: -0.1295%
+ testSecondDerivative(movingRegressor, -15.572356144461647) // Datapoint: 79, Theoretical value: -15.6666844733883, Error: -0.6021%
+ movingRegressor.push(7.7927423822589, flywheelPosition(90)) // Datapoint 90
+ testFirstDerivative(movingRegressor, 19.614341957383232) // Datapoint: 80, Theoretical value: 19.6435656125486, Error: -0.1488%
+ testSecondDerivative(movingRegressor, -15.247777333850475) // Datapoint: 80, Theoretical value: -15.3532663414201, Error: -0.6871%
+ movingRegressor.push(7.90154683801025, flywheelPosition(91)) // Datapoint 91
+ testFirstDerivative(movingRegressor, 18.78368585371763) // Datapoint: 81, Theoretical value: 18.8163120184221, Error: -0.1734%
+ testSecondDerivative(movingRegressor, -14.907455157935825) // Datapoint: 81, Theoretical value: -15.0265013965682, Error: -0.7922%
+ movingRegressor.push(8.02602057688646, flywheelPosition(92)) // Datapoint 92
+ testFirstDerivative(movingRegressor, 17.933422588953093) // Datapoint: 82, Theoretical value: 17.9704529528025, Error: -0.2061%
+ testSecondDerivative(movingRegressor, -14.548790942093078) // Datapoint: 82, Theoretical value: -14.6848709709561, Error: -0.9267%
+ movingRegressor.push(8.17342064334141, flywheelPosition(93)) // Datapoint 93
+ testFirstDerivative(movingRegressor, 17.061479737473306) // Datapoint: 83, Theoretical value: 17.1041922069488, Error: -0.2497%
+ testSecondDerivative(movingRegressor, -14.168437758363037) // Datapoint: 83, Theoretical value: -14.3265594782343, Error: -1.1037%
+ movingRegressor.push(8.35857366911792, flywheelPosition(94)) // Datapoint 94
+ testFirstDerivative(movingRegressor, 16.165257979865217) // Datapoint: 84, Theoretical value: 16.2154061403809, Error: -0.3093%
+ testSecondDerivative(movingRegressor, -13.76323766997378) // Datapoint: 84, Theoretical value: -13.9493682181155, Error: -1.3343%
+ movingRegressor.push(8.62252123333967, flywheelPosition(95)) // Datapoint 95
+ testFirstDerivative(movingRegressor, 15.24093105983495) // Datapoint: 85, Theoretical value: 15.3015510945379, Error: -0.3962%
+ testSecondDerivative(movingRegressor, -13.326034947565844) // Datapoint: 85, Theoretical value: -13.5505945675625, Error: -1.6572%
+ movingRegressor.push(9.63801253090293, flywheelPosition(96)) // Datapoint 96
+ testFirstDerivative(movingRegressor, 14.287862296451593) // Datapoint: 86, Theoretical value: 14.3595335732101, Error: -0.4991%
+ testSecondDerivative(movingRegressor, -12.86939888687885) // Datapoint: 86, Theoretical value: -13.1268580733747, Error: -1.9613%
+ movingRegressor.push(10.6535038284662, flywheelPosition(97)) // Datapoint 97
+ testFirstDerivative(movingRegressor, 13.343446992890279) // Datapoint: 87, Theoretical value: 13.3855228467043, Error: -0.3097%
+ testSecondDerivative(movingRegressor, -12.5936586216325) // Datapoint: 87, Theoretical value: -12.6738421230679, Error: -0.6068%
+ movingRegressor.push(10.9174513926879, flywheelPosition(98)) // Datapoint 98
+ testFirstDerivative(movingRegressor, 12.272424425340773) // Datapoint: 88, Theoretical value: 12.3746709051205, Error: -0.8234%
+ testSecondDerivative(movingRegressor, -11.943049564907804) // Datapoint: 88, Theoretical value: -12.1858955707591, Error: -1.9677%
+ movingRegressor.push(11.1026044184645, flywheelPosition(99)) // Datapoint 99
+ testFirstDerivative(movingRegressor, 11.111275577176826) // Datapoint: 89, Theoretical value: 11.3206759756907, Error: -1.8498%
+ testSecondDerivative(movingRegressor, -11.10165547133927) // Datapoint: 89, Theoretical value: -11.6553898136565, Error: -4.7286%
+ movingRegressor.push(11.2500044849194, flywheelPosition(100)) // Datapoint 100
+ testFirstDerivative(movingRegressor, 9.856705462311382) // Datapoint: 90, Theoretical value: 10.2150657644303, Error: -3.5125%
+ testSecondDerivative(movingRegressor, -10.131638260893967) // Datapoint: 90, Theoretical value: -11.0716208918642, Error: -8.4727%
+ movingRegressor.push(11.3744782237956, flywheelPosition(101)) // Datapoint 101
+ testFirstDerivative(movingRegressor, 8.331502562905783) // Datapoint: 91, Theoretical value: 9.04593930777978, Error: -7.9107%
+ testSecondDerivative(movingRegressor, -8.902600102970158) // Datapoint: 91, Theoretical value: -10.4187941573561, Error: -14.5446%
+ movingRegressor.push(11.483282679547, flywheelPosition(102)) // Datapoint 102
+ testFirstDerivative(movingRegressor, 6.616103614905029) // Datapoint: 92, Theoretical value: 7.79555417944151, Error: -15.1565%
+ testSecondDerivative(movingRegressor, -7.485916659905551) // Datapoint: 92, Theoretical value: -9.67195172409882, Error: -22.6092%
+ movingRegressor.push(11.5805774998457, flywheelPosition(103)) // Datapoint 103
+ testFirstDerivative(movingRegressor, 4.888940778715536) // Datapoint: 93, Theoretical value: 6.43508819133308, Error: -24.0732%
+ testSecondDerivative(movingRegressor, -5.956802316997673) // Datapoint: 93, Theoretical value: -8.78755132536914, Error: -32.2443%
+ movingRegressor.push(11.6689951260294, flywheelPosition(104)) // Datapoint 104
+ testFirstDerivative(movingRegressor, 3.6316979581926674) // Datapoint: 94, Theoretical value: 4.91089140313715, Error: -26.1114%
+ testSecondDerivative(movingRegressor, -4.771588801466752) // Datapoint: 94, Theoretical value: -7.67663317071005, Error: -37.9068%
+ movingRegressor.push(11.7503195514143, flywheelPosition(105)) // Datapoint 105
+ testFirstDerivative(movingRegressor, 2.3077423939613055) // Datapoint: 95, Theoretical value: 3.09366772628014, Error: -25.4724%
+ testSecondDerivative(movingRegressor, -3.559315261247479) // Datapoint: 95, Theoretical value: -6.09294778537956, Error: -41.7127%
+ movingRegressor.push(11.8258222097987, flywheelPosition(106)) // Datapoint 1066
+ testFirstDerivative(movingRegressor, 1.5335044322408324) // Datapoint: 96, Theoretical value: 0
+ testSecondDerivative(movingRegressor, -2.0596270771553654e-12) // Datapoint: 96, Theoretical value: 0
+ movingRegressor.push(11.89644495883, flywheelPosition(107)) // Datapoint 107
+ testFirstDerivative(movingRegressor, 2.307742393960204) // Datapoint: 97, Theoretical value: 3.09366772628014, Error: -25.4724%
+ testSecondDerivative(movingRegressor, 3.559315261247989) // Datapoint: 97, Theoretical value: 6.09294778537956, Error: -41.7127%
+ movingRegressor.push(11.9629072339222, flywheelPosition(108)) // Datapoint 108
+ testFirstDerivative(movingRegressor, 3.6316979581925963) // Datapoint: 98, Theoretical value: 4.91089140313715, Error: -26.1114%
+ testSecondDerivative(movingRegressor, 4.771588801465188) // Datapoint: 98, Theoretical value: 7.67663317071005, Error: -37.9068%
+ movingRegressor.push(12.025772443942, flywheelPosition(109)) // Datapoint 109
+ testFirstDerivative(movingRegressor, 4.888940778716552) // Datapoint: 99, Theoretical value: 6.43508819133308, Error: -24.0732%
+ testSecondDerivative(movingRegressor, 5.956802316995809) // Datapoint: 99, Theoretical value: 8.78755132536914, Error: -32.2443%
+ movingRegressor.push(12.0854910260623, flywheelPosition(110)) // Datapoint 110
+ testFirstDerivative(movingRegressor, 6.616103614905953) // Datapoint: 100, Theoretical value: 7.79555417944151, Error: -15.1565%
+ testSecondDerivative(movingRegressor, 7.485916659903175) // Datapoint: 100, Theoretical value: 9.67195172409882, Error: -22.6092%
+ movingRegressor.push(12.142429430331, flywheelPosition(111)) // Datapoint 111
+ testFirstDerivative(movingRegressor, 8.331502562907502) // Datapoint: 101, Theoretical value: 9.04593930777979, Error: -7.9107%
+ testSecondDerivative(movingRegressor, 8.902600102964639) // Datapoint: 101, Theoretical value: 10.4187941573561, Error: -14.5446%
+ movingRegressor.push(12.1968902544729, flywheelPosition(112)) // Datapoint 112
+ testFirstDerivative(movingRegressor, 9.856705462312902) // Datapoint: 102, Theoretical value: 10.2150657644303, Error: -3.5125%
+ testSecondDerivative(movingRegressor, 10.131638260888828) // Datapoint: 102, Theoretical value: 11.0716208918642, Error: -8.4727%
+ movingRegressor.push(12.249126609801, flywheelPosition(113)) // Datapoint 113
+ testFirstDerivative(movingRegressor, 11.111275577177864) // Datapoint: 103, Theoretical value: 11.3206759756907, Error: -1.8498%
+ testSecondDerivative(movingRegressor, 11.101655471333814) // Datapoint: 103, Theoretical value: 11.6553898136565, Error: -4.7286%
+ movingRegressor.push(12.2993526098859, flywheelPosition(114)) // Datapoint 114
+ testFirstDerivative(movingRegressor, 12.272424425338542) // Datapoint: 104, Theoretical value: 12.3746709051205, Error: -0.8234%
+ testSecondDerivative(movingRegressor, 11.943049564906497) // Datapoint: 104, Theoretical value: 12.1858955707591, Error: -1.9677%
+ movingRegressor.push(12.3477511835835, flywheelPosition(115)) // Datapoint 115
+ testFirstDerivative(movingRegressor, 13.343446992888772) // Datapoint: 105, Theoretical value: 13.3855228467043, Error: -0.3097%
+ testSecondDerivative(movingRegressor, 12.59365862163293) // Datapoint: 105, Theoretical value: 12.6738421230679, Error: -0.6068%
+ movingRegressor.push(12.3944799985075, flywheelPosition(116)) // Datapoint 116
+ testFirstDerivative(movingRegressor, 14.28786229645084) // Datapoint: 106, Theoretical value: 14.3595335732101, Error: -0.4991%
+ testSecondDerivative(movingRegressor, 12.86939888687176) // Datapoint: 106, Theoretical value: 13.1268580733747, Error: -1.9613%
+ movingRegressor.push(12.4396760226548, flywheelPosition(117)) // Datapoint 117
+ testFirstDerivative(movingRegressor, 15.240931059833144) // Datapoint: 107, Theoretical value: 15.3015510945379, Error: -0.3962%
+ testSecondDerivative(movingRegressor, 13.32603494755819) // Datapoint: 107, Theoretical value: 13.5505945675625, Error: -1.6572%
+ movingRegressor.push(12.4834590866135, flywheelPosition(118)) // Datapoint 118
+ testFirstDerivative(movingRegressor, 16.165257979863895) // Datapoint: 108, Theoretical value: 16.2154061403809, Error: -0.3093%
+ testSecondDerivative(movingRegressor, 13.763237669965005) // Datapoint: 108, Theoretical value: 13.9493682181155, Error: -1.3343%
+ movingRegressor.push(12.5259347003697, flywheelPosition(119)) // Datapoint 119
+ testFirstDerivative(movingRegressor, 17.061479737472382) // Datapoint: 109, Theoretical value: 17.1041922069488, Error: -0.2497%
+ testSecondDerivative(movingRegressor, 14.16843775835476) // Datapoint: 109, Theoretical value: 14.3265594782343, Error: -1.1037%
+ movingRegressor.push(12.567196306026, flywheelPosition(120)) // Datapoint 120
+ testFirstDerivative(movingRegressor, 17.933422588952254) // Datapoint: 110, Theoretical value: 17.9704529528025, Error: -0.2061%
+ testSecondDerivative(movingRegressor, 14.548790942084981) // Datapoint: 110, Theoretical value: 14.6848709709561, Error: -0.9267%
+ movingRegressor.push(12.6073270979787, flywheelPosition(121)) // Datapoint 121
+ testFirstDerivative(movingRegressor, 18.783685853716804) // Datapoint: 111, Theoretical value: 18.8163120184221, Error: -0.1734%
+ testSecondDerivative(movingRegressor, 14.907455157924247) // Datapoint: 111, Theoretical value: 15.0265013965682, Error: -0.7922%
+ movingRegressor.push(12.646401507436, flywheelPosition(122)) // Datapoint 122
+ testFirstDerivative(movingRegressor, 19.61434195738164) // Datapoint: 112, Theoretical value: 19.6435656125486, Error: -0.1488%
+ testSecondDerivative(movingRegressor, 15.24777733383992) // Datapoint: 112, Theoretical value: 15.3532663414201, Error: -0.6871%
+ movingRegressor.push(12.6844864235927, flywheelPosition(123)) // Datapoint 123
+ testFirstDerivative(movingRegressor, 20.427255162205995) // Datapoint: 113, Theoretical value: 20.4537501990589, Error: -0.1295%
+ testSecondDerivative(movingRegressor, 15.572356144449973) // Datapoint: 113, Theoretical value: 15.6666844733883, Error: -0.6021%
+ movingRegressor.push(12.7216422061192, flywheelPosition(124)) // Datapoint 124
+ testFirstDerivative(movingRegressor, 21.223976516688566) // Datapoint: 114, Theoretical value: 21.2481930480027, Error: -0.114%
+ testSecondDerivative(movingRegressor, 15.883041508982846) // Datapoint: 114, Theoretical value: 15.9680404738976, Error: -0.5323%
+ movingRegressor.push(12.7579235307441, flywheelPosition(125)) // Datapoint 125
+ testFirstDerivative(movingRegressor, 22.005733141866813) // Datapoint: 115, Theoretical value: 22.0280506974936, Error: -0.1013%
+ testSecondDerivative(movingRegressor, 16.181331807938957) // Datapoint: 115, Theoretical value: 16.2584319160835, Error: -0.4742%
+ movingRegressor.push(12.7933801002048, flywheelPosition(126)) // Datapoint 126
+ testFirstDerivative(movingRegressor, 22.77362314936127) // Datapoint: 116, Theoretical value: 22.7943386998864, Error: -0.0909%
+ testSecondDerivative(movingRegressor, 16.468465526509394) // Datapoint: 116, Theoretical value: 16.5388048056272, Error: -0.4253%
+ movingRegressor.push(12.82805724574, flywheelPosition(127)) // Datapoint 127
+ testFirstDerivative(movingRegressor, 23.528633993818517) // Datapoint: 117, Theoretical value: 23.5479549630465, Error: -0.082%
+ testSecondDerivative(movingRegressor, 16.745483084958252) // Datapoint: 117, Theoretical value: 16.8099809505115, Error: -0.3837%
+ movingRegressor.push(12.8619964389359, flywheelPosition(128)) // Datapoint 128
+ testFirstDerivative(movingRegressor, 24.271609293007685) // Datapoint: 118, Theoretical value: 24.2896983042147, Error: -0.0745%
+ testSecondDerivative(movingRegressor, 17.01327023725146) // Datapoint: 118, Theoretical value: 17.0726793342632, Error: -0.348%
+ movingRegressor.push(12.8952357296491, flywheelPosition(129)) // Datapoint 129
+ testFirstDerivative(movingRegressor, 25.00329039949736) // Datapoint: 119, Theoretical value: 25.020283370693, Error: -0.0679%
+ testSecondDerivative(movingRegressor, 17.2725894495434) // Datapoint: 119, Theoretical value: 17.3275330168006, Error: -0.3171%
+ movingRegressor.push(12.9278101225835, flywheelPosition(130)) // Datapoint 130
+ testFirstDerivative(movingRegressor, 25.724341026843803) // Datapoint: 120, Theoretical value: 25.7403527653323, Error: -0.0622%
+ testSecondDerivative(movingRegressor, 17.524103281563516) // Datapoint: 120, Theoretical value: 17.5751026507383, Error: -0.2902%
+ movingRegressor.push(12.9597519026518, flywheelPosition(131)) // Datapoint 131
+ testFirstDerivative(movingRegressor, 26.43535860745402) // Datapoint: 121, Theoretical value: 26.4504869947451, Error: -0.0572%
+ testSecondDerivative(movingRegressor, 17.76839220151102) // Datapoint: 121, Theoretical value: 17.8158874024546, Error: -0.2666%
+ movingRegressor.push(12.9910909173424, flywheelPosition(132)) // Datapoint 132
+ testFirstDerivative(movingRegressor, 27.13688352453582) // Datapoint: 122, Theoretical value: 27.1512127023768, Error: -0.0528%
+ testSecondDerivative(movingRegressor, 18.005968411087238) // Datapoint: 122, Theoretical value: 18.0503338591983, Error: -0.2458%
+ movingRegressor.push(13.0218548227995, flywheelPosition(133)) // Datapoint 133
+ testFirstDerivative(movingRegressor, 27.829406702835342) // Datapoint: 123, Theoretical value: 27.8430095365212, Error: -0.0489%
+ testSecondDerivative(movingRegressor, 18.237286747595334) // Datapoint: 123, Theoretical value: 18.2788433561387, Error: -0.2273%
+ movingRegressor.push(13.052069299129, flywheelPosition(134)) // Datapoint 134
+ testFirstDerivative(movingRegressor, 28.51337591257476) // Datapoint: 124, Theoretical value: 28.5263159216238, Error: -0.0454%
+ testSecondDerivative(movingRegressor, 18.46275340124088) // Datapoint: 124, Theoretical value: 18.5017780512978, Error: -0.2109%
+})
+
+// Test behaviour for no datapoints
+test('Test of correct algorithmic behaviourof FullTSQuadraticEstimator in movingRegressor object for function f(x) = (x + 2,01853237434599)^5 + 33,5103216382911', () => {
+ const flankLength = 11
+ const movingRegressor = createMovingRegressor(flankLength)
+
+ movingRegressor.push(0, flywheelPosition(0)) // Datapoint 0
+ movingRegressor.push(0.0127765482722895, flywheelPosition(1)) // Datapoint 1
+ movingRegressor.push(0.0258871873643309, flywheelPosition(2)) // Datapoint 2
+ movingRegressor.push(0.0393522399171293, flywheelPosition(3)) // Datapoint 3
+ movingRegressor.push(0.0531940190835751, flywheelPosition(4)) // Datapoint 4
+ movingRegressor.push(0.067437102746416, flywheelPosition(5)) // Datapoint 5
+ movingRegressor.push(0.0821086572565166, flywheelPosition(6)) // Datapoint 6
+ movingRegressor.push(0.0972388219213607, flywheelPosition(7)) // Datapoint 7
+ movingRegressor.push(0.112861168581494, flywheelPosition(8)) // Datapoint 8
+ movingRegressor.push(0.129013254748914, flywheelPosition(9)) // Datapoint 9
+ movingRegressor.push(0.14573729434739, flywheelPosition(10)) // Datapoint 10
+ testFirstDerivative(movingRegressor, 82.83414815073293) // Datapoint: 0, Theoretical value: 83.0066489499545, Error: -0.2078%
+ testSecondDerivative(movingRegressor, -154.84617912466788) // Datapoint: 0, Theoretical value: -164.489111009377, Error: -5.8624%
+ movingRegressor.push(0.163080977673881, flywheelPosition(11)) // Datapoint 11
+ testFirstDerivative(movingRegressor, 80.83430671887758) // Datapoint: 1, Theoretical value: 80.924915348848, Error: -0.112%
+ testSecondDerivative(movingRegressor, -154.5212766669226) // Datapoint: 1, Theoretical value: -161.385377615499, Error: -4.2532%
+ movingRegressor.push(0.181098482654215, flywheelPosition(12)) // Datapoint 12
+ testFirstDerivative(movingRegressor, 78.77829854479528) // Datapoint: 2, Theoretical value: 78.8297052108031, Error: -0.0652%
+ testSecondDerivative(movingRegressor, -153.1647345450032) // Datapoint: 2, Theoretical value: -158.241328111624, Error: -3.2081%
+ movingRegressor.push(0.199851734088548, flywheelPosition(13)) // Datapoint 13
+ testFirstDerivative(movingRegressor, 76.68917854978797) // Datapoint: 3, Theoretical value: 76.7204774755507, Error: -0.0408%
+ testSecondDerivative(movingRegressor, -151.05377106974638) // Datapoint: 3, Theoretical value: -155.055067784803, Error: -2.5806%
+ movingRegressor.push(0.219411988349432, flywheelPosition(14)) // Datapoint 14
+ testFirstDerivative(movingRegressor, 74.5731479614624) // Datapoint: 4, Theoretical value: 74.5966498720006, Error: -0.0315%
+ testSecondDerivative(movingRegressor, -148.77399328985268) // Datapoint: 4, Theoretical value: -151.824543946359, Error: -2.0093%
+ movingRegressor.push(0.239861850988137, flywheelPosition(15)) // Datapoint 15
+ testFirstDerivative(movingRegressor, 72.43104307362053) // Datapoint: 5, Theoretical value: 72.4575941879248, Error: -0.0366%
+ testSecondDerivative(movingRegressor, -146.291108515136) // Datapoint: 5, Theoretical value: -148.547526597246, Error: -1.519%
+ movingRegressor.push(0.261297878827625, flywheelPosition(16)) // Datapoint 16
+ testFirstDerivative(movingRegressor, 70.26499879303667) // Datapoint: 6, Theoretical value: 70.3026308003372, Error: -0.0535%
+ testSecondDerivative(movingRegressor, -143.58937061930774) // Datapoint: 6, Theoretical value: -145.221585916134, Error: -1.1239%
+ movingRegressor.push(0.283833984369796, flywheelPosition(17)) // Datapoint 17
+ testFirstDerivative(movingRegressor, 68.07758327384184) // Datapoint: 7, Theoretical value: 68.1310223179495, Error: -0.0784%
+ testSecondDerivative(movingRegressor, -140.67035054010483) // Datapoint: 7, Theoretical value: -141.84406590439, Error: -0.8275%
+ movingRegressor.push(0.307605962104817, flywheelPosition(18)) // Datapoint 18
+ testFirstDerivative(movingRegressor, 65.87145764731441) // Datapoint: 8, Theoretical value: 65.9419661500209, Error: -0.1069%
+ testSecondDerivative(movingRegressor, -137.54570192933917) // Datapoint: 8, Theoretical value: -138.412053350131, Error: -0.6259%
+ movingRegressor.push(0.332777616653774, flywheelPosition(19)) // Datapoint 19
+ testFirstDerivative(movingRegressor, 63.648554184124606) // Datapoint: 9, Theoretical value: 63.7345857676683, Error: -0.135%
+ testSecondDerivative(movingRegressor, -134.23327757538945) // Datapoint: 9, Theoretical value: -134.922341047831, Error: -0.5107%
+ movingRegressor.push(0.359549232710504, flywheelPosition(20)) // Datapoint 20
+ testFirstDerivative(movingRegressor, 61.41000388054417) // Datapoint: 10, Theoretical value: 61.5079203602521, Error: -0.1592%
+ testSecondDerivative(movingRegressor, -130.7477684604862) // Datapoint: 10, Theoretical value: -131.371383910936, Error: -0.4747%
+ movingRegressor.push(0.388169562514999, flywheelPosition(21)) // Datapoint 21
+ testFirstDerivative(movingRegressor, 59.1570731910292) // Datapoint: 11, Theoretical value: 59.2609125050954, Error: -0.1752%
+ testSecondDerivative(movingRegressor, -127.08642259785582) // Datapoint: 11, Theoretical value: -127.75524621423, Error: -0.5235%
+ movingRegressor.push(0.418953264914326, flywheelPosition(22)) // Datapoint 22
+ testFirstDerivative(movingRegressor, 56.88180606989519) // Datapoint: 12, Theoretical value: 56.9923933553014, Error: -0.194%
+ testSecondDerivative(movingRegressor, -123.34941561724611) // Datapoint: 12, Theoretical value: -124.069537659016, Error: -0.5804%
+ movingRegressor.push(0.452307108879146, flywheelPosition(23)) // Datapoint 23
+ testFirstDerivative(movingRegressor, 54.5826015331356) // Datapoint: 13, Theoretical value: 54.7010646957755, Error: -0.2166%
+ testSecondDerivative(movingRegressor, -119.53055207765563) // Datapoint: 13, Theoretical value: -120.309335206939, Error: -0.6473%
+ movingRegressor.push(0.488770894429097, flywheelPosition(24)) // Datapoint 24
+ testFirstDerivative(movingRegressor, 52.25778033833686) // Datapoint: 14, Theoretical value: 52.3854770038019, Error: -0.2438%
+ testSecondDerivative(movingRegressor, -115.6226233758796) // Datapoint: 14, Theoretical value: -116.469086585965, Error: -0.7268%
+ movingRegressor.push(0.52908442241122, flywheelPosition(25)) // Datapoint 25
+ testFirstDerivative(movingRegressor, 49.905473188025454) // Datapoint: 15, Theoretical value: 50.0440023505143, Error: -0.2768%
+ testSecondDerivative(movingRegressor, -111.61712925853197) // Datapoint: 15, Theoretical value: -112.542489895293, Error: -0.8222%
+ movingRegressor.push(0.574303665896444, flywheelPosition(26)) // Datapoint 26
+ testFirstDerivative(movingRegressor, 47.52311022187186) // Datapoint: 16, Theoretical value: 47.6748005513145, Error: -0.3182%
+ testSecondDerivative(movingRegressor, -107.50388680814983) // Datapoint: 16, Theoretical value: -108.522341606437, Error: -0.9385%
+ movingRegressor.push(0.626017879593542, flywheelPosition(27)) // Datapoint 27
+ testFirstDerivative(movingRegressor, 45.107793331419145) // Datapoint: 17, Theoretical value: 45.2757763502753, Error: -0.371%
+ testSecondDerivative(movingRegressor, -103.27042724443308) // Datapoint: 17, Theoretical value: -104.400342127248, Error: -1.0823%
+ movingRegressor.push(0.686797656295626, flywheelPosition(28)) // Datapoint 28
+ testFirstDerivative(movingRegressor, 42.656217000836094) // Datapoint: 18, Theoretical value: 42.8445244981284, Error: -0.4395%
+ testSecondDerivative(movingRegressor, -98.8976072195803) // Datapoint: 18, Theoretical value: -100.166843393353, Error: -1.2671%
+ movingRegressor.push(0.761258258676804, flywheelPosition(29)) // Datapoint 29
+ testFirstDerivative(movingRegressor, 40.16444538212078) // Datapoint: 19, Theoretical value: 40.3782581761556, Error: -0.5295%
+ testSecondDerivative(movingRegressor, -94.36804851908964) // Datapoint: 19, Theoretical value: -95.8105157156623, Error: -1.5055%
+ movingRegressor.push(0.85918996538624, flywheelPosition(30)) // Datapoint 30
+ testFirstDerivative(movingRegressor, 37.625913993605074) // Datapoint: 20, Theoretical value: 37.8737140208996, Error: -0.6543%
+ testSecondDerivative(movingRegressor, -89.65680699527465) // Datapoint: 20, Theoretical value: -91.3178996709088, Error: -1.819%
+ movingRegressor.push(1.00926618717299, flywheelPosition(31)) // Datapoint 31
+ testFirstDerivative(movingRegressor, 35.03463828636044) // Datapoint: 21, Theoretical value: 35.3270234685551, Error: -0.8277%
+ testSecondDerivative(movingRegressor, -84.72131405455713) // Datapoint: 21, Theoretical value: -86.6727901598317, Error: -2.2515%
+ movingRegressor.push(2.01853237434599, flywheelPosition(32)) // Datapoint 32
+ testFirstDerivative(movingRegressor, 32.554704278009275) // Datapoint: 22, Theoretical value: 32.7335342472893, Error: -0.5404%
+ testSecondDerivative(movingRegressor, -81.7211071442608) // Datapoint: 22, Theoretical value: -81.8553682135038, Error: -0.1341%
+ movingRegressor.push(3.02779856151898, flywheelPosition(33)) // Datapoint 33
+ testFirstDerivative(movingRegressor, 30.000816353086208) // Datapoint: 23, Theoretical value: 30.0875556300011, Error: -0.2733%
+ testSecondDerivative(movingRegressor, -78.74803078952486) // Datapoint: 23, Theoretical value: -76.8409405553369, Error: 2.563%
+ movingRegressor.push(3.17787478330574, flywheelPosition(34)) // Datapoint 34
+ testFirstDerivative(movingRegressor, 27.35085176667772) // Datapoint: 24, Theoretical value: 27.3819824840534, Error: -0.0837%
+ testSecondDerivative(movingRegressor, -75.79177233150754) // Datapoint: 24, Theoretical value: -71.5980441226457, Error: 6.0231%
+ movingRegressor.push(3.27580649001517, flywheelPosition(35)) // Datapoint 35
+ testFirstDerivative(movingRegressor, 24.26071071895941) // Datapoint: 25, Theoretical value: 24.6077174058151, Error: -1.395%
+ testSecondDerivative(movingRegressor, -70.11339732080359) // Datapoint: 25, Theoretical value: -66.0854711273398, Error: 6.2702%
+ movingRegressor.push(3.35026709239635, flywheelPosition(36)) // Datapoint 36
+ testFirstDerivative(movingRegressor, 20.837518238274917) // Datapoint: 26, Theoretical value: 21.7527364967177, Error: -4.2193%
+ testSecondDerivative(movingRegressor, -63.09857048895648) // Datapoint: 26, Theoretical value: -60.2473455054648, Error: 4.9079%
+ movingRegressor.push(3.41104686909844, flywheelPosition(37)) // Datapoint 37
+ testFirstDerivative(movingRegressor, 16.553286547446575) // Datapoint: 27, Theoretical value: 18.8004784715502, Error: -12.0216%
+ testSecondDerivative(movingRegressor, -53.82083440126937) // Datapoint: 27, Theoretical value: -54.0044029484732, Error: -0.1884%
+ movingRegressor.push(3.46276108279553, flywheelPosition(38)) // Datapoint 38
+ testFirstDerivative(movingRegressor, 12.04945082759771) // Datapoint: 28, Theoretical value: 15.7268191179949, Error: -23.5492%
+ testSecondDerivative(movingRegressor, -44.46299143724117) // Datapoint: 28, Theoretical value: -47.2370928078489, Error: -5.7614%
+ movingRegressor.push(3.50798032628076, flywheelPosition(39)) // Datapoint 39
+ testFirstDerivative(movingRegressor, 7.995979006137617) // Datapoint: 29, Theoretical value: 12.4936663152318, Error: -36.3269%
+ testSecondDerivative(movingRegressor, -35.67688197212385) // Datapoint: 29, Theoretical value: -39.7484244987643, Error: -10.203%
+ movingRegressor.push(3.54829385426288, flywheelPosition(40)) // Datapoint 40
+ testFirstDerivative(movingRegressor, 4.545499777144109) // Datapoint: 30, Theoretical value: 9.03268562508831, Error: -50.2852%
+ testSecondDerivative(movingRegressor, -26.644166155256052) // Datapoint: 30, Theoretical value: -31.164858820935, Error: -14.6531%
+ movingRegressor.push(3.58475763981283, flywheelPosition(41)) // Datapoint 41
+ testFirstDerivative(movingRegressor, 3.8462783014954773) // Datapoint: 31, Theoretical value: 5.18791555937216, Error: -26.4463%
+ testSecondDerivative(movingRegressor, -19.207586578866348) // Datapoint: 31, Theoretical value: -20.5611388761721, Error: -7.4435%
+ movingRegressor.push(3.61811148377765, flywheelPosition(42)) // Datapoint 42
+ testFirstDerivative(movingRegressor, 3.1383576841321967) // Datapoint: 32, Theoretical value: 0
+ testSecondDerivative(movingRegressor, 4.409564582673597e-15) // Datapoint: 32, Theoretical value: 0
+ movingRegressor.push(3.64889518617698, flywheelPosition(43)) // Datapoint 43
+ testFirstDerivative(movingRegressor, 3.8462783014949977) // Datapoint: 33, Theoretical value: 5.18791555937215, Error: -26.4463%
+ testSecondDerivative(movingRegressor, 19.2075865788684) // Datapoint: 33, Theoretical value: 20.5611388761721, Error: -7.4435%
+ movingRegressor.push(3.67751551598147, flywheelPosition(44)) // Datapoint 44
+ testFirstDerivative(movingRegressor, 4.545499777143718) // Datapoint: 34, Theoretical value: 9.03268562508831, Error: -50.2852%
+ testSecondDerivative(movingRegressor, 26.64416615525877) // Datapoint: 34, Theoretical value: 31.164858820935, Error: -14.6531%
+ movingRegressor.push(3.7042871320382, flywheelPosition(45)) // Datapoint 45
+ testFirstDerivative(movingRegressor, 7.995979006135855) // Datapoint: 35, Theoretical value: 12.4936663152318, Error: -36.3269%
+ testSecondDerivative(movingRegressor, 35.67688197213815) // Datapoint: 35, Theoretical value: 39.7484244987643, Error: -10.203%
+ movingRegressor.push(3.72945878658716, flywheelPosition(46)) // Datapoint 46
+ testFirstDerivative(movingRegressor, 12.049450827592068) // Datapoint: 36, Theoretical value: 15.7268191179949, Error: -23.5492%
+ testSecondDerivative(movingRegressor, 44.46299143727433) // Datapoint: 36, Theoretical value: 47.2370928078489, Error: -5.7614%
+ movingRegressor.push(3.75323076432218, flywheelPosition(47)) // Datapoint 47
+ testFirstDerivative(movingRegressor, 16.55328654744136) // Datapoint: 37, Theoretical value: 18.8004784715502, Error: -12.0216%
+ testSecondDerivative(movingRegressor, 53.820834401312524) // Datapoint: 37, Theoretical value: 54.0044029484732, Error: -0.1884%
+ movingRegressor.push(3.77576686986435, flywheelPosition(48)) // Datapoint 48
+ testFirstDerivative(movingRegressor, 20.837518238271798) // Datapoint: 38, Theoretical value: 21.7527364967177, Error: -4.2193%
+ testSecondDerivative(movingRegressor, 63.0985704889981) // Datapoint: 38, Theoretical value: 60.2473455054648, Error: 4.9079%
+ movingRegressor.push(3.79720289770384, flywheelPosition(49)) // Datapoint 49
+ testFirstDerivative(movingRegressor, 24.260710718959217) // Datapoint: 39, Theoretical value: 24.6077174058151, Error: -1.395%
+ testSecondDerivative(movingRegressor, 70.11339732083867) // Datapoint: 39, Theoretical value: 66.0854711273397, Error: 6.2702%
+ movingRegressor.push(3.81765276034255, flywheelPosition(50)) // Datapoint 50
+ testFirstDerivative(movingRegressor, 27.35085176667826) // Datapoint: 40, Theoretical value: 27.3819824840534, Error: -0.0837%
+ testSecondDerivative(movingRegressor, 75.79177233154196) // Datapoint: 40, Theoretical value: 71.5980441226458, Error: 6.0231%
+ movingRegressor.push(3.83721301460343, flywheelPosition(51)) // Datapoint 51
+ testFirstDerivative(movingRegressor, 30.000816353087885) // Datapoint: 41, Theoretical value: 30.0875556300011, Error: -0.2733%
+ testSecondDerivative(movingRegressor, 78.74803078955205) // Datapoint: 41, Theoretical value: 76.8409405553369, Error: 2.563%
+ movingRegressor.push(3.85596626603776, flywheelPosition(52)) // Datapoint 52
+ testFirstDerivative(movingRegressor, 32.5547042780135) // Datapoint: 42, Theoretical value: 32.7335342472893, Error: -0.5404%
+ testSecondDerivative(movingRegressor, 81.72110714427114) // Datapoint: 42, Theoretical value: 81.8553682135038, Error: -0.1341%
+ movingRegressor.push(3.8739837710181, flywheelPosition(53)) // Datapoint 53
+ testFirstDerivative(movingRegressor, 35.03463828636535) // Datapoint: 43, Theoretical value: 35.3270234685551, Error: -0.8277%
+ testSecondDerivative(movingRegressor, 84.72131405455926) // Datapoint: 43, Theoretical value: 86.6727901598316, Error: -2.2515%
+ movingRegressor.push(3.89132745434459, flywheelPosition(54)) // Datapoint 54
+ testFirstDerivative(movingRegressor, 37.625913993608265) // Datapoint: 44, Theoretical value: 37.8737140208995, Error: -0.6543%
+ testSecondDerivative(movingRegressor, 89.65680699528338) // Datapoint: 44, Theoretical value: 91.3178996709088, Error: -1.819%
+ movingRegressor.push(3.90805149394306, flywheelPosition(55)) // Datapoint 55
+ testFirstDerivative(movingRegressor, 40.16444538212363) // Datapoint: 45, Theoretical value: 40.3782581761556, Error: -0.5295%
+ testSecondDerivative(movingRegressor, 94.36804851909604) // Datapoint: 45, Theoretical value: 95.8105157156622, Error: -1.5055%
+ movingRegressor.push(3.92420358011048, flywheelPosition(56)) // Datapoint 56
+ testFirstDerivative(movingRegressor, 42.656217000838126) // Datapoint: 46, Theoretical value: 42.8445244981284, Error: -0.4395%
+ testSecondDerivative(movingRegressor, 98.89760721956821) // Datapoint: 46, Theoretical value: 100.166843393353, Error: -1.2671%
+ movingRegressor.push(3.93982592677062, flywheelPosition(57)) // Datapoint 57
+ testFirstDerivative(movingRegressor, 45.10779333141892) // Datapoint: 47, Theoretical value: 45.2757763502753, Error: -0.371%
+ testSecondDerivative(movingRegressor, 103.27042724442408) // Datapoint: 47, Theoretical value: 104.400342127248, Error: -1.0823%
+ movingRegressor.push(3.95495609143546, flywheelPosition(58)) // Datapoint 58
+ testFirstDerivative(movingRegressor, 47.5231102218691) // Datapoint: 48, Theoretical value: 47.6748005513145, Error: -0.3182%
+ testSecondDerivative(movingRegressor, 107.50388680816235) // Datapoint: 48, Theoretical value: 108.522341606437, Error: -0.9385%
+ movingRegressor.push(3.96962764594556, flywheelPosition(59)) // Datapoint 59
+ testFirstDerivative(movingRegressor, 49.905473188024416) // Datapoint: 49, Theoretical value: 50.0440023505143, Error: -0.2768%
+ testSecondDerivative(movingRegressor, 111.6171292585368) // Datapoint: 49, Theoretical value: 112.542489895293, Error: -0.8222%
+ movingRegressor.push(3.9838707296084, flywheelPosition(60)) // Datapoint 60
+ testFirstDerivative(movingRegressor, 52.25778033833785) // Datapoint: 50, Theoretical value: 52.3854770038019, Error: -0.2438%
+ testSecondDerivative(movingRegressor, 115.62262337587573) // Datapoint: 50, Theoretical value: 116.469086585965, Error: -0.7268%
+ movingRegressor.push(3.99771250877485, flywheelPosition(61)) // Datapoint 61
+ testFirstDerivative(movingRegressor, 54.582601533135005) // Datapoint: 51, Theoretical value: 54.7010646957755, Error: -0.2166%
+ testSecondDerivative(movingRegressor, 119.53055207766285) // Datapoint: 51, Theoretical value: 120.309335206939, Error: -0.6473%
+ movingRegressor.push(4.01117756132765, flywheelPosition(62)) // Datapoint 62
+ testFirstDerivative(movingRegressor, 56.881806069895106) // Datapoint: 52, Theoretical value: 56.9923933553014, Error: -0.194%
+ testSecondDerivative(movingRegressor, 123.34941561724908) // Datapoint: 52, Theoretical value: 124.069537659016, Error: -0.5804%
+ movingRegressor.push(4.02428820041969, flywheelPosition(63)) // Datapoint 63
+ testFirstDerivative(movingRegressor, 59.15707319103194) // Datapoint: 53, Theoretical value: 59.2609125050953, Error: -0.1752%
+ testSecondDerivative(movingRegressor, 127.08642259784997) // Datapoint: 53, Theoretical value: 127.75524621423, Error: -0.5235%
+ movingRegressor.push(4.02428820041969, flywheelPosition(64)) // Datapoint 64
+ testFirstDerivative(movingRegressor, 61.41026002082708) // Datapoint: 54, Theoretical value: 61.5079203602521, Error: -0.1588%
+ testSecondDerivative(movingRegressor, 130.7372321804723) // Datapoint: 54, Theoretical value: 131.371383910936, Error: -0.4827%
+ movingRegressor.push(4.03706474869198, flywheelPosition(65)) // Datapoint 65
+ testFirstDerivative(movingRegressor, 63.644342254392484) // Datapoint: 55, Theoretical value: 63.7345857676683, Error: -0.1416%
+ testSecondDerivative(movingRegressor, 134.33869512072937) // Datapoint: 55, Theoretical value: 134.922341047831, Error: -0.4326%
+ movingRegressor.push(4.05017538778402, flywheelPosition(66)) // Datapoint 66
+ testFirstDerivative(movingRegressor, 65.86741453755053) // Datapoint: 56, Theoretical value: 65.9419661500209, Error: -0.1131%
+ testSecondDerivative(movingRegressor, 137.8780141797187) // Datapoint: 56, Theoretical value: 138.412053350131, Error: -0.3858%
+ movingRegressor.push(4.06364044033682, flywheelPosition(67)) // Datapoint 67
+ testFirstDerivative(movingRegressor, 68.54444889883757) // Datapoint: 57, Theoretical value: 68.1310223179495, Error: 0.6068%
+ testSecondDerivative(movingRegressor, 141.16457001710992) // Datapoint: 57, Theoretical value: 141.84406590439, Error: -0.479%
+ movingRegressor.push(4.07748221950326, flywheelPosition(68)) // Datapoint 68
+ testFirstDerivative(movingRegressor, 71.3654378949409) // Datapoint: 58, Theoretical value: 70.3026308003372, Error: 1.5118%
+ testSecondDerivative(movingRegressor, 143.98144239891764) // Datapoint: 58, Theoretical value: 145.221585916134, Error: -0.854%
+ movingRegressor.push(4.0917253031661, flywheelPosition(69)) // Datapoint 69
+ testFirstDerivative(movingRegressor, 75.30164018219989) // Datapoint: 59, Theoretical value: 72.4575941879248, Error: 3.9251%
+ testSecondDerivative(movingRegressor, 126.07905869782206) // Datapoint: 59, Theoretical value: 148.547526597246, Error: -15.1254%
+ movingRegressor.push(4.1063968576762, flywheelPosition(70)) // Datapoint 70
+ testFirstDerivative(movingRegressor, 78.57819540283134) // Datapoint: 60, Theoretical value: 74.5966498720006, Error: 5.3374%
+ testSecondDerivative(movingRegressor, 103.79601069533149) // Datapoint: 60, Theoretical value: 151.824543946359, Error: -31.6342%
+ movingRegressor.push(4.12152702234105, flywheelPosition(71)) // Datapoint 71
+ testFirstDerivative(movingRegressor, 81.20994360799062) // Datapoint: 61, Theoretical value: 76.7204774755507, Error: 5.8517%
+ testSecondDerivative(movingRegressor, 77.65096540872354) // Datapoint: 61, Theoretical value: 155.055067784803, Error: -49.9204%
+ movingRegressor.push(4.13714936900118, flywheelPosition(72)) // Datapoint 72
+ testFirstDerivative(movingRegressor, 83.04710562163946) // Datapoint: 62, Theoretical value: 78.829705210803, Error: 5.35%
+ testSecondDerivative(movingRegressor, 47.96831078789868) // Datapoint: 62, Theoretical value: 158.241328111624, Error: -69.6866%
+ movingRegressor.push(4.1533014551686, flywheelPosition(73)) // Datapoint 73
+ testFirstDerivative(movingRegressor, 83.97100662917377) // Datapoint: 63, Theoretical value: 80.924915348848, Error: 3.7641%
+ testSecondDerivative(movingRegressor, 14.954334770920239) // Datapoint: 63, Theoretical value: 161.385377615499, Error: -90.7338%
+ movingRegressor.push(4.17002549476708, flywheelPosition(74)) // Datapoint 74
+ testFirstDerivative(movingRegressor, 84.16754477550508) // Datapoint: 64, Theoretical value: 83.0066489499545, Error: 1.3986%
+ testSecondDerivative(movingRegressor, -17.092112574980536) // Datapoint: 64, Theoretical value: -164.489111009377, Error: -89.609%
+ movingRegressor.push(4.18736917809357, flywheelPosition(75)) // Datapoint 75
+ testFirstDerivative(movingRegressor, 83.5716461621889) // Datapoint: 65, Theoretical value: 80.924915348848, Error: 3.2706%
+ testSecondDerivative(movingRegressor, -49.900050401294735) // Datapoint: 65, Theoretical value: -161.385377615499, Error: -69.0802%
+ movingRegressor.push(4.2053866830739, flywheelPosition(76)) // Datapoint 76
+ testFirstDerivative(movingRegressor, 82.06385360415482) // Datapoint: 66, Theoretical value: 78.8297052108031, Error: 4.1027%
+ testSecondDerivative(movingRegressor, -79.55362430561435) // Datapoint: 66, Theoretical value: -158.241328111624, Error: -49.7264%
+ movingRegressor.push(4.22413993450824, flywheelPosition(77)) // Datapoint 77
+ testFirstDerivative(movingRegressor, 79.76241462500502) // Datapoint: 67, Theoretical value: 76.7204774755507, Error: 3.965%
+ testSecondDerivative(movingRegressor, -105.7692377776201) // Datapoint: 67, Theoretical value: -155.055067784803, Error: -31.786%
+ movingRegressor.push(4.24370018876912, flywheelPosition(78)) // Datapoint 78
+ testFirstDerivative(movingRegressor, 76.41914684790277) // Datapoint: 68, Theoretical value: 74.5966498720006, Error: 2.4431%
+ testSecondDerivative(movingRegressor, -129.14752813375634) // Datapoint: 68, Theoretical value: -151.824543946359, Error: -14.9363%
+ movingRegressor.push(4.26415005140783, flywheelPosition(79)) // Datapoint 79
+ testFirstDerivative(movingRegressor, 72.72961089976661) // Datapoint: 69, Theoretical value: 72.4575941879248, Error: 0.3754%
+ testSecondDerivative(movingRegressor, -147.17724519180067) // Datapoint: 69, Theoretical value: -148.547526597246, Error: -0.9225%
+ movingRegressor.push(4.28558607924731, flywheelPosition(80)) // Datapoint 80
+ testFirstDerivative(movingRegressor, 70.25958290426024) // Datapoint: 70, Theoretical value: 70.3026308003372, Error: -0.0612%
+ testSecondDerivative(movingRegressor, -144.4135816996841) // Datapoint: 70, Theoretical value: -145.221585916134, Error: -0.5564%
+ movingRegressor.push(4.30812218478948, flywheelPosition(81)) // Datapoint 81
+ testFirstDerivative(movingRegressor, 68.06067353487629) // Datapoint: 71, Theoretical value: 68.1310223179495, Error: -0.1033%
+ testSecondDerivative(movingRegressor, -141.20668760476255) // Datapoint: 71, Theoretical value: -141.84406590439, Error: -0.4494%
+ movingRegressor.push(4.33189416252451, flywheelPosition(82)) // Datapoint 82
+ testFirstDerivative(movingRegressor, 65.86108760287186) // Datapoint: 72, Theoretical value: 65.9419661500209, Error: -0.1227%
+ testSecondDerivative(movingRegressor, -137.74011344154923) // Datapoint: 72, Theoretical value: -138.412053350131, Error: -0.4855%
+ movingRegressor.push(4.35706581707346, flywheelPosition(83)) // Datapoint 83
+ testFirstDerivative(movingRegressor, 63.648449832698134) // Datapoint: 73, Theoretical value: 63.7345857676683, Error: -0.1351%
+ testSecondDerivative(movingRegressor, -134.22069550066365) // Datapoint: 73, Theoretical value: -134.922341047831, Error: -0.52%
+ movingRegressor.push(4.38383743313019, flywheelPosition(84)) // Datapoint 84
+ testFirstDerivative(movingRegressor, 61.41665794554547) // Datapoint: 74, Theoretical value: 61.5079203602521, Error: -0.1484%
+ testSecondDerivative(movingRegressor, -130.65983476435815) // Datapoint: 74, Theoretical value: -131.371383910936, Error: -0.5416%
+ movingRegressor.push(4.41245776293469, flywheelPosition(85)) // Datapoint 85
+ testFirstDerivative(movingRegressor, 59.15707319102739) // Datapoint: 75, Theoretical value: 59.2609125050954, Error: -0.1752%
+ testSecondDerivative(movingRegressor, -127.08642259788174) // Datapoint: 75, Theoretical value: -127.75524621423, Error: -0.5235%
+ movingRegressor.push(4.44324146533401, flywheelPosition(86)) // Datapoint 86
+ testFirstDerivative(movingRegressor, 56.881806069896356) // Datapoint: 76, Theoretical value: 56.9923933553014, Error: -0.194%
+ testSecondDerivative(movingRegressor, -123.34941561724712) // Datapoint: 76, Theoretical value: -124.069537659016, Error: -0.5804%
+ movingRegressor.push(4.47659530929884, flywheelPosition(87)) // Datapoint 87
+ testFirstDerivative(movingRegressor, 54.58260153313722) // Datapoint: 77, Theoretical value: 54.7010646957755, Error: -0.2166%
+ testSecondDerivative(movingRegressor, -119.53055207765404) // Datapoint: 77, Theoretical value: -120.309335206939, Error: -0.6473%
+ movingRegressor.push(4.51305909484878, flywheelPosition(88)) // Datapoint 88
+ testFirstDerivative(movingRegressor, 52.257780338337625) // Datapoint: 78, Theoretical value: 52.3854770038019, Error: -0.2438%
+ testSecondDerivative(movingRegressor, -115.62262337589141) // Datapoint: 78, Theoretical value: -116.469086585965, Error: -0.7268%
+ movingRegressor.push(4.55337262283091, flywheelPosition(89)) // Datapoint 89
+ testFirstDerivative(movingRegressor, 49.905473188025326) // Datapoint: 79, Theoretical value: 50.0440023505143, Error: -0.2768%
+ testSecondDerivative(movingRegressor, -111.61712925855326) // Datapoint: 79, Theoretical value: -112.542489895293, Error: -0.8222%
+ movingRegressor.push(4.59859186631613, flywheelPosition(90)) // Datapoint 90
+ testFirstDerivative(movingRegressor, 47.52311022187348) // Datapoint: 80, Theoretical value: 47.6748005513145, Error: -0.3182%
+ testSecondDerivative(movingRegressor, -107.50388680816754) // Datapoint: 80, Theoretical value: -108.522341606437, Error: -0.9385%
+ movingRegressor.push(4.65030608001323, flywheelPosition(91)) // Datapoint 91
+ testFirstDerivative(movingRegressor, 45.10779333141943) // Datapoint: 81, Theoretical value: 45.2757763502753, Error: -0.371%
+ testSecondDerivative(movingRegressor, -103.27042724445896) // Datapoint: 81, Theoretical value: -104.400342127248, Error: -1.0823%
+ movingRegressor.push(4.71108585671531, flywheelPosition(92)) // Datapoint 92
+ testFirstDerivative(movingRegressor, 42.656217000835056) // Datapoint: 82, Theoretical value: 42.8445244981284, Error: -0.4395%
+ testSecondDerivative(movingRegressor, -98.89760721960394) // Datapoint: 82, Theoretical value: -100.166843393353, Error: -1.2671%
+ movingRegressor.push(4.78554645909649, flywheelPosition(93)) // Datapoint 93
+ testFirstDerivative(movingRegressor, 40.164445382119595) // Datapoint: 83, Theoretical value: 40.3782581761556, Error: -0.5295%
+ testSecondDerivative(movingRegressor, -94.36804851911774) // Datapoint: 83, Theoretical value: -95.8105157156623, Error: -1.5055%
+ movingRegressor.push(4.88347816580593, flywheelPosition(94)) // Datapoint 94
+ testFirstDerivative(movingRegressor, 37.62591399360383) // Datapoint: 84, Theoretical value: 37.8737140208996, Error: -0.6543%
+ testSecondDerivative(movingRegressor, -89.65680699529587) // Datapoint: 84, Theoretical value: -91.3178996709088, Error: -1.819%
+ movingRegressor.push(5.03355438759268, flywheelPosition(95)) // Datapoint 95
+ testFirstDerivative(movingRegressor, 35.034638286358415) // Datapoint: 85, Theoretical value: 35.3270234685551, Error: -0.8277%
+ testSecondDerivative(movingRegressor, -84.7213140545765) // Datapoint: 85, Theoretical value: -86.6727901598317, Error: -2.2515%
+ movingRegressor.push(6.04282057476568, flywheelPosition(96)) // Datapoint 96
+ testFirstDerivative(movingRegressor, 32.55470427800782) // Datapoint: 86, Theoretical value: 32.7335342472893, Error: -0.5404%
+ testSecondDerivative(movingRegressor, -81.72110714427964) // Datapoint: 86, Theoretical value: -81.8553682135038, Error: -0.1341%
+ movingRegressor.push(7.05208676193867, flywheelPosition(97)) // Datapoint 97
+ testFirstDerivative(movingRegressor, 30.0008163530847) // Datapoint: 87, Theoretical value: 30.0875556300011, Error: -0.2733%
+ testSecondDerivative(movingRegressor, -78.74803078953659) // Datapoint: 87, Theoretical value: -76.8409405553369, Error: 2.563%
+ movingRegressor.push(7.20216298372543, flywheelPosition(98)) // Datapoint 98
+ testFirstDerivative(movingRegressor, 27.350851766678318) // Datapoint: 88, Theoretical value: 27.3819824840534, Error: -0.0837%
+ testSecondDerivative(movingRegressor, -75.7917723315129) // Datapoint: 88, Theoretical value: -71.5980441226457, Error: 6.0231%
+ movingRegressor.push(7.30009469043486, flywheelPosition(99)) // Datapoint 99
+ testFirstDerivative(movingRegressor, 24.260710718958364) // Datapoint: 89, Theoretical value: 24.6077174058151, Error: -1.395%
+ testSecondDerivative(movingRegressor, -70.11339732081393) // Datapoint: 89, Theoretical value: -66.0854711273398, Error: 6.2702%
+ movingRegressor.push(7.37455529281604, flywheelPosition(100)) // Datapoint 100
+ testFirstDerivative(movingRegressor, 20.837518238273844) // Datapoint: 90, Theoretical value: 21.7527364967177, Error: -4.2193%
+ testSecondDerivative(movingRegressor, -63.09857048896637) // Datapoint: 90, Theoretical value: -60.2473455054648, Error: 4.9079%
+ movingRegressor.push(7.43533506951812, flywheelPosition(101)) // Datapoint 101
+ testFirstDerivative(movingRegressor, 16.55328654744659) // Datapoint: 91, Theoretical value: 18.8004784715502, Error: -12.0216%
+ testSecondDerivative(movingRegressor, -53.82083440127297) // Datapoint: 91, Theoretical value: -54.0044029484732, Error: -0.1884%
+ movingRegressor.push(7.48704928321522, flywheelPosition(102)) // Datapoint 102
+ testFirstDerivative(movingRegressor, 12.04945082759599) // Datapoint: 92, Theoretical value: 15.7268191179949, Error: -23.5492%
+ testSecondDerivative(movingRegressor, -44.46299143725113) // Datapoint: 92, Theoretical value: -47.2370928078489, Error: -5.7614%
+ movingRegressor.push(7.53226852670045, flywheelPosition(103)) // Datapoint 103
+ testFirstDerivative(movingRegressor, 7.995979006137162) // Datapoint: 93, Theoretical value: 12.4936663152318, Error: -36.3269%
+ testSecondDerivative(movingRegressor, -35.676881972128726) // Datapoint: 93, Theoretical value: -39.7484244987643, Error: -10.203%
+ movingRegressor.push(7.57258205468257, flywheelPosition(104)) // Datapoint 104
+ testFirstDerivative(movingRegressor, 4.5454997771441015) // Datapoint: 94, Theoretical value: 9.03268562508831, Error: -50.2852%
+ testSecondDerivative(movingRegressor, -26.64416615525746) // Datapoint: 94, Theoretical value: -31.164858820935, Error: -14.6531%
+ movingRegressor.push(7.60904584023252, flywheelPosition(105)) // Datapoint 105
+ testFirstDerivative(movingRegressor, 3.846278301494735) // Datapoint: 95, Theoretical value: 5.18791555937216, Error: -26.4463%
+ testSecondDerivative(movingRegressor, -19.20758657886873) // Datapoint: 95, Theoretical value: -20.5611388761721, Error: -7.4435%
+ movingRegressor.push(7.64239968419734, flywheelPosition(106)) // Datapoint 106
+ testFirstDerivative(movingRegressor, 3.1383576841322074) // Datapoint: 96, Theoretical value: 0
+ testSecondDerivative(movingRegressor, 1.3705117896314261e-14) // Datapoint: 96, Theoretical value: 0
+ movingRegressor.push(7.67318338659667, flywheelPosition(107)) // Datapoint 107
+ testFirstDerivative(movingRegressor, 3.846278301494692) // Datapoint: 97, Theoretical value: 5.18791555937215, Error: -26.4463%
+ testSecondDerivative(movingRegressor, 19.20758657887071) // Datapoint: 97, Theoretical value: 20.5611388761721, Error: -7.4435%
+ movingRegressor.push(7.70180371640116, flywheelPosition(108)) // Datapoint 108
+ testFirstDerivative(movingRegressor, 4.545499777143846) // Datapoint: 98, Theoretical value: 9.03268562508831, Error: -50.2852%
+ testSecondDerivative(movingRegressor, 26.644166155259857) // Datapoint: 98, Theoretical value: 31.164858820935, Error: -14.6531%
+ movingRegressor.push(7.72857533245789, flywheelPosition(109)) // Datapoint 109
+ testFirstDerivative(movingRegressor, 7.995979006133808) // Datapoint: 99, Theoretical value: 12.4936663152318, Error: -36.3269%
+ testSecondDerivative(movingRegressor, 35.67688197214775) // Datapoint: 99, Theoretical value: 39.7484244987643, Error: -10.203%
+ movingRegressor.push(7.75374698700685, flywheelPosition(110)) // Datapoint 110
+ testFirstDerivative(movingRegressor, 12.049450827602357) // Datapoint: 100, Theoretical value: 15.7268191179949, Error: -23.5492%
+ testSecondDerivative(movingRegressor, 44.46299143724677) // Datapoint: 100, Theoretical value: 47.2370928078489, Error: -5.7614%
+ movingRegressor.push(7.77751896474187, flywheelPosition(111)) // Datapoint 111
+ testFirstDerivative(movingRegressor, 16.55328654745199) // Datapoint: 101, Theoretical value: 18.8004784715502, Error: -12.0216%
+ testSecondDerivative(movingRegressor, 53.82083440127187) // Datapoint: 101, Theoretical value: 54.0044029484732, Error: -0.1884%
+ movingRegressor.push(7.80005507028404, flywheelPosition(112)) // Datapoint 112
+ testFirstDerivative(movingRegressor, 20.837518238281234) // Datapoint: 102, Theoretical value: 21.7527364967177, Error: -4.2193%
+ testSecondDerivative(movingRegressor, 63.09857048895572) // Datapoint: 102, Theoretical value: 60.2473455054648, Error: 4.9079%
+ movingRegressor.push(7.82149109812353, flywheelPosition(113)) // Datapoint 113
+ testFirstDerivative(movingRegressor, 24.260710718966607) // Datapoint: 103, Theoretical value: 24.6077174058151, Error: -1.395%
+ testSecondDerivative(movingRegressor, 70.11339732079524) // Datapoint: 103, Theoretical value: 66.0854711273397, Error: 6.2702%
+ movingRegressor.push(7.84194096076223, flywheelPosition(114)) // Datapoint 114
+ testFirstDerivative(movingRegressor, 27.350851766683718) // Datapoint: 104, Theoretical value: 27.3819824840534, Error: -0.0837%
+ testSecondDerivative(movingRegressor, 75.79177233149834) // Datapoint: 104, Theoretical value: 71.5980441226458, Error: 6.0231%
+ movingRegressor.push(7.86150121502312, flywheelPosition(115)) // Datapoint 115
+ testFirstDerivative(movingRegressor, 30.000816353090613) // Datapoint: 105, Theoretical value: 30.0875556300011, Error: -0.2733%
+ testSecondDerivative(movingRegressor, 78.7480307895183) // Datapoint: 105, Theoretical value: 76.8409405553369, Error: 2.563%
+ movingRegressor.push(7.88025446645745, flywheelPosition(116)) // Datapoint 116
+ testFirstDerivative(movingRegressor, 32.55470427801254) // Datapoint: 106, Theoretical value: 32.7335342472893, Error: -0.5404%
+ testSecondDerivative(movingRegressor, 81.72110714425453) // Datapoint: 106, Theoretical value: 81.8553682135038, Error: -0.1341%
+ movingRegressor.push(7.89827197143778, flywheelPosition(117)) // Datapoint 117
+ testFirstDerivative(movingRegressor, 35.034638286363815) // Datapoint: 107, Theoretical value: 35.3270234685551, Error: -0.8277%
+ testSecondDerivative(movingRegressor, 84.72131405454387) // Datapoint: 107, Theoretical value: 86.6727901598316, Error: -2.2515%
+ movingRegressor.push(7.91561565476428, flywheelPosition(118)) // Datapoint 118
+ testFirstDerivative(movingRegressor, 37.62591399360758) // Datapoint: 108, Theoretical value: 37.8737140208995, Error: -0.6543%
+ testSecondDerivative(movingRegressor, 89.65680699524638) // Datapoint: 108, Theoretical value: 91.3178996709088, Error: -1.819%
+ movingRegressor.push(7.93233969436275, flywheelPosition(119)) // Datapoint 119
+ testFirstDerivative(movingRegressor, 40.16444538212181) // Datapoint: 109, Theoretical value: 40.3782581761556, Error: -0.5295%
+ testSecondDerivative(movingRegressor, 94.36804851906145) // Datapoint: 109, Theoretical value: 95.8105157156622, Error: -1.5055%
+ movingRegressor.push(7.94849178053017, flywheelPosition(120)) // Datapoint 120
+ testFirstDerivative(movingRegressor, 42.656217000836136) // Datapoint: 110, Theoretical value: 42.8445244981284, Error: -0.4395%
+ testSecondDerivative(movingRegressor, 98.89760721951238) // Datapoint: 110, Theoretical value: 100.166843393353, Error: -1.2671%
+ movingRegressor.push(7.9641141271903, flywheelPosition(121)) // Datapoint 121
+ testFirstDerivative(movingRegressor, 45.10779333141909) // Datapoint: 111, Theoretical value: 45.2757763502753, Error: -0.371%
+ testSecondDerivative(movingRegressor, 103.27042724435985) // Datapoint: 111, Theoretical value: 104.400342127248, Error: -1.0823%
+ movingRegressor.push(7.97924429185515, flywheelPosition(122)) // Datapoint 122
+ testFirstDerivative(movingRegressor, 47.52311022187121) // Datapoint: 112, Theoretical value: 47.6748005513145, Error: -0.3182%
+ testSecondDerivative(movingRegressor, 107.50388680806326) // Datapoint: 112, Theoretical value: 108.522341606437, Error: -0.9385%
+ movingRegressor.push(7.99391584636525, flywheelPosition(123)) // Datapoint 123
+ testFirstDerivative(movingRegressor, 49.90547318802419) // Datapoint: 113, Theoretical value: 50.0440023505143, Error: -0.2768%
+ testSecondDerivative(movingRegressor, 111.61712925841942) // Datapoint: 113, Theoretical value: 112.542489895293, Error: -0.8222%
+ movingRegressor.push(8.00815893002809, flywheelPosition(124)) // Datapoint 124
+ testFirstDerivative(movingRegressor, 52.25778033833399) // Datapoint: 114, Theoretical value: 52.3854770038019, Error: -0.2438%
+ testSecondDerivative(movingRegressor, 115.62262337575498) // Datapoint: 114, Theoretical value: 116.469086585965, Error: -0.7268%
+ movingRegressor.push(8.02200070919454, flywheelPosition(125)) // Datapoint 125
+ testFirstDerivative(movingRegressor, 54.582601533131424) // Datapoint: 115, Theoretical value: 54.7010646957755, Error: -0.2166%
+ testSecondDerivative(movingRegressor, 119.530552077508) // Datapoint: 115, Theoretical value: 120.309335206939, Error: -0.6473%
+ movingRegressor.push(8.03546576174734, flywheelPosition(126)) // Datapoint 126
+ testFirstDerivative(movingRegressor, 56.88180606988715) // Datapoint: 116, Theoretical value: 56.9923933553014, Error: -0.194%
+ testSecondDerivative(movingRegressor, 123.34941561712556) // Datapoint: 116, Theoretical value: 124.069537659016, Error: -0.5804%
+ movingRegressor.push(8.04857640083938, flywheelPosition(127)) // Datapoint 127
+ testFirstDerivative(movingRegressor, 59.15707319101773) // Datapoint: 117, Theoretical value: 59.2609125050953, Error: -0.1752%
+ testSecondDerivative(movingRegressor, 127.08642259777825) // Datapoint: 117, Theoretical value: 127.75524621423, Error: -0.5235%
+ movingRegressor.push(8.06135294911167, flywheelPosition(128)) // Datapoint 128
+ testFirstDerivative(movingRegressor, 61.41000388053635) // Datapoint: 118, Theoretical value: 61.5079203602521, Error: -0.1592%
+ testSecondDerivative(movingRegressor, 130.74776846040675) // Datapoint: 118, Theoretical value: 131.371383910936, Error: -0.4747%
+ movingRegressor.push(8.07381397226209, flywheelPosition(129)) // Datapoint 129
+ testFirstDerivative(movingRegressor, 63.64197850554342) // Datapoint: 119, Theoretical value: 63.7345857676683, Error: -0.1453%
+ testSecondDerivative(movingRegressor, 134.3388391346595) // Datapoint: 119, Theoretical value: 134.922341047831, Error: -0.4325%
+ movingRegressor.push(8.08597647902856, flywheelPosition(130)) // Datapoint 130
+ testFirstDerivative(movingRegressor, 65.85414584244677) // Datapoint: 120, Theoretical value: 65.9419661500209, Error: -0.1332%
+ testSecondDerivative(movingRegressor, 137.86436162982568) // Datapoint: 120, Theoretical value: 138.412053350131, Error: -0.3957%
+ movingRegressor.push(8.09785609325274, flywheelPosition(131)) // Datapoint 131
+ testFirstDerivative(movingRegressor, 68.0475404855556) // Datapoint: 121, Theoretical value: 68.1310223179495, Error: -0.1225%
+ testSecondDerivative(movingRegressor, 141.32851756189933) // Datapoint: 121, Theoretical value: 141.84406590439, Error: -0.3635%
+ movingRegressor.push(8.10946720260155, flywheelPosition(132)) // Datapoint 132
+ testFirstDerivative(movingRegressor, 70.22309927953825) // Datapoint: 122, Theoretical value: 70.3026308003372, Error: -0.1131%
+ testSecondDerivative(movingRegressor, 144.73503205195712) // Datapoint: 122, Theoretical value: 145.221585916134, Error: -0.335%
+ movingRegressor.push(8.1208230876678, flywheelPosition(133)) // Datapoint 133
+ testFirstDerivative(movingRegressor, 72.38167464942285) // Datapoint: 123, Theoretical value: 72.4575941879248, Error: -0.1048%
+ testSecondDerivative(movingRegressor, 148.0872440238788) // Datapoint: 123, Theoretical value: 148.547526597246, Error: -0.3099%
+ movingRegressor.push(8.13193603449435, flywheelPosition(134)) // Datapoint 134
+ testFirstDerivative(movingRegressor, 74.52404555335283) // Datapoint: 124, Theoretical value: 74.5966498720006, Error: -0.0973%
+ testSecondDerivative(movingRegressor, 151.38816260883686) // Datapoint: 124, Theoretical value: 151.824543946359, Error: -0.2874%
+})
+
+function testFirstDerivative (regressor, expectedValue) {
+ assert.ok(regressor.firstDerivative(0) === expectedValue, `First derivative should be ${expectedValue} Radians/sec at ${regressor.X.get(0)} sec, is ${regressor.firstDerivative(0)}`)
+}
+
+function testSecondDerivative (regressor, expectedValue) {
+ assert.ok(regressor.secondDerivative(0) === expectedValue, `Second derivative should be ${expectedValue} Radians/sec^2 at ${regressor.X.get(0)} sec, is ${regressor.secondDerivative(0)}`)
+}
+
+test.run()
diff --git a/app/engine/utils/OLSLinearSeries.js b/app/engine/utils/OLSLinearSeries.js
deleted file mode 100644
index 6d0c26541e..0000000000
--- a/app/engine/utils/OLSLinearSeries.js
+++ /dev/null
@@ -1,129 +0,0 @@
-'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- The LinearSeries is a datatype that represents a Linear Series. It allows
- values to be retrieved (like a FiFo buffer, or Queue) but it also includes
- a Linear Regressor to determine the slope, intercept and R^2 of this timeseries
- of x any y coordinates through Simple Linear Regression.
-
- At creation it can be determined that the Time Series is limited (i.e. after it
- is filled, the oldest will be pushed out of the queue) or that the the time series
- is unlimited (will only expand). The latter is activated by calling the creation with
- an empty argument.
-
- please note that for unlimited series it is up to the calling function to handle resetting
- the Linear Series when needed through the reset() call.
-
- A key constraint is to prevent heavy calculations at the end (due to large
- array based curve fitting) as this function is also used to calculate
- drag at the end of the recovery phase, which might happen on a Pi zero
-
- This implementation uses concepts that are described here:
- https://www.colorado.edu/amath/sites/default/files/attached-files/ch12_0.pdf
-*/
-
-import { createSeries } from './Series.js'
-
-import loglevel from 'loglevel'
-const log = loglevel.getLogger('RowingEngine')
-
-export function createOLSLinearSeries (maxSeriesLength = 0) {
- const X = createSeries(maxSeriesLength)
- const XX = createSeries(maxSeriesLength)
- const Y = createSeries(maxSeriesLength)
- const YY = createSeries(maxSeriesLength)
- const XY = createSeries(maxSeriesLength)
- let _slope = 0
- let _intercept = 0
- let _goodnessOfFit = 0
-
- function push (x, y) {
- if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
- X.push(x)
- XX.push(x * x)
- Y.push(y)
- YY.push(y * y)
- XY.push(x * y)
-
- // Let's approximate the line through OLS
- if (X.length() >= 2 && X.sum() > 0) {
- _slope = (X.length() * XY.sum() - X.sum() * Y.sum()) / (X.length() * XX.sum() - X.sum() * X.sum())
- _intercept = (Y.sum() - (_slope * X.sum())) / X.length()
- const sse = YY.sum() - (_intercept * Y.sum()) - (_slope * XY.sum())
- const sst = YY.sum() - (Math.pow(Y.sum(), 2) / X.length())
- _goodnessOfFit = 1 - (sse / sst)
- } else {
- _slope = 0
- _intercept = 0
- _goodnessOfFit = 0
- }
- }
-
- function slope () {
- return _slope
- }
-
- function intercept () {
- return _intercept
- }
-
- function length () {
- return X.length()
- }
-
- function goodnessOfFit () {
- // This function returns the R^2 as a goodness of fit indicator
- if (X.length() >= 2) {
- return _goodnessOfFit
- } else {
- return 0
- }
- }
-
- function projectX (x) {
- if (X.length() >= 2) {
- return (_slope * x) + _intercept
- } else {
- return 0
- }
- }
-
- function projectY (y) {
- if (X.length() >= 2 && _slope !== 0) {
- return ((y - _intercept) / _slope)
- } else {
- log.error('OLS Regressor, attempted a Y-projection while slope was zero!')
- return 0
- }
- }
-
- function reliable () {
- return (X.length() >= 2 && _slope !== 0)
- }
-
- function reset () {
- X.reset()
- XX.reset()
- Y.reset()
- YY.reset()
- XY.reset()
- _slope = 0
- _intercept = 0
- _goodnessOfFit = 0
- }
-
- return {
- push,
- X,
- Y,
- slope,
- intercept,
- length,
- goodnessOfFit,
- projectX,
- projectY,
- reliable,
- reset
- }
-}
diff --git a/app/engine/utils/Series.js b/app/engine/utils/Series.js
index 15a67fd9d0..f0bd0057d1 100644
--- a/app/engine/utils/Series.js
+++ b/app/engine/utils/Series.js
@@ -1,26 +1,27 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * This creates a series with a maximum number of values. It allows for determining the Average, Median, Number of Positive, number of Negative
- * @remark BE AWARE: The median function is extremely CPU intensive for larger series. Use the BinarySearchTree for that situation instead!
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
*
- * @param {number} [maxSeriesLength] The maximum length of the series (0 for unlimited)
+ * @file This creates a series with a maximum number of values. It allows for determining the Average, Median, Number of Positive, number of Negative
+ * BE AWARE: The median function is extremely CPU intensive for larger series. Use the BinarySearchTree for that situation instead!
+ * BE AWARE: Accumulators (seriesSum especially) are vulnerable to floating point rounding errors causing drift. Special tests are present in the unit-tests, which should be run manually when this module is changed
+ */
+/**
+ * @param {number} maxSeriesLength - The maximum length of the series (0 for unlimited)
*/
export function createSeries (maxSeriesLength = 0) {
/**
* @type {Array}
*/
let seriesArray = []
- let seriesSum = 0
let numPos = 0
let numNeg = 0
let min = undefined
let max = undefined
+ let seriesSum = null
/**
- * @param {float} value to be added to the series
+ * @param {float} value - value to be added to the series
*/
function push (value) {
if (value === undefined || isNaN(value)) { return }
@@ -29,9 +30,7 @@ export function createSeries (maxSeriesLength = 0) {
if (max !== undefined) { max = Math.max(max, value) }
if (maxSeriesLength > 0 && seriesArray.length >= maxSeriesLength) {
- // The maximum of the array has been reached, we have to create room by removing the first
- // value from the array
- seriesSum -= seriesArray[0]
+ // The maximum of the array has been reached, we have to create room by removing the first value from the array
if (seriesArray[0] > 0) {
numPos--
} else {
@@ -46,7 +45,8 @@ export function createSeries (maxSeriesLength = 0) {
seriesArray.shift()
}
seriesArray.push(value)
- seriesSum += value
+ seriesSum = null
+
if (value > 0) {
numPos++
} else {
@@ -55,14 +55,14 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {number} length of the series
+ * @returns {number} length of the series
*/
function length () {
return seriesArray.length
}
/**
- * @output {float} value at the head of the series (i.e. the one first added)
+ * @returns {float} the oldest value of the series (i.e. the one first added)
*/
function atSeriesBegin () {
if (seriesArray.length > 0) {
@@ -73,7 +73,7 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {float} value at the tail of the series (i.e. the one last added)
+ * @returns {float} the youngest value of the series (i.e. the one last added)
*/
function atSeriesEnd () {
if (seriesArray.length > 0) {
@@ -84,8 +84,8 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @param {number} position
- * @output {float} value at a specific postion, starting at 0
+ * @param {integer} position - position to be retrieved, starting at 0
+ * @returns {float} value at that specific postion in the series
*/
function get (position) {
if (position >= 0 && position < seriesArray.length) {
@@ -96,8 +96,8 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @param {number} testedValue
- * @output {number} number of values in the series above the tested value
+ * @param {float} testedValue - tested value
+ * @returns {integer} count of values in the series above the tested value
*/
function numberOfValuesAbove (testedValue) {
if (testedValue === 0) {
@@ -116,8 +116,8 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @param {number} testedValue
- * @output {number} number of values in the series below or equal to the tested value
+ * @param {float} testedValue - tested value
+ * @returns {integer} number of values in the series below or equal to the tested value
*/
function numberOfValuesEqualOrBelow (testedValue) {
if (testedValue === 0) {
@@ -136,25 +136,29 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {float} sum of the entire series
+ * @returns {float} sum of the entire series
+ * @description This determines the total sum of the series. As a running sum becomes unstable after longer running sums, we need to summarise this via a reduce
*/
function sum () {
+ if (seriesSum === null) {
+ seriesSum = (seriesArray.length > 0 ? seriesArray.reduce((total, item) => total + item) : 0)
+ }
return seriesSum
}
/**
- * @output {float} average of the entire series
+ * @returns {float} average of the entire series
*/
function average () {
if (seriesArray.length > 0) {
- return seriesSum / seriesArray.length
+ return sum() / seriesArray.length
} else {
return 0
}
}
/**
- * @output {float} smallest element in the series
+ * @returns {float} smallest element in the series
*/
function minimum () {
if (seriesArray.length > 0) {
@@ -166,7 +170,7 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {float} largest value in the series
+ * @returns {float} largest value in the series
*/
function maximum () {
if (seriesArray.length > 0) {
@@ -178,7 +182,8 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {float} median of the series (DO NOT USE FOR LARGE SERIES!)
+ * @returns {float} median of the series
+ * @description returns the median of the series. As this is a CPU intensive approach, DO NOT USE FOR LARGE SERIES!. For larger series, use the BinarySearchTree.js instead
*/
function median () {
if (seriesArray.length > 0) {
@@ -191,7 +196,7 @@ export function createSeries (maxSeriesLength = 0) {
}
/**
- * @output {array} returns the entire series
+ * @returns {array} returns the entire series
*/
function series () {
if (seriesArray.length > 0) {
@@ -207,7 +212,6 @@ export function createSeries (maxSeriesLength = 0) {
function reset () {
seriesArray = /** @type {Array} */(/** @type {unknown} */(null))
seriesArray = []
- seriesSum = 0
numPos = 0
numNeg = 0
min = undefined
diff --git a/app/engine/utils/Series.test.js b/app/engine/utils/Series.test.js
index 1d9962c3ec..94d63d45fb 100644
--- a/app/engine/utils/Series.test.js
+++ b/app/engine/utils/Series.test.js
@@ -1,15 +1,19 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * As this object is fundamental for most other utility objects, we must test its behaviour quite thoroughly
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file Tests of the Series object. As this object is fundamental for most other utility objects, we must test its behaviour quite thoroughly
+ * Please note: this file contains commented out stress tests of the length(), sum(), average() functions, to detect any issues with numerical stability
+ * As these tests tend to run in the dozens of minutes, we do not run them systematically, but they should be run when the series object is changed.
*/
import { test } from 'uvu'
import * as assert from 'uvu/assert'
import { createSeries } from './Series.js'
+/**
+ * @description Test behaviour for no datapoints
+ */
test('Series behaviour with an empty series', () => {
const dataSeries = createSeries(3)
testLength(dataSeries, 0)
@@ -26,6 +30,9 @@ test('Series behaviour with an empty series', () => {
testMaximum(dataSeries, 0)
})
+/**
+ * @description Test behaviour for a single datapoint
+ */
test('Series behaviour with a single pushed value. Series = [9]', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -43,6 +50,9 @@ test('Series behaviour with a single pushed value. Series = [9]', () => {
testMaximum(dataSeries, 9)
})
+/**
+ * @description Test behaviour for two datapoints
+ */
test('Series behaviour with a second pushed value. Series = [9, 3]', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -61,6 +71,9 @@ test('Series behaviour with a second pushed value. Series = [9, 3]', () => {
testMaximum(dataSeries, 9)
})
+/**
+ * @description Test behaviour for three datapoints
+ */
test('Series behaviour with a third pushed value. Series = [9, 3, 6]', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -80,6 +93,9 @@ test('Series behaviour with a third pushed value. Series = [9, 3, 6]', () => {
testMaximum(dataSeries, 9)
})
+/**
+ * @description Test behaviour for four datapoints
+ */
test('Series behaviour with a fourth pushed value. Series = [3, 6, 12]', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -100,6 +116,9 @@ test('Series behaviour with a fourth pushed value. Series = [3, 6, 12]', () => {
testMaximum(dataSeries, 12)
})
+/**
+ * @description Test behaviour for five datapoints
+ */
test('Series behaviour with a fifth pushed value. Series = [6, 12, -3]', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -121,6 +140,9 @@ test('Series behaviour with a fifth pushed value. Series = [6, 12, -3]', () => {
testMaximum(dataSeries, 12)
})
+/**
+ * @description Test behaviour for recalculations of the min/max values
+ */
test('Series behaviour pushing out the min and max value and forcing a recalculate of min/max via the array.', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -136,6 +158,9 @@ test('Series behaviour pushing out the min and max value and forcing a recalcula
testMaximum(dataSeries, 6)
})
+/**
+ * @description Test behaviour for recalculations of the min/max values
+ */
test('Series behaviour pushing out the min and max value, replacing them just in time.', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -151,6 +176,9 @@ test('Series behaviour pushing out the min and max value, replacing them just in
testMaximum(dataSeries, 12)
})
+/**
+ * @description Test behaviour after a reset()
+ */
test('Series behaviour with a five pushed values followed by a reset, Series = []', () => {
const dataSeries = createSeries(3)
dataSeries.push(9)
@@ -171,6 +199,43 @@ test('Series behaviour with a five pushed values followed by a reset, Series = [
testMedian(dataSeries, 0)
})
+/* These stress tests test the reliability of the sum(), average() and length() function after a huge number of updates
+// This specific test takes a long time (over 10 minutes), so only run them manually when changing the series module
+// Javascript maximum array length is 4294967295, as heap memory is limited, we stay with 2^25 datapoints
+test('Stress test of the series object, 33.554.432 (2^25) datapoints', () => {
+ const dataSeries = createSeries()
+ let j = 0
+ let randomvalue
+ while (j < 16777216) {
+ randomvalue = Math.random()
+ dataSeries.push(randomvalue)
+ dataSeries.push(1 - randomvalue)
+ j++
+ }
+ testLength(dataSeries, 33554432)
+ testSum(dataSeries, 16777216)
+ testAverage(dataSeries, 0.5)
+ testMedian(dataSeries, 0.5)
+})
+
+// Javascript maximum array length is 4294967295, as heap memory is limited, we stay with 2^25 datapoints
+// This test takes several hours (!) due to the many large array shifts, so only run them manually when changing the series module
+test('Stress test of the series object, 67.108.864 datapoints, with a maxLength of 33.554.432 (2^25)', () => {
+ const dataSeries = createSeries(33554432)
+ let j = 0
+ let randomvalue
+ while (j < 33554432) {
+ randomvalue = Math.random()
+ dataSeries.push(randomvalue)
+ dataSeries.push(1 - randomvalue)
+ j++
+ }
+ testLength(dataSeries, 33554432)
+ testSum(dataSeries, 16777216)
+ testAverage(dataSeries, 0.5)
+ testMedian(dataSeries, 0.5)
+}) */
+
function testLength (series, expectedValue) {
assert.ok(series.length() === expectedValue, `Expected length should be ${expectedValue}, encountered ${series.length()}`)
}
diff --git a/app/engine/utils/StreamFilter.js b/app/engine/utils/StreamFilter.js
index 6f77c68664..9be00afa09 100644
--- a/app/engine/utils/StreamFilter.js
+++ b/app/engine/utils/StreamFilter.js
@@ -4,7 +4,7 @@
*/
/**
* This keeps a series of specified length, which we can ask for an moving median
- *
+ * This is used by RowingStatistics.js to aggregate over multiple stroke phasee
*/
import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
@@ -20,9 +20,9 @@ export function createStreamFilter (maxLength, defaultValue) {
if (maxLength > 0) {
position = (position + 1) % maxLength
bst.remove(position)
- bst.push(position, dataPoint)
+ bst.push(position, dataPoint, 1)
} else {
- bst.push(position, dataPoint)
+ bst.push(position, dataPoint, 1)
}
cleanDatapoint = bst.median()
}
diff --git a/app/engine/utils/TSLinearSeries.js b/app/engine/utils/TSLinearSeries.js
new file mode 100644
index 0000000000..17b8f8099e
--- /dev/null
+++ b/app/engine/utils/TSLinearSeries.js
@@ -0,0 +1,316 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file The TSLinearSeries is a datatype that represents a Weighted Linear Series. It allows
+ * values to be retrieved (like a FiFo buffer, or Queue) but it also includes
+ * a Weighted Theil-Sen estimator Linear Regressor to determine the slope of this timeseries.
+ *
+ * At creation its length is determined. After it is filled, the oldest will be pushed
+ * out of the queue) automatically. This is a property of the Series object
+ *
+ * A key constraint is to prevent heavy calculations at the end (due to large
+ * array based curve fitting), which might happen on a Pi zero
+ *
+ * In order to prevent unneccessary calculations, this implementation uses lazy evaluation,
+ * so it will calculate the intercept and goodnessOfFit only when needed, as many uses only
+ * (first) need the slope.
+ *
+ * This implementation uses concepts that are described here:
+ * https://en.wikipedia.org/wiki/Theil%E2%80%93Sen_estimator
+ *
+ * The array is ordered such that x[0] is the oldest, and x[x.length-1] is the youngest
+ */
+
+import { createSeries } from './Series.js'
+import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
+
+import loglevel from 'loglevel'
+const log = loglevel.getLogger('RowingEngine')
+
+/**
+ * @param {integer} maxSeriesLength - the maximum length of the quadratic series, default = 0 for unlimited
+ */
+export function createTSLinearSeries (maxSeriesLength = 0) {
+ const X = createSeries(maxSeriesLength)
+ const Y = createSeries(maxSeriesLength)
+ const weight = createSeries(maxSeriesLength)
+ const WY = createSeries(maxSeriesLength)
+ const A = createLabelledBinarySearchTree()
+
+ let _A = 0
+ let _B = 0
+ let _sst = 0
+ let _goodnessOfFit = 0
+
+ /**
+ * @param {float} x - the x value of the datapoint
+ * @param {float} y - the y value of the datapoint
+ * @param {float} w - the weight of the datapoint (optional, defaults to 1 for unweighted regression)
+ * Invariant: BinarySearchTree A contains all calculated a's (as in the general formula y = a * x + b),
+ * where the a's are labeled in the BinarySearchTree with their Xi when they BEGIN in the point (Xi, Yi)
+ */
+ function push (x, y, w = 1) {
+ if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
+
+ if (maxSeriesLength > 0 && X.length() >= maxSeriesLength) {
+ // The maximum of the array has been reached, so when pushing the x,y the array gets shifted,
+ // thus we have to remove the a's belonging to the current position X[0] as well before this value is trashed
+ A.remove(X.get(0))
+ }
+
+ X.push(x)
+ Y.push(y)
+ weight.push(w)
+ WY.push(w * y)
+
+ // Calculate all the slopes of the newly added point
+ if (X.length() > 1) {
+ // There are at least two points in the X and Y arrays, so let's add the new datapoint
+ let i = 0
+ let slope
+ let combinedweight
+ while (i < X.length() - 1) {
+ // Calculate the slope with all preceeding datapoints and X.length() - 1'th datapoint (as the array starts at zero)
+ slope = calculateSlope(i, X.length() - 1)
+ combinedweight = weight.get(i) * w
+ A.push(X.get(i), slope, combinedweight)
+ i++
+ }
+ }
+
+ // Calculate the median of the slopes
+ if (X.length() > 1) {
+ _A = A.weightedMedian()
+ } else {
+ _A = 0
+ }
+
+ // Invalidate the previously calculated intercept and goodnessOfFit. We'll only calculate them if we need them
+ _B = null
+ _sst = null
+ _goodnessOfFit = null
+ }
+
+ /**
+ * @returns {float} the slope of the linear function
+ */
+ function slope () {
+ return _A
+ }
+
+ /**
+ * @returns {float} the intercept of the linear function
+ */
+ function intercept () {
+ calculateIntercept()
+ return _B
+ }
+
+ /**
+ * @returns {float} the coefficient a of the linear function y = a * x + b
+ */
+ function coefficientA () {
+ return _A
+ }
+
+ /**
+ * @returns {float} the coefficient b of the linear function y = a * x + b
+ */
+ function coefficientB () {
+ calculateIntercept()
+ return _B
+ }
+
+ /**
+ * @returns {integer} the lenght of the stored series
+ */
+ function length () {
+ return X.length()
+ }
+
+ /**
+ * @returns {float} the R^2 as a global goodness of fit indicator
+ * It will automatically recalculate the _goodnessOfFit when it isn't defined
+ * This lazy approach is intended to prevent unneccesary calculations, especially when there is a batch of datapoints
+ * pushes from the TSQuadratic regressor processing its linear residu
+ * @see [Goodness-of-Fit Statistics] {@link https://web.maths.unsw.edu.au/~adelle/Garvan/Assays/GoodnessOfFit.html}
+ */
+ function goodnessOfFit () {
+ let i = 0
+ let sse = 0
+ calculateIntercept()
+ if (_goodnessOfFit === null) {
+ if (X.length() >= 2) {
+ _sst = 0
+
+ // Calculate weighted R^2
+ const weightedAverageY = WY.sum() / weight.sum()
+
+ while (i < X.length()) {
+ sse += weight.get(i) * Math.pow(Y.get(i) - projectX(X.get(i)), 2)
+ _sst += weight.get(i) * Math.pow(Y.get(i) - weightedAverageY, 2)
+ i++
+ }
+
+ switch (true) {
+ case (sse === 0):
+ _goodnessOfFit = 1
+ break
+ case (sse > _sst):
+ // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
+ _goodnessOfFit = 0.01
+ break
+ case (_sst !== 0):
+ _goodnessOfFit = 1 - (sse / _sst)
+ break
+ default:
+ // When SST = 0, R2 isn't defined
+ _goodnessOfFit = 0.01
+ }
+ } else {
+ _goodnessOfFit = 0
+ }
+ }
+ return _goodnessOfFit
+ }
+
+ /**
+ * @param {integer} position - The position in the series for which the Local Goodness Of Fit has to be calcuated
+ * @returns {float} the local R^2 as a local goodness of fit indicator
+ */
+ function localGoodnessOfFit (position) {
+ if (_sst === null) {
+ // Force the recalculation of the _sst
+ goodnessOfFit()
+ }
+ if (X.length() >= 2 && position < X.length()) {
+ const weightedSquaredError = weight.get(position) * Math.pow((Y.get(position) - projectX(X.get(position))), 2)
+ /* eslint-disable no-unreachable -- rather be systematic and add a break in all case statements */
+ switch (true) {
+ case (weightedSquaredError === 0):
+ return 1
+ break
+ case (weightedSquaredError > _sst):
+ // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
+ return 0.01
+ break
+ case (_sst !== 0):
+ return Math.min(Math.max(1 - ((weightedSquaredError * X.length()) / _sst), 0), 1)
+ break
+ default:
+ // When _SST = 0, localGoodnessOfFit isn't defined
+ return 0.01
+ }
+ /* eslint-enable no-unreachable */
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} x - the x value to be projected
+ * @returns {float} the resulting y value when projected via the linear function
+ */
+ function projectX (x) {
+ if (X.length() >= 2) {
+ calculateIntercept()
+ return (_A * x) + _B
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} y - the y value to be solved
+ * @returns {float} the resulting x value when solved via the linear function
+ */
+ function projectY (y) {
+ if (X.length() >= 2 && _A !== 0) {
+ calculateIntercept()
+ return ((y - _B) / _A)
+ } else {
+ log.error('TS Linear Regressor, attempted a Y-projection while slope was zero!')
+ return 0
+ }
+ }
+
+ /**
+ * @param {integer} pointOne - The position in the series of the first datapoint used for the slope calculation
+ * @param {integer} pointTwo - The position in the series of the second datapoint used for the slope calculation
+ * @returns {float} the slope of the linear function
+ */
+ function calculateSlope (pointOne, pointTwo) {
+ if (pointOne !== pointTwo && X.get(pointOne) !== X.get(pointTwo)) {
+ return ((Y.get(pointTwo) - Y.get(pointOne)) / (X.get(pointTwo) - X.get(pointOne)))
+ } else {
+ log.error('TS Linear Regressor, Division by zero prevented!')
+ return 0
+ }
+ }
+
+ /**
+ * @description This helper function calculates the intercept and stores it in _B
+ */
+ function calculateIntercept () {
+ // Calculate all the intercepts for the newly added point and the newly calculated A, when needed
+ // This function is only called when an intercept is really needed, as this saves a lot of CPU cycles when only a slope suffices
+ const B = createLabelledBinarySearchTree()
+ if (_B === null) {
+ if (X.length() > 1) {
+ // There are at least two points in the X and Y arrays, so let's calculate the intercept
+ let i = 0
+ while (i < X.length()) {
+ // Please note, we recreate the B-tree for each newly added datapoint anyway, so the label i isn't relevant
+ B.push(i, (Y.get(i) - (_A * X.get(i))), weight.get(i))
+ i++
+ }
+ _B = B.weightedMedian()
+ } else {
+ _B = 0
+ }
+ }
+ B.reset()
+ }
+
+ /**
+ * @returns {boolean} whether the linear regression should be considered reliable to produce results
+ */
+ function reliable () {
+ return (X.length() >= 2)
+ }
+
+ /**
+ * @description This function is used for clearing data and state, bringing it back to its original state
+ */
+ function reset () {
+ if (X.length() > 0) {
+ // There is something to reset
+ X.reset()
+ Y.reset()
+ weight.reset()
+ WY.reset()
+ A.reset()
+ _A = 0
+ _B = 0
+ _goodnessOfFit = 0
+ }
+ }
+
+ return {
+ push,
+ X,
+ Y,
+ slope,
+ intercept,
+ coefficientA,
+ coefficientB,
+ length,
+ goodnessOfFit,
+ localGoodnessOfFit,
+ projectX,
+ projectY,
+ reliable,
+ reset
+ }
+}
diff --git a/app/engine/utils/FullTSLinearSeries.test.js b/app/engine/utils/TSLinearSeries.test.js
similarity index 66%
rename from app/engine/utils/FullTSLinearSeries.test.js
rename to app/engine/utils/TSLinearSeries.test.js
index b0c29955c2..e6aec4f722 100644
--- a/app/engine/utils/FullTSLinearSeries.test.js
+++ b/app/engine/utils/TSLinearSeries.test.js
@@ -5,7 +5,7 @@
import { test } from 'uvu'
import * as assert from 'uvu/assert'
-import { createTSLinearSeries } from './FullTSLinearSeries.js'
+import { createTSLinearSeries } from './TSLinearSeries.js'
test('Correct behaviour of a series after initialisation', () => {
const dataSeries = createTSLinearSeries(3)
@@ -27,9 +27,10 @@ test('Correct behaviour of a series after initialisation', () => {
testSlopeEquals(dataSeries, 0)
testInterceptEquals(dataSeries, 0)
testGoodnessOfFitEquals(dataSeries, 0)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 0)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 1 datapoint', () => {
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 1 datapoint', () => {
const dataSeries = createTSLinearSeries(3)
testLength(dataSeries, 0)
dataSeries.push(5, 9)
@@ -51,9 +52,10 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 0)
testInterceptEquals(dataSeries, 0)
testGoodnessOfFitEquals(dataSeries, 0)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 0)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 2 datapoints', () => {
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 2 datapoints', () => {
const dataSeries = createTSLinearSeries(3)
dataSeries.push(5, 9)
dataSeries.push(3, 3)
@@ -75,9 +77,11 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 3)
testInterceptEquals(dataSeries, -6)
testGoodnessOfFitEquals(dataSeries, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 3 datapoints', () => {
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 3 datapoints', () => {
const dataSeries = createTSLinearSeries(3)
dataSeries.push(5, 9)
dataSeries.push(3, 3)
@@ -100,9 +104,41 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 3)
testInterceptEquals(dataSeries, -6)
testGoodnessOfFitEquals(dataSeries, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 3, 0) // Overshooting the length of the series
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 4 datapoints', () => {
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 3 datapoints, uniform (halved) weights', () => {
+ const dataSeries = createTSLinearSeries(3)
+ dataSeries.push(5, 9, 0.5)
+ dataSeries.push(3, 3, 0.5)
+ dataSeries.push(4, 6, 0.5)
+ testLength(dataSeries, 3)
+ testXAtSeriesBegin(dataSeries, 5)
+ testYAtSeriesBegin(dataSeries, 9)
+ testXAtSeriesEnd(dataSeries, 4)
+ testYAtSeriesEnd(dataSeries, 6)
+ testNumberOfXValuesAbove(dataSeries, 0, 3)
+ testNumberOfYValuesAbove(dataSeries, 0, 3)
+ testNumberOfXValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfYValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfXValuesAbove(dataSeries, 10, 0)
+ testNumberOfYValuesAbove(dataSeries, 10, 0)
+ testNumberOfXValuesEqualOrBelow(dataSeries, 10, 3)
+ testNumberOfYValuesEqualOrBelow(dataSeries, 10, 3)
+ testXSum(dataSeries, 12)
+ testYSum(dataSeries, 18)
+ testSlopeEquals(dataSeries, 3)
+ testInterceptEquals(dataSeries, -6)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 1)
+})
+
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 4 datapoints', () => {
const dataSeries = createTSLinearSeries(3)
dataSeries.push(5, 9)
dataSeries.push(3, 3)
@@ -126,9 +162,12 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 3)
testInterceptEquals(dataSeries, -6)
testGoodnessOfFitEquals(dataSeries, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 5 datapoints', () => {
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 5 datapoints', () => {
const dataSeries = createTSLinearSeries(3)
dataSeries.push(5, 9)
dataSeries.push(3, 3)
@@ -153,9 +192,56 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 3)
testInterceptEquals(dataSeries, -6)
testGoodnessOfFitEquals(dataSeries, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 1)
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 1)
+})
+
+test('Series with 5 elements, with 2 noisy datapoints, ideal function y = 3x - 6, uniform weights', () => {
+ const dataSeries = createTSLinearSeries(5)
+ dataSeries.push(5, 9)
+ dataSeries.push(3, 2)
+ dataSeries.push(4, 7)
+ dataSeries.push(6, 12)
+ dataSeries.push(1, -3)
+ testSlopeEquals(dataSeries, 3) // Theoretical noisefree value 3
+ testInterceptEquals(dataSeries, -6) // Theoretical noisefree value -6
+ testGoodnessOfFitEquals(dataSeries, 0.9858356940509915) // Ideal value 1
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testXProjectionEquals(dataSeries, 1, -3) // Theoretical noisefree value -3
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 0.9645892351274787)
+ testXProjectionEquals(dataSeries, 3, 3) // Theoretical noisefree value 3
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 0.9645892351274787)
+ testXProjectionEquals(dataSeries, 4, 6) // Theoretical noisefree value 6
+ testLocalGoodnessOfFitEquals(dataSeries, 3, 1)
+ testXProjectionEquals(dataSeries, 5, 9) // Theoretical noisefree value 9
+ testLocalGoodnessOfFitEquals(dataSeries, 4, 1)
+ testXProjectionEquals(dataSeries, 6, 12) // Theoretical noisefree value 12
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 4 datapoints and a reset', () => {
+test('Series with 5 elements, with 2 noisy datapoints, ideal function y = 3x - 6, non-uniform weights', () => {
+ const dataSeries = createTSLinearSeries(5)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 2, 0.5)
+ dataSeries.push(4, 7, 0.5)
+ dataSeries.push(6, 12, 1)
+ dataSeries.push(1, -3, 1)
+ testSlopeEquals(dataSeries, 3) // Theoretical noisefree value 3
+ testInterceptEquals(dataSeries, -6) // Theoretical noisefree value -6
+ testGoodnessOfFitEquals(dataSeries, 0.9925338310779281) // Ideal value 1
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 1)
+ testXProjectionEquals(dataSeries, 1, -3) // Theoretical noisefree value -3
+ testLocalGoodnessOfFitEquals(dataSeries, 1, 0.9813345776948204)
+ testXProjectionEquals(dataSeries, 3, 3) // Theoretical noisefree value 3
+ testLocalGoodnessOfFitEquals(dataSeries, 2, 0.9813345776948204)
+ testXProjectionEquals(dataSeries, 4, 6) // Theoretical noisefree value 6
+ testLocalGoodnessOfFitEquals(dataSeries, 3, 1)
+ testXProjectionEquals(dataSeries, 5, 9) // Theoretical noisefree value 9
+ testLocalGoodnessOfFitEquals(dataSeries, 4, 1)
+ testXProjectionEquals(dataSeries, 6, 12) // Theoretical noisefree value 12
+})
+
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 4 datapoints and a reset', () => {
const dataSeries = createTSLinearSeries(3)
dataSeries.push(5, 9)
dataSeries.push(3, 3)
@@ -180,18 +266,7 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testSlopeEquals(dataSeries, 0)
testInterceptEquals(dataSeries, 0)
testGoodnessOfFitEquals(dataSeries, 0)
-})
-
-test('Series with 5 elements, with 2 noisy datapoints', () => {
- const dataSeries = createTSLinearSeries(5)
- dataSeries.push(5, 9)
- dataSeries.push(3, 2)
- dataSeries.push(4, 7)
- dataSeries.push(6, 12)
- dataSeries.push(1, -3)
- testSlopeBetween(dataSeries, 2.9, 3.1)
- testInterceptBetween(dataSeries, -6.3, -5.8)
- testGoodnessOfFitBetween(dataSeries, 0.9, 1.0)
+ testLocalGoodnessOfFitEquals(dataSeries, 0, 0)
})
function testLength (series, expectedValue) {
@@ -242,27 +317,20 @@ function testSlopeEquals (series, expectedValue) {
assert.ok(series.slope() === expectedValue, `Expected slope to be ${expectedValue}, encountered a ${series.slope()}`)
}
-function testSlopeBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.slope() > expectedValueAbove, `Expected slope to be above ${expectedValueAbove}, encountered a ${series.slope()}`)
- assert.ok(series.slope() < expectedValueBelow, `Expected slope to be below ${expectedValueBelow}, encountered a ${series.slope()}`)
-}
-
function testInterceptEquals (series, expectedValue) {
assert.ok(series.intercept() === expectedValue, `Expected intercept to be ${expectedValue}, encountered ${series.intercept()}`)
}
-function testInterceptBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.intercept() > expectedValueAbove, `Expected intercept to be above ${expectedValueAbove}, encountered ${series.intercept()}`)
- assert.ok(series.intercept() < expectedValueBelow, `Expected intercept to be below ${expectedValueBelow}, encountered ${series.intercept()}`)
-}
-
function testGoodnessOfFitEquals (series, expectedValue) {
assert.ok(series.goodnessOfFit() === expectedValue, `Expected goodnessOfFit to be ${expectedValue}, encountered ${series.goodnessOfFit()}`)
}
-function testGoodnessOfFitBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.goodnessOfFit() > expectedValueAbove, `Expected goodnessOfFit to be above ${expectedValueAbove}, encountered ${series.goodnessOfFit()}`)
- assert.ok(series.goodnessOfFit() < expectedValueBelow, `Expected goodnessOfFit to be below ${expectedValueBelow}, encountered ${series.goodnessOfFit()}`)
+function testLocalGoodnessOfFitEquals (series, position, expectedValue) {
+ assert.ok(series.localGoodnessOfFit(position) === expectedValue, `Expected localGoodnessOfFit at position ${position} to be ${expectedValue}, encountered ${series.localGoodnessOfFit(position)}`)
+}
+
+function testXProjectionEquals (series, value, expectedValue) {
+ assert.ok(series.projectX(value) === expectedValue, `Expected projectX at value ${value} to be ${expectedValue}, encountered ${series.projectX(value)}`)
}
test.run()
diff --git a/app/engine/utils/TSQuadraticSeries.js b/app/engine/utils/TSQuadraticSeries.js
new file mode 100644
index 0000000000..c7d3ca07e0
--- /dev/null
+++ b/app/engine/utils/TSQuadraticSeries.js
@@ -0,0 +1,380 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file The FullTSQuadraticSeries is a datatype that represents a Quadratic Series. It allows
+ * values to be retrieved (like a FiFo buffer, or Queue) but it also includes
+ * a Theil-Sen Quadratic Regressor to determine the coefficients of this dataseries.
+ *
+ * At creation its maximum length is set. After the buffer is filled, the oldest will be pushed
+ * out of the buffer automatically.
+ *
+ * A key constraint is to prevent heavy calculations at the end of a stroke (due to large
+ * array based curve fitting), which might be performed on a Pi zero or Zero 2W
+ *
+ * In order to prevent unneccessary calculations, this implementation uses lazy evaluation,
+ * so it will calculate the B, C and goodnessOfFit only when needed, as many uses only
+ * (first) need the first and second direvative.
+ *
+ * The Theil-Senn implementation uses concepts that are described here:
+ * https://stats.stackexchange.com/questions/317777/theil-sen-estimator-for-polynomial,
+ *
+ * The determination of the coefficients is based on the Lagrange interpolation, which is descirbed here:
+ * https://www.quora.com/How-do-I-find-a-quadratic-equation-from-points/answer/Robert-Paxson,
+ * https://www.physicsforums.com/threads/quadratic-equation-from-3-points.404174/
+ */
+
+import { createSeries } from './Series.js'
+import { createTSLinearSeries } from './TSLinearSeries.js'
+import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
+
+import loglevel from 'loglevel'
+const log = loglevel.getLogger('RowingEngine')
+
+/**
+ * @param {integer} maxSeriesLength - the maximum length of the quadratic series, 0 for unlimited
+ */
+export function createTSQuadraticSeries (maxSeriesLength = 0) {
+ const X = createSeries(maxSeriesLength)
+ const Y = createSeries(maxSeriesLength)
+ const weight = createSeries(maxSeriesLength)
+ const WY = createSeries(maxSeriesLength)
+ const A = createLabelledBinarySearchTree()
+ const linearResidu = createTSLinearSeries(maxSeriesLength)
+ let _A = 0
+ let _B = 0
+ let _C = 0
+ let _sst = 0
+ let _goodnessOfFit = 0
+
+ /**
+ * @param {float} x - the x value of the datapoint
+ * @param {float} y - the y value of the datapoint
+ * @param {float} w - the weight of the datapoint (defaults to 1)
+ * Invariant: BinrySearchTree A contains all calculated a's (as in the general formula y = a * x^2 + b * x + c),
+ * where the a's are labeled in the BinarySearchTree with their Xi when they BEGIN in the point (Xi, Yi)
+ */
+ /* eslint-disable max-statements -- A lot of variables have to be set */
+ function push (x, y, w = 1) {
+ if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
+
+ if (maxSeriesLength > 0 && X.length() >= maxSeriesLength) {
+ // The maximum of the array has been reached, so when pushing the new datapoint (x,y), the array will get shifted,
+ // thus we have to remove all the A's that start with the old position X0 BEFORE this value gets thrown away
+ A.remove(X.get(0))
+ }
+
+ X.push(x)
+ Y.push(y)
+ weight.push(w)
+ WY.push(w * y)
+ _A = 0
+ _B = 0
+ _C = 0
+ _sst = 0
+ _goodnessOfFit = 0
+
+ if (X.length() >= 3) {
+ // There are now at least three datapoints in the X and Y arrays, so let's calculate the A portion belonging for the new datapoint via Quadratic Theil-Sen regression
+ let i = 0
+ let j = 0
+
+ // First we calculate the A for the formula
+ let combinedweight = 0
+ let coeffA = 1
+ while (i < X.length() - 2) {
+ j = i + 1
+ while (j < X.length() - 1) {
+ combinedweight = weight.get(i) * weight.get(j) * w
+ coeffA = calculateA(i, j, X.length() - 1)
+ A.push(X.get(i), coeffA, combinedweight)
+ j++
+ }
+ i++
+ }
+ _A = A.weightedMedian()
+
+ // We invalidate the linearResidu, B, C, and goodnessOfFit, as this will trigger a recalculate when they are needed
+ linearResidu.reset()
+ _B = null
+ _C = null
+ _sst = null
+ _goodnessOfFit = null
+ }
+ }
+ /* eslint-enable max-statements */
+
+ /**
+ * @param {integer} position - the position in the flank of the requested value (default = 0)
+ * @returns {float} the firdt derivative of the quadratic function y = a x^2 + b x + c
+ */
+ function firstDerivativeAtPosition (position = 0) {
+ if (X.length() >= 3 && position < X.length()) {
+ calculateB()
+ return ((_A * 2 * X.get(position)) + _B)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {integer} position - the position in the flank of the requested value (default = 0)
+ * @returns {float} the second derivative of the quadratic function y = a x^2 + b x + c
+ */
+ function secondDerivativeAtPosition (position = 0) {
+ if (X.length() >= 3 && position < X.length()) {
+ return (_A * 2)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} x - the x value of the requested value
+ * @returns {float} the slope of the linear function
+ */
+ function slope (x) {
+ if (X.length() >= 3) {
+ calculateB()
+ return ((_A * 2 * x) + _B)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the (quadratic) coefficient a of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientA () {
+ return _A
+ }
+
+ /**
+ * @returns {float} the (linear) coefficient b of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientB () {
+ calculateB()
+ return _B
+ }
+
+ /**
+ * @returns {float} the (intercept) coefficient c of the quadratic function y = a x^2 + b x + c
+ */
+ function coefficientC () {
+ calculateB()
+ calculateC()
+ return _C
+ }
+
+ /**
+ * @returns {float} the intercept of the quadratic function
+ */
+ function intercept () {
+ calculateB()
+ calculateC()
+ return _C
+ }
+
+ /**
+ * @returns {integer} the lenght of the stored series
+ */
+ function length () {
+ return X.length()
+ }
+
+ /**
+ * @returns {float} the R^2 as a global goodness of fit indicator
+ */
+ function goodnessOfFit () {
+ let i = 0
+ let sse = 0
+ if (_goodnessOfFit === null) {
+ calculateB()
+ calculateC()
+ if (X.length() >= 3) {
+ _sst = 0
+ const weightedAverageY = WY.sum() / weight.sum()
+
+ while (i < X.length()) {
+ sse += weight.get(i) * Math.pow(Y.get(i) - projectX(X.get(i)), 2)
+ _sst += weight.get(i) * Math.pow(Y.get(i) - weightedAverageY, 2)
+ i++
+ }
+
+ switch (true) {
+ case (sse === 0):
+ _goodnessOfFit = 1
+ break
+ case (sse > _sst):
+ // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
+ _goodnessOfFit = 0.01
+ break
+ case (_sst !== 0):
+ _goodnessOfFit = 1 - (sse / _sst)
+ break
+ default:
+ // When _SST = 0, R2 isn't defined
+ _goodnessOfFit = 0.01
+ }
+ } else {
+ _goodnessOfFit = 0
+ }
+ }
+ return _goodnessOfFit
+ }
+
+ /**
+ * @returns {float} the local R^2 as a local goodness of fit indicator
+ */
+ function localGoodnessOfFit (position) {
+ if (_sst === null) {
+ // Force the recalculation of the _sst
+ goodnessOfFit()
+ }
+ if (X.length() >= 3 && position < X.length()) {
+ const squaredError = Math.pow((Y.get(position) - projectX(X.get(position))), 2)
+ /* eslint-disable no-unreachable -- rather be systematic and add a break in all case statements */
+ switch (true) {
+ case (squaredError === 0):
+ return 1
+ break
+ case (squaredError > _sst):
+ // This is a pretty bad fit as the error is bigger than just using the line for the average y as intercept
+ return 0.01
+ break
+ case (_sst !== 0):
+ return Math.min(Math.max(1 - ((squaredError * X.length()) / _sst), 0), 1)
+ break
+ default:
+ // When _SST = 0, localGoodnessOfFit isn't defined
+ return 0.01
+ }
+ /* eslint-enable no-unreachable */
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} x - the x value to be projected
+ * @returns {float} the resulting y value when projected via the linear function
+ */
+ function projectX (x) {
+ if (X.length() >= 3) {
+ calculateB()
+ calculateC()
+ return ((_A * x * x) + (_B * x) + _C)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {integer} pointOne - The position in the series of the first datapoint used for the quadratic coefficient calculation
+ * @param {integer} pointTwo - The position in the series of the second datapoint used for the quadratic coefficient calculation
+ * @param {integer} pointThree - The position in the series of the third datapoint used for the quadratic coefficient calculation
+ * @returns {float} the coefficient A of the linear function
+ */
+ function calculateA (pointOne, pointTwo, pointThree) {
+ let result = 0
+ if (X.get(pointOne) !== X.get(pointTwo) && X.get(pointOne) !== X.get(pointThree) && X.get(pointTwo) !== X.get(pointThree)) {
+ // For the underlying math, see https://www.quora.com/How-do-I-find-a-quadratic-equation-from-points/answer/Robert-Paxson
+ result = (X.get(pointOne) * (Y.get(pointThree) - Y.get(pointTwo)) + Y.get(pointOne) * (X.get(pointTwo) - X.get(pointThree)) + (X.get(pointThree) * Y.get(pointTwo) - X.get(pointTwo) * Y.get(pointThree))) / ((X.get(pointOne) - X.get(pointTwo)) * (X.get(pointOne) - X.get(pointThree)) * (X.get(pointTwo) - X.get(pointThree)))
+ return result
+ } else {
+ log.error('TS Quadratic Regressor, Division by zero prevented in CalculateA!')
+ return 0
+ }
+ }
+
+ /**
+ * @description This helper function calculates the slope of the linear residu and stores it in _B
+ */
+ function calculateB () {
+ // Calculate all the linear slope for the newly added point and the newly calculated A
+ // This function is only called when a linear slope is really needed, as this saves a lot of CPU cycles when only a slope suffices
+ if (_B === null) {
+ if (X.length() >= 3) {
+ fillLinearResidu()
+ _B = linearResidu.slope()
+ } else {
+ _B = 0
+ }
+ }
+ }
+
+ /**
+ * @description This helper function calculates the intercept of the linear residu and stores it in _C
+ */
+ function calculateC () {
+ // Calculate all the intercept for the newly added point and the newly calculated A
+ // This function is only called when a linear intercept is really needed, as this saves a lot of CPU cycles when only a slope suffices
+ if (_C === null) {
+ if (X.length() >= 3) {
+ fillLinearResidu()
+ _C = linearResidu.intercept()
+ } else {
+ _C = 0
+ }
+ }
+ }
+
+ /**
+ * @description This helper function fills the linear residu
+ */
+ function fillLinearResidu () {
+ // To calculate the B and C via Linear regression over the residu, we need to fill it if empty
+ if (linearResidu.length() === 0) {
+ let i = 0
+ while (i < X.length()) {
+ linearResidu.push(X.get(i), Y.get(i) - (_A * Math.pow(X.get(i), 2)), weight.get(i))
+ i++
+ }
+ }
+ }
+
+ /**
+ * @returns {boolean} whether the quadratic regression should be considered reliable to produce results
+ */
+ function reliable () {
+ return (X.length() >= 3)
+ }
+
+ /**
+ * @description This function is used for clearing data and state
+ */
+ function reset () {
+ if (X.length() > 0) {
+ // There is something to reset
+ X.reset()
+ Y.reset()
+ weight.reset()
+ WY.reset()
+ A.reset()
+ linearResidu.reset()
+ _A = 0
+ _B = 0
+ _C = 0
+ _goodnessOfFit = 0
+ }
+ }
+
+ return {
+ push,
+ X,
+ Y,
+ firstDerivativeAtPosition,
+ secondDerivativeAtPosition,
+ slope,
+ coefficientA,
+ coefficientB,
+ coefficientC,
+ intercept,
+ length,
+ goodnessOfFit,
+ localGoodnessOfFit,
+ projectX,
+ reliable,
+ reset
+ }
+}
diff --git a/app/engine/utils/TSQuadraticSeries.test.js b/app/engine/utils/TSQuadraticSeries.test.js
new file mode 100644
index 0000000000..5ab31e5724
--- /dev/null
+++ b/app/engine/utils/TSQuadraticSeries.test.js
@@ -0,0 +1,1831 @@
+'use strict'
+/*
+ Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
+*/
+/**
+ * This tests the Quadratic Theil-Senn Regression algorithm. As regression is an estimation and methods have biasses,
+ * we need to accept some slack with respect to real-life examples
+ */
+import { test } from 'uvu'
+import * as assert from 'uvu/assert'
+
+import { createTSQuadraticSeries } from './TSQuadraticSeries.js'
+
+/**
+ * This series of tests focusses on testing the reliability of the quadratic estimator algorithm
+ */
+test('Quadratic Approximation startup behaviour', () => {
+ const dataSeries = createTSQuadraticSeries(10)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ dataSeries.push(-1, 2)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ dataSeries.push(0, 2)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ dataSeries.push(1, 6)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+})
+
+test('Quadratic Approximation on a perfect noisefree function y = 2 * Math.pow(x, 2) + 2 * x + 2, 21 datapoints', () => {
+ // Data based on 2 x^2 + 2 x + 2
+ const dataSeries = createTSQuadraticSeries(21)
+ dataSeries.push(-10, 182)
+ dataSeries.push(-9, 146)
+ dataSeries.push(-8, 114)
+ dataSeries.push(-7, 86)
+ dataSeries.push(-6, 62)
+ dataSeries.push(-5, 42)
+ dataSeries.push(-4, 26)
+ dataSeries.push(-3, 14) // Pi ;)
+ dataSeries.push(-2, 6)
+ dataSeries.push(-1, 2)
+ dataSeries.push(0, 2)
+ dataSeries.push(1, 6)
+ dataSeries.push(2, 14)
+ dataSeries.push(3, 26)
+ dataSeries.push(4, 42)
+ dataSeries.push(5, 62)
+ dataSeries.push(6, 86)
+ dataSeries.push(7, 114)
+ dataSeries.push(8, 146)
+ dataSeries.push(9, 182)
+ dataSeries.push(10, 222)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+test('Quadratic Approximation on a perfect noisefree function y = 2 * Math.pow(x, 2) + 2 * x + 2, with 10 datapoints and some shifting in the series', () => {
+ // Data based on 2 x^2 + 2 x + 2, split the dataset in two to see its behaviour when it is around the Vertex
+ const dataSeries = createTSQuadraticSeries(10)
+ dataSeries.push(-10, 182)
+ dataSeries.push(-9, 146)
+ dataSeries.push(-8, 114)
+ dataSeries.push(-7, 86)
+ dataSeries.push(-6, 62)
+ dataSeries.push(-5, 42)
+ dataSeries.push(-4, 26)
+ dataSeries.push(-3, 14) // Pi ;)
+ dataSeries.push(-2, 6)
+ dataSeries.push(-1, 2)
+ dataSeries.push(0, 2)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(1, 6)
+ dataSeries.push(2, 14)
+ dataSeries.push(3, 26)
+ dataSeries.push(4, 42)
+ dataSeries.push(5, 62)
+ dataSeries.push(6, 86)
+ dataSeries.push(7, 114)
+ dataSeries.push(8, 146)
+ dataSeries.push(9, 182)
+ dataSeries.push(10, 222)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, noisefree', () => {
+ // Data based on 4 x^2 + 4 x + 4
+ const dataSeries = createTSQuadraticSeries(11)
+ dataSeries.push(-11, 444)
+ dataSeries.push(-10, 364)
+ dataSeries.push(-9, 292)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-8, 228)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-7, 172)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-6, 124)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-5, 84)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-4, 52)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-3, 28)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-2, 12)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-1, 4)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(0, 4)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(1, 12)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(2, 28)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(3, 52)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(4, 84)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(5, 124)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(6, 172)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(7, 228)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(8, 292)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(9, 364)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(10, 444)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, with some noise (+/- 1)', () => {
+ // Data based on 4 x^2 + 4 x + 4
+ const dataSeries = createTSQuadraticSeries(11)
+ dataSeries.push(-11, 443)
+ dataSeries.push(-10, 365)
+ dataSeries.push(-9, 291)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, -36)
+ testCoefficientC(dataSeries, -195)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-8, 229)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4.333333333333334)
+ testCoefficientC(dataSeries, 7.166666666666671)
+ testGoodnessOfFitEquals(dataSeries, 0.9998746217034155)
+ dataSeries.push(-7, 171)
+ testCoefficientA(dataSeries, 3.3333333333333335)
+ testCoefficientB(dataSeries, -7.999999999999991)
+ testCoefficientC(dataSeries, -48.33333333333328)
+ testGoodnessOfFitEquals(dataSeries, 0.9998468647471163)
+ dataSeries.push(-6, 125)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999165499911914)
+ dataSeries.push(-5, 83)
+ testCoefficientA(dataSeries, 3.8666666666666667)
+ testCoefficientB(dataSeries, 1.8666666666666671)
+ testCoefficientC(dataSeries, -4.333333333333336) // This is quite acceptable as ORM ignores the C
+ testGoodnessOfFitEquals(dataSeries, 0.9999366117119067)
+ dataSeries.push(-4, 53)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999402806808002)
+ dataSeries.push(-3, 27)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9999042318865254)
+ dataSeries.push(-2, 13)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999495097395712)
+ dataSeries.push(-1, 3)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9999117149452151)
+ dataSeries.push(0, 5)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9998721709098177)
+ dataSeries.push(1, 11)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9997996371611135)
+ dataSeries.push(2, 29)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9996545703483187)
+ dataSeries.push(3, 51)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9993201651380683)
+ dataSeries.push(4, 85)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9987227718173796)
+ dataSeries.push(5, 123)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9986961263098004)
+ dataSeries.push(6, 173)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9993274803746546)
+ dataSeries.push(7, 227)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9996526505917571)
+ dataSeries.push(8, 293)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9998002774328024)
+ dataSeries.push(9, 363)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3) // We get a 3 instead of 4, which is quite acceptable (especially since ORM ignores the C)
+ testGoodnessOfFitEquals(dataSeries, 0.9998719089295779)
+ dataSeries.push(10, 444)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999558104799866)
+})
+
+test('Quadratic Approximation on function y = 4 * Math.pow(x, 2) + 4 * x + 4, with some noise (+/- 1) and spikes (+/- 9)', () => {
+ // Data based on 4 x^2 + 4 x + 4
+ const dataSeries = createTSQuadraticSeries(11)
+ dataSeries.push(-11, 443)
+ dataSeries.push(-10, 365)
+ dataSeries.push(-9, 291)
+ dataSeries.push(-8, 229)
+ dataSeries.push(-7, 171)
+ dataSeries.push(-6, 125)
+ dataSeries.push(-5, 83)
+ dataSeries.push(-4, 53)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999402806808002)
+ dataSeries.push(-3, 37) // FIRST SPIKE +9
+ testCoefficientA(dataSeries, 4.215277777777778)
+ testCoefficientB(dataSeries, 7.694940476190471)
+ testCoefficientC(dataSeries, 18.816964285714235)
+ testGoodnessOfFitEquals(dataSeries, 0.9997971509015441)
+ dataSeries.push(-2, 3) // SECOND SPIKE -9
+ testCoefficientA(dataSeries, 3.9714285714285715)
+ testCoefficientB(dataSeries, 3.6000000000000036) // Coefficient B seems to take a hit anyway
+ testCoefficientC(dataSeries, 2.842857142857163) // We get a 2.8 instead of 4, which is quite acceptable (especially since ORM ignores the C)
+ testGoodnessOfFitEquals(dataSeries, 0.9991656951087963)
+ dataSeries.push(-1, 3)
+ testCoefficientA(dataSeries, 3.9555555555555557)
+ testCoefficientB(dataSeries, 3.37777777777778)
+ testCoefficientC(dataSeries, 2.4222222222222243)
+ testGoodnessOfFitEquals(dataSeries, 0.9992769580376006)
+ dataSeries.push(0, 5)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9988530568930122)
+ dataSeries.push(1, 11)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9982053643291688)
+ dataSeries.push(2, 29)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9969166946967148)
+ dataSeries.push(3, 51)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9939797134586851)
+ dataSeries.push(4, 85)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 5)
+ testGoodnessOfFitEquals(dataSeries, 0.9888468297958631)
+ dataSeries.push(5, 123)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9886212128178015)
+ dataSeries.push(6, 173)
+ testCoefficientA(dataSeries, 4.044444444444444)
+ testCoefficientB(dataSeries, 3.822222222222223)
+ testCoefficientC(dataSeries, 3.577777777777783)
+ testGoodnessOfFitEquals(dataSeries, 0.9945681627011398)
+ dataSeries.push(7, 227)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9968997006175546)
+ dataSeries.push(8, 293)
+ testCoefficientA(dataSeries, 3.9047619047619047)
+ testCoefficientB(dataSeries, 4.888888888888889)
+ testCoefficientC(dataSeries, 2.9682539682539684) // This is quite acceptable as ORM ignores the C
+ testGoodnessOfFitEquals(dataSeries, 0.9995034675221599)
+ dataSeries.push(9, 363)
+ testCoefficientA(dataSeries, 4) // These results match up 100% with the previous test, showing that a spike has no carry over effects
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 3)
+ testGoodnessOfFitEquals(dataSeries, 0.9998719089295779)
+ dataSeries.push(10, 444)
+ testCoefficientA(dataSeries, 4)
+ testCoefficientB(dataSeries, 4)
+ testCoefficientC(dataSeries, 4)
+ testGoodnessOfFitEquals(dataSeries, 0.9999558104799866)
+})
+
+test('Quadratic TS Estimation should be decent for standard real-life example from MathBits with some noise', () => {
+ // Data based on https://mathbits.com/MathBits/TISection/Statistics2/quadratic.html
+ const dataSeries = createTSQuadraticSeries(13)
+ dataSeries.push(10, 115.6)
+ dataSeries.push(15, 157.2)
+ dataSeries.push(20, 189.2)
+ dataSeries.push(24, 220.8)
+ dataSeries.push(30, 253.8)
+ dataSeries.push(34, 269.2)
+ dataSeries.push(40, 284.8)
+ dataSeries.push(45, 285.0)
+ dataSeries.push(48, 277.4)
+ dataSeries.push(50, 269.2)
+ dataSeries.push(58, 244.2)
+ dataSeries.push(60, 231.4)
+ dataSeries.push(64, 180.4)
+ testCoefficientA(dataSeries, -0.17702838827838824) // In the example, the TI084 results in -0.1737141137, which we consider acceptably close
+ testCoefficientB(dataSeries, 14.929144536019532) // In the example, the TI084 results in 14.52117133, which we consider acceptably close
+ testCoefficientC(dataSeries, -31.325531135531037) // In the example, the TI084 results in -21.89774466, which we consider acceptably close
+ testGoodnessOfFitEquals(dataSeries, 0.9781087883163964)
+})
+
+test('Quadratic TS Estimation should be decent for standard real-life example from VarsityTutors with some noise', () => {
+ // Test based on https://www.varsitytutors.com/hotmath/hotmath_help/topics/quadratic-regression
+ const dataSeries = createTSQuadraticSeries(7)
+ dataSeries.push(-3, 7.5)
+ dataSeries.push(-2, 3)
+ dataSeries.push(-1, 0.5)
+ dataSeries.push(0, 1)
+ dataSeries.push(1, 3)
+ dataSeries.push(2, 6)
+ dataSeries.push(3, 14)
+ testCoefficientA(dataSeries, 1.0833333333333333) // The example results in 1.1071 for OLS, which we consider acceptably close
+ testCoefficientB(dataSeries, 1.0833333333333333) // The example results in 1 for OLS, which we consider acceptably close
+ testCoefficientC(dataSeries, 0.8333333333333335) // The example results in 0.5714 for OLS, which we consider acceptably close
+ testGoodnessOfFitEquals(dataSeries, 0.9851153039832286)
+})
+
+test('Quadratic TS Estimation should be decent for standard example from VTUPulse with some noise, without the vertex being part of the dataset', () => {
+ // Test based on https://www.vtupulse.com/machine-learning/quadratic-polynomial-regression-model-solved-example/
+ const dataSeries = createTSQuadraticSeries(5)
+ dataSeries.push(3, 2.5)
+ dataSeries.push(4, 3.3)
+ dataSeries.push(5, 3.8)
+ dataSeries.push(6, 6.5)
+ dataSeries.push(7, 11.5)
+ testCoefficientA(dataSeries, 0.8583333333333334) // The example results in 0.7642857 for OLS, which we consider acceptably close given the small sample size
+ testCoefficientB(dataSeries, -6.420833333333334) // The example results in -5.5128571 for OLS, which we consider acceptably close given the small sample size
+ testCoefficientC(dataSeries, 14.387500000000003) // The example results in 12.4285714 for OLS, which we consider acceptably close given the small sample size
+ testGoodnessOfFitEquals(dataSeries, 0.9825283785404673)
+})
+
+test('Quadratic TS Estimation should be decent for standard real-life example from Uni Berlin with some noise without the vertex being part of the dataset', () => {
+ // Test based on https://www.geo.fu-berlin.de/en/v/soga/Basics-of-statistics/Linear-Regression/Polynomial-Regression/Polynomial-Regression---An-example/index.html
+ const dataSeries = createTSQuadraticSeries(25)
+ dataSeries.push(0.001399613, -0.23436656)
+ dataSeries.push(0.971629779, 0.64689524)
+ dataSeries.push(0.579119475, -0.92635765)
+ dataSeries.push(0.335693937, 0.13000706)
+ dataSeries.push(0.736736086, -0.89294863)
+ dataSeries.push(0.492572335, 0.33854780)
+ dataSeries.push(0.737133774, -1.24171910)
+ dataSeries.push(0.563693769, -0.22523318)
+ dataSeries.push(0.877603280, -0.12962722)
+ dataSeries.push(0.141426545, 0.37632006)
+ dataSeries.push(0.307203910, 0.30299077)
+ dataSeries.push(0.024509308, -0.21162739)
+ dataSeries.push(0.843665029, -0.76468719)
+ dataSeries.push(0.771206067, -0.90455412)
+ dataSeries.push(0.149670258, 0.77097952)
+ dataSeries.push(0.359605608, 0.56466366)
+ dataSeries.push(0.049612895, 0.18897607)
+ dataSeries.push(0.409898906, 0.32531750)
+ dataSeries.push(0.935457898, -0.78703491)
+ dataSeries.push(0.149476207, 0.80585375)
+ dataSeries.push(0.234315216, 0.62944986)
+ dataSeries.push(0.455297119, 0.02353327)
+ dataSeries.push(0.102696671, 0.27621694)
+ dataSeries.push(0.715372314, -1.20379729)
+ dataSeries.push(0.681745393, -0.83059624)
+ testCoefficientA(dataSeries, -2.030477132951317)
+ testCoefficientB(dataSeries, 0.5976858995201227)
+ testCoefficientC(dataSeries, 0.17630021024409503)
+ testGoodnessOfFitEquals(dataSeries, 0.23921110548689295)
+})
+
+test('Quadratic TS Estimation should be decent for standard real-life example from Statology.org with some noise and chaotic X values', () => {
+ // Test based on https://www.statology.org/quadratic-regression-r/
+ const dataSeries = createTSQuadraticSeries(11)
+ dataSeries.push(6, 14)
+ dataSeries.push(9, 28)
+ dataSeries.push(12, 50)
+ dataSeries.push(14, 70)
+ dataSeries.push(30, 89)
+ dataSeries.push(35, 94)
+ dataSeries.push(40, 90)
+ dataSeries.push(47, 75)
+ dataSeries.push(51, 59)
+ dataSeries.push(55, 44)
+ dataSeries.push(60, 27)
+ testCoefficientA(dataSeries, -0.10119047619047619) // The example results in -0.1012 for R after two rounds, which we consider acceptably close
+ testCoefficientB(dataSeries, 6.801190476190477) // The example results in 6.7444 for R after two rounds, which we consider acceptably close
+ testCoefficientC(dataSeries, -21.126190476190516) // The example results in 18.2536 for R after two rounds, but for ORM, this factor is irrelevant
+ testGoodnessOfFitEquals(dataSeries, 0.9571127392718894)
+})
+
+test('Quadratic TS Estimation should be decent for standard real-life example from StatsDirect.com with some noise and chaotic X values', () => {
+ // Test based on https://www.statsdirect.com/help/regression_and_correlation/polynomial.htm
+ const dataSeries = createTSQuadraticSeries(10)
+ dataSeries.push(1290, 1182)
+ dataSeries.push(1350, 1172)
+ dataSeries.push(1470, 1264)
+ dataSeries.push(1600, 1493)
+ dataSeries.push(1710, 1571)
+ dataSeries.push(1840, 1711)
+ dataSeries.push(1980, 1804)
+ dataSeries.push(2230, 1840)
+ dataSeries.push(2400, 1956)
+ dataSeries.push(2930, 1954)
+ testCoefficientA(dataSeries, -0.00046251263566907585) // The example results in -0.00045 through QR decomposition by Givens rotations, which we consider acceptably close
+ testCoefficientB(dataSeries, 2.441798780934297) // The example results in 2.39893 for QR decomposition by Givens rotations, which we consider acceptably close
+ testCoefficientC(dataSeries, -1235.044997485239) // The example results in -1216.143887 for QR decomposition by Givens rotations, but for ORM, this factor is irrelevant
+ testGoodnessOfFitEquals(dataSeries, 0.9790379024208455)
+})
+
+test('Quadratic Approximation with a clean function and a reset', () => {
+ // Data based on 2 x^2 + 2 x + 2
+ const dataSeries = createTSQuadraticSeries(10)
+ dataSeries.push(-10, 182)
+ dataSeries.push(-9, 146)
+ dataSeries.push(-8, 114)
+ dataSeries.push(-7, 86)
+ dataSeries.push(-6, 62)
+ dataSeries.push(-5, 42)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(-4, 26)
+ dataSeries.push(-3, 14) // Pi ;)
+ dataSeries.push(-2, 6)
+ dataSeries.push(-1, 2)
+ dataSeries.push(0, 2)
+ dataSeries.push(1, 6)
+ dataSeries.push(2, 14)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.push(3, 26)
+ dataSeries.push(4, 42)
+ dataSeries.push(5, 62)
+ dataSeries.push(6, 86)
+ dataSeries.push(7, 114)
+ dataSeries.push(8, 146)
+ dataSeries.push(9, 182)
+ dataSeries.push(10, 222)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ dataSeries.reset()
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ dataSeries.push(-1, 2)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ dataSeries.push(0, 2)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 0)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ dataSeries.push(1, 6)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 2)
+ testCoefficientC(dataSeries, 2)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+test('Quadratic TS Estimation should result in a straight line for function y = x', () => {
+ // As ORM will encounter straight lines (when forces are balanced on the flywheel, there is no acceleration/deceleration), so we need to test this as well
+ const dataSeries = createTSQuadraticSeries(7)
+ dataSeries.push(0, 0)
+ dataSeries.push(1, 1)
+ dataSeries.push(2, 2)
+ dataSeries.push(3, 3)
+ dataSeries.push(4, 4)
+ dataSeries.push(5, 5)
+ dataSeries.push(6, 6)
+ testCoefficientA(dataSeries, 0)
+ testCoefficientB(dataSeries, 1)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+/**
+ * This group tests the results of the functions actually used. Please note: these exact same tests are also used in Flywheel.test.js
+ */
+/**
+ * The data of the underlying test is based on y = pow(x, 2) + 4 x, where we have a spacing in y of 1/3th pi (i.e. a 6 magnet flywheel)
+ * and a flankLength of 12 (2 * magnets), as this is what Flywheel.test.js will use. CurrentDt's are mentioned in the comment behind the line
+ * So the first derivative should follow y' = 4x + 4, and the second derivative y'' = 4,
+ */
+test('Quadratic Approximation on a perfect noisefree function y = 2 * Math.pow(x, 2) + 4 * x + 2, 32 datapoints', () => {
+ const dataSeries = createTSQuadraticSeries(12)
+ testLength(dataSeries, 0)
+ testIsReliable(dataSeries, false)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ testLocalGoodnessOfFit (dataSeries, 0, 0)
+ dataSeries.push(0.000000000000000, 0.000000000000000) // Datapoint 0, no currentDt
+ testLength(dataSeries, 1)
+ testIsReliable(dataSeries, false)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ testLocalGoodnessOfFit (dataSeries, 0, 0)
+ dataSeries.push(0.234341433963188, 1.047197551196600) // Datapoint 1, currentDt = 0,234341433963188
+ testLength(dataSeries, 2)
+ testIsReliable(dataSeries, false)
+ testGoodnessOfFitEquals(dataSeries, 0)
+ testLocalGoodnessOfFit(dataSeries, 0, 0)
+ testLocalGoodnessOfFit(dataSeries, 1, 0)
+ dataSeries.push(0.430803114057485, 2.094395102393200) // Datapoint 2, currentDt = 0,196461680094298
+ testLength(dataSeries, 3)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000049)
+ testCoefficientB(dataSeries, 3.999999999999996)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 3.999999999999996) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000098)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.9373657358527705) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000098)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229978) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000098)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ dataSeries.push(0.603370302455080, 3.141592653589790) // Datapoint 3, currentDt = 0,172567188397595
+ testLength(dataSeries, 4)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999454)
+ testCoefficientB(dataSeries, 4.000000000000037)
+ testCoefficientC(dataSeries, -1.3322676295501878e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.000000000000037) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.9999999999998908)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852763) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.9999999999998908)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.7232124562299305) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.9999999999998908)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820292) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.9999999999998908)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ dataSeries.push(0.759089282098323, 4.188790204786390) // Datapoint 4, currentDt = 0,155718979643243
+ testLength(dataSeries, 5)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999871)
+ testCoefficientB(dataSeries, 4.0000000000000115)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.0000000000000115) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.9999999999999742)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852757) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.9999999999999742)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.72321245622994) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.9999999999999742)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820317) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.9999999999999742)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393284) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.9999999999999742)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ dataSeries.push(0.902102488824273, 5.235987755982990) // Datapoint 5, currentDt = 0,143013206725950
+ testLength(dataSeries, 6)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2)
+ testCoefficientB(dataSeries, 4.000000000000002)
+ testCoefficientC(dataSeries, 1.0547118733938987e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.000000000000002) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852754) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 4)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229942) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 4)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820322) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 4)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393294) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 4)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.608409955297094) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 4)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ dataSeries.push(1.035090330572530, 6.283185307179590) // Datapoint 6, currentDt = 0,132987841748253
+ testLength(dataSeries, 7)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999525)
+ testCoefficientB(dataSeries, 4.000000000000037)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.000000000000037) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852767) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229937) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820301) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.0363571283932576) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.608409955297043) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.140361322290058) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999905)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ dataSeries.push(1.159905421352540, 7.330382858376180) // Datapoint 7, currentDt = 0,124815090780014
+ testLength(dataSeries, 8)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999734)
+ testCoefficientB(dataSeries, 4.000000000000025)
+ testCoefficientC(dataSeries, -6.661338147750939e-16)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.000000000000025) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852764) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229942) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820313) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393277) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.608409955297069) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.14036132229009) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 8.639621685410123) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.9999999999999467)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ dataSeries.push(1.27789161392424, 8.37758040957278) // Datapoint 8, currentDt = 0,117986192571703
+ testLength(dataSeries, 9)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000002)
+ testCoefficientB(dataSeries, 3.999999999999999)
+ testCoefficientC(dataSeries, 1.7763568394002505e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 3.999999999999999) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852752) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229941) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820322) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393295) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.608409955297095) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.140361322290124) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 8.639621685410164) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.111566455696964) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000004)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ dataSeries.push(1.39006045538281, 9.42477796076938) // Datapoint 9, currentDt = 0,112168841458569
+ testLength(dataSeries, 10)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000008)
+ testCoefficientB(dataSeries, 3.999999999999994)
+ testCoefficientC(dataSeries, 2.220446049250313e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 3.999999999999994) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852749) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229941) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820324) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393298) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.6084099552971) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.140361322290131) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 8.639621685410173) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.111566455696973) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 9.560241821531257) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000016)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ dataSeries.push(1.4971959786895, 10.471975511966) // Datapoint 10, currentDt = 0,107135523306685
+ testLength(dataSeries, 11)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999885)
+ testCoefficientB(dataSeries, 4.00000000000001)
+ testCoefficientC(dataSeries, 0)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.00000000000001) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852756) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229939) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820317) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.036357128393284) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.6084099552970805) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.140361322290106) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 8.639621685410143) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.111566455696941) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 9.560241821531218) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 9.988783914757974) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999977)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ dataSeries.push(1.59992048562668, 11.5191730631626) // Datapoint 11, currentDt = 0,102724506937187
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000018)
+ testCoefficientB(dataSeries, 3.9999999999999805)
+ testCoefficientC(dataSeries, 5.329070518200751e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 3.9999999999999805) // datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 4.937365735852741) // datapoint 1
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 5.723212456229936) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 6.413481209820322) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.0363571283933) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 7.608409955297105) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.140361322290138) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 8.639621685410184) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.111566455696988) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 9.560241821531271) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 9.988783914758034) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 10.39968194250676) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000036)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(1.69873772478535, 12.5663706143592) // Datapoint 12, currentDt = 0,098817239158663
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000000124)
+ testCoefficientB(dataSeries, 3.999999999999981)
+ testCoefficientC(dataSeries, 8.215650382226158e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 4.9373657358527385) // datapoint 1, the series had shifted and pushed out datapoint 0
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 5.723212456229931) // datapoint 2
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 6.413481209820317) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 7.036357128393292) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 7.608409955297096) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 8.140361322290127) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 8.63962168541017) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 9.111566455696973) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.560241821531255) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 9.988783914758018) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 10.39968194250674) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 10.794950899141423) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000025)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(1.79406229042552, 13.6135681655558) // Datapoint 13, currentDt = 0,095324565640171
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000000204)
+ testCoefficientB(dataSeries, 3.9999999999999565)
+ testCoefficientC(dataSeries, 2.0650148258027912e-14)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 5.7232124562299145) // datapoint 2, as datapoint 1 was pushed out
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 6.413481209820302) // datapoint 3
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 7.03635712839328) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 7.608409955297086) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 8.140361322290119) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 8.639621685410162) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 9.11156645569697) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 9.560241821531253) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 9.988783914758017) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 10.399681942506742) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 10.794950899141426) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 11.17624916170211) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000041)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(1.88624026345282, 14.6607657167524) // Datapoint 14, currentDt = 0,092177973027300
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.999999999999996)
+ testCoefficientB(dataSeries, 4.000000000000007)
+ testCoefficientC(dataSeries, -3.9968028886505635e-15)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 6.413481209820322) // datapoint 3, as datapoint 2 was pushed out
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 7.036357128393293) // datapoint 4
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 7.608409955297092) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 8.140361322290119) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 8.639621685410159) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 9.111566455696956) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 9.560241821531235) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 9.988783914757995) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 10.399681942506714) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 10.794950899141394) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 11.176249161702072) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 11.54496105381127) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999999992)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(1.97556408668583, 15.707963267949) // Datapoint 15, currentDt = 0,089323823233014
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000044)
+ testCoefficientB(dataSeries, 3.999999999999893)
+ testCoefficientC(dataSeries, 5.88418203051333e-14)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 7.036357128393252) // datapoint 4, as datapoint 3 was pushed out
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 7.6084099552970645) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 8.140361322290104) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 8.639621685410155) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 9.111566455696966) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 9.560241821531255) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 9.988783914758024) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 10.399681942506753) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 10.794950899141442) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 11.176249161702131) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 11.544961053811338) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 11.902256346743387) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000088)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.06228352860619, 16.7551608191456) // Datapoint 16, currentDt = 0,086719441920360
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000065)
+ testCoefficientB(dataSeries, 3.9999999999998357)
+ testCoefficientC(dataSeries, 9.769962616701378e-14)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 7.608409955297045) // datapoint 5
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 8.14036132229009) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 8.639621685410145) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 9.111566455696963) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 9.560241821531257) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 9.98878391475803) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 10.399681942506763) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 10.794950899141455) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 11.176249161702149) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 11.54496105381136) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 11.90225634674341) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 12.249134114424862) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.00000000000013)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.14661392375536, 17.8023583703422) // Datapoint 17, currentDt = 0,084330395149166
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.999999999999992)
+ testCoefficientB(dataSeries, 4.00000000000005)
+ testCoefficientC(dataSeries, -6.084022174945858e-14)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 8.140361322290154) // datapoint 6
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 8.63962168541019) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 9.11156645569699) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 9.560241821531267) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 9.988783914758026) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 10.399681942506744) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 10.794950899141423) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 11.176249161702101) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 11.5449610538113) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 11.902256346743338) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 12.249134114424775) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 12.586455695021456) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999999984)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.22874247359082, 18.8495559215388) // Datapoint 18, currentDt = 0,082128549835466
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000001164)
+ testCoefficientB(dataSeries, 3.999999999999644)
+ testCoefficientC(dataSeries, 2.686739719592879e-13)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 8.639621685410074) // datapoint 7
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 9.111566455696902) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 9.560241821531207) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 9.988783914757992) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 10.399681942506735) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 10.794950899141439) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 11.176249161702142) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 11.544961053811363) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 11.902256346743425) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 12.249134114424884) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 12.586455695021584) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 12.914969894363441) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000233)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.30883313818749, 19.8967534727354) // Datapoint 19, currentDt = 0,080090664596669
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000001563)
+ testCoefficientB(dataSeries, 3.999999999999474)
+ testCoefficientC(dataSeries, 4.3787196091216174e-13)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 9.111566455696835) // datapoint 8
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 9.560241821531148) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 9.988783914757942) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 10.399681942506694) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 10.794950899141405) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 11.176249161702115) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 11.544961053811344) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 11.90225634674341) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 12.249134114424878) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 12.586455695021586) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 12.91496989436345) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 13.235332552750155) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000313)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.38703048583357, 20.943951023932) // Datapoint 20, currentDt = 0,078197347646078
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000047)
+ testCoefficientB(dataSeries, 3.9999999999998614)
+ testCoefficientC(dataSeries, 1.0125233984581428e-13)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 9.560241821531232) // datapoint 9
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 9.988783914758002) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 10.399681942506732) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 10.794950899141421) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 11.17624916170211) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 11.544961053811319) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 11.902256346743368) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 12.249134114424814) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 12.586455695021504) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 12.91496989436335) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 13.235332552750037) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 13.548121943334367) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000094)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.46346275966182, 21.9911485751286) // Datapoint 21, currentDt = 0,076432273828253
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.000000000000006)
+ testCoefficientB(dataSeries, 4.000000000000021)
+ testCoefficientC(dataSeries, -4.618527782440651e-14)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 9.98878391475804) // datapoint 10
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 10.39968194250676) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 10.794950899141442) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 11.176249161702124) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 11.544961053811324) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 11.902256346743366) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 12.249134114424805) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 12.586455695021488) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 12.91496989436333) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 13.235332552750009) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 13.548121943334332) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 13.853851038647331) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000012)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.53824434757728, 23.0383461263251) // Datapoint 22, currentDt = 0,074781587915460
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999998974)
+ testCoefficientB(dataSeries, 4.000000000000433)
+ testCoefficientC(dataSeries, -4.1877612488860905e-13)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 10.399681942506824) // datapoint 11
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 10.794950899141485) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 11.176249161702145) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 11.544961053811326) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 11.902256346743348) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 12.24913411442477) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 12.586455695021433) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 12.914969894363256) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 13.235332552749918) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 13.548121943334223) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 13.853851038647207) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 14.152977390309033) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999999795)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.61147779153643, 24.0855436775217) // Datapoint 23, currentDt = 0,073233443959153
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999997056)
+ testCoefficientB(dataSeries, 4.000000000001288)
+ testCoefficientC(dataSeries, -1.3553602684623911e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 10.794950899141687) // datapoint 12
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 11.176249161702312) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 11.544961053811456) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 11.902256346743444) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 12.249134114424832) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 12.586455695021463) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 12.914969894363255) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 13.235332552749888) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 13.548121943334163) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 13.853851038647116) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 14.152977390308914) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 14.44591116614547) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999999411)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.68325543702296, 25.1327412287183) // Datapoint 24, currentDt = 0,071777645486524
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.999999999999476)
+ testCoefficientB(dataSeries, 4.000000000002281)
+ testCoefficientC(dataSeries, -2.4211743721025414e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 11.17624916170248) // datapoint 13
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 11.544961053811583) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 11.902256346743531) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 12.249134114424878) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 12.58645569502147) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 12.914969894363225) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 13.23533255274982) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 13.54812194333406) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 13.85385103864698) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 14.15297739030874) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 14.445911166145263) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 14.733021748091309) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999998952)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.75366079846827, 26.1799387799149) // Datapoint 25, currentDt = 0,070405361445316
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.999999999999619)
+ testCoefficientB(dataSeries, 4.000000000001693)
+ testCoefficientC(dataSeries, -1.822542117224657e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 11.544961053811535) // datapoint 14
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 11.902256346743506) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 12.24913411442488) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 12.586455695021497) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 12.914969894363274) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 13.235332552749894) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 13.548121943334154) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 13.853851038647095) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 14.152977390308878) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 14.445911166145423) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 14.733021748091488) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 15.014643193872676) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.999999999999238)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.82276969821042, 27.2271363311115) // Datapoint 26, currentDt = 0,069108899742145
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999996692)
+ testCoefficientB(dataSeries, 4.000000000001454)
+ testCoefficientC(dataSeries, -1.538325022920617e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 11.902256346743467) // datapoint 15
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 12.249134114424848) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 12.586455695021474) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 12.91496989436326) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 13.235332552749885) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 13.548121943334156) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 13.853851038647104) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 14.152977390308894) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 14.445911166145446) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 14.733021748091517) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 15.014643193872711) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 15.291078792841265) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.9999999999993383)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.89065122327279, 28.2743338823081) // Datapoint 27, currentDt = 0,067881525062373
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 1.9999999999999296)
+ testCoefficientB(dataSeries, 4.0000000000002345)
+ testCoefficientC(dataSeries, -1.341149413747189e-13)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 12.249134114424704) // datapoint 16
+ testSecondDerivativeAtPosition(dataSeries, 0, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 12.586455695021373) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 1, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 12.9149698943632) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 2, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 13.235332552749869) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 3, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 13.548121943334179) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 4, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 13.853851038647168) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 5, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 14.152977390308997) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 6, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 14.445911166145587) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 7, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 14.733021748091696) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 8, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 15.014643193872928) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 9, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 15.291078792841516) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 10, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 15.562604893090988) // datapoint 27
+ testSecondDerivativeAtPosition(dataSeries, 11, 3.9999999999998592)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(2.95736853436123, 29.3215314335047) // Datapoint 28, currentDt = 0,066717311088441
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000001585)
+ testCoefficientB(dataSeries, 3.999999999999079)
+ testCoefficientC(dataSeries, 1.3224976669334865e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 12.5864556950212) // datapoint 17
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 12.914969894363065) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 13.235332552749771) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 13.548121943334117) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 13.85385103864714) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 14.152977390309005) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 14.445911166145628) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 14.733021748091769) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 15.014643193873034) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 15.291078792841653) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 15.562604893091155) // datapoint 27
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 15.829474137444937) // datapoint 28
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000317)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(3.02297955405576, 30.3687289847013) // Datapoint 29, currentDt = 0,065611019694526
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000002034)
+ testCoefficientB(dataSeries, 3.9999999999988223)
+ testCoefficientC(dataSeries, 1.6857626405908377e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 12.914969894363008) // datapoint 18
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 13.235332552749721) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 13.548121943334074) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 13.853851038647104) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 14.152977390308974) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 14.445911166145605) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 14.733021748091753) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 15.014643193873024) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 15.291078792841649) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 15.562604893091159) // datapoint 27
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 15.829474137444945) // datapoint 28
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 16.091918216223092) // datapoint 29
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000407)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(3.08753755553988, 31.4159265358979) // Datapoint 30, currentDt = 0,064558001484125
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000004476)
+ testCoefficientB(dataSeries, 3.999999999997489)
+ testCoefficientC(dataSeries, 3.4852121189032914e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 13.235332552749515) // datapoint 19
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 13.548121943333907) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 13.853851038646976) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 14.15297739030888) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 14.445911166145546) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 14.73302174809173) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 15.014643193873034) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 15.291078792841695) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 15.562604893091237) // datapoint 27
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 15.829474137445057) // datapoint 28
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 16.091918216223235) // datapoint 29
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 16.350150222159773) // datapoint 30
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000000895)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+ dataSeries.push(3.15109166889232, 32.4631240870945) // Datapoint 31, currentDt = 0,063554113352442
+ testLength(dataSeries, 12)
+ testIsReliable(dataSeries, true)
+ testCoefficientA(dataSeries, 2.0000000000006226)
+ testCoefficientB(dataSeries, 3.9999999999965214)
+ testCoefficientC(dataSeries, 4.8263615326504805e-12)
+ testGoodnessOfFitEquals(dataSeries, 1)
+ testFirstDerivativeAtPosition(dataSeries, 0, 13.548121943333774) // datapoint 20
+ testSecondDerivativeAtPosition(dataSeries, 0, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 0, 1)
+ testFirstDerivativeAtPosition(dataSeries, 1, 13.85385103864687) // datapoint 21
+ testSecondDerivativeAtPosition(dataSeries, 1, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 1, 1)
+ testFirstDerivativeAtPosition(dataSeries, 2, 14.152977390308802) // datapoint 22
+ testSecondDerivativeAtPosition(dataSeries, 2, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 2, 1)
+ testFirstDerivativeAtPosition(dataSeries, 3, 14.445911166145494) // datapoint 23
+ testSecondDerivativeAtPosition(dataSeries, 3, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 3, 1)
+ testFirstDerivativeAtPosition(dataSeries, 4, 14.733021748091703) // datapoint 24
+ testSecondDerivativeAtPosition(dataSeries, 4, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 4, 1)
+ testFirstDerivativeAtPosition(dataSeries, 5, 15.014643193873031) // datapoint 25
+ testSecondDerivativeAtPosition(dataSeries, 5, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 5, 1)
+ testFirstDerivativeAtPosition(dataSeries, 6, 15.291078792841716) // datapoint 26
+ testSecondDerivativeAtPosition(dataSeries, 6, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 6, 1)
+ testFirstDerivativeAtPosition(dataSeries, 7, 15.562604893091281) // datapoint 27
+ testSecondDerivativeAtPosition(dataSeries, 7, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 7, 1)
+ testFirstDerivativeAtPosition(dataSeries, 8, 15.829474137445125) // datapoint 28
+ testSecondDerivativeAtPosition(dataSeries, 8, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 8, 1)
+ testFirstDerivativeAtPosition(dataSeries, 9, 16.091918216223323) // datapoint 29
+ testSecondDerivativeAtPosition(dataSeries, 9, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 9, 1)
+ testFirstDerivativeAtPosition(dataSeries, 10, 16.350150222159886) // datapoint 30
+ testSecondDerivativeAtPosition(dataSeries, 10, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 10, 1)
+ testFirstDerivativeAtPosition(dataSeries, 11, 16.604366675569725) // datapoint 31
+ testSecondDerivativeAtPosition(dataSeries, 11, 4.000000000001245)
+ testLocalGoodnessOfFit(dataSeries, 11, 1)
+})
+
+function testLength (series, expectedValue) {
+ assert.ok(series.length() === expectedValue, `Expected value for length at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.length()}`)
+}
+
+function testCoefficientA (series, expectedValue) {
+ assert.ok(series.coefficientA() === expectedValue, `Expected value for coefficientA at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientA()}`)
+}
+
+function testCoefficientB (series, expectedValue) {
+ assert.ok(series.coefficientB() === expectedValue, `Expected value for coefficientB at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientB()}`)
+}
+
+function testCoefficientC (series, expectedValue) {
+ assert.ok(series.coefficientC() === expectedValue, `Expected value for coefficientC at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.coefficientC()}`)
+}
+
+function testGoodnessOfFitEquals (series, expectedValue) {
+ assert.ok(series.goodnessOfFit() === expectedValue, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered ${series.goodnessOfFit()}`)
+}
+
+function testGoodnessOfFitBetween (series, expectedValueAbove, expectedValueBelow) { // eslint-disable-line no-unused-vars
+ assert.ok(series.goodnessOfFit() > expectedValueAbove, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} above ${expectedValueAbove}, encountered ${series.goodnessOfFit()}`)
+ assert.ok(series.goodnessOfFit() < expectedValueBelow, `Expected goodnessOfFit at X-position ${series.X.atSeriesEnd()} below ${expectedValueBelow}, encountered ${series.goodnessOfFit()}`)
+}
+
+function testLocalGoodnessOfFit (series, position, expectedValue) {
+ assert.ok(series.localGoodnessOfFit(position) === expectedValue, `Expected localGoodnessOfFit at X-position ${series.X.atSeriesEnd()} for position ${position} to be ${expectedValue}, encountered ${series.localGoodnessOfFit(position)}`)
+}
+
+function testSlope (series, position, expectedValue) { // eslint-disable-line no-unused-vars
+ assert.ok(series.slope(position) === expectedValue, `Expected value for Slope-${position} at X-position ${series.X.atSeriesEnd()} (slope at X-position ${series.X.atPosition(position)}) is ${expectedValue}, encountered a ${series.slope(position)}`)
+}
+
+function testFirstDerivativeAtPosition (series, position, expectedValue) {
+ assert.ok(series.firstDerivativeAtPosition(position) === expectedValue, `Expected value for first derivative for position ${position} at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.firstDerivativeAtPosition(position)}`)
+}
+
+function testSecondDerivativeAtPosition (series, position, expectedValue) {
+ assert.ok(series.secondDerivativeAtPosition(position) === expectedValue, `Expected value for second derivative for position ${position} at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.secondDerivativeAtPosition(position)}`)
+}
+
+function testIsReliable (series, expectedValue) {
+ assert.ok(series.reliable() === expectedValue, `Expected value for isReliable at X-position ${series.X.atSeriesEnd()} is ${expectedValue}, encountered a ${series.reliable()}`)
+}
+
+function reportAll (series) { // eslint-disable-line no-unused-vars
+ assert.ok(series.coefficientA() === 99, `time: ${series.X.atSeriesEnd()}, coefficientA: ${series.coefficientA()}, coefficientB: ${series.coefficientB()}, coefficientC: ${series.coefficientC()}, Slope-10: ${series.slope(10)}, Slope-9: ${series.slope(9)}, Slope-8: ${series.slope(8)}, Slope-7: ${series.slope(7)}, Slope-6: ${series.slope(6)}, Slope-5: ${series.slope(5)}, Slope-4: ${series.slope(4)}, Slope-3: ${series.slope(3)}, Slope-2: ${series.slope(2)}, Slope-1: ${series.slope(1)}, Slope-0: ${series.slope(0)}`)
+}
+
+test.run()
diff --git a/app/engine/utils/WLSLinearSeries.js b/app/engine/utils/WLSLinearSeries.js
new file mode 100644
index 0000000000..b596616ea3
--- /dev/null
+++ b/app/engine/utils/WLSLinearSeries.js
@@ -0,0 +1,181 @@
+'use strict'
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file The WLSLinearSeries is a datatype that represents a Linear Series. It allows
+ * values to be retrieved (like a FiFo buffer, or Queue) but it also includes
+ * a Weighted Linear Regressor to determine the slope, intercept and R^2 of this series
+ * of x and y coordinates through Weighted Least Squares Regression.
+ *
+ * At creation it can be determined that the Series is limited (i.e. after it
+ * is filled, the oldest will be pushed out of the queue) or that the series
+ * is unlimited (will only expand). The latter is activated by calling the creation with
+ * an empty argument.
+ *
+ * please note that for unlimited series it is up to the calling function to handle resetting
+ * the Linear Series when needed through the reset() call.
+ *
+ * This implementation uses concepts that are described here:
+ * https://www.colorado.edu/amath/sites/default/files/attached-files/ch12_0.pdf
+ *
+ * For weighted least squares:
+ * https://en.wikipedia.org/wiki/Weighted_least_squares
+ */
+import { createSeries } from './Series.js'
+
+import loglevel from 'loglevel'
+const log = loglevel.getLogger('RowingEngine')
+
+/**
+ * @param {integer} maxSeriesLength - the maximum length of the linear series, default = 0 for unlimited
+ */
+export function createWLSLinearSeries (maxSeriesLength = 0) {
+ const X = createSeries(maxSeriesLength)
+ const weight = createSeries(maxSeriesLength)
+ const WX = createSeries(maxSeriesLength)
+ const WY = createSeries(maxSeriesLength)
+ const WXX = createSeries(maxSeriesLength)
+ const WYY = createSeries(maxSeriesLength)
+ const WXY = createSeries(maxSeriesLength)
+ const Y = createSeries(maxSeriesLength)
+ let _slope = 0
+ let _intercept = 0
+ let _goodnessOfFit = 0
+
+ /**
+ * @param {float} x - the x value of the datapoint
+ * @param {float} y - the y value of the datapoint
+ * @param {float} w - the weight of the datapoint, default = 1
+ */
+ function push (x, y, w = 1) {
+ if (x === undefined || isNaN(x) || y === undefined || isNaN(y)) { return }
+
+ // Ensure weight is valid and positive
+ const _weight = (w === undefined || isNaN(w) || w <= 0) ? 1 : w
+
+ X.push(x)
+ Y.push(y)
+ weight.push(_weight)
+ WX.push(_weight * x)
+ WY.push(_weight * y)
+ WXX.push(_weight * x * x)
+ WYY.push(_weight * y * y)
+ WXY.push(_weight * x * y)
+
+ // Calculate regression parameters using Weighted Least Squares
+ const denominator = (weight.sum() * WXX.sum()) - (WX.sum() * WX.sum())
+ if (X.length() >= 2 && denominator !== 0) {
+ _slope = (weight.sum() * WXY.sum() - WX.sum() * WY.sum()) / denominator
+ _intercept = (WY.sum() - _slope * WX.sum()) / weight.sum()
+
+ // Calculate weighted R^2
+ const weighedAverageY = WY.sum() / weight.sum()
+ const sse = WYY.sum() - (2 * _intercept * WY.sum()) - (2 * _slope * WXY.sum()) +
+ (_intercept * _intercept * weight.sum()) + (2 * _slope * _intercept * WX.sum()) +
+ (_slope * _slope * WXX.sum())
+ const sst = WYY.sum() - (weighedAverageY * weighedAverageY * weight.sum())
+
+ _goodnessOfFit = (sst !== 0) ? 1 - (sse / sst) : 0
+ } else {
+ _slope = 0
+ _intercept = 0
+ _goodnessOfFit = 0
+ }
+ }
+
+ /**
+ * @returns {float} the slope of the linear function
+ */
+ function slope () {
+ return _slope
+ }
+
+ /**
+ * @returns {float} the intercept of the linear function
+ */
+ function intercept () {
+ return _intercept
+ }
+
+ /**
+ * @returns {integer} the lenght of the stored series
+ */
+ function length () {
+ return X.length()
+ }
+
+ /**
+ * @returns {float} the R^2 as a goodness of fit indicator
+ */
+ function goodnessOfFit () {
+ if (X.length() >= 2) {
+ return _goodnessOfFit
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} x - the x value to be projected
+ * @returns {float} the resulting y value when projected via the linear function
+ */
+ function projectX (x) {
+ if (X.length() >= 2) {
+ return (_slope * x) + _intercept
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {float} y - the y value to be solved
+ * @returns {float} the resulting x value when solved via the linear function
+ */
+ function projectY (y) {
+ if (X.length() >= 2 && _slope !== 0) {
+ return ((y - _intercept) / _slope)
+ } else {
+ log.error('WLS Regressor, attempted a Y-projection while slope was zero!')
+ return 0
+ }
+ }
+
+ /**
+ * @returns {boolean} whether the linear regression should be considered reliable to produce results
+ */
+ function reliable () {
+ return (X.length() >= 2 && _slope !== 0)
+ }
+
+ /**
+ * @description This function is used for clearing all data, typically when flywheel.js is completely reset
+ */
+ function reset () {
+ X.reset()
+ Y.reset()
+ weight.reset()
+ WX.reset()
+ WY.reset()
+ WXX.reset()
+ WYY.reset()
+ WXY.reset()
+ _slope = 0
+ _intercept = 0
+ _goodnessOfFit = 0
+ }
+
+ return {
+ push,
+ X,
+ Y,
+ weight,
+ slope,
+ intercept,
+ length,
+ goodnessOfFit,
+ projectX,
+ projectY,
+ reliable,
+ reset
+ }
+}
diff --git a/app/engine/utils/OLSLinearSeries.test.js b/app/engine/utils/WLSLinearSeries.test.js
similarity index 57%
rename from app/engine/utils/OLSLinearSeries.test.js
rename to app/engine/utils/WLSLinearSeries.test.js
index 9bf25cc3c0..2d7ee4692b 100644
--- a/app/engine/utils/OLSLinearSeries.test.js
+++ b/app/engine/utils/WLSLinearSeries.test.js
@@ -1,14 +1,16 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This constains all tests for the WLS Linear Series
+ */
import { test } from 'uvu'
import * as assert from 'uvu/assert'
-import { createOLSLinearSeries } from './OLSLinearSeries.js'
+import { createWLSLinearSeries } from './WLSLinearSeries.js'
test('Correct behaviour of a series after initialisation', () => {
- const dataSeries = createOLSLinearSeries(3)
+ const dataSeries = createWLSLinearSeries(3)
testLength(dataSeries, 0)
testXAtSeriesBegin(dataSeries, 0)
testYAtSeriesBegin(dataSeries, 0)
@@ -29,10 +31,10 @@ test('Correct behaviour of a series after initialisation', () => {
testGoodnessOfFitEquals(dataSeries, 0)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 1 datapoint', () => {
- const dataSeries = createOLSLinearSeries(3)
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 1 datapoint', () => {
+ const dataSeries = createWLSLinearSeries(3)
testLength(dataSeries, 0)
- dataSeries.push(5, 9)
+ dataSeries.push(5, 9, 1)
testLength(dataSeries, 1)
testXAtSeriesBegin(dataSeries, 5)
testYAtSeriesBegin(dataSeries, 9)
@@ -53,10 +55,10 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 0)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 2 datapoints', () => {
- const dataSeries = createOLSLinearSeries(3)
- dataSeries.push(5, 9)
- dataSeries.push(3, 3)
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 2 datapoints', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 3, 1)
testLength(dataSeries, 2)
testXAtSeriesBegin(dataSeries, 5)
testYAtSeriesBegin(dataSeries, 9)
@@ -77,11 +79,11 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 3 datapoints', () => {
- const dataSeries = createOLSLinearSeries(3)
- dataSeries.push(5, 9)
- dataSeries.push(3, 3)
- dataSeries.push(4, 6)
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 3 datapoints', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 3, 1)
+ dataSeries.push(4, 6, 1)
testLength(dataSeries, 3)
testXAtSeriesBegin(dataSeries, 5)
testYAtSeriesBegin(dataSeries, 9)
@@ -102,12 +104,12 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 4 datapoints', () => {
- const dataSeries = createOLSLinearSeries(3)
- dataSeries.push(5, 9)
- dataSeries.push(3, 3)
- dataSeries.push(4, 6)
- dataSeries.push(6, 12)
+test('Correct behaviour of a series after several puhed values, function y = 3x - 6, noisefree, 4 datapoints', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 3, 1)
+ dataSeries.push(4, 6, 1)
+ dataSeries.push(6, 12, 1)
testLength(dataSeries, 3)
testXAtSeriesBegin(dataSeries, 3)
testYAtSeriesBegin(dataSeries, 3)
@@ -128,13 +130,13 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 5 datapoints', () => {
- const dataSeries = createOLSLinearSeries(3)
- dataSeries.push(5, 9)
- dataSeries.push(3, 3)
- dataSeries.push(4, 6)
- dataSeries.push(6, 12)
- dataSeries.push(1, -3)
+test('Correct behaviour of an unweighted series after several puhed values, function y = 3x - 6, noisefree, 5 datapoints', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 3, 1)
+ dataSeries.push(4, 6, 1)
+ dataSeries.push(6, 12, 1)
+ dataSeries.push(1, -3, 1)
testLength(dataSeries, 3)
testXAtSeriesBegin(dataSeries, 4)
testYAtSeriesBegin(dataSeries, 6)
@@ -155,12 +157,105 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 1)
})
-test('Correct behaviour of a series after several puhed values, function y = 3x + 6, noisefree, 4 datapoints and a reset', () => {
- const dataSeries = createOLSLinearSeries(3)
- dataSeries.push(5, 9)
- dataSeries.push(3, 3)
- dataSeries.push(4, 6)
- dataSeries.push(6, 12)
+test('Correct behaviour of a uniformly weighted series after several puhed values, function y = 3x - 6, noisefree, 5 datapoints', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 0.5)
+ dataSeries.push(3, 3, 0.5)
+ dataSeries.push(4, 6, 0.5)
+ dataSeries.push(6, 12, 0.5)
+ dataSeries.push(1, -3, 0.5)
+ testLength(dataSeries, 3)
+ testXAtSeriesBegin(dataSeries, 4)
+ testYAtSeriesBegin(dataSeries, 6)
+ testXAtSeriesEnd(dataSeries, 1)
+ testYAtSeriesEnd(dataSeries, -3)
+ testNumberOfXValuesAbove(dataSeries, 0, 3)
+ testNumberOfYValuesAbove(dataSeries, 0, 2)
+ testNumberOfXValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfYValuesEqualOrBelow(dataSeries, 0, 1)
+ testNumberOfXValuesAbove(dataSeries, 10, 0)
+ testNumberOfYValuesAbove(dataSeries, 10, 1)
+ testNumberOfXValuesEqualOrBelow(dataSeries, 10, 3)
+ testNumberOfYValuesEqualOrBelow(dataSeries, 10, 2)
+ testXSum(dataSeries, 11)
+ testYSum(dataSeries, 15)
+ testSlopeEquals(dataSeries, 3)
+ testInterceptEquals(dataSeries, -6)
+ testGoodnessOfFitEquals(dataSeries, 1)
+})
+
+test('Series with 5 elements, with 2 noisy datapoints, ideal function y = 3x - 6, uniform weights', () => {
+ const dataSeries = createWLSLinearSeries(5)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 2, 1)
+ dataSeries.push(4, 7, 1)
+ dataSeries.push(6, 12, 1)
+ dataSeries.push(1, -3, 1)
+ testSlopeEquals(dataSeries, 3.0675675675675675) // Theoretical noisefree value 3
+ testInterceptEquals(dataSeries, -6.256756756756756) // Theoretical noisefree value -6
+ testGoodnessOfFitEquals(dataSeries, 0.9863142179006205) // Ideal value 1
+ testXProjectionEquals(dataSeries, 1, -3.1891891891891886) // Theoretical noisefree value -3
+ testXProjectionEquals(dataSeries, 3, 2.9459459459459456) // Theoretical noisefree value 3
+ testXProjectionEquals(dataSeries, 4, 6.013513513513514) // Theoretical noisefree value 6
+ testXProjectionEquals(dataSeries, 5, 9.081081081081082) // Theoretical noisefree value 9
+ testXProjectionEquals(dataSeries, 6, 12.148648648648647) // Theoretical noisefree value 12
+})
+
+test('Series with 5 elements, with 2 noisy datapoints, ideal function y = 3x - 6, non-uniform weights', () => {
+ const dataSeries = createWLSLinearSeries(5)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 2, 0.5)
+ dataSeries.push(4, 7, 0.5)
+ dataSeries.push(6, 12, 1)
+ dataSeries.push(1, -3, 1)
+ testSlopeEquals(dataSeries, 3.034632034632035) // Theoretical noisefree value 3
+ testInterceptEquals(dataSeries, -6.134199134199134) // Theoretical noisefree value -6
+ testGoodnessOfFitEquals(dataSeries, 0.9926631153882663) // Ideal value 1
+ testXProjectionEquals(dataSeries, 1, -3.0995670995670994) // Theoretical noisefree value -3
+ testXProjectionEquals(dataSeries, 3, 2.9696969696969706) // Theoretical noisefree value 3
+ testXProjectionEquals(dataSeries, 4, 6.004329004329005) // Theoretical noisefree value 6
+ testXProjectionEquals(dataSeries, 5, 9.03896103896104) // Theoretical noisefree value 9
+ testXProjectionEquals(dataSeries, 6, 12.073593073593075) // Theoretical noisefree value 12
+})
+
+// Test based on the Galton dataset, using unweighted (=OLS) regression
+// Example found at https://online.stat.psu.edu/stat501/lesson/13/13.1
+test('Unweighted series with 7 elements based on Galton dataset (OLS)', () => {
+ const dataSeries = createWLSLinearSeries(7)
+ dataSeries.push(0.21, 0.1726, 1)
+ dataSeries.push(0.2, 0.1707, 1)
+ dataSeries.push(0.19, 0.1637, 1)
+ dataSeries.push(0.18, 0.164, 1)
+ dataSeries.push(0.17, 0.1613, 1)
+ dataSeries.push(0.16, 0.1617, 1)
+ dataSeries.push(0.15, 0.1598, 1)
+ testSlopeEquals(dataSeries, 0.2100000000000111)
+ testInterceptEquals(dataSeries, 0.12702857142856944)
+ testGoodnessOfFitEquals(dataSeries, 0.8553954556248868)
+})
+
+// Test based on the Galton dataset, using weighted (=WLS) regression
+// Example found at https://online.stat.psu.edu/stat501/lesson/13/13.1
+test('Non-uniformly weighted series with 7 elements based on Galton dataset (WLS)', () => {
+ const dataSeries = createWLSLinearSeries(7)
+ dataSeries.push(0.21, 0.1726, 2530.272176)
+ dataSeries.push(0.2, 0.1707, 2662.5174)
+ dataSeries.push(0.19, 0.1637, 2781.783546)
+ dataSeries.push(0.18, 0.164, 2410.004991)
+ dataSeries.push(0.17, 0.1613, 3655.35019)
+ dataSeries.push(0.16, 0.1617, 3935.712498)
+ dataSeries.push(0.15, 0.1598, 3217.328273)
+ testSlopeEquals(dataSeries, 0.20480116324222641)
+ testInterceptEquals(dataSeries, 0.12796416521509518)
+ testGoodnessOfFitEquals(dataSeries, 0.8521213232768868)
+})
+
+test('Correct reset behaviour. Series with 4 datapoints and a reset', () => {
+ const dataSeries = createWLSLinearSeries(3)
+ dataSeries.push(5, 9, 1)
+ dataSeries.push(3, 3, 1)
+ dataSeries.push(4, 6, 1)
+ dataSeries.push(6, 12, 1)
dataSeries.reset()
testLength(dataSeries, 0)
testXAtSeriesBegin(dataSeries, 0)
@@ -182,18 +277,6 @@ test('Correct behaviour of a series after several puhed values, function y = 3x
testGoodnessOfFitEquals(dataSeries, 0)
})
-test('Series with 5 elements, with 2 noisy datapoints', () => {
- const dataSeries = createOLSLinearSeries(5)
- dataSeries.push(5, 9)
- dataSeries.push(3, 2)
- dataSeries.push(4, 7)
- dataSeries.push(6, 12)
- dataSeries.push(1, -3)
- testSlopeBetween(dataSeries, 2.9, 3.1)
- testInterceptBetween(dataSeries, -6.3, -5.8)
- testGoodnessOfFitBetween(dataSeries, 0.9, 1.0)
-})
-
function testLength (series, expectedValue) {
assert.ok(series.length() === expectedValue, `Expected length should be ${expectedValue}, encountered a ${series.length()}`)
}
@@ -242,27 +325,16 @@ function testSlopeEquals (series, expectedValue) {
assert.ok(series.slope() === expectedValue, `Expected slope to be ${expectedValue}, encountered a ${series.slope()}`)
}
-function testSlopeBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.slope() > expectedValueAbove, `Expected slope to be above ${expectedValueAbove}, encountered a ${series.slope()}`)
- assert.ok(series.slope() < expectedValueBelow, `Expected slope to be below ${expectedValueBelow}, encountered a ${series.slope()}`)
-}
-
function testInterceptEquals (series, expectedValue) {
assert.ok(series.intercept() === expectedValue, `Expected intercept to be ${expectedValue}, encountered ${series.intercept()}`)
}
-function testInterceptBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.intercept() > expectedValueAbove, `Expected intercept to be above ${expectedValueAbove}, encountered ${series.intercept()}`)
- assert.ok(series.intercept() < expectedValueBelow, `Expected intercept to be below ${expectedValueBelow}, encountered ${series.intercept()}`)
-}
-
function testGoodnessOfFitEquals (series, expectedValue) {
assert.ok(series.goodnessOfFit() === expectedValue, `Expected goodnessOfFit to be ${expectedValue}, encountered ${series.goodnessOfFit()}`)
}
-function testGoodnessOfFitBetween (series, expectedValueAbove, expectedValueBelow) {
- assert.ok(series.goodnessOfFit() > expectedValueAbove, `Expected goodnessOfFit to be above ${expectedValueAbove}, encountered ${series.goodnessOfFit()}`)
- assert.ok(series.goodnessOfFit() < expectedValueBelow, `Expected goodnessOfFit to be below ${expectedValueBelow}, encountered ${series.goodnessOfFit()}`)
+function testXProjectionEquals (series, value, expectedValue) {
+ assert.ok(series.projectX(value) === expectedValue, `Expected projectX at value ${value} to be ${expectedValue}, encountered ${series.projectX(value)}`)
}
test.run()
diff --git a/app/engine/utils/WeighedMedianSeries.js b/app/engine/utils/WeighedMedianSeries.js
new file mode 100644
index 0000000000..4a9ce1d37b
--- /dev/null
+++ b/app/engine/utils/WeighedMedianSeries.js
@@ -0,0 +1,214 @@
+'use strict'
+/*
+ Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
+*/
+/**
+ * This creates a series with a maximum number of values. It allows for determining the Average, Median, Number of Positive, number of Negative
+ * @remark This object uses BinairySearchTrees for determining the Median, Min and Max values, making it usefull for larger series without hurting the CPU too much
+ *
+ * @param {number} [maxSeriesLength] The maximum length of the series (0 for unlimited)
+ */
+import { createLabelledBinarySearchTree } from './BinarySearchTree.js'
+
+export function createWeighedMedianSeries (maxSeriesLength = 0) {
+ /**
+ * @type {Array}
+ */
+ const binarySearchTree = createLabelledBinarySearchTree()
+ let positionArray = []
+ let seriesArray = []
+ let seriesSum = 0
+
+ /**
+ * @param {float} unique identifyer for destroying the datapoint
+ * @param {float} value to be added to the series
+ * @param {float} weight of value
+ */
+ function push (position, value, weight) {
+ if (value === undefined || isNaN(value)) { return }
+
+ binarySearchTree.push(position, value, weight)
+ // As we manage the size of the series based on weight, a single new value might trigger the removal of two old ones
+ if (maxSeriesLength > 0 && binarySearchTree.totalWeight() >= maxSeriesLength) { removeHead() }
+ if (maxSeriesLength > 0 && binarySearchTree.totalWeight() >= maxSeriesLength) { removeHead() }
+ seriesArray.push(value)
+ positionArray.push(position)
+ seriesSum += value
+ }
+
+ function removeHead () {
+ // The maximum of the array has been reached, we have to create room by removing the first
+ // value from the array
+ seriesSum -= seriesArray[0]
+ binarySearchTree.remove(positionArray[0])
+ positionArray.shift()
+ seriesArray.shift()
+ }
+
+ /**
+ * @output {number} length of the series
+ */
+ function length () {
+ return seriesArray.length
+ }
+
+ /**
+ * @output {float} value at the head of the series (i.e. the one first added)
+ */
+ function atSeriesBegin () {
+ if (seriesArray.length > 0) {
+ return seriesArray[0]
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} value at the tail of the series (i.e. the one last added)
+ */
+ function atSeriesEnd () {
+ if (seriesArray.length > 0) {
+ return seriesArray[seriesArray.length - 1]
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {number} position
+ * @output {float} value at a specific postion, starting at 0
+ */
+ function get (position) {
+ if (position >= 0 && position < seriesArray.length) {
+ return seriesArray[position]
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @param {number} testedValue
+ * @output {number} number of values in the series above the tested value
+ */
+ function numberOfValuesAbove (testedValue) {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.numberOfValuesAbove(testedValue)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @param {number} testedValue
+ * @output {number} number of values in the series below or equal to the tested value
+ */
+ function numberOfValuesEqualOrBelow (testedValue) {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.numberOfValuesEqualOrBelow(testedValue)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} sum of the entire series
+ */
+ function sum () {
+ return seriesSum
+ }
+
+ /**
+ * @output {float} average of the entire series
+ */
+ function average () {
+ if (seriesArray.length > 0) {
+ return seriesSum / seriesArray.length
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} smallest element in the series
+ */
+ function minimum () {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.minimum()
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} largest value in the series
+ */
+ function maximum () {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.maximum()
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} median of the series
+ */
+ function median () {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.median()
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {float} weighed median of the series
+ */
+ function weighedMedian () {
+ if (seriesArray.length > 0) {
+ return binarySearchTree.weightedMedian()
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @output {array} returns the entire series
+ */
+ function series () {
+ if (seriesArray.length > 0) {
+ return seriesArray
+ } else {
+ return []
+ }
+ }
+
+ /**
+ * Resets the series to its initial state
+ */
+ function reset () {
+ binarySearchTree.reset()
+ seriesArray = /** @type {Array} */(/** @type {unknown} */(null))
+ positionArray = []
+ seriesArray = []
+ seriesSum = 0
+ }
+
+ return {
+ push,
+ length,
+ atSeriesBegin,
+ atSeriesEnd,
+ get,
+ numberOfValuesAbove,
+ numberOfValuesEqualOrBelow,
+ sum,
+ average,
+ minimum,
+ maximum,
+ median,
+ weighedMedian,
+ series,
+ reset
+ }
+}
diff --git a/app/engine/utils/WeighedSeries.js b/app/engine/utils/WeighedSeries.js
index 8581597f5b..e5568ca1b7 100644
--- a/app/engine/utils/WeighedSeries.js
+++ b/app/engine/utils/WeighedSeries.js
@@ -1,18 +1,24 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-
- This creates a series with a maximum number of values
- It allows for determining the Average, Median, Number of Positive, number of Negative
-*/
-
+/**
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file This creates a weighed series with a maximum number of values. It allows for determining the Average, Weighed Averge, Median, Number of Positive, number of Negative. DO NOT USE MEDIAN ON LARGE SERIES!
+ */
import { createSeries } from './Series.js'
-export function createWeighedSeries (maxSeriesLength, defaultValue) {
+/**
+ * @param {integer} the maximum length of the weighed series, 0 for unlimited
+ * @param {float|undefined} the default value to return if a function can't calculate a value
+ */
+export function createWeighedSeries (maxSeriesLength = 0, defaultValue) {
const dataArray = createSeries(maxSeriesLength)
const weightArray = createSeries(maxSeriesLength)
const weightedArray = createSeries(maxSeriesLength)
+ /**
+ * @param {float} the value of the datapoint
+ * @param {float} the weight of the datapoint
+ */
function push (value, weight) {
if (value === undefined || isNaN(value) || weight === undefined || isNaN(weight)) { return }
dataArray.push(value)
@@ -20,34 +26,61 @@ export function createWeighedSeries (maxSeriesLength, defaultValue) {
weightedArray.push(value * weight)
}
+ /**
+ * @returns {integer} the lenght of the stored series
+ */
function length () {
return dataArray.length()
}
+ /**
+ * @returns {float} the oldest value of the series (i.e. the one first added)
+ */
function atSeriesBegin () {
return dataArray.atSeriesBegin()
}
+ /**
+ * @returns {float} the youngest value of the series (i.e. the one last added)
+ */
function atSeriesEnd () {
return dataArray.atSeriesEnd()
}
+ /**
+ * @param {integer} position to be retrieved, starting at 0
+ * @returns {float} value at that specific postion in the series
+ */
function get (position) {
return dataArray.get(position)
}
+ /**
+ * @param {float} tested value
+ * @returns {integer} count of values in the series above the tested value
+ */
function numberOfValuesAbove (testedValue) {
return dataArray.numberOfValuesAbove(testedValue)
}
+ /**
+ * @param {float} tested value
+ * @returns {integer} number of values in the series below or equal to the tested value
+ */
function numberOfValuesEqualOrBelow (testedValue) {
return dataArray.numberOfValuesEqualOrBelow(testedValue)
}
+ /**
+ * @returns {float} sum of the entire series
+ */
function sum () {
return dataArray.sum()
}
+ /**
+ * @returns {float} average of the entire series
+ */
function average () {
if (dataArray.length() > 0) {
// The series contains sufficient values to be valid
@@ -58,6 +91,9 @@ export function createWeighedSeries (maxSeriesLength, defaultValue) {
}
}
+ /**
+ * @returns {float} the weighed average of the series
+ */
function weighedAverage () {
if (dataArray.length() > 0 && weightArray.sum() !== 0) {
return (weightedArray.sum() / weightArray.sum())
@@ -66,26 +102,45 @@ export function createWeighedSeries (maxSeriesLength, defaultValue) {
}
}
+ /**
+ * @returns {float} smallest element in the series
+ */
function minimum () {
return dataArray.minimum()
}
+ /**
+ * @returns {float} largest value in the series
+ */
function maximum () {
return dataArray.maximum()
}
+ /**
+ * @returns {float} median of the series
+ * @description returns the median of the series. As this is a CPU intensive approach, DO NOT USE FOR LARGE SERIES!. For larger series, use the BinarySearchTree.js instead
+ */
function median () {
return dataArray.median()
}
+ /**
+ * @returns {boolean} if the weighed series results are to be considered reliable
+ */
function reliable () {
return dataArray.length() > 0
}
+ /**
+ * @returns {array} returns the entire series of datapoints
+ */
function series () {
return dataArray.series()
}
+ /**
+ * Resets the series to its initial state
+ */
function reset () {
dataArray.reset()
weightArray.reset()
diff --git a/app/engine/utils/WeighedSeries.test.js b/app/engine/utils/WeighedSeries.test.js
new file mode 100644
index 0000000000..93928deb24
--- /dev/null
+++ b/app/engine/utils/WeighedSeries.test.js
@@ -0,0 +1,228 @@
+'use strict'
+/*
+ Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
+*/
+/**
+ * As this object is fundamental for most other utility objects, we must test its behaviour quite thoroughly
+ */
+import { test } from 'uvu'
+import * as assert from 'uvu/assert'
+
+import { createWeighedSeries } from './WeighedSeries.js'
+
+test('Series behaviour with an empty series', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ testLength(dataSeries, 0)
+ testatSeriesBegin(dataSeries, 0)
+ testAtSeriesEnd(dataSeries, 0)
+ testNumberOfValuesAbove(dataSeries, 0, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 0)
+ testSum(dataSeries, 0)
+ testAverage(dataSeries, undefined)
+ testWeighedAverage(dataSeries, undefined)
+ testMedian(dataSeries, 0)
+ testMinimum(dataSeries, 0)
+ testMaximum(dataSeries, 0)
+})
+
+test('Series behaviour with a single pushed value. Series = [9]', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ testLength(dataSeries, 1)
+ testatSeriesBegin(dataSeries, 9)
+ testAtSeriesEnd(dataSeries, 9)
+ testNumberOfValuesAbove(dataSeries, 0, 1)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 1)
+ testSum(dataSeries, 9)
+ testAverage(dataSeries, 9)
+ testWeighedAverage(dataSeries, 9)
+ testMedian(dataSeries, 9)
+ testMinimum(dataSeries, 9)
+ testMaximum(dataSeries, 9)
+})
+
+test('Series behaviour with a second pushed value. Series = [9, 3]', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 0)
+ testLength(dataSeries, 2)
+ testatSeriesBegin(dataSeries, 9)
+ testAtSeriesEnd(dataSeries, 3)
+ testNumberOfValuesAbove(dataSeries, 0, 2)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 2)
+ testSum(dataSeries, 12)
+ testAverage(dataSeries, 6)
+ testWeighedAverage(dataSeries, 9)
+ testMedian(dataSeries, 6)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 9)
+})
+
+test('Series behaviour with a third pushed value. Series = [9, 3, 6]', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 0)
+ dataSeries.push(6, 1)
+ testLength(dataSeries, 3)
+ testatSeriesBegin(dataSeries, 9)
+ testAtSeriesEnd(dataSeries, 6)
+ testNumberOfValuesAbove(dataSeries, 0, 3)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 3)
+ testSum(dataSeries, 18)
+ testAverage(dataSeries, 6)
+ testWeighedAverage(dataSeries, 7.5)
+ testMedian(dataSeries, 6)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 9)
+})
+
+test('Series behaviour with a fourth pushed value. Series = [3, 6, 12]', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 0)
+ dataSeries.push(3, 0)
+ dataSeries.push(6, 1)
+ dataSeries.push(12, 1)
+ testLength(dataSeries, 3)
+ testatSeriesBegin(dataSeries, 3)
+ testAtSeriesEnd(dataSeries, 12)
+ testNumberOfValuesAbove(dataSeries, 0, 3)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 1)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 2)
+ testSum(dataSeries, 21)
+ testAverage(dataSeries, 7)
+ testWeighedAverage(dataSeries, 9)
+ testMedian(dataSeries, 6)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 12)
+})
+
+test('Series behaviour with a fifth pushed value. Series = [6, 12, -3]', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 1)
+ dataSeries.push(6, 1)
+ dataSeries.push(12, 1)
+ dataSeries.push(-3, 0.5)
+ testLength(dataSeries, 3)
+ testatSeriesBegin(dataSeries, 6)
+ testAtSeriesEnd(dataSeries, -3)
+ testNumberOfValuesAbove(dataSeries, 0, 2)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 1)
+ testNumberOfValuesAbove(dataSeries, 10, 1)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 2)
+ testSum(dataSeries, 15)
+ testAverage(dataSeries, 5)
+ testWeighedAverage(dataSeries, 6.6)
+ testMedian(dataSeries, 6)
+ testMinimum(dataSeries, -3)
+ testMaximum(dataSeries, 12)
+})
+
+test('Series behaviour pushing out the min and max value and forcing a recalculate of min/max via the array.', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 1)
+ dataSeries.push(6, 1)
+ testLength(dataSeries, 3)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 9)
+ dataSeries.push(6, 1)
+ testLength(dataSeries, 3)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 6)
+ dataSeries.push(6, 1)
+ testLength(dataSeries, 3)
+ testMinimum(dataSeries, 6)
+ testMaximum(dataSeries, 6)
+})
+
+test('Series behaviour pushing out the min and max value, replacing them just in time.', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 1)
+ dataSeries.push(6, 1)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 9)
+ dataSeries.push(12, 1)
+ testMinimum(dataSeries, 3)
+ testMaximum(dataSeries, 12)
+ dataSeries.push(1, 1)
+ testMinimum(dataSeries, 1)
+ testMaximum(dataSeries, 12)
+})
+
+test('Series behaviour with a five pushed values followed by a reset, Series = []', () => {
+ const dataSeries = createWeighedSeries(3, undefined)
+ dataSeries.push(9, 1)
+ dataSeries.push(3, 1)
+ dataSeries.push(6, 1)
+ dataSeries.push(12, 1)
+ dataSeries.push(-3, 1)
+ dataSeries.reset()
+ testLength(dataSeries, 0)
+ testatSeriesBegin(dataSeries, 0)
+ testAtSeriesEnd(dataSeries, 0)
+ testNumberOfValuesAbove(dataSeries, 0, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 0, 0)
+ testNumberOfValuesAbove(dataSeries, 10, 0)
+ testNumberOfValuesEqualOrBelow(dataSeries, 10, 0)
+ testSum(dataSeries, 0)
+ testAverage(dataSeries, undefined)
+ testWeighedAverage(dataSeries, undefined)
+ testMedian(dataSeries, 0)
+})
+
+function testLength (series, expectedValue) {
+ assert.ok(series.length() === expectedValue, `Expected length should be ${expectedValue}, encountered ${series.length()}`)
+}
+
+function testatSeriesBegin (series, expectedValue) {
+ assert.ok(series.atSeriesBegin() === expectedValue, `Expected atSeriesBegin to be ${expectedValue}, encountered ${series.atSeriesBegin()}`)
+}
+
+function testAtSeriesEnd (series, expectedValue) {
+ assert.ok(series.atSeriesEnd() === expectedValue, `Expected atSeriesEnd to be ${expectedValue}, encountered ${series.atSeriesEnd()}`)
+}
+
+function testNumberOfValuesAbove (series, cutoff, expectedValue) {
+ assert.ok(series.numberOfValuesAbove(cutoff) === expectedValue, `Expected numberOfValuesAbove(${cutoff}) to be ${expectedValue}, encountered ${series.numberOfValuesAbove(cutoff)}`)
+}
+
+function testNumberOfValuesEqualOrBelow (series, cutoff, expectedValue) {
+ assert.ok(series.numberOfValuesEqualOrBelow(cutoff) === expectedValue, `Expected numberOfValuesEqualOrBelow(${cutoff}) to be ${expectedValue}, encountered ${series.numberOfValuesEqualOrBelow(cutoff)}`)
+}
+
+function testSum (series, expectedValue) {
+ assert.ok(series.sum() === expectedValue, `Expected sum to be ${expectedValue}, encountered ${series.sum()}`)
+}
+
+function testAverage (series, expectedValue) {
+ assert.ok(series.average() === expectedValue, `Expected average to be ${expectedValue}, encountered ${series.average()}`)
+}
+
+function testWeighedAverage (series, expectedValue) {
+ assert.ok(series.weighedAverage() === expectedValue, `Expected weighedAverage to be ${expectedValue}, encountered ${series.weighedAverage()}`)
+}
+
+function testMedian (series, expectedValue) {
+ assert.ok(series.median() === expectedValue, `Expected median to be ${expectedValue}, encountered ${series.median()}`)
+}
+
+function testMinimum (series, expectedValue) {
+ assert.ok(series.minimum() === expectedValue, `Expected minimum to be ${expectedValue}, encountered ${series.minimum()}`)
+}
+
+function testMaximum (series, expectedValue) {
+ assert.ok(series.maximum() === expectedValue, `Expected maximum to be ${expectedValue}, encountered ${series.maximum()}`)
+}
+
+test.run()
diff --git a/app/engine/utils/workoutSegment.js b/app/engine/utils/workoutSegment.js
index ce62d82665..cbd9236f07 100644
--- a/app/engine/utils/workoutSegment.js
+++ b/app/engine/utils/workoutSegment.js
@@ -7,14 +7,15 @@
* @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/Architecture.md#session-interval-and-split-boundaries-in-sessionmanagerjs|the description of the concepts used}
*/
/* eslint-disable max-lines -- This contains a lot of defensive programming, so it is long */
-import { createOLSLinearSeries } from './OLSLinearSeries.js'
+import { createWLSLinearSeries } from './WLSLinearSeries.js'
import { createSeries } from './Series.js'
import loglevel from 'loglevel'
const log = loglevel.getLogger('RowingEngine')
export function createWorkoutSegment (config) {
const numOfDataPointsForAveraging = config.numOfPhasesForAveragingScreenData
- const distanceOverTime = createOLSLinearSeries(Math.min(4, numOfDataPointsForAveraging))
+ const distanceOverTime = createWLSLinearSeries(Math.min(4, numOfDataPointsForAveraging))
+ const caloriesOverTime = createWLSLinearSeries(Math.min(4, numOfDataPointsForAveraging))
const _power = createSeries()
const _linearVelocity = createSeries()
const _strokerate = createSeries()
@@ -29,8 +30,10 @@ export function createWorkoutSegment (config) {
let _startCalories = 0
let _targetTime = 0
let _targetDistance = 0
+ let _targetCalories = 0
let _endMovingTime = 0
let _endLinearDistance = 0
+ let _endCalories = 0
let _totalNumberIntervals = 0
let _split = {
type: 'justrow',
@@ -62,6 +65,7 @@ export function createWorkoutSegment (config) {
let intervalNumber = 0
let totalDistance = 0
let totalTime = 0
+ let totalCalories = 0
let containsJustRow = false
_totalNumberIntervals = Math.max(intervals.length, 1)
switch (true) {
@@ -83,6 +87,9 @@ export function createWorkoutSegment (config) {
case (intervals[intervalNumber].type === 'time' && intervals[intervalNumber].targetTime > 0):
totalTime = totalTime + Number(intervals[intervalNumber].targetTime)
break
+ case (intervals[intervalNumber].type === 'calories' && intervals[intervalNumber].targetCalories > 0):
+ totalCalories = totalCalories + intervals[intervalNumber].targetCalories
+ break
case (intervals[intervalNumber].type === 'justrow'):
containsJustRow = true
break
@@ -95,14 +102,14 @@ export function createWorkoutSegment (config) {
case (containsJustRow):
setEnd({ type: 'justrow' })
break
- case (totalDistance > 0 && totalTime === 0):
+ case (totalDistance > 0 && totalTime === 0 && totalCalories === 0):
setEnd({ type: 'distance', targetDistance: totalDistance })
break
- case (totalTime > 0 && totalDistance === 0):
+ case (totalTime > 0 && totalDistance === 0 && totalCalories === 0):
setEnd({ type: 'time', targetTime: totalTime })
break
- case (totalTime > 0 && totalDistance > 0):
- setEnd({ type: 'justrow' })
+ case (totalCalories > 0 && totalTime === 0 && totalDistance === 0):
+ setEnd({ type: 'calories', targetCalories: totalCalories })
break
default:
setEnd({ type: 'justrow' })
@@ -124,8 +131,10 @@ export function createWorkoutSegment (config) {
_type = 'rest'
_targetTime = Number(intervalSettings.targetTime)
_targetDistance = 0
+ _targetCalories = 0
_endMovingTime = _startMovingTime + Number(intervalSettings.targetTime)
_endLinearDistance = 0
+ _endCalories = 0
log.debug(` Workout parser, recognised ${_type} interval/split, ${_targetTime} seconds`)
break
case (intervalSettings.type === 'rest'):
@@ -133,8 +142,10 @@ export function createWorkoutSegment (config) {
_type = 'rest'
_targetTime = 0
_targetDistance = 0
+ _targetCalories = 0
_endMovingTime = _startMovingTime
_endLinearDistance = 0
+ _endCalories = 0
log.debug(` Workout parser, recognised undetermined ${_type} interval`)
break
case (intervalSettings.type === 'distance' && Number(intervalSettings.targetDistance) > 0):
@@ -142,8 +153,10 @@ export function createWorkoutSegment (config) {
_type = 'distance'
_targetTime = 0
_targetDistance = Number(intervalSettings.targetDistance)
+ _targetCalories = 0
_endMovingTime = 0
_endLinearDistance = _startLinearDistance + Number(intervalSettings.targetDistance)
+ _endCalories = 0
log.debug(` Workout parser, recognised ${_type} interval/split, ${_targetDistance} meters`)
break
case (intervalSettings.type === 'time' && Number(intervalSettings.targetTime) > 0):
@@ -151,16 +164,31 @@ export function createWorkoutSegment (config) {
_type = 'time'
_targetTime = Number(intervalSettings.targetTime)
_targetDistance = 0
+ _targetCalories = 0
_endMovingTime = _startMovingTime + Number(intervalSettings.targetTime)
_endLinearDistance = 0
+ _endCalories = 0
log.debug(` Workout parser, recognised ${_type} interval/split, ${_targetTime} seconds`)
break
+ case (intervalSettings.type === 'calories' && Number(intervalSettings.targetCalories) > 0):
+ // A target time is set
+ _type = 'calories'
+ _targetTime = 0
+ _targetDistance = 0
+ _targetCalories = Number(intervalSettings.targetCalories)
+ _endMovingTime = 0
+ _endLinearDistance = 0
+ _endCalories = _startCalories + Number(intervalSettings.targetCalories)
+ log.debug(` Workout parser, recognised ${_type} interval/split, ${_targetCalories} kCal`)
+ break
case (intervalSettings.type === 'justrow'):
_type = 'justrow'
_targetTime = 0
_targetDistance = 0
+ _targetCalories = 0
_endMovingTime = 0
_endLinearDistance = 0
+ _endCalories = 0
log.debug(` Workout parser, recognised ${_type} interval/split`)
break
default:
@@ -168,8 +196,10 @@ export function createWorkoutSegment (config) {
_type = 'justrow'
_targetTime = 0
_targetDistance = 0
+ _targetCalories = 0
_endMovingTime = 0
_endLinearDistance = 0
+ _endCalories = 0
}
// Set the split parameters
@@ -179,7 +209,8 @@ export function createWorkoutSegment (config) {
_split = {
type: 'rest',
targetDistance: 0,
- targetTime: _targetTime
+ targetTime: _targetTime,
+ targetCalories: 0
}
break
case (!!intervalSettings.split && intervalSettings.split !== undefined && intervalSettings.split.type === 'distance' && Number(intervalSettings.split.targetDistance) > 0):
@@ -187,7 +218,8 @@ export function createWorkoutSegment (config) {
_split = {
type: 'distance',
targetDistance: Number(intervalSettings.split.targetDistance),
- targetTime: 0
+ targetTime: 0,
+ targetCalories: 0
}
break
case (!!intervalSettings.split && intervalSettings.split !== undefined && intervalSettings.split.type === 'time' && Number(intervalSettings.split.targetTime) > 0):
@@ -195,14 +227,25 @@ export function createWorkoutSegment (config) {
_split = {
type: 'time',
targetDistance: 0,
- targetTime: Number(intervalSettings.split.targetTime)
+ targetTime: Number(intervalSettings.split.targetTime),
+ targetCalories: 0
+ }
+ break
+ case (!!intervalSettings.split && intervalSettings.split !== undefined && intervalSettings.split.type === 'calories' && Number(intervalSettings.split.targetCalories) > 0):
+ // A target time is set
+ _split = {
+ type: 'calories',
+ targetDistance: 0,
+ targetTime: 0,
+ targetCalories: Number(intervalSettings.split.targetCalories)
}
break
case (!!intervalSettings.split && intervalSettings.split !== undefined && intervalSettings.split.type === 'justrow'):
_split = {
type: _type,
targetDistance: _targetDistance,
- targetTime: _targetTime
+ targetTime: _targetTime,
+ targetCalories: _targetCalories
}
break
case (!intervalSettings.split):
@@ -210,7 +253,8 @@ export function createWorkoutSegment (config) {
_split = {
type: _type,
targetDistance: _targetDistance,
- targetTime: _targetTime
+ targetTime: _targetTime,
+ targetCalories: _targetCalories
}
break
default:
@@ -218,174 +262,94 @@ export function createWorkoutSegment (config) {
_split = {
type: _type,
targetDistance: _targetDistance,
- targetTime: _targetTime
+ targetTime: _targetTime,
+ targetCalories: _targetCalories
}
}
}
/**
- * Updates projectiondata and segment metrics
- */
- function push (baseMetrics) {
- distanceOverTime.push(baseMetrics.totalMovingTime, baseMetrics.totalLinearDistance)
- if (!!baseMetrics.cyclePower && !isNaN(baseMetrics.cyclePower) && baseMetrics.cyclePower > 0) { _power.push(baseMetrics.cyclePower) }
- if (!!baseMetrics.cycleLinearVelocity && !isNaN(baseMetrics.cycleLinearVelocity) && baseMetrics.cycleLinearVelocity > 0) { _linearVelocity.push(baseMetrics.cycleLinearVelocity) }
- if (!!baseMetrics.cycleStrokeRate && !isNaN(baseMetrics.cycleStrokeRate) && baseMetrics.cycleStrokeRate > 0) { _strokerate.push(baseMetrics.cycleStrokeRate) }
- if (!!baseMetrics.cycleDistance && !isNaN(baseMetrics.cycleDistance) && baseMetrics.cycleDistance > 0) { _strokedistance.push(baseMetrics.cycleDistance) }
- if (!!baseMetrics.totalCaloriesPerHour && !isNaN(baseMetrics.totalCaloriesPerHour) && baseMetrics.totalCaloriesPerHour > 0) { _caloriesPerHour.push(baseMetrics.totalCaloriesPerHour) }
- if (!!baseMetrics.dragFactor && !isNaN(baseMetrics.dragFactor) && baseMetrics.dragFactor > 0) { _dragFactor.push(baseMetrics.dragFactor) }
- }
-
- /**
- * @returns {float} the distance from te start of the workoutsegment
+ * @returns {string} the type of the workoutSegment
*/
- function distanceFromStart (baseMetrics) {
- if (!isNaN(_startLinearDistance) && _startLinearDistance >= 0 && !isNaN(baseMetrics.totalLinearDistance) && baseMetrics.totalLinearDistance > _startLinearDistance) {
- return baseMetrics.totalLinearDistance - _startLinearDistance
- } else {
- return 0
- }
- }
-
- /**
- * @returns {float} the remaining distance to the end of the workoutsegment
- */
- function distanceToEnd (baseMetrics) {
- if (_type === 'distance' && _endLinearDistance > 0) {
- // We have set a distance boundary
- return _endLinearDistance - baseMetrics.totalLinearDistance
- } else {
- return undefined
- }
+ function type () {
+ return _type
}
- /**
- * @returns {float} the moving time since the start of the workoutsegment
+ /*
+ * This function is return the underlying split of a workoutsegment (typically an interval)
*/
- function timeSinceStart (baseMetrics) {
- if (!isNaN(_startMovingTime) && _startMovingTime >= 0 && !isNaN(baseMetrics.totalMovingTime) && baseMetrics.totalMovingTime > _startMovingTime) {
- return baseMetrics.totalMovingTime - _startMovingTime
- } else {
- return 0
- }
+ function getSplit () {
+ return _split
}
/**
- * @returns {float} the projected time to the end of the workoutsegment
+ * @returns {boolean} If the boundary of the planned segment has been reached
*/
- function projectedEndTime () {
- switch (true) {
- case (_type === 'distance' && _endLinearDistance > 0 && distanceOverTime.reliable()):
- // We are in a distance based interval, so we need to project
- return (distanceOverTime.projectY(_endLinearDistance) - _startMovingTime)
- case (_type === 'time' && _endMovingTime > 0):
- return _targetTime
+ function isEndReached (baseMetrics) {
+ switch (_type) {
+ case 'distance':
+ if (_endLinearDistance > 0 && baseMetrics.totalLinearDistance >= _endLinearDistance) {
+ return true
+ } else {
+ return false
+ }
+ case 'time':
+ if (_endMovingTime > 0 && baseMetrics.totalMovingTime >= _endMovingTime) {
+ return true
+ } else {
+ return false
+ }
+ case 'calories':
+ if (_endCalories > 0 && baseMetrics.totalCalories >= _endCalories) {
+ return true
+ } else {
+ return false
+ }
default:
- return undefined
+ return false
}
}
/**
- * @returns {float} the projected time to the end of the workoutsegment
+ * This function returns the remaining split (used for managing unplanned pauses)
*/
- function projectedEndDistance () {
- switch (true) {
- case (_type === 'distance' && _endLinearDistance > 0):
- return _targetDistance
- case (_type === 'time' && _endMovingTime > 0 && distanceOverTime.reliable()):
- // We are in a time based interval, so we need to project
- return (distanceOverTime.projectX(_endMovingTime) - _startLinearDistance)
+ function remainder (baseMetrics) {
+ switch (_type) {
+ case ('distance'):
+ return {
+ type: _type,
+ targetDistance: distanceToEnd(baseMetrics)
+ }
+ case ('time'):
+ return {
+ type: _type,
+ targetTime: timeToEnd(baseMetrics)
+ }
+ case ('calories'):
+ return {
+ type: _type,
+ targetCalories: caloriesToEnd(baseMetrics)
+ }
default:
- return undefined
- }
- }
-
- /**
- * @returns {float} the remaining time to the end of the workoutsegment
- */
- function timeToEnd (baseMetrics) {
- if ((_type === 'time' || _type === 'rest') && _endMovingTime > 0) {
- // We are in a time based interval
- return _endMovingTime - baseMetrics.totalMovingTime
- } else {
- return undefined
- }
- }
-
- /**
- * @returns {float} the total time since start of the workoutsegment
- */
- function totalTime (baseMetrics) {
- if (!isNaN(_startTimestamp) && _startTimestamp >= 0 && !isNaN(baseMetrics.timestamp) && baseMetrics.timestamp > _startTimestamp) {
- return Math.max((baseMetrics.timestamp.getTime() - _startTimestamp.getTime()) / 1000, (baseMetrics.totalMovingTime - _startMovingTime))
- } else {
- return 0
- }
- }
-
- /**
- * @returns {float} the time spent not moving since start of the workoutsegment
- */
- function restTime (baseMetrics) {
- if (!isNaN(_startMovingTime) && !isNaN(_startTimestamp) && _startTimestamp >= 0 && !isNaN(baseMetrics.totalMovingTime) && !isNaN(baseMetrics.timestamp) && baseMetrics.timestamp > _startTimestamp) {
- return (Math.max(baseMetrics.timestamp.getTime() - _startTimestamp.getTime(), 0) / 1000) - Math.max(baseMetrics.totalMovingTime - _startMovingTime, 0)
- } else {
- return 0
- }
- }
-
- /**
- * @returns {float} the time spent not moving since the start of the workoutsgment
- */
- function averageLinearVelocity (baseMetrics) {
- if (!isNaN(_startMovingTime) && _startMovingTime >= 0 && !isNaN(_startLinearDistance) && _startLinearDistance >= 0 && !isNaN(baseMetrics.totalMovingTime) && baseMetrics.totalMovingTime > _startMovingTime && !isNaN(baseMetrics.totalLinearDistance) && baseMetrics.totalLinearDistance > _startLinearDistance) {
- return (baseMetrics.totalLinearDistance - _startLinearDistance) / (baseMetrics.totalMovingTime - _startMovingTime)
- } else {
- return _linearVelocity.average()
- }
- }
-
- /**
- * @param {float} linear velocity
- * @returns {float} pace per 500 meters
- */
- function linearVelocityToPace (linearVel) {
- if (!isNaN(linearVel) && linearVel > 0) {
- return (500.0 / linearVel)
- } else {
- return Infinity
- }
- }
-
- /**
- * @returns {number} the number of strokes since the start of the segment
- */
- function numberOfStrokes (baseMetrics) {
- if (!isNaN(_startStrokeNumber) && _startStrokeNumber >= 0 && !isNaN(baseMetrics.totalNumberOfStrokes) && baseMetrics.totalNumberOfStrokes > _startStrokeNumber) {
- return baseMetrics.totalNumberOfStrokes - _startStrokeNumber
- } else {
- return 0
- }
- }
-
- function spentCalories (baseMetrics) {
- if (!isNaN(_startCalories) && _startCalories >= 0 && !isNaN(baseMetrics.totalCalories) && baseMetrics.totalCalories > _startCalories) {
- return baseMetrics.totalCalories - _startCalories
- } else {
- return 0
+ return {
+ type: _type,
+ targetTime: 0
+ }
}
}
/**
- * @returns {boolean} If the boundary of the planned segment has been reached
+ * Updates projectiondata and segment metrics
*/
- function isEndReached (baseMetrics) {
- if ((_type === 'distance' && _endLinearDistance > 0 && baseMetrics.totalLinearDistance >= _endLinearDistance) || (_type === 'time' && _endMovingTime > 0 && baseMetrics.totalMovingTime >= _endMovingTime)) {
- // We have exceeded the boundary
- return true
- } else {
- return false
- }
+ function push (baseMetrics) {
+ distanceOverTime.push(baseMetrics.totalMovingTime, baseMetrics.totalLinearDistance, 1)
+ caloriesOverTime.push(baseMetrics.totalMovingTime, baseMetrics.totalCalories, 1)
+ if (!!baseMetrics.cyclePower && !isNaN(baseMetrics.cyclePower) && baseMetrics.cyclePower > 0) { _power.push(baseMetrics.cyclePower) }
+ if (!!baseMetrics.cycleLinearVelocity && !isNaN(baseMetrics.cycleLinearVelocity) && baseMetrics.cycleLinearVelocity > 0) { _linearVelocity.push(baseMetrics.cycleLinearVelocity) }
+ if (!!baseMetrics.cycleStrokeRate && !isNaN(baseMetrics.cycleStrokeRate) && baseMetrics.cycleStrokeRate > 0) { _strokerate.push(baseMetrics.cycleStrokeRate) }
+ if (!!baseMetrics.cycleDistance && !isNaN(baseMetrics.cycleDistance) && baseMetrics.cycleDistance > 0) { _strokedistance.push(baseMetrics.cycleDistance) }
+ if (!!baseMetrics.totalCaloriesPerHour && !isNaN(baseMetrics.totalCaloriesPerHour) && baseMetrics.totalCaloriesPerHour > 0) { _caloriesPerHour.push(baseMetrics.totalCaloriesPerHour) }
+ if (!!baseMetrics.dragFactor && !isNaN(baseMetrics.dragFactor) && baseMetrics.dragFactor > 0) { _dragFactor.push(baseMetrics.dragFactor) }
}
/*
@@ -398,16 +362,22 @@ export function createWorkoutSegment (config) {
case (_type === 'distance' && _endLinearDistance > 0 && currMetrics.totalLinearDistance > _endLinearDistance):
// We are in a distance based interval, and overshot the targetDistance
projectedMetrics.totalMovingTime = interpolatedTime(prevMetrics, currMetrics, _endLinearDistance)
- projectedMetrics.timestamp = new Date(currMetrics.timestamp.getTime() - ((currMetrics.totalMovingTime - projectedMetrics.totalMovingTime) * 1000))
projectedMetrics.totalLinearDistance = _endLinearDistance
- projectedMetrics.timestamp = currMetrics.timestamp - ((currMetrics.totalMovingTime - projectedMetrics.totalMovingTime) * 1000)
+ projectedMetrics.totalCalories = interpolatedCaloriesFromTime(prevMetrics, currMetrics, projectedMetrics.totalMovingTime)
projectedMetrics.modified = true
break
case (_type === 'time' && _endMovingTime > 0 && currMetrics.totalMovingTime > _endMovingTime):
// We are in a time based interval, and overshot the targetTime
projectedMetrics.totalLinearDistance = interpolatedDistance(prevMetrics, currMetrics, _endMovingTime)
projectedMetrics.totalMovingTime = _endMovingTime
- projectedMetrics.timestamp = new Date(_startTimestamp.getTime() + (_targetTime * 1000))
+ projectedMetrics.totalCalories = interpolatedCaloriesFromTime(prevMetrics, currMetrics, _endMovingTime)
+ projectedMetrics.modified = true
+ break
+ case (_type === 'calories' && _endCalories > 0 && currMetrics.totalCalories > _endCalories):
+ // We are in a calorie based interval, and overshot the targetCalories
+ projectedMetrics.totalCalories = _endCalories
+ projectedMetrics.totalMovingTime = interpolatedTimeFromCalories(prevMetrics, currMetrics, _endCalories)
+ projectedMetrics.totalLinearDistance = interpolatedDistance(prevMetrics, currMetrics, projectedMetrics.totalMovingTime)
projectedMetrics.modified = true
break
default:
@@ -426,49 +396,59 @@ export function createWorkoutSegment (config) {
return projectedMetrics
}
+ /*
+ * This function is used to precisely calculate the end time of a workout segment based on a target distance
+ * @see {@link https://en.wikipedia.org/wiki/Linear_interpolation|the math behind interpolation}
+ * @returns {float} the exact time where the distance barrier was crossed
+ */
function interpolatedTime (prevMetrics, currMetrics, targetDistance) {
if (prevMetrics.totalLinearDistance < targetDistance && targetDistance < currMetrics.totalLinearDistance) {
- // See https://en.wikipedia.org/wiki/Linear_interpolation
return (prevMetrics.totalMovingTime + ((currMetrics.totalMovingTime - prevMetrics.totalMovingTime) * ((targetDistance - prevMetrics.totalLinearDistance) / (currMetrics.totalLinearDistance - prevMetrics.totalLinearDistance))))
} else {
return currMetrics.totalMovingTime
}
}
+ /*
+ * This function is used to precisely calculate the end distance of a workout segment based on a target time
+ * @see {@link https://en.wikipedia.org/wiki/Linear_interpolation|the math behind interpolation}
+ * @returns {float} the exact distance where the time barrier was crossed
+ */
function interpolatedDistance (prevMetrics, currMetrics, targetTime) {
if (prevMetrics.totalMovingTime < targetTime && targetTime < currMetrics.totalMovingTime) {
- // See https://en.wikipedia.org/wiki/Linear_interpolation
return (prevMetrics.totalLinearDistance + ((currMetrics.totalLinearDistance - prevMetrics.totalLinearDistance) * ((targetTime - prevMetrics.totalMovingTime) / (currMetrics.totalMovingTime - prevMetrics.totalMovingTime))))
} else {
return currMetrics.totalLinearDistance
}
}
- function getSplit () {
- return _split
- }
-
- function targetDistance () {
- if (_type === 'distance' && _endLinearDistance > 0) {
- return _targetDistance
+ /*
+ * This function is used to precisely calculate the end time of a workout segment based on a target calories
+ * @see {@link https://en.wikipedia.org/wiki/Linear_interpolation|the math behind interpolation}
+ * @returns {float} the exact time where the calories barrier was crossed
+ */
+ function interpolatedTimeFromCalories (prevMetrics, currMetrics, targetCalories) {
+ if (prevMetrics.totalCalories < targetCalories && targetCalories < currMetrics.totalCalories) {
+ return (prevMetrics.totalMovingTime + ((currMetrics.totalMovingTime - prevMetrics.totalMovingTime) * ((targetCalories - prevMetrics.totalCalories) / (currMetrics.totalCalories - prevMetrics.totalCalories))))
} else {
- return undefined
+ return currMetrics.totalMovingTime
}
}
- function targetTime () {
- if (_type === 'time' && _endMovingTime > 0) {
- // We have a distance boundary
- return _targetTime
+ /*
+ * This function is used to precisely calculate the end calories of a workout segment based on a target time
+ * @see {@link https://en.wikipedia.org/wiki/Linear_interpolation|the math behind interpolation}
+ * @returns {float} the exact calories where the time barrier was crossed
+ */
+ function interpolatedCaloriesFromTime (prevMetrics, currMetrics, targetTime) {
+ if (prevMetrics.totalMovingTime < targetTime && targetTime < currMetrics.totalMovingTime) {
+ // See https://en.wikipedia.org/wiki/Linear_interpolation
+ return (prevMetrics.totalCalories + ((currMetrics.totalCalories - prevMetrics.totalCalories) * ((targetTime - prevMetrics.totalMovingTime) / (currMetrics.totalMovingTime - prevMetrics.totalMovingTime))))
} else {
- return undefined
+ return currMetrics.totalCalories
}
}
- function type () {
- return _type
- }
-
/**
* This function returns all the workoutSegment metrics for the current workoutSegment
*/
@@ -527,32 +507,219 @@ export function createWorkoutSegment (config) {
maximum: _dragFactor.maximum()
},
calories: {
+ absoluteStart: _startCalories,
+ sinceStart: spentCalories(baseMetrics),
+ target: _targetCalories,
+ toEnd: caloriesToEnd(baseMetrics),
totalSpent: spentCalories(baseMetrics),
averagePerHour: _caloriesPerHour.average()
+ },
+ caloriesSpent: {
+ total: totalCalories(baseMetrics),
+ moving: spentCalories(baseMetrics),
+ rest: restCalories(baseMetrics)
}
}
}
/**
- * This function returns the remaining split (used for managing unplanned pausesremainder (baseMetrics)
+ * @returns {number} the number of strokes since the start of the segment
*/
- function remainder (baseMetrics) {
- switch (_type) {
- case ('distance'):
- return {
- type: _type,
- targetDistance: distanceToEnd(baseMetrics)
- }
- case ('time'):
- return {
- type: _type,
- targetTime: timeToEnd(baseMetrics)
- }
+ function numberOfStrokes (baseMetrics) {
+ if (!isNaN(_startStrokeNumber) && _startStrokeNumber >= 0 && !isNaN(baseMetrics.totalNumberOfStrokes) && baseMetrics.totalNumberOfStrokes > _startStrokeNumber) {
+ return baseMetrics.totalNumberOfStrokes - _startStrokeNumber
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the distance from the start of the workoutsegment
+ */
+ function distanceFromStart (baseMetrics) {
+ if (!isNaN(_startLinearDistance) && _startLinearDistance >= 0 && !isNaN(baseMetrics.totalLinearDistance) && baseMetrics.totalLinearDistance > _startLinearDistance) {
+ return baseMetrics.totalLinearDistance - _startLinearDistance
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the target distance for this workoutsegment from the workout plan (only if type === 'distance')
+ */
+ function targetDistance () {
+ if (_type === 'distance' && _endLinearDistance > 0) {
+ return _targetDistance
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the remaining distance to the end of the workoutsegment
+ */
+ function distanceToEnd (baseMetrics) {
+ if (_type === 'distance' && _endLinearDistance > 0) {
+ // We have set a distance boundary
+ return _endLinearDistance - baseMetrics.totalLinearDistance
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the projected distance at the end of the workoutsegment (if type !== 'distance')
+ */
+ function projectedEndDistance () {
+ switch (true) {
+ case (_type === 'distance' && _endLinearDistance > 0):
+ return _targetDistance
+ case (_type === 'time' && _endMovingTime > 0 && distanceOverTime.reliable()):
+ // We are in a time based interval, so we need to project
+ return (distanceOverTime.projectX(_endMovingTime) - _startLinearDistance)
+ case (_type === 'calories' && _endCalories > 0 && distanceOverTime.reliable() && caloriesOverTime.reliable()):
+ return (distanceOverTime.projectX(caloriesOverTime.projectY(_endCalories)) - _startLinearDistance)
default:
- return {
- type: _type,
- targetTime: 0
- }
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the moving time since the start of the workoutsegment
+ */
+ function timeSinceStart (baseMetrics) {
+ if (!isNaN(_startMovingTime) && _startMovingTime >= 0 && !isNaN(baseMetrics.totalMovingTime) && baseMetrics.totalMovingTime > _startMovingTime) {
+ return baseMetrics.totalMovingTime - _startMovingTime
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the target time for this workoutsegment from the workout plan (only if type === 'time')
+ */
+ function targetTime () {
+ if (_type === 'time' && _endMovingTime > 0) {
+ // We have a distance boundary
+ return _targetTime
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the remaining time to the end of the workoutsegment
+ */
+ function timeToEnd (baseMetrics) {
+ if ((_type === 'time' || _type === 'rest') && _endMovingTime > 0) {
+ // We are in a time based interval
+ return _endMovingTime - baseMetrics.totalMovingTime
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the projected time to the end of the workoutsegment (if type !== 'time')
+ */
+ function projectedEndTime () {
+ switch (true) {
+ case (_type === 'distance' && _endLinearDistance > 0 && distanceOverTime.reliable()):
+ // We are in a distance based interval, so we need to project
+ return (distanceOverTime.projectY(_endLinearDistance) - _startMovingTime)
+ case (_type === 'time' && _endMovingTime > 0):
+ return _targetTime
+ case (_type === 'calories' && _endCalories > 0 && caloriesOverTime.reliable()):
+ return (caloriesOverTime.projectY(_endCalories) - _startMovingTime)
+ default:
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the total time since start of the workoutsegment
+ */
+ function totalTime (baseMetrics) {
+ if (!isNaN(_startTimestamp) && _startTimestamp >= 0 && !isNaN(baseMetrics.timestamp) && baseMetrics.timestamp > _startTimestamp) {
+ return Math.max((baseMetrics.timestamp.getTime() - _startTimestamp.getTime()) / 1000, (baseMetrics.totalMovingTime - _startMovingTime))
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the time spent not moving since start of the workoutsegment
+ */
+ function restTime (baseMetrics) {
+ if (!isNaN(_startMovingTime) && !isNaN(_startTimestamp) && _startTimestamp >= 0 && !isNaN(baseMetrics.totalMovingTime) && !isNaN(baseMetrics.timestamp) && baseMetrics.timestamp > _startTimestamp) {
+ return (Math.max(baseMetrics.timestamp.getTime() - _startTimestamp.getTime(), 0) / 1000) - Math.max(baseMetrics.totalMovingTime - _startMovingTime, 0)
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} average linear velocity since the start of the workoutsgment
+ */
+ function averageLinearVelocity (baseMetrics) {
+ if (!isNaN(_startMovingTime) && _startMovingTime >= 0 && !isNaN(_startLinearDistance) && _startLinearDistance >= 0 && !isNaN(baseMetrics.totalMovingTime) && baseMetrics.totalMovingTime > _startMovingTime && !isNaN(baseMetrics.totalLinearDistance) && baseMetrics.totalLinearDistance > _startLinearDistance) {
+ return (baseMetrics.totalLinearDistance - _startLinearDistance) / (baseMetrics.totalMovingTime - _startMovingTime)
+ } else {
+ return _linearVelocity.average()
+ }
+ }
+
+ /**
+ * @param {float} linear velocity
+ * @returns {float} pace per 500 meters
+ */
+ function linearVelocityToPace (linearVel) {
+ if (!isNaN(linearVel) && linearVel > 0) {
+ return (500.0 / linearVel)
+ } else {
+ return Infinity
+ }
+ }
+
+ /**
+ * @returns {float} the calories spent moving since the start of the workoutsegment (i.e. active calories)
+ */
+ function spentCalories (baseMetrics) {
+ if (!isNaN(_startCalories) && _startCalories >= 0 && !isNaN(baseMetrics.totalCalories) && baseMetrics.totalCalories > _startCalories) {
+ return baseMetrics.totalCalories - _startCalories
+ } else {
+ return 0
+ }
+ }
+
+ /**
+ * @returns {float} the remaining calories to the end of the workoutsegment
+ */
+ function caloriesToEnd (baseMetrics) {
+ if (_type === 'calories' && _endCalories > 0) {
+ // We are in a time based interval
+ return _endCalories - baseMetrics.totalCalories
+ } else {
+ return undefined
+ }
+ }
+
+ /**
+ * @returns {float} the total calories since the start (active + rest) of the workoutSegment
+ */
+ function totalCalories (baseMetrics) {
+ return spentCalories(baseMetrics) + restCalories(baseMetrics)
+ }
+
+ /**
+ * @returns {float} the rest calories since the start of the workoutSegment
+ * Based on crude Basal Metabolic Rates, see https://my.clevelandclinic.org/health/body/basal-metabolic-rate-bmr
+ */
+ function restCalories (baseMetrics) {
+ if (config.userSettings.sex === 'male') {
+ return 0.0196296296296296 * restTime(baseMetrics)
+ } else {
+ return 0.0163194444444444 * restTime(baseMetrics)
}
}
@@ -589,6 +756,7 @@ export function createWorkoutSegment (config) {
function reset () {
resetSegmentMetrics()
distanceOverTime.reset()
+ caloriesOverTime.reset()
}
return {
diff --git a/app/engine/utils/workoutSegment.test.js b/app/engine/utils/workoutSegment.test.js
index bdc193f89e..9f935b4487 100644
--- a/app/engine/utils/workoutSegment.test.js
+++ b/app/engine/utils/workoutSegment.test.js
@@ -11,7 +11,10 @@ import * as assert from 'uvu/assert'
import { createWorkoutSegment } from './workoutSegment.js'
const basicConfig = {
- numOfPhasesForAveragingScreenData: 4
+ numOfPhasesForAveragingScreenData: 4,
+ userSettings: {
+ sex: 'male'
+ }
}
test('Test workoutSegment initialisation behaviour without setting an interval', () => {
diff --git a/app/peripherals/ble/common/SensorLocation.js b/app/peripherals/ble/common/SensorLocation.js
index 4a64280875..647811367d 100644
--- a/app/peripherals/ble/common/SensorLocation.js
+++ b/app/peripherals/ble/common/SensorLocation.js
@@ -5,25 +5,25 @@
import { BufferBuilder } from '../BufferBuilder.js'
export const sensorLocations =
-{
- other: 0,
- topOfShoe: 1,
- inShoe: 2,
- hip: 3,
- frontWheel: 4,
- leftCrank: 5,
- rightCrank: 6,
- leftPedal: 7,
- rightPedal: 8,
- frontHub: 9,
- rearDropout: 10,
- chainstay: 11,
- rearWheel: 12,
- rearHub: 13,
- chest: 14,
- spider: 15,
- chainRing: 16
-}
+ {
+ other: 0,
+ topOfShoe: 1,
+ inShoe: 2,
+ hip: 3,
+ frontWheel: 4,
+ leftCrank: 5,
+ rightCrank: 6,
+ leftPedal: 7,
+ rightPedal: 8,
+ frontHub: 9,
+ rearDropout: 10,
+ chainstay: 11,
+ rearWheel: 12,
+ rearHub: 13,
+ chest: 14,
+ spider: 15,
+ chainRing: 16
+ }
export const SensorLocationAsBuffer = () => {
const sensorLocationBuffer = new BufferBuilder()
diff --git a/app/peripherals/ble/cps/CyclingPowerMeterService.js b/app/peripherals/ble/cps/CyclingPowerMeterService.js
index 5b9c2721fb..183a572056 100644
--- a/app/peripherals/ble/cps/CyclingPowerMeterService.js
+++ b/app/peripherals/ble/cps/CyclingPowerMeterService.js
@@ -44,33 +44,33 @@ export class CyclingPowerService extends GattService {
}
export const cpsFeaturesFlags =
-{
- pedalPowerBalanceSupported: (0x01 << 0),
- accumulatedTorqueSupported: (0x01 << 1),
- wheelRevolutionDataSupported: (0x01 << 2),
- crankRevolutionDataSupported: (0x01 << 3),
- extremeMagnitudesSupported: (0x01 << 4),
- extremeAnglesSupported: (0x01 << 5),
- topAndBottomDeadSpotAnglesSupported: (0x01 << 6),
- accumulatedEnergySupported: (0x01 << 7),
- offsetCompensationIndicatorSupported: (0x01 << 8),
- offsetCompensationSupported: (0x01 << 9),
- cyclingPowerMeasurementCharacteristicContentMaskingSupported: (0x01 << 10),
- multipleSensorLocationsSupported: (0x01 << 11),
- crankLengthAdjustmentSupported: (0x01 << 12),
- chainLengthAdjustmentSupported: (0x01 << 13),
- chainWeightAdjustmentSupported: (0x01 << 14),
- spanLengthAdjustmentSupported: (0x01 << 15),
- sensorMeasurementContext: (0x01 << 16),
- sensorMeasurementContextForce: (0x00 << 16),
- sensorMeasurementContextTorque: (0x01 << 16),
- instantaneousMeasurementDirectionSupported: (0x01 << 17),
- factoryCalibrationDateSupported: (0x01 << 18),
- enhancedOffsetCompensationSupported: (0x01 << 19),
- distributeSystemSupportUnspecified: (0x00 << 20),
- distributeSystemSupportNotInDistributed: (0x01 << 20),
- distributeSystemSupportInDistributed: (0x02 << 20),
- distributeSystemSupportRFU: (0x03 << 20)
-}
+ {
+ pedalPowerBalanceSupported: (0x01 << 0),
+ accumulatedTorqueSupported: (0x01 << 1),
+ wheelRevolutionDataSupported: (0x01 << 2),
+ crankRevolutionDataSupported: (0x01 << 3),
+ extremeMagnitudesSupported: (0x01 << 4),
+ extremeAnglesSupported: (0x01 << 5),
+ topAndBottomDeadSpotAnglesSupported: (0x01 << 6),
+ accumulatedEnergySupported: (0x01 << 7),
+ offsetCompensationIndicatorSupported: (0x01 << 8),
+ offsetCompensationSupported: (0x01 << 9),
+ cyclingPowerMeasurementCharacteristicContentMaskingSupported: (0x01 << 10),
+ multipleSensorLocationsSupported: (0x01 << 11),
+ crankLengthAdjustmentSupported: (0x01 << 12),
+ chainLengthAdjustmentSupported: (0x01 << 13),
+ chainWeightAdjustmentSupported: (0x01 << 14),
+ spanLengthAdjustmentSupported: (0x01 << 15),
+ sensorMeasurementContext: (0x01 << 16),
+ sensorMeasurementContextForce: (0x00 << 16),
+ sensorMeasurementContextTorque: (0x01 << 16),
+ instantaneousMeasurementDirectionSupported: (0x01 << 17),
+ factoryCalibrationDateSupported: (0x01 << 18),
+ enhancedOffsetCompensationSupported: (0x01 << 19),
+ distributeSystemSupportUnspecified: (0x00 << 20),
+ distributeSystemSupportNotInDistributed: (0x01 << 20),
+ distributeSystemSupportInDistributed: (0x02 << 20),
+ distributeSystemSupportRFU: (0x03 << 20)
+ }
const featuresFlag = cpsFeaturesFlags.sensorMeasurementContextForce | cpsFeaturesFlags.wheelRevolutionDataSupported | cpsFeaturesFlags.crankRevolutionDataSupported | cpsFeaturesFlags.distributeSystemSupportNotInDistributed
diff --git a/app/peripherals/ble/csc/CscMeasurementCharacteristic.js b/app/peripherals/ble/csc/CscMeasurementCharacteristic.js
index 6d10c34d84..da1253685e 100644
--- a/app/peripherals/ble/csc/CscMeasurementCharacteristic.js
+++ b/app/peripherals/ble/csc/CscMeasurementCharacteristic.js
@@ -47,8 +47,8 @@ export class CyclingSpeedCadenceMeasurementCharacteristic extends GattNotifyChar
}
export const cscFeaturesFlags =
-{
- wheelRevolutionDataSupported: (0x01 << 0),
- crankRevolutionDataSupported: (0x01 << 1),
- multipleSensorLocationSupported: (0x01 << 2)
-}
+ {
+ wheelRevolutionDataSupported: (0x01 << 0),
+ crankRevolutionDataSupported: (0x01 << 1),
+ multipleSensorLocationSupported: (0x01 << 2)
+ }
diff --git a/app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js b/app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js
index 166ecf5378..c2701da1db 100644
--- a/app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js
+++ b/app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js
@@ -125,6 +125,22 @@ export class CsafeManagerService {
}
log.debug(`PM5 WORKOUTTYPE_FIXEDDIST_INTERVAL is mapped to 25 '${this.#workoutplan.forelastInterval().type}' intervals with ${this.#workoutplan.forelastInterval().targetDistance} meters length, followed by a ${this.#workoutplan.lastInterval().targetTime} seconds '${this.#workoutplan.lastInterval().type}' intervals`)
break
+ case (WorkoutTypes.WORKOUTTYPE_FIXEDCALS_INTERVAL):
+ response.addCommand(commands[i].command)
+ i++ // Move to the duration
+ intervalLength = commands[i].data
+ response.addCommand(commands[i].command)
+ i++ // Move to the rest specification
+ pauseLength = commands[i].data
+ response.addCommand(commands[i].command)
+ j = 0
+ while (j < 25) {
+ this.#workoutplan.addInterval('calories', intervalLength)
+ this.#workoutplan.addInterval('rest', pauseLength)
+ j++
+ }
+ log.debug(`PM5 WORKOUTTYPE_FIXEDCALS_INTERVAL is mapped to 25 '${this.#workoutplan.forelastInterval().type}' intervals with ${this.#workoutplan.forelastInterval().targetCalories} calories length, followed by a ${this.#workoutplan.lastInterval().targetTime} seconds '${this.#workoutplan.lastInterval().type}' intervals`)
+ break
default:
response.addCommand(commands[i].command)
}
@@ -137,25 +153,49 @@ export class CsafeManagerService {
response.addCommand(commands[i].command)
break
case (ProprietaryLongSetConfigCommands.CSAFE_PM_SET_WORKOUTDURATION):
- if (commandData[0] === DurationTypes.CSAFE_DISTANCE_DURATION) {
- this.#workoutplan.addInterval('distance', commands[i].data)
- response.addCommand(commands[i].command)
- log.debug(`command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, length ${this.#workoutplan.lastInterval().targetDistance} meters`)
- } else {
- this.#workoutplan.addInterval('time', commands[i].data)
- response.addCommand(commands[i].command)
- log.debug(`command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, duration ${this.#workoutplan.lastInterval().targetTime} seconds`)
+ switch (commandData[0]) {
+ case (DurationTypes.CSAFE_DISTANCE_DURATION):
+ this.#workoutplan.addInterval('distance', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, length ${this.#workoutplan.lastInterval().targetDistance} meters`)
+ break
+ case (DurationTypes.CSAFE_TIME_DURATION):
+ this.#workoutplan.addInterval('time', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, duration ${this.#workoutplan.lastInterval().targetTime} seconds`)
+ break
+ case (DurationTypes.CSAFE_CALORIES_DURATION):
+ this.#workoutplan.addInterval('calories', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, duration ${this.#workoutplan.lastInterval().targetCalories} calories`)
+ break
+ default:
+ this.#workoutplan.addInterval('time', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.error(`UNKNOWN command ${i + 1}, CSAFE_PM_SET_WORKOUTDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().type}' interval, duration ${this.#workoutplan.lastInterval().targetTime} seconds`)
}
break
case (ProprietaryLongSetConfigCommands.CSAFE_PM_SET_SPLITDURATION):
- if (commandData[0] === DurationTypes.CSAFE_DISTANCE_DURATION) {
- this.#workoutplan.addSplit('distance', commands[i].data)
- response.addCommand(commands[i].command)
- log.debug(`command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, length ${this.#workoutplan.lastInterval().split.targetDistance} meters`)
- } else {
- this.#workoutplan.addSplit('time', commands[i].data)
- response.addCommand(commands[i].command)
- log.debug(`command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, duration ${this.#workoutplan.lastInterval().split.targetTime} seconds`)
+ switch (commandData[0]) {
+ case (DurationTypes.CSAFE_DISTANCE_DURATION):
+ this.#workoutplan.addSplit('distance', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, length ${this.#workoutplan.lastInterval().split.targetDistance} meters`)
+ break
+ case (DurationTypes.CSAFE_TIME_DURATION):
+ this.#workoutplan.addSplit('time', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, duration ${this.#workoutplan.lastInterval().split.targetTime} seconds`)
+ break
+ case (DurationTypes.CSAFE_CALORIES_DURATION):
+ this.#workoutplan.addSplit('calories', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.debug(`command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, duration ${this.#workoutplan.lastInterval().split.targetCalories} calories`)
+ break
+ default:
+ this.#workoutplan.addSplit('time', commands[i].data)
+ response.addCommand(commands[i].command)
+ log.error(`UNKNOWN command ${i + 1}, CSAFE_PM_SET_SPLITDURATION, ${swapObjectPropertyValues(DurationTypes)[commandData[0]]}, mapped to '${this.#workoutplan.lastInterval().split.type}' split, duration ${this.#workoutplan.lastInterval().split.targetTime} seconds`)
}
break
case (ProprietaryLongSetConfigCommands.CSAFE_PM_SET_TARGETPACETIME):
@@ -193,7 +233,6 @@ export class CsafeManagerService {
log.debug(`command ${i + 1}, CSAFE_PM_GET_DATETIME`)
break
case (ProprietaryLongSetConfigCommands.CSAFE_PM_SET_SCREENSTATE):
- /* eslint-disable max-depth -- Screenstate is a nasty beast to handle, requiring quite some layers to make sense of it */
if (commandData[0] === ScreenTypes.SCREENTYPE_WORKOUT) {
switch (commandData[1]) {
case ScreenValue.SCREENVALUEWORKOUT_TERMINATEWORKOUT:
@@ -217,7 +256,6 @@ export class CsafeManagerService {
// no default
}
}
- /* eslint-enable max-depth */
response.addCommand(commands[i].command)
log.debug(`command ${i + 1}, CSAFE_PM_SET_SCREENSTATE data: ${swapObjectPropertyValues(ScreenTypes)[commandData[0]]}, ${swapObjectPropertyValues(ScreenValue)[commandData[1]]}`)
break
@@ -229,5 +267,4 @@ export class CsafeManagerService {
}
this.#controlTransmitCharacteristic.notify(response.build())
}
- /* eslint-enable max-statements, max-depth */
}
diff --git a/app/peripherals/ble/pm5/rowing-service/other-characteristics/ForceCurveCharacteristic.js b/app/peripherals/ble/pm5/rowing-service/other-characteristics/ForceCurveCharacteristic.js
index efec5cd5ba..3cd22ba81f 100644
--- a/app/peripherals/ble/pm5/rowing-service/other-characteristics/ForceCurveCharacteristic.js
+++ b/app/peripherals/ble/pm5/rowing-service/other-characteristics/ForceCurveCharacteristic.js
@@ -1,11 +1,10 @@
'use strict'
-/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
/**
- * Implementation of the StrokeData as defined in:
- * https://www.concept2.co.uk/files/pdf/us/monitors/PM5_BluetoothSmartInterfaceDefinition.pdf
- * https://www.concept2.co.uk/files/pdf/us/monitors/PM5_CSAFECommunicationDefinition.pdf
+ * @copyright [OpenRowingMonitor]{@link https://github.com/JaapvanEkris/openrowingmonitor}
+ *
+ * @file Implementation of the Force Curve Data as defined in:
+ * - @see {@link https://www.concept2.co.uk/files/pdf/us/monitors/PM5_BluetoothSmartInterfaceDefinition.pdf|The PM5 Bluetooth Smart Interface Definition}
+ * - @see {@link https://www.concept2.co.uk/files/pdf/us/monitors/PM5_CSAFECommunicationDefinition.pdf|The PM5 CSAFE Communication Definition}
*/
import loglevel from 'loglevel'
@@ -51,7 +50,7 @@ export class ForceCurveCharacteristic extends GattNotifyCharacteristic {
const split = Math.floor(data.driveHandleForceCurve.length / chunkSize + (data.driveHandleForceCurve.length % chunkSize === 0 ? 0 : 1))
let i = 0
- log.debug(`Force curve data count: ${data.driveHandleForceCurve.length} chunk size(number of values): ${chunkSize}, number of chunks: ${split}`)
+ log.trace(`Force curve data count: ${data.driveHandleForceCurve.length} chunk size(number of values): ${chunkSize}, number of chunks: ${split}`)
while (i < split) {
const end = (i + 1) * chunkSize < data.driveHandleForceCurve.length ? chunkSize * (i + 1) : data.driveHandleForceCurve.length
@@ -65,8 +64,8 @@ export class ForceCurveCharacteristic extends GattNotifyCharacteristic {
bufferBuilder.writeUInt8(i)
currentChunkedData.forEach((data) => {
- // Data
- bufferBuilder.writeUInt16LE(Math.round(data * 0.224809))
+ // Data, clipped to a maximum of 255 to prevent an overflow
+ bufferBuilder.writeUInt16LE(Math.min(Math.round(data * 0.224809), 255))
})
i++
diff --git a/app/peripherals/ble/pm5/rowing-service/status-characteristics/GeneralStatusCharacteristic.js b/app/peripherals/ble/pm5/rowing-service/status-characteristics/GeneralStatusCharacteristic.js
index 6aebde681b..4a775c00cd 100644
--- a/app/peripherals/ble/pm5/rowing-service/status-characteristics/GeneralStatusCharacteristic.js
+++ b/app/peripherals/ble/pm5/rowing-service/status-characteristics/GeneralStatusCharacteristic.js
@@ -4,9 +4,9 @@
*/
/**
* Implementation of the GeneralStatus as defined in:
- * - https://www.concept2.co.uk/files/pdf/us/monitors/PM5_BluetoothSmartInterfaceDefinition.pdf
- * - https://www.concept2.co.uk/files/pdf/us/monitors/PM5_CSAFECommunicationDefinition.pdf
- * @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/PM5_Interface.md#0x0031-general-status|the description of desired behaviour}
+ * - @see {@link https://www.concept2.co.uk/files/pdf/us/monitors/PM5_BluetoothSmartInterfaceDefinition.pdf|the BLE specifications}
+ * - @see {@link https://www.concept2.co.uk/files/pdf/us/monitors/PM5_CSAFECommunicationDefinition.pdf|the CSafe communication specification}
+ * - @see {@link https://github.com/JaapvanEkris/openrowingmonitor/blob/main/docs/PM5_Interface.md#0x0031-general-status|the description of desired behaviour}
*/
import { BufferBuilder } from '../../../BufferBuilder.js'
import { GattNotifyCharacteristic } from '../../../BleManager.js'
@@ -50,10 +50,18 @@ export class GeneralStatusCharacteristic extends GattNotifyCharacteristic {
// totalWorkDistance: UInt24LE in 1 m
bufferBuilder.writeUInt24LE(data.interval.distance.absoluteStart > 0 ? Math.round(data.interval.distance.absoluteStart) : 0)
// workoutDuration: UInt24LE in 0.01 sec (if type TIME)
- if (data.interval.type === 'distance') {
- bufferBuilder.writeUInt24LE(data.interval.distance.target > 0 ? Math.round(data.interval.distance.target) : 0)
- } else {
- bufferBuilder.writeUInt24LE(data.interval.movingTime.target > 0 ? Math.round(data.interval.movingTime.target * 100) : 0)
+ switch (data.interval.type) {
+ case ('distance'):
+ bufferBuilder.writeUInt24LE(data.interval.distance.target > 0 ? Math.round(data.interval.distance.target) : 0)
+ break
+ case ('time'):
+ bufferBuilder.writeUInt24LE(data.interval.movingTime.target > 0 ? Math.round(data.interval.movingTime.target * 100) : 0)
+ break
+ case ('calories'):
+ bufferBuilder.writeUInt24LE(data.interval.calories.target > 0 ? Math.round(data.interval.calories.target) : 0)
+ break
+ default:
+ bufferBuilder.writeUInt24LE(0)
}
// workoutDurationType: UInt8, see DurationTypes enum
bufferBuilder.writeUInt8(toC2DurationType(data))
diff --git a/app/peripherals/ble/pm5/utils/C2toORMMapper.js b/app/peripherals/ble/pm5/utils/C2toORMMapper.js
index 69a69526a8..ed64304023 100644
--- a/app/peripherals/ble/pm5/utils/C2toORMMapper.js
+++ b/app/peripherals/ble/pm5/utils/C2toORMMapper.js
@@ -58,6 +58,18 @@ export function createWorkoutPlan () {
workoutplan[workoutstep].type = 'justrow'
}
break
+ case (type === 'calories' && data.length > 4):
+ workoutplan.push({})
+ workoutstep = workoutplan.length - 1
+ /* eslint-disable-next-line no-case-declarations -- readable code outweighs rules */
+ const targetCalories = readUInt32(data[1], data[2], data[3], data[4])
+ if (targetCalories > 0) {
+ workoutplan[workoutstep].type = 'calories'
+ workoutplan[workoutstep].targetCalories = targetCalories
+ } else {
+ workoutplan[workoutstep].type = 'justrow'
+ }
+ break
default:
workoutplan.push({})
workoutstep = workoutplan.length - 1
@@ -99,16 +111,28 @@ export function createWorkoutPlan () {
const targetTime = readUInt32(data[1], data[2], data[3], data[4]) / 100
if (targetTime > 0) {
workoutplan[workoutstep].split.type = 'time'
- workoutplan[workoutstep].split.targetTime = readUInt32(data[1], data[2], data[3], data[4]) / 100
+ workoutplan[workoutstep].split.targetTime = targetTime
} else {
workoutplan[workoutstep].split.type = workoutplan[workoutstep].type
workoutplan[workoutstep].split.targetTime = workoutplan[workoutstep].targetTime
}
break
+ case (type === 'calories' && data.length > 4):
+ /* eslint-disable-next-line no-case-declarations -- readable code outweighs rules */
+ const targetCalories = readUInt32(data[1], data[2], data[3], data[4])
+ if (targetCalories > 0) {
+ workoutplan[workoutstep].split.type = 'calories'
+ workoutplan[workoutstep].split.targetCalories = targetCalories
+ } else {
+ workoutplan[workoutstep].split.type = workoutplan[workoutstep].type
+ workoutplan[workoutstep].split.targetCalories = workoutplan[workoutstep].targetCalories
+ }
+ break
default:
workoutplan[workoutstep].split.type = workoutplan[workoutstep].type
if (workoutplan[workoutstep].type === 'distance') { workoutplan[workoutstep].split.targetDistance = workoutplan[workoutstep].targetDistance }
if (workoutplan[workoutstep].type === 'time' || workoutplan[workoutstep].type === 'rest') { workoutplan[workoutstep].split.targetTime = workoutplan[workoutstep].targetTime }
+ if (workoutplan[workoutstep].type === 'calories') { workoutplan[workoutstep].split.targetCalories = workoutplan[workoutstep].targetCalories }
}
}
diff --git a/app/peripherals/ble/pm5/utils/ORMtoC2Mapper.js b/app/peripherals/ble/pm5/utils/ORMtoC2Mapper.js
index 0959f2437f..07560c7878 100644
--- a/app/peripherals/ble/pm5/utils/ORMtoC2Mapper.js
+++ b/app/peripherals/ble/pm5/utils/ORMtoC2Mapper.js
@@ -59,6 +59,9 @@ export function toC2WorkoutType (baseMetrics) {
case (baseMetrics.workout.numberOfIntervals > 1 && baseMetrics.workout.type === 'time'):
return WorkoutTypes.WORKOUTTYPE_FIXEDTIME_INTERVAL
break
+ case (baseMetrics.workout.numberOfIntervals > 1 && baseMetrics.workout.type === 'calories'):
+ return WorkoutTypes.WORKOUTTYPE_FIXEDCALS_INTERVAL
+ break
default:
return WorkoutTypes.WORKOUTTYPE_JUSTROW_NOSPLITS
}
diff --git a/app/recorders/fitRecorder.js b/app/recorders/fitRecorder.js
index 8df1e5fab8..cb9aabe8fe 100644
--- a/app/recorders/fitRecorder.js
+++ b/app/recorders/fitRecorder.js
@@ -1,10 +1,16 @@
'use strict'
/*
- Open Rowing Monitor, https://github.com/JaapvanEkris/openrowingmonitor
-*/
+ * OpenRowingMonitor, https://github.com/JaapvanEkris/openrowingmonitor
+ */
/**
- * This Module captures the metrics of a rowing session and persists them into the fit format
+ * @file This Module captures the metrics of a rowing session and persists them into the fit format
* It provides a fit-file content, and some metadata for the filewriter and the file-uploaders
+ *
+ * Be aware: OpenRowingMonitor and Garmin actually use conflicting terminology!
+ * - An OpenRowingMonitor Interval is nearly identical as a Garmin Split (aside the handling of unplanned pauses)
+ * - An OpenRowingMonitor Split is identical to a Garmin lap
+ *
+ * Analysis of Garmin files show that splits, laps and strokes are completely disconnected, so we use that loose structure here as well
*/
/* eslint-disable camelcase -- Imported parameters are not camelCase */
/* eslint-disable max-lines -- The length is governed by the fit-parameterisation, which we can't control */
@@ -18,16 +24,22 @@ export function createFITRecorder (config) {
const type = 'fit'
const postfix = '_rowing'
const presentationName = 'Garmin fit'
- const lapHRMetrics = createSeries()
const sessionHRMetrics = createSeries()
+ const splitActiveHRMetrics = createSeries()
+ const splitRestHRMetrics = createSeries()
+ const splitVelocityMetrics = createSeries()
+ const lapHRMetrics = createSeries()
const VO2max = createVO2max(config)
let heartRate = 0
let sessionData = {}
sessionData.workoutplan = []
sessionData.workoutplan[0] = { type: 'justrow' }
- sessionData.lap = []
+ sessionData.splits = []
+ sessionData.laps = []
+ sessionData.strokes = []
+ sessionData.noActiveSplits = 0
+ sessionData.noRestSplits = 0
sessionData.complete = false
- let lapnumber = 0
let postExerciseHR = []
let lastMetrics = {}
let fitfileContent
@@ -47,10 +59,11 @@ export function createFITRecorder (config) {
break
case ('reset'):
case ('shutdown'):
- if (lastMetrics !== undefined && !!lastMetrics.metricsContext && lastMetrics.metricsContext.isMoving === true && (sessionData.lap[lapnumber].strokes.length > 0) && (lastMetrics.totalMovingTime > sessionData.lap[lapnumber].strokes[sessionData.lap[lapnumber].strokes.length - 1].totalMovingTime)) {
+ if (lastMetrics !== undefined && !!lastMetrics.metricsContext && lastMetrics.metricsContext.isMoving === true && (lastMetrics.totalNumberOfStrokes > 0) && (lastMetrics.totalMovingTime > sessionData.totalMovingTime)) {
// We apperantly get a shutdown/crash during session
addMetricsToStrokesArray(lastMetrics)
calculateLapMetrics(lastMetrics)
+ calculateSplitMetrics(lastMetrics)
calculateSessionMetrics(lastMetrics)
}
break
@@ -68,20 +81,23 @@ export function createFITRecorder (config) {
/**
* This function records the metrics in the structure for he fit-file to be generated
- * * @param {Metrics} metrics to be recorded
+ * @param {Metrics} metrics to be recorded
*/
function recordRowingMetrics (metrics) {
switch (true) {
case (metrics.metricsContext.isSessionStart):
sessionData.startTime = metrics.timestamp
- lapnumber = 0
- startLap(lapnumber, metrics)
+ startSplit(metrics)
+ startLap(metrics)
sessionHRMetrics.reset()
+ splitActiveHRMetrics.reset()
+ splitRestHRMetrics.reset()
addMetricsToStrokesArray(metrics)
break
case (metrics.metricsContext.isSessionStop && lastMetrics.sessionState !== 'Stopped'):
addMetricsToStrokesArray(metrics)
calculateLapMetrics(metrics)
+ calculateSplitMetrics(metrics)
calculateSessionMetrics(metrics)
postExerciseHR = null
postExerciseHR = []
@@ -90,6 +106,7 @@ export function createFITRecorder (config) {
case (metrics.metricsContext.isPauseStart && lastMetrics.sessionState === 'Rowing'):
addMetricsToStrokesArray(metrics)
calculateLapMetrics(metrics)
+ calculateSplitMetrics(metrics)
calculateSessionMetrics(metrics)
resetLapMetrics()
postExerciseHR = null
@@ -97,31 +114,33 @@ export function createFITRecorder (config) {
measureRecoveryHR()
break
case (metrics.metricsContext.isPauseEnd):
- // The session is resumed, so it was a pause instead of a stop
- lapnumber++
- addRestLap(lapnumber, metrics, sessionData.lap[lapnumber - 1].endTime, metrics.interval.workoutStepNumber)
- lapnumber++
- startLap(lapnumber, metrics)
+ // The session is resumed, so it was a pause instead of a stop. First add the rest split and lap
+ // eslint-disable-next-line no-case-declarations -- Code clarity outweighs lint rules
+ const endTime = sessionData.splits[sessionData.splits.length - 1].endTime
+ addRestSplit(metrics, endTime)
+ addRestLap(metrics, endTime, metrics.interval.workoutStepNumber)
+ // Now start a new active split and lap
+ startSplit(metrics)
+ startLap(metrics)
addMetricsToStrokesArray(metrics)
break
case (metrics.metricsContext.isIntervalEnd):
- if (metrics.metricsContext.isDriveStart) { addMetricsToStrokesArray(metrics) }
+ addMetricsToStrokesArray(metrics) // Add a trackpoint to provide the lap and split with an anchor
+ calculateSplitMetrics(metrics)
calculateLapMetrics(metrics)
- calculateSessionMetrics(metrics)
resetLapMetrics()
- lapnumber++
- startLap(lapnumber, metrics)
+ startSplit(metrics)
+ startLap(metrics)
break
case (metrics.metricsContext.isSplitEnd):
- if (metrics.metricsContext.isDriveStart) { addMetricsToStrokesArray(metrics) }
+ addMetricsToStrokesArray(metrics) // Add a trackpoint to provide the lap and split with an anchor
calculateLapMetrics(metrics)
- calculateSessionMetrics(metrics)
resetLapMetrics()
- lapnumber++
- startLap(lapnumber, metrics)
+ startLap(metrics)
break
case (metrics.metricsContext.isDriveStart):
addMetricsToStrokesArray(metrics)
+ splitVelocityMetrics.push(metrics.cycleLinearVelocity)
break
// no default
}
@@ -129,78 +148,147 @@ export function createFITRecorder (config) {
}
function addMetricsToStrokesArray (metrics) {
- sessionData.lap[lapnumber].strokes.push({})
- const strokenumber = sessionData.lap[lapnumber].strokes.length - 1
- sessionData.lap[lapnumber].strokes[strokenumber].timestamp = metrics.timestamp
- sessionData.lap[lapnumber].strokes[strokenumber].totalLinearDistance = metrics.totalLinearDistance
- sessionData.lap[lapnumber].strokes[strokenumber].totalNumberOfStrokes = metrics.totalNumberOfStrokes
- sessionData.lap[lapnumber].strokes[strokenumber].cycleStrokeRate = metrics.cycleStrokeRate
- sessionData.lap[lapnumber].strokes[strokenumber].cyclePower = metrics.cyclePower
- sessionData.lap[lapnumber].strokes[strokenumber].cycleLinearVelocity = metrics.cycleLinearVelocity
- sessionData.lap[lapnumber].strokes[strokenumber].cycleDistance = metrics.cycleDistance
- sessionData.lap[lapnumber].strokes[strokenumber].dragFactor = metrics.dragFactor
- if (!isNaN(heartRate) && heartRate > 0) {
- sessionData.lap[lapnumber].strokes[strokenumber].heartrate = heartRate
- } else {
- sessionData.lap[lapnumber].strokes[strokenumber].heartrate = undefined
- }
- VO2max.push(metrics)
+ sessionData.strokes.push({
+ timestamp: metrics.timestamp,
+ totalNumberOfStrokes: metrics.totalNumberOfStrokes,
+ totalLinearDistance: metrics.totalLinearDistance,
+ cycleStrokeRate: metrics.cycleStrokeRate,
+ cyclePower: metrics.cyclePower,
+ cycleLinearVelocity: metrics.cycleLinearVelocity,
+ cycleDistance: metrics.cycleDistance,
+ dragFactor: metrics.dragFactor,
+ ...(!isNaN(heartRate) && heartRate > 0 ? { heartrate: heartRate } : { heartrate: undefined })
+ })
+ sessionData.totalMovingTime = metrics.workout.timeSpent.moving
+ VO2max.push(metrics, heartRate)
fitfileContentIsCurrent = false
allDataHasBeenWritten = false
}
- function startLap (lapnumber, metrics) {
+ function startSplit (metrics) {
+ sessionData.noActiveSplits++
+ splitVelocityMetrics.reset()
+ splitVelocityMetrics.push(metrics.cycleLinearVelocity)
+ const splitnumber = sessionData.splits.length
+ sessionData.splits.push({
+ startTime: metrics.timestamp,
+ splitNumber: splitnumber,
+ totalMovingTimeAtStart: metrics.totalMovingTime,
+ startDistance: metrics.totalLinearDistance,
+ startCalories: metrics.workout.caloriesSpent.total,
+ intensity: 'active',
+ complete: false
+ })
+ }
+
+ function calculateSplitMetrics (metrics) {
+ const splitnumber = sessionData.splits.length - 1
+ sessionData.splits[splitnumber].totalTime = metrics.totalMovingTime - sessionData.splits[splitnumber].totalMovingTimeAtStart
+ sessionData.splits[splitnumber].totalLinearDistance = metrics.totalLinearDistance - sessionData.splits[splitnumber].startDistance
+ sessionData.splits[splitnumber].calories = metrics.workout.caloriesSpent.total - sessionData.splits[splitnumber].startCalories
+ sessionData.splits[splitnumber].endTime = metrics.timestamp
+ sessionData.splits[splitnumber].maxSpeed = splitVelocityMetrics.maximum()
+ sessionData.splits[splitnumber].complete = true
+ sessionData.totalMovingTime = metrics.workout.timeSpent.moving
+ }
+
+ function addRestSplit (metrics, startTime) {
+ sessionData.noRestSplits++
+ const splitnumber = sessionData.splits.length
+ sessionData.splits.push({
+ startTime: startTime,
+ splitNumber: splitnumber,
+ intensity: 'rest',
+ totalTime: metrics.split.timeSpent.rest,
+ calories: metrics.split.caloriesSpent.rest,
+ endTime: metrics.timestamp,
+ complete: true
+ })
+ }
+
+ function startLap (metrics) {
resetLapMetrics()
- sessionData.lap[lapnumber] = { totalMovingTimeAtStart: metrics.totalMovingTime }
- sessionData.lap[lapnumber].intensity = 'active'
- sessionData.lap[lapnumber].strokes = []
- sessionData.lap[lapnumber].startTime = metrics.timestamp
- sessionData.lap[lapnumber].lapNumber = lapnumber + 1
- sessionData.lap[lapnumber].complete = false
+ const lapnumber = sessionData.laps.length
+ sessionData.laps.push({
+ startTime: metrics.timestamp,
+ lapNumber: lapnumber,
+ totalMovingTimeAtStart: metrics.totalMovingTime,
+ intensity: 'active',
+ complete: false
+ })
}
function calculateLapMetrics (metrics) {
- sessionData.lap[lapnumber].workoutStepNumber = metrics.interval.workoutStepNumber
- sessionData.lap[lapnumber].endTime = metrics.timestamp
+ const lapnumber = sessionData.laps.length - 1
+ sessionData.laps[lapnumber].workoutStepNumber = metrics.interval.workoutStepNumber
+ sessionData.laps[lapnumber].endTime = metrics.timestamp
switch (true) {
- case (metrics.metricsContext.isSessionStop && (metrics.interval.type === 'distance' || metrics.interval.type === 'time')):
+ case (metrics.metricsContext.isSessionStop && (metrics.interval.type === 'distance' || metrics.interval.type === 'time' || metrics.interval.type === 'calories')):
// As the workout closure has its own events, we need to close the workout step here
- sessionData.lap[lapnumber].trigger = metrics.interval.type
- sessionData.lap[lapnumber].event = 'workoutStep'
+ sessionData.laps[lapnumber].trigger = metrics.interval.type
+ sessionData.laps[lapnumber].event = 'workoutStep'
break
case (metrics.metricsContext.isSessionStop):
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'workoutStep'
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'workoutStep'
break
case (metrics.metricsContext.isIntervalEnd && (metrics.interval.type === 'distance' || metrics.interval.type === 'time')):
- sessionData.lap[lapnumber].trigger = metrics.interval.type
- sessionData.lap[lapnumber].event = 'workoutStep'
+ sessionData.laps[lapnumber].trigger = metrics.interval.type
+ sessionData.laps[lapnumber].event = 'workoutStep'
+ break
+ case (metrics.metricsContext.isIntervalEnd && metrics.interval.type === 'calories'):
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'workoutStep'
break
case (metrics.metricsContext.isIntervalEnd):
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'workoutStep'
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'workoutStep'
break
case (metrics.metricsContext.isPauseStart):
// As metrics.metricsContext.isIntervalEnd === false, we know this is a spontanuous pause and not a planned rest interval
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'speedLowAlert'
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'speedLowAlert'
break
case (metrics.metricsContext.isSplitEnd && (metrics.split.type === 'distance' || metrics.split.type === 'time')):
- sessionData.lap[lapnumber].trigger = metrics.split.type
- sessionData.lap[lapnumber].event = 'lap'
+ sessionData.laps[lapnumber].trigger = metrics.split.type
+ sessionData.laps[lapnumber].event = 'lap'
+ break
+ case (metrics.metricsContext.isSplitEnd && metrics.split.type === 'calories'):
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'lap'
break
case (metrics.metricsContext.isSplitEnd):
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'lap'
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'lap'
break
default:
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'lap'
+ sessionData.laps[lapnumber].trigger = 'manual'
+ sessionData.laps[lapnumber].event = 'lap'
}
- sessionData.lap[lapnumber].summary = { ...metrics.split }
- sessionData.lap[lapnumber].averageHeartrate = lapHRMetrics.average()
- sessionData.lap[lapnumber].maximumHeartrate = lapHRMetrics.maximum()
- sessionData.lap[lapnumber].complete = true
+ sessionData.laps[lapnumber].summary = { ...metrics.split }
+ sessionData.laps[lapnumber].averageHeartrate = lapHRMetrics.average()
+ sessionData.laps[lapnumber].maximumHeartrate = lapHRMetrics.maximum()
+ sessionData.laps[lapnumber].complete = true
+ sessionData.totalMovingTime = metrics.workout.timeSpent.moving
+ }
+
+ function addRestLap (metrics, startTime, workoutStepNo) {
+ resetLapMetrics()
+ const lapnumber = sessionData.laps.length
+ sessionData.laps.push({
+ startTime: startTime,
+ lapNumber: lapnumber,
+ intensity: 'rest',
+ workoutStepNumber: workoutStepNo,
+ ...(metrics.metricsContext.isIntervalEnd ? { trigger: 'time' } : { trigger: 'manual' }),
+ ...(metrics.metricsContext.isIntervalEnd ? { event: 'workoutStep' } : { event: 'lap' }),
+ endTime: metrics.timestamp,
+ averageHeartrate: lapHRMetrics.average(),
+ maximumHeartrate: lapHRMetrics.maximum(),
+ summary: { ...metrics.split },
+ complete: true
+ })
+ VO2max.handleRestart(metrics.split.timeSpent.moving)
}
function resetLapMetrics () {
@@ -208,35 +296,13 @@ export function createFITRecorder (config) {
if (!isNaN(heartRate) && heartRate > 0) { lapHRMetrics.push(heartRate) }
}
- function addRestLap (lapnumber, metrics, startTime, workoutStepNo) {
- sessionData.lap[lapnumber] = { startTime }
- sessionData.lap[lapnumber].intensity = 'rest'
- sessionData.lap[lapnumber].workoutStepNumber = workoutStepNo
- switch (true) {
- case (metrics.metricsContext.isIntervalEnd):
- // This occurs when the sessionmanager termnates a planned rest interval
- sessionData.lap[lapnumber].trigger = 'time'
- sessionData.lap[lapnumber].event = 'workoutStep'
- break
- default:
- // It is an unplanned rest lap
- sessionData.lap[lapnumber].trigger = 'manual'
- sessionData.lap[lapnumber].event = 'lap'
- }
- sessionData.lap[lapnumber].lapNumber = lapnumber + 1
- sessionData.lap[lapnumber].endTime = metrics.timestamp
- sessionData.lap[lapnumber].averageHeartrate = lapHRMetrics.average()
- sessionData.lap[lapnumber].maximumHeartrate = lapHRMetrics.maximum()
- sessionData.lap[lapnumber].summary = { ...metrics.split }
- sessionData.lap[lapnumber].complete = true
- VO2max.handleRestart(metrics.split.timeSpent.moving)
- }
-
function calculateSessionMetrics (metrics) {
- sessionData.totalNoLaps = lapnumber + 1
+ sessionData.totalNoLaps = sessionData.laps.length
sessionData.totalTime = metrics.workout.timeSpent.total
sessionData.totalMovingTime = metrics.workout.timeSpent.moving
sessionData.totalRestTime = metrics.workout.timeSpent.rest
+ sessionData.totalMovingCalories = metrics.workout.caloriesSpent.moving
+ sessionData.totalRestCalories = metrics.workout.caloriesSpent.rest
sessionData.totalLinearDistance = metrics.workout.distance.fromStart
sessionData.totalNumberOfStrokes = metrics.workout.numberOfStrokes
sessionData.averageLinearVelocity = metrics.workout.linearVelocity.average
@@ -249,32 +315,38 @@ export function createFITRecorder (config) {
sessionData.minimumHeartrate = sessionHRMetrics.minimum()
sessionData.averageHeartrate = sessionHRMetrics.average()
sessionData.maximumHeartrate = sessionHRMetrics.maximum()
- sessionData.endTime = sessionData.lap[lapnumber].endTime
+ sessionData.endTime = metrics.timestamp
sessionData.complete = true
}
- /*
+ /**
* initiated when a new heart rate value is received from heart rate sensor
*/
async function recordHeartRate (value) {
heartRate = value.heartrate
if (!isNaN(heartRate) && heartRate > 0) {
lapHRMetrics.push(heartRate)
+ if (lastMetrics.sessionState === 'Paused') { splitRestHRMetrics.push(heartRate) }
+ if (lastMetrics.sessionState === 'Rowing') { splitActiveHRMetrics.push(heartRate) }
sessionHRMetrics.push(heartRate)
}
}
- /*
+ /**
* This externally exposed function generates the file contont for the file writer and uploaders
*/
async function fileContent () {
if (Object.keys(lastMetrics).length === 0 || Object.keys(sessionData).length === 0) { return undefined }
- if (sessionData.lap[lapnumber].complete !== true) {
+ if (sessionData.laps[sessionData.laps.length - 1].complete !== true) {
addMetricsToStrokesArray(lastMetrics)
calculateLapMetrics(lastMetrics)
}
+ if (sessionData.splits[sessionData.splits.length - 1].complete !== true) {
+ calculateSplitMetrics(lastMetrics)
+ }
+
if (sessionData.complete !== true) {
calculateSessionMetrics(lastMetrics)
}
@@ -288,13 +360,15 @@ export function createFITRecorder (config) {
}
}
+ /**
+ * @see {@link https://developer.garmin.com/fit/file-types/activity/|the fields and their meaning}. We use 'Smart Recording' per stroke.
+ * @see {@link https://developer.garmin.com/fit/cookbook/encoding-activity-files/|the description of the filestructure and how timestamps}
+ * We use 'summary last message sequencing' as the stream makes most sense that way
+ */
async function workoutToFit (workout) {
// The file content is filled and hasn't changed
if (fitfileContentIsCurrent === true && fitfileContent !== undefined) { return fitfileContent }
- // See https://developer.garmin.com/fit/file-types/activity/ for the fields and their meaning. We use 'Smart Recording' per stroke.
- // See also https://developer.garmin.com/fit/cookbook/encoding-activity-files/ for a description of the filestructure and how timestamps should be implemented
- // We use 'summary last message sequencing' as the stream makes most sense that way
const fitWriter = new FitWriter()
const versionNumber = parseInt(process.env.npm_package_version, 10)
@@ -321,6 +395,70 @@ export function createFITRecorder (config) {
true
)
+ // Activity summary
+ fitWriter.writeMessage(
+ 'activity',
+ {
+ timestamp: fitWriter.time(workout.startTime),
+ local_timestamp: fitWriter.time(workout.startTime) - workout.startTime.getTimezoneOffset() * 60,
+ total_timer_time: workout.totalTime,
+ num_sessions: 1,
+ event: 'activity',
+ event_type: 'stop',
+ type: 'manual'
+ },
+ null,
+ true
+ )
+
+ /*
+ * The session summary
+ * @see {@link https://developer.garmin.com/fit/cookbook/durations/|for explanation about times}
+ */
+ fitWriter.writeMessage(
+ 'session',
+ {
+ timestamp: fitWriter.time(workout.startTime),
+ message_index: 0,
+ sport: 'rowing',
+ sub_sport: 'indoorRowing',
+ event: 'session',
+ event_type: 'stop',
+ trigger: 'activityEnd',
+ sport_profile_name: 'Row Indoor',
+ start_time: fitWriter.time(workout.startTime),
+ total_elapsed_time: workout.totalTime,
+ total_timer_time: workout.totalTime,
+ total_moving_time: workout.totalMovingTime,
+ total_distance: workout.totalLinearDistance,
+ total_calories: workout.totalMovingCalories + workout.totalRestCalories,
+ total_cycles: workout.totalNumberOfStrokes,
+ avg_speed: workout.averageLinearVelocity,
+ max_speed: workout.maximumLinearVelocity,
+ avg_power: workout.averagePower,
+ max_power: workout.maximumPower,
+ avg_cadence: workout.averageStrokerate,
+ max_cadence: workout.maximumStrokerate,
+ ...(workout.minimumHeartrate > 0 ? { min_heart_rate: workout.minimumHeartrate } : {}),
+ ...(workout.averageHeartrate > 0 ? { avg_heart_rate: workout.averageHeartrate } : {}),
+ ...(workout.maximumHeartrate > 0 ? { max_heart_rate: workout.maximumHeartrate } : {}),
+ avg_stroke_distance: workout.averageStrokeDistance,
+ first_lap_index: 0,
+ num_laps: sessionData.totalNoLaps
+ },
+ null,
+ true
+ )
+
+ // Write the laps
+ await writeLaps(fitWriter, workout)
+
+ // Write the splits
+ await writeSplits(fitWriter, workout)
+
+ // Write the events
+ await writeEvents(fitWriter, workout)
+
fitWriter.writeMessage(
'device_info',
{
@@ -358,155 +496,52 @@ export function createFITRecorder (config) {
true
)
- // The workout before the start
+ // The workout definition before the start
await createWorkoutSteps(fitWriter, workout)
- // Write the metrics
- await createActivity(fitWriter, workout)
+ await writeRecords(fitWriter, workout)
+
+ await createVO2MaxRecord(fitWriter, workout)
+
+ await addHRR2Event(fitWriter)
fitfileContent = fitWriter.finish()
fitfileContentIsCurrent = true
return fitfileContent
}
- async function createActivity (writer, workout) {
- // Start of the session
- await addEvent(writer, workout.startTime, 'workout', 'start')
- await addEvent(writer, workout.startTime, 'timer', 'start')
-
+ async function writeLaps (writer, workout) {
// Write all laps
let i = 0
- while (i < workout.lap.length) {
- if (workout.lap[i].intensity === 'active') {
+ while (i < workout.laps.length) {
+ if (workout.laps[i].intensity === 'active') {
// eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
- await createActiveLap(writer, workout.lap[i])
+ await createActiveLap(writer, workout.laps[i])
} else {
// This is a rest interval
// eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
- await createRestLap(writer, workout.lap[i])
+ await createRestLap(writer, workout.laps[i])
}
i++
}
-
- // Finish the seesion with a stop event
- await addEvent(writer, workout.endTime, 'timer', 'stopAll')
- await addEvent(writer, workout.endTime, 'workout', 'stop')
-
- // Write the split summary
- // ToDo: Find out how records, splits, laps and sessions can be subdivided
- writer.writeMessage(
- 'split',
- {
- start_time: writer.time(workout.startTime),
- split_type: 'intervalActive',
- total_elapsed_time: workout.totalTime,
- total_timer_time: workout.totalTime,
- total_moving_time: workout.totalMovingTime,
- total_distance: workout.totalLinearDistance,
- avg_speed: workout.averageLinearVelocity,
- max_speed: workout.maximumLinearVelocity,
- end_time: writer.time(workout.endTime)
- },
- null,
- true
- )
-
- await createVO2MaxRecord(writer, workout)
-
- // Conclude with a session summary
- // See https://developer.garmin.com/fit/cookbook/durations/ for explanation about times
- writer.writeMessage(
- 'session',
- {
- timestamp: writer.time(workout.endTime),
- message_index: 0,
- sport: 'rowing',
- sub_sport: 'indoorRowing',
- event: 'session',
- event_type: 'stop',
- trigger: 'activityEnd',
- start_time: writer.time(workout.startTime),
- total_elapsed_time: workout.totalTime,
- total_timer_time: workout.totalTime,
- total_moving_time: workout.totalMovingTime,
- total_distance: workout.totalLinearDistance,
- total_cycles: workout.totalNumberOfStrokes,
- avg_speed: workout.averageLinearVelocity,
- max_speed: workout.maximumLinearVelocity,
- avg_power: workout.averagePower,
- max_power: workout.maximumPower,
- avg_cadence: workout.averageStrokerate,
- max_cadence: workout.maximumStrokerate,
- ...(sessionData.minimumHeartrate > 0 ? { min_heart_rate: sessionData.minimumHeartrate } : {}),
- ...(sessionData.averageHeartrate > 0 ? { avg_heart_rate: sessionData.averageHeartrate } : {}),
- ...(sessionData.maximumHeartrate > 0 ? { max_heart_rate: sessionData.maximumHeartrate } : {}),
- avg_stroke_distance: workout.averageStrokeDistance,
- first_lap_index: 0,
- num_laps: sessionData.totalNoLaps
- },
- null,
- true
- )
-
- // Activity summary
- writer.writeMessage(
- 'activity',
- {
- timestamp: writer.time(workout.endTime),
- local_timestamp: writer.time(workout.startTime) - workout.startTime.getTimezoneOffset() * 60,
- total_timer_time: workout.totalTime,
- num_sessions: 1,
- event: 'activity',
- event_type: 'stop',
- type: 'manual'
- },
- null,
- true
- )
-
- await addHRR2Event(writer)
- }
-
- async function addEvent (writer, time, event, eventType) {
- writer.writeMessage(
- 'event',
- {
- timestamp: writer.time(time),
- event: event,
- event_type: eventType,
- event_group: 0
- },
- null,
- true
- )
}
async function createActiveLap (writer, lapdata) {
// It is an active lap, after we make sure it is a completed lap, we can write all underlying records
- if (!!lapdata.summary.timeSpent.moving && lapdata.summary.timeSpent.moving > 0 && !!lapdata.summary.distance.fromStart && lapdata.summary.distance.fromStart > 0) {
- let i = 0
- while (i < lapdata.strokes.length) {
- // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
- await createTrackPoint(writer, lapdata.strokes[i])
- i++
- }
-
- await addEvent(writer, lapdata.endTime, lapdata.event, 'stop')
-
- // Conclude the lap with a summary
+ if (!!lapdata.complete && lapdata.complete) {
// See https://developer.garmin.com/fit/cookbook/durations/ for how the different times are defined
writer.writeMessage(
'lap',
{
- timestamp: writer.time(lapdata.endTime),
- message_index: lapdata.lapNumber - 1,
+ timestamp: writer.time(sessionData.startTime),
+ message_index: lapdata.lapNumber,
sport: 'rowing',
sub_sport: 'indoorRowing',
event: lapdata.event,
wkt_step_index: lapdata.workoutStepNumber,
event_type: 'stop',
intensity: lapdata.intensity,
- ...(sessionData.totalNoLaps === lapdata.lapNumber ? { lap_trigger: 'sessionEnd' } : { lap_trigger: lapdata.trigger }),
+ ...(sessionData.totalNoLaps === (lapdata.lapNumber + 1) ? { lap_trigger: 'sessionEnd' } : { lap_trigger: lapdata.trigger }),
start_time: writer.time(lapdata.startTime),
total_elapsed_time: lapdata.summary.timeSpent.total,
total_timer_time: lapdata.summary.timeSpent.total,
@@ -516,7 +551,7 @@ export function createFITRecorder (config) {
avg_cadence: lapdata.summary.strokerate.average,
max_cadence: lapdata.summary.strokerate.maximum,
avg_stroke_distance: lapdata.summary.strokeDistance.average,
- total_calories: lapdata.summary.calories.totalSpent,
+ total_calories: lapdata.summary.caloriesSpent.moving,
avg_speed: lapdata.summary.linearVelocity.average,
max_speed: lapdata.summary.linearVelocity.maximum,
avg_power: lapdata.summary.power.average,
@@ -525,24 +560,19 @@ export function createFITRecorder (config) {
...(lapdata.maximumHeartrate > 0 ? { max_heart_rate: lapdata.maximumHeartrate } : {})
},
null,
- sessionData.totalNoLaps === lapdata.lapNumber
+ sessionData.totalNoLaps === (lapdata.lapNumber + 1)
)
}
}
async function createRestLap (writer, lapdata) {
// First, make sure the rest lap is complete
- if (!!lapdata.endTime && lapdata.endTime > 0 && !!lapdata.startTime && lapdata.startTime > 0) {
- // Pause the session timer with a stop event at the begin of the rest interval
- await addEvent(writer, lapdata.startTime, 'timer', 'stop')
-
- // Add a rest lap summary
- // See https://developer.garmin.com/fit/cookbook/durations/ for how the different times are defined
+ if (!!lapdata.complete && lapdata.complete) {
writer.writeMessage(
'lap',
{
- timestamp: writer.time(lapdata.endTime),
- message_index: lapdata.lapNumber - 1,
+ timestamp: writer.time(sessionData.startTime),
+ message_index: lapdata.lapNumber,
sport: 'rowing',
sub_sport: 'indoorRowing',
event: lapdata.event,
@@ -559,7 +589,7 @@ export function createFITRecorder (config) {
avg_cadence: 0,
max_cadence: 0,
avg_stroke_distance: 0,
- total_calories: 0,
+ total_calories: lapdata.summary.caloriesSpent.rest,
avg_speed: 0,
max_speed: 0,
avg_power: 0,
@@ -568,37 +598,188 @@ export function createFITRecorder (config) {
...(lapdata.maximumHeartrate > 0 ? { max_heart_rate: lapdata.maximumHeartrate } : {})
},
null,
- sessionData.totalNoLaps === lapdata.lapNumber
+ sessionData.totalNoLaps === (lapdata.lapNumber + 1)
)
-
- // Restart of the session
- await addEvent(writer, lapdata.endTime, lapdata.event, 'stop')
- await addEvent(writer, lapdata.endTime, 'timer', 'start')
}
}
- async function createTrackPoint (writer, trackpoint) {
+ async function writeSplits (writer, workout) {
+ // Create the splits
+ let i = 0
+ while (i < workout.splits.length) {
+ if (workout.splits[i].intensity === 'active') {
+ // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
+ await createActiveSplit(writer, workout.splits[i])
+ } else {
+ // This is a rest interval
+ // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
+ await createRestSplit(writer, workout.splits[i])
+ }
+ i++
+ }
+
+ // Write the split summary
writer.writeMessage(
- 'record',
+ 'split_summary',
{
- timestamp: writer.time(trackpoint.timestamp),
- distance: trackpoint.totalLinearDistance,
- total_cycles: trackpoint.totalNumberOfStrokes,
- activity_type: 'fitnessEquipment',
- ...(trackpoint.cycleLinearVelocity > 0 || trackpoint.isPauseStart ? { speed: trackpoint.cycleLinearVelocity } : {}),
- ...(trackpoint.cyclePower > 0 || trackpoint.isPauseStart ? { power: trackpoint.cyclePower } : {}),
- ...(trackpoint.cycleStrokeRate > 0 ? { cadence: trackpoint.cycleStrokeRate } : {}),
- ...(trackpoint.cycleDistance > 0 ? { cycle_length16: trackpoint.cycleDistance } : {}),
- ...(trackpoint.dragFactor > 0 || trackpoint.dragFactor < 255 ? { resistance: trackpoint.dragFactor } : {}), // As the data is stored in an int8, we need to guard the maximum
- ...(trackpoint.heartrate !== undefined && trackpoint.heartrate > 0 ? { heart_rate: trackpoint.heartrate } : {})
+ timestamp: writer.time(workout.startTime),
+ message_index: 0,
+ split_type: 'interval_active',
+ num_splits: sessionData.noActiveSplits,
+ total_timer_time: workout.totalMovingTime,
+ total_distance: workout.totalLinearDistance,
+ avg_speed: workout.averageLinearVelocity,
+ max_speed: workout.maximumLinearVelocity,
+ total_calories: sessionData.totalMovingCalories,
+ ...(splitActiveHRMetrics.average() > 0 ? { avg_heart_rate: splitActiveHRMetrics.average() } : {}),
+ ...(splitActiveHRMetrics.maximum() > 0 ? { max_heart_rate: splitActiveHRMetrics.maximum() } : {})
+ },
+ null,
+ sessionData.noRestSplits === 0
+ )
+
+ if (sessionData.noRestSplits > 0) {
+ // There was a pause
+ writer.writeMessage(
+ 'split_summary',
+ {
+ timestamp: writer.time(workout.startTime),
+ message_index: 1,
+ split_type: 'interval_rest',
+ num_splits: sessionData.noRestSplits,
+ total_timer_time: sessionData.totalRestTime,
+ total_distance: 0,
+ avg_speed: 0,
+ max_speed: 0,
+ total_calories: sessionData.totalRestCalories,
+ ...(splitRestHRMetrics.average() > 0 ? { avg_heart_rate: splitRestHRMetrics.average() } : {}),
+ ...(splitRestHRMetrics.maximum() > 0 ? { max_heart_rate: splitRestHRMetrics.maximum() } : {})
+ },
+ null,
+ true
+ )
+ }
+ }
+
+ /**
+ * Creation of the active split
+ * @see {@link https://developer.garmin.com/fit/cookbook/durations/|how the different times are defined}
+ */
+ async function createActiveSplit (writer, splitdata) {
+ if (!!splitdata.complete && splitdata.complete) {
+ // The split is complete
+
+ writer.writeMessage(
+ 'split',
+ {
+ timestamp: writer.time(sessionData.startTime),
+ message_index: splitdata.splitNumber,
+ split_type: 'interval_active',
+ total_elapsed_time: splitdata.totalTime,
+ total_timer_time: splitdata.totalTime,
+ total_moving_time: splitdata.totalTime,
+ total_distance: splitdata.totalLinearDistance,
+ avg_speed: splitdata.totalLinearDistance > 0 ? splitdata.totalLinearDistance / splitdata.totalTime : 0,
+ max_speed: splitdata.maxSpeed,
+ total_calories: splitdata.calories,
+ start_time: writer.time(splitdata.startTime),
+ end_time: writer.time(splitdata.endTime)
+ },
+ null,
+ (splitdata.splitNumber + 1) === (sessionData.noRestSplits + sessionData.noActiveSplits)
+ )
+ }
+ }
+
+ /**
+ * Creation of the rest split
+ * @see {@link https://developer.garmin.com/fit/cookbook/durations/|how the different times are defined}
+ */
+ async function createRestSplit (writer, splitdata) {
+ // First, make sure the rest lap is complete
+ if (!!splitdata.complete && splitdata.complete) {
+ // Add a rest lap summary
+ writer.writeMessage(
+ 'split',
+ {
+ timestamp: writer.time(sessionData.startTime),
+ message_index: splitdata.splitNumber,
+ split_type: 'interval_rest',
+ total_elapsed_time: splitdata.totalTime,
+ total_timer_time: splitdata.totalTime,
+ total_moving_time: 0,
+ total_distance: 0,
+ avg_speed: 0,
+ max_speed: 0,
+ total_calories: splitdata.calories,
+ start_time: writer.time(splitdata.startTime),
+ end_time: writer.time(splitdata.endTime)
+ },
+ null,
+ (splitdata.splitNumber + 1) === (sessionData.noRestSplits + sessionData.noActiveSplits)
+ )
+ }
+ }
+
+ // Write the events
+ async function writeEvents (writer, workout) {
+ // Start of the session
+ await addEvent(writer, workout.startTime, 'workout', 'start')
+ await addEvent(writer, workout.startTime, 'timer', 'start')
+
+ // Write all rest laps
+ let i = 0
+ while (i < workout.laps.length) {
+ if (workout.laps[i].intensity === 'rest') {
+ // This is a rest interval
+ // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
+ await addEvent(writer, workout.laps[i].startTime, workout.laps[i].event, 'stop')
+ // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
+ await addEvent(writer, workout.laps[i].endTime, 'timer', 'start')
}
+ i++
+ }
+
+ // Finish the seesion with a stop event
+ await addEvent(writer, workout.endTime, 'timer', 'stopAll')
+ await addEvent(writer, workout.endTime, 'workout', 'stop')
+ }
+
+ async function addEvent (writer, time, event, eventType) {
+ writer.writeMessage(
+ 'event',
+ {
+ timestamp: writer.time(time),
+ event: event,
+ event_type: eventType,
+ event_group: 0
+ },
+ null,
+ true
)
}
+ /**
+ * @see {@link https://developer.garmin.com/fit/file-types/workout/|a general description of the workout structure}
+ * @see {@link https://developer.garmin.com/fit/cookbook/encoding-workout-files/|a detailed description of the workout structure}
+ */
async function createWorkoutSteps (writer, workout) {
- // See https://developer.garmin.com/fit/file-types/workout/ for a general description of the workout structure
- // and https://developer.garmin.com/fit/cookbook/encoding-workout-files/ for a detailed description of the workout structure
- const maxWorkoutStepNumber = workout.lap[workout.lap.length - 1].workoutStepNumber
+ // The file header
+ writer.writeMessage(
+ 'training_file',
+ {
+ timestamp: writer.time(workout.startTime),
+ time_created: writer.time(workout.startTime),
+ type: 'workout',
+ manufacturer: 'concept2',
+ product: 0,
+ serial_number: 0
+ },
+ null,
+ true
+ )
+
+ const maxWorkoutStepNumber = workout.laps[workout.laps.length - 1].workoutStepNumber
writer.writeMessage(
'workout',
{
@@ -623,6 +804,10 @@ export function createFITRecorder (config) {
// A target time is set
createWorkoutStep(writer, i, 'time', workout.workoutplan[i].targetTime * 1000, 'active')
break
+ case (workout.workoutplan[i].type === 'calories' && workout.workoutplan[i].targetCalories > 0):
+ // A target time is set
+ createWorkoutStep(writer, i, 'calories', workout.workoutplan[i].targetCalories, 'active')
+ break
case (workout.workoutplan[i].type === 'rest' && workout.workoutplan[i].targetTime > 0):
// A target time is set
createWorkoutStep(writer, i, 'time', workout.workoutplan[i].targetTime * 1000, 'rest')
@@ -651,6 +836,36 @@ export function createFITRecorder (config) {
)
}
+ async function writeRecords (writer, workout) {
+ // It is an active lap, after we make sure it is a completed lap, we can write all underlying records
+ if (!!sessionData.totalMovingTime && sessionData.totalMovingTime > 0 && !!workout.strokes[workout.strokes.length - 1].totalLinearDistance && workout.strokes[workout.strokes.length - 1].totalLinearDistance > 0) {
+ let i = 0
+ while (i < workout.strokes.length) {
+ // eslint-disable-next-line no-await-in-loop -- This is inevitable if you want to have some decent order in the file
+ await createTrackPoint(writer, workout.strokes[i])
+ i++
+ }
+ }
+ }
+
+ async function createTrackPoint (writer, trackpoint) {
+ writer.writeMessage(
+ 'record',
+ {
+ timestamp: writer.time(trackpoint.timestamp),
+ distance: trackpoint.totalLinearDistance,
+ total_cycles: trackpoint.totalNumberOfStrokes,
+ activity_type: 'fitnessEquipment',
+ ...(trackpoint.cycleLinearVelocity > 0 || trackpoint.isPauseStart ? { speed: trackpoint.cycleLinearVelocity } : {}),
+ ...(trackpoint.cyclePower > 0 || trackpoint.isPauseStart ? { power: trackpoint.cyclePower } : {}),
+ ...(trackpoint.cycleStrokeRate > 0 ? { cadence: trackpoint.cycleStrokeRate } : {}),
+ ...(trackpoint.cycleDistance > 0 ? { cycle_length16: trackpoint.cycleDistance } : {}),
+ ...(trackpoint.dragFactor > 0 || trackpoint.dragFactor < 255 ? { resistance: trackpoint.dragFactor } : {}), // As the data is stored in an int8, we need to guard the ma>
+ ...(trackpoint.heartrate !== undefined && trackpoint.heartrate > 0 ? { heart_rate: trackpoint.heartrate } : {})
+ }
+ )
+ }
+
async function createVO2MaxRecord (writer, workout) {
if (!isNaN(VO2max.result()) && VO2max.result() > 10 && VO2max.result() < 60) {
writer.writeMessage(
@@ -684,9 +899,11 @@ export function createFITRecorder (config) {
}
}
+ /**
+ * This function is called when the rowing session is stopped. postExerciseHR[0] is the last measured excercise HR
+ * Thus postExerciseHR[1] is Recovery HR after 1 min, etc..
+ */
function measureRecoveryHR () {
- // This function is called when the rowing session is stopped. postExerciseHR[0] is the last measured excercise HR
- // Thus postExerciseHR[1] is Recovery HR after 1 min, etc..
if (!isNaN(heartRate) && config.userSettings.restingHR <= heartRate && heartRate <= config.userSettings.maxHR) {
log.debug(`*** Fit-recorder HRR-${postExerciseHR.length}: ${heartRate}`)
postExerciseHR.push(heartRate)
@@ -764,14 +981,19 @@ export function createFITRecorder (config) {
function reset () {
heartRate = 0
- lapnumber = 0
lapHRMetrics.reset()
+ splitActiveHRMetrics.reset()
+ splitRestHRMetrics.reset()
+ splitVelocityMetrics.reset()
sessionHRMetrics.reset()
sessionData = null
sessionData = {}
sessionData.workoutplan = []
sessionData.workoutplan[0] = { type: 'justrow' }
sessionData.lap = []
+ sessionData.split = []
+ sessionData.noActiveSplits = 0
+ sessionData.noRestSplits = 0
sessionData.complete = false
postExerciseHR = null
postExerciseHR = []
diff --git a/app/recorders/rowingDataRecorder.js b/app/recorders/rowingDataRecorder.js
index 1da019f764..5dd1c59b57 100644
--- a/app/recorders/rowingDataRecorder.js
+++ b/app/recorders/rowingDataRecorder.js
@@ -117,7 +117,7 @@ export function createRowingDataRecorder (config) {
strokes[strokeNumber].driveHandleForceCurve = metrics.driveHandleForceCurve
strokes[strokeNumber].driveHandleVelocityCurve = metrics.driveHandleVelocityCurve
strokes[strokeNumber].driveHandlePowerCurve = metrics.driveHandlePowerCurve
- VO2max.push(metrics)
+ VO2max.push(metrics, heartRate)
if (!isNaN(metrics.dragFactor) && metrics.dragFactor > 0) { drag.push(metrics.dragFactor) }
allDataHasBeenWritten = false
rowingDataFileContentIsCurrent = false
diff --git a/app/recorders/stravaInterface.js b/app/recorders/stravaInterface.js
index c1cbcebc26..bd404292ab 100644
--- a/app/recorders/stravaInterface.js
+++ b/app/recorders/stravaInterface.js
@@ -4,8 +4,8 @@
*/
/**
* This Module is the interface to the Strava.com webservice
- * See https://developers.strava.com/
- * and https://gist.github.com/michaellihs/bb262e2c6ee93093485361de282c242d
+ * @see {@link https://developers.strava.com/|Official Strava documentation}
+ * @see {@link https://gist.github.com/michaellihs/bb262e2c6ee93093485361de282c242d|this practical application}
*/
/* eslint-disable camelcase -- As Strava's url parameters use underscores, not much we can do about it */
import log from 'loglevel'
@@ -26,6 +26,8 @@ export function createStravaInterface (config) {
if (!recorder.minimumDataAvailable()) {
log.info(`${recorder.presentationName} file has not been uploaded to Strava.com, as there was not enough data recorded`)
return
+ } else {
+ log.info(`Attempting upload to ${recorder.presentationName} file to Strava.com`)
}
// ToDo: check if the uploaded file has changed since last upload based on total recorded movingtime
diff --git a/app/recorders/tcxRecorder.js b/app/recorders/tcxRecorder.js
index 6810657d04..97d2f778fd 100644
--- a/app/recorders/tcxRecorder.js
+++ b/app/recorders/tcxRecorder.js
@@ -111,7 +111,7 @@ export function createTCXRecorder (config) {
} else {
sessionData.lap[lapnumber].strokes[strokenumber].heartrate = undefined
}
- VO2max.push(metrics)
+ VO2max.push(metrics, heartRate)
tcxfileContentIsCurrent = false
allDataHasBeenWritten = false
}
diff --git a/app/recorders/utils/BucketedLinearSeries.js b/app/recorders/utils/BucketedLinearSeries.js
index fd72a62ea4..77939502ee 100644
--- a/app/recorders/utils/BucketedLinearSeries.js
+++ b/app/recorders/utils/BucketedLinearSeries.js
@@ -5,7 +5,7 @@
This Module calculates a bucketed Linear Regression. It assumes a rising line.
*/
-import { createTSLinearSeries } from '../../engine/utils/FullTSLinearSeries.js'
+import { createTSLinearSeries } from '../../engine/utils/TSLinearSeries.js'
/**
* @param {number} xCutOffInterval
diff --git a/app/recorders/utils/VO2max.js b/app/recorders/utils/VO2max.js
index d59834a095..c97d089a1c 100644
--- a/app/recorders/utils/VO2max.js
+++ b/app/recorders/utils/VO2max.js
@@ -27,15 +27,15 @@ export function createVO2max (config) {
/**
* @param {Metrics} metrics
*/
- function push (metrics) {
+ function push (metrics, HRData) {
VO2MaxResultIsCurrent = false
- if (metrics.totalMovingTime > offset && !!metrics.heartrate && !isNaN(metrics.heartrate) && metrics.heartrate >= config.userSettings.restingHR && metrics.heartrate < config.userSettings.maxHR && !isNaN(metrics.cyclePower) && metrics.cyclePower > 0 && metrics.cyclePower <= config.userSettings.maxPower) {
+ if (metrics.totalMovingTime > offset && !!HRData && !isNaN(HRData) && HRData >= config.userSettings.restingHR && HRData < config.userSettings.maxHR && !isNaN(metrics.cyclePower) && metrics.cyclePower > 0 && metrics.cyclePower <= config.userSettings.maxPower) {
// We are outside the startup noise and have numeric fields
metricsArray.push({
totalMovingTime: metrics.totalMovingTime,
totalLinearDistance: metrics.totalLinearDistance,
cyclePower: metrics.cyclePower,
- heartrate: metrics.heartrate
+ heartrate: HRData
})
}
}
diff --git a/config/rowerProfiles.js b/config/rowerProfiles.js
index 121163666b..32b4077d93 100644
--- a/config/rowerProfiles.js
+++ b/config/rowerProfiles.js
@@ -30,8 +30,12 @@ export default {
// NOISE FILTER SETTINGS
// Filter Settings to reduce noise in the measured data
- // Smoothing determines the length of the running average for filtering the currentDt, 1 effectively turns it off
- smoothing: 1,
+ // Systematic error agressiveness determines the strength of the systematic error filter. 0 turns it off (default), 1 turns it to its maximum.
+ // A value of 0.10 is known to bring some benefits, but some machines can handle 0.90 to 1.0. Don't set too high if the machine has a lot of signal bounce.
+ systematicErrorAgressiveness: 0,
+
+ // Size of the total buffer for the systematic error filter. We recomend to use at least use the length of the longest recovery here (logs can tell this).
+ systematicErrorNumberOfDatapoints: 1,
// Flank length determines the number of measuments that are used for determining the angular velocity and angular acceleration
flankLength: 3,
@@ -150,20 +154,21 @@ export default {
maximumStrokeTimeBeforePause: 6.0,
dragFactor: 68,
autoAdjustDragFactor: true,
- minimumDragQuality: 0.60,
+ minimumDragQuality: 0.85,
dragFactorSmoothing: 3,
minimumTimeBetweenImpulses: 0.005,
- maximumTimeBetweenImpulses: 0.0145,
+ maximumTimeBetweenImpulses: 0.01375,
flankLength: 12,
- smoothing: 1,
- minimumStrokeQuality: 0.34,
- minimumForceBeforeStroke: 11,
+ systematicErrorAgressiveness: 0.95,
+ systematicErrorNumberOfDatapoints: 240,
+ minimumStrokeQuality: 0.26,
+ minimumForceBeforeStroke: 22,
minimumRecoverySlope: 0.00070,
autoAdjustRecoverySlope: true,
- autoAdjustRecoverySlopeMargin: 0.01,
- minimumDriveTime: 0.40,
+ autoAdjustRecoverySlopeMargin: 0.0045,
+ minimumDriveTime: 0.50,
minimumRecoveryTime: 0.90,
- flywheelInertia: 0.101255,
+ flywheelInertia: 0.10145,
magicConstant: 2.8
},
@@ -228,12 +233,12 @@ export default {
dragFactorSmoothing: 3,
flywheelInertia: 0.180,
dragFactor: 225,
- flankLength: 11,
+ flankLength: 7,
minimumStrokeQuality: 0.34,
minimumRecoverySlope: 0,
- autoAdjustRecoverySlope: false,
- autoAdjustRecoverySlopeMargin: 0.1,
- minimumForceBeforeStroke: 80,
+ autoAdjustRecoverySlope: true,
+ autoAdjustRecoverySlopeMargin: 0.14,
+ minimumForceBeforeStroke: 35,
minimumDriveTime: 0.30,
minimumRecoveryTime: 0.60
},
diff --git a/docs/Architecture.md b/docs/Architecture.md
index 0c8e9c1417..c4daf63b41 100644
--- a/docs/Architecture.md
+++ b/docs/Architecture.md
@@ -125,7 +125,7 @@ sequenceDiagram
Both the `webServer.js` and `PeripheralManager.js` can trigger a command. Server.js will communicate this command to all managers, where they will handle this as they see fit. The following commands are defined:
| command | description | Relvant manager behaviour |
-|---|---|---|
+| --- | --- | --- |
| updateIntervalSettings | An update in the interval settings has to be processed. Here the `data` parameter has to be filled with a valid workout object in JSON format | The `SessionManager` will ingest it and use it to structure the workout (see its description). The `fitRecorder` will inject it in the recording |
| start | start of a session initiated by the user. As the true start of a session is actually triggered by the flywheel, which will always be communicated via the metrics, its only purpose is to make sure that the flywheel is allowed to move. This command is routinely sent at the start of a ANT+ FE-C communication. | The `SessionManager` will activate a stopped workout. All other managers will ignore the command, but will obey the `SessionManager`'s response. |
| startOrResume | User forced (re)start of a session. As the true start of a session is actually triggered by the flywheel, its only purpose is to clear the flywheel for further movement. This is not used in normal operation, but can functionally change a 'stopped' session into a 'paused' one. Intended use is to allow a user to continue beyond pre-programmed interval parameters as reaching them results in a session being 'stopped'. | The `SessionManager` will reactivate a stopped workout. All other managers will ignore the command, but will obey the `SessionManager`'s resonse. |
@@ -176,13 +176,13 @@ A key thing to realize is that `SessionManager.js` will process *currentDt* valu
Part of the metrics is the metricsContext object, which provides an insight in the state of both stroke (determined in `RowingStatistics.js`) and session (determined in `SessionManager.js`), allowing the clients to trigger on these flags. The following flags are recognised:
| Flag | Meaning |
-|---|---|
+| --- | --- |
| isMoving | Rower is moving |
| isDriveStart | Current metrics are related to the start of a drive |
| isRecoveryStart | Current metrics are related to the start of a recovery |
| isSessionStart | Current metrics are related to the start of a session |
| isIntervalEnd | Current metrics are related to the end of an session interval. An interval implies that there will be no stop of the rowing session between the current and next interval unless there is an intended (temporary) rest period in the session after the interval. If a rest is specified (the flywheel is intended to stop), a "isPauseStart" is indicated as well. |
-| isSplitEnd | Current metrics are related to the end of a session split. |
+| isSplitEnd | Current metrics are related to the end of a session split. |
| isPauseStart | Current metrics are related to the start of a session pause. This implies that the flywheel is intended to stop after this message (interval with a forced rest period), or actually has stopped (spontanuous pause). |
| isUnplannedPause | Indication by the sessionManager that the metrics are inside a spontanuous pause if set to 'true'. Used to distinguish between a planned and unplanned pause by the PM5 emulator. |
| isPauseEnd | Current metrics are related to the end of a session pause, implying that the flywheel has started to move again. This is **NOT** sent upon completion of a planned rest period, as the pause is only eneded after the flywheel to reaches its minimum speed again. To identify if the SessionManager is still blocking metrics due to the pause still being active, check if the `pauseCountdownTime` is equal to 0. |
@@ -395,8 +395,8 @@ OpenRowingMonitor allows setting the NICE-level of both the `GpioTimerService.js
HOWEVER, when compared to an oracle system (the Concept2 PM5), we see quite a variation in deviation with that result.
-| Distance | Minimal deviation | Average deviation | Maximal deviation | Deviation Spread |
-|---|---|---|---|---|
+| Distance | Minimal deviation | Average deviation | Maximal deviation | Deviation Spread |
+| --- | --- | --- | --- | --- |
| 5000 meters | 0.70 sec | 1.08 sec | 1.40 sec | 0.70 sec |
| 10000 meters | 0.70 sec | 1.05 sec | 1.40 sec | 0.80 sec |
| 21097 meters | 0.70 sec | 1.08 sec | 1.30 sec | 0.60 sec |
@@ -411,7 +411,7 @@ Along with the introduction of Raspberry Pi 5, a new GPIO hardware architecture
* the possibility to measure on the upward or downward moving flank, or both
* the provision of a built-in debounce filter
-An alternative is the `onoff` library, which was used in OpenRowingMonitor up to version 0.8.2, which does work with the new RPi5 architecture. Although the latter benefits could be moved to `GpioTimerService.js`, the two former benefits can't. Therefore, we decided to wait with moving to onoff until a decent alternative for `pigpio` emerges.
+An alternative is the `onoff` library, which was used in OpenRowingMonitor up to version 0.8.2. This does work with the new RPi5 architecture. Although the latter benefit could be moved to `GpioTimerService.js`, the two former benefits can't. Based on test runs with historic data, we see a significant drop in dataquality. For example, we see a significant drop in the average Goodness of Fit for drag detection from 0.97-0.99 to 0.80-0.85. We also see a very significant increase in cyclic error correction error messages, forced cyclic error filter resets and stroke detection errors, where in the `pigpio` setup these are very rare. Therefore, we decided to wait with moving to onoff until a decent alternative for `pigpio` emerges.
### Race conditions between commands and metrics
diff --git a/docs/Integrations.md b/docs/Integrations.md
index ebdd91adb3..267fa433f9 100644
--- a/docs/Integrations.md
+++ b/docs/Integrations.md
@@ -10,7 +10,7 @@ For services we distinguish between two types of functionality:
Looking at the individual services, we see the following:
| Service | Download workout plan | Upload results | Remarks |
-|---|---|---|---|
+| --- | --- | --- | --- |
| File system | No | Yes | Integrated service |
| Strava | No | Yes | Integrated service |
| RowsAndAll.com | No | Yes | Integrated service |
@@ -36,7 +36,7 @@ OpenRowingMonitor supports the following exports, which can be obtained via the
The creation of each of these files is independently controlled via their own parameters in the `config.js`. You can turn on each filetype independently without issue, as OpenRowingMonitor will make sure the names will not be identical, even when the file extension is the same. OpenRowingMonitor can create regular files and gzipped files (which are accepted by several websites) and will write them in the directory specified in the `dataDirectory` parameter of `config.js`. In `config.js`, you can set a parameter to create a file and another parameter to gzip it. The following parameters are available:
| File type | parameter to create file | parameter to zip file |
-|---|---|---|
+| --- | --- | --- |
| Garmin FIT files | createFitFiles | gzipFitFiles |
| Garmin TCX files | createTcxFiles | gzipTcxFiles |
| Rowingdata csv | createRowingDataFiles | - |
@@ -173,7 +173,7 @@ The `machineName` is an element that is used to identify your monitor uniquely i
The topic 'OpenRowingMonitor/`machineName`/metrics' will contain your metrics. Each completed stroke results in one message, initiated at the beginning of the drive. At the begin/end of splits, intervals and sessions an additional message will be sent. Flags indicate the rowing machine state and all associated metrics.
| Field | Meaning | Unit |
-|---|---|---|
+| --- | --- | --- |
| timestamp | The timestamp of the creation of the metrics | JSON timestamp |
| intervaltype | The type of the current interval | `justrow`, `distance`, `time` or `rest` |
| sessionStatus | | |
@@ -192,13 +192,13 @@ The topic 'OpenRowingMonitor/`machineName`/metrics' will contain your metrics. E
| splitNumber | | Counter |
| heartrate | | Beats per minute |
| velocity | | m/s |
-| pace | | sec/500m |
+| pace | | sec/500m |
| power | | Watts |
| driveDuration | | milliseconds |
| driveLength | | Meters |
| recoveryDuration | | milliseconds |
| strokeDuration | | milliseconds |
-| strokeRate | | strokes per minute|
+| strokeRate | | strokes per minute |
| distancePerStroke | | Meters |
| peakHandleForce | Maximum encountered force during the drive | Newtons |
| averageHandleForce | Average handle force during the drive | Newtons |
@@ -241,6 +241,7 @@ Valid values for type are:
* `justrow`: an endless session that will not stop unless you stop rowing. If you like an undetermined cooldown after a session, this is recomended as last interval.
* `distance`: creates an interval that will end at a specified distance. This requires the `targetDistance` to be greater than 0 meters.
* `time`: creates an interval that will end at a specified time. This requires the `targetTime` to be greater than 0 seconds.
+* `calories`: creates an interval that will end at a specified calorie count. This requires the `targetCalories` to be greater than 0 meters.
* `rest`: creates an rest interval with a minumum duration of `targetTime` seconds. PLease note, duing a rest interval, no metrics will be recorded.
Splits are optional elements. It will allow a session to be split up into smaller pieces for analysis purposes. In OpenRowingMonitor, intervals and splits do not have to be of the same type. So one can have time based splits in a distance based interval. Please observe that in the transition from one interval to the next, splits are reset.
diff --git a/docs/Mathematical_Foundations.md b/docs/Mathematical_Foundations.md
new file mode 100644
index 0000000000..66364bbb8c
--- /dev/null
+++ b/docs/Mathematical_Foundations.md
@@ -0,0 +1,265 @@
+# The mathematics of OpenRowingMonitor
+
+
+In this document we explain the math behind the OpenRowingMonitor, to allow for independent review and software maintenance. It should be read in conjunction with [our desciption of OpenRowingMonitor's physics](./physics_openrowingmonitor.md), as these interact. When possible, we link to the source code to allow further investigation and keep the link with the actual implementation.
+
+Please note that this text is used as a rationale for design decissions of the mathematical algorithms used in OpenRowingMonitor. So it is of interest for people maintaining the code (as it explains why we do things the way we do) and for academics to verify or improve our solution. For these academics, we conclude with a section of open design issues as they might provide avenues of future research and improvement. As the algorithms described here are essential to OpenRowingMonitor's performance, we are actively looking for improvements in accuracy and reduction of cpu-load. If you are interested in just using OpenRowingMonitor as-is, this might not be the text you are looking for.
+
+This document consists out of four sections:
+
+* [A description of the leading design principles of the mathematic algorithms](#leading-design-principles-of-the-mathematic-algorithms)
+* [An overview of the places where specific algorithms are used](#overview-of-algorithms-used)
+* [The selection and/or design of the specific mathematical algorithms used](#the-selection-and-design-of-used-mathematical-algorithms)
+* [Open design issues and regrettable design decissions](#open-issues-known-problems-and-regrettable-design-decissions)
+
+## Leading design principles of the mathematic algorithms
+
+In our design of the physics engine, we obey the following principles (see also [the architecture document](Architecture.md)):
+
+* all calculations should be performed in real-time in a stream of datapoints, even on data intensive machines, to allow decent feedback to the user. The load on the CPU thus has to be limited as some rowing machines are data intensive, and a Raspberry Pi Zero 2W is used, and the app's CPU load interferes with the accurate measurement of time between pulses by the responsible kernel functions;
+
+* stay as close to the original data as possible (thus depend on direct measurements as much as possible) instead of heavily depend on derived data. This means that there are two absolute values we try to stay close to as much as possible: the **time between an impulse** and the **Number of Impulses**, where we consider **Number of Impulses** most reliable, and **time between an impulse** reliable but containing noise (the origin and meaning of these metrics, as well the effects of this approach are explained later);
+
+* use robust calculations wherever possible (i.e. not depend on a single measurements, extrapolations, derivation, etc.) to reduce effects of measurement errors. A typical issue is the role of *CurrentDt* (essentially Δt), which is often used as a divisor with small numers, increasing the effect of measurement errors in most metrics. When we do need to calculate a derived function, we choose to use a linear regression method to reduce the impact of noise and than use the function to calculate the derived function;
+
+* A key element of the analysis of the algorithm performance, we use two way of testing performance: a **synthetic approach** which uses a known polynomial (sometimes with injected noise) to see how well a certain algorithm behaves, and an **organic approach**, where previously recorded data is replayed. A key indicator for the organic approach in the Goodness Of Fit of the drag calculation: [as theory prescribes](./physics_openrowingmonitor.md#determining-the-drag-factor-of-the-flywheel), the progression of Δt throughout the recovery time should be a perfect straight line, thus any deviation from that line is most likely due to measurement noise.
+
+## Overview of algorithms used
+
+### Filtering on systematic noise on *CurrentDt*
+
+Several machines, including the Concept2 RowErg, [are known to have small errors in their magnet placement](./rower_settings.md#fixing-magnet-placement-errors). The systematic error filter is designed to reduce the effects of these systematic errors. Although the subsequent calcuations are designed to be robust against noise, the repeating nature has tendency to still disturbe measurements, requiring a different approach to noise supression.
+
+As the synchronisation with the actual flywheel position is essential (as somehow it should be known which specific misplaced magnet produces which error) and quite hard to solve, we've chosen the approach to continuously dynamically calculate the error correction value, rather than provide a static value beforehand that has to be synchronised.
+
+A key assumption is that these structural errors will always be present as part of the random noise, and by comparing multiple observations across time, systematic errors can be identified. In essence, the residual between the raw input and the regression corrected projection is used as a basis for future corrections. These concepts are shared with [Kalman filters](https://en.wikipedia.org/wiki/Kalman_filter). Here, as the error is specific for each magnet, we need to maintain such a filter per magnet.
+
+Here, we use a linear regressor to determine the relation between the raw value and the 'perfect values' (i.e. noise free) per individual magnet. By maintaining a function per magnet, we can calculate the error per magnet as a function of the raw value, allowing for an effective error correction of systematic placement errors in the magnet array.
+
+[`Flywheel.js`](../app/engine/Flywheel.js) both feeds and uses the [`CyclicErrorFilter.js`](../app/engine/utils/CyclicErrorFilter.js). Given its dynamic approach, there is a need for a source of *perfect values*. In [`Flywheel.js`](../app/engine/Flywheel.js) there are two potential sources for estimating the perfect value of a given *CurrentDt*:
+
+* The Quadratic regression used for the estimation of ω and α. This is a continuous stream of data as each *CurrentDt* will be processed via this algorithm, allowing the algorithm to adjust quickly to changes. The key issue is that this quadratic regression is an approximation at best: especially in the drive the true movement behaves like much higher polynomial, so errors found can also originate from the deviation between the quadratic estimator and the true data. Using this as a basis for noise reduction would be theoretically unsound. An additional issue is that, due to its design, it requires solving a the local quadratic equation to find the total time given a specific angular distance θ, which might result in two solutions. A minor issue is that this regression is done on relatively small *flankLenght* sized intervals (typically two full rotations), making it a narrow basis for error correction, especially for systematic errors. These issues combined makes this the far less desireable choice.
+
+* The linear regression used for the drag calculation. This is a discontinuous stream, as data is only collected during the recovery, and can only be processed after the recovery is complete (as only then the regression completes to deliver a completed estimate of the drag slope). The big benefit is that this regression analysis is theoretically sound: [the physics model prescribes a straight line](./physics_openrowingmonitor.md#determining-the-drag-factor-of-the-flywheel) and linear regression is used to calculate it. It has a much wider base, as the regression is conducted over the entirety of the recovery (often dozens of full rotations), making the regression also less vulnerable to systematic errors. Additional benefit is that the regression already is arranged optimally to calculate *perfect values of currentDt* as the dragslope measures the decay of *currentDt* through time. Disadvantage is that shifts (i.e. misalignment of the physical flywheel and the corrections due to missed datapoints or switch bounces) and changes in magnet error are detected relatively late: it will take at least a recovery and part of the drive to detect and correct these issues.
+
+Practical experiments support the above argument: where feeding the algorithm using the Quadratic regression for systematic error correction results in a Goodness Of Fit for the recovery slope of 0.9990, feeding from the linear regression results in a Goodness Of Fit often exceeding 0.9996.
+
+A key element is the use of a weighed linear regression method, where the weight is based on the Goodness of Fit of the datapoint to prevent badly fitted drag slopes or badly fitting specific datapoints from throwing off the systematic error correction filter too much.
+
+A key issue is preventing time contraction/delution: maintaining a correction factor per magnet will not guarantee that all corrections over all magnets across a session will not cause a drift in time (as the corrected *currentDt* is also used for calculating the moving time since start [`Flywheel.js`](../app/engine/Flywheel.js)). To keep time drift under controll, the [`CyclicErrorFilter.js`](../app/engine/utils/CyclicErrorFilter.js) will guarantee that the average of all slopes will be 1 at all times, and the slopes of all intercepts be 0. This is a bit crude, but it is shown to be effective.
+
+### Linear regression algorithm for dragfactor calculation based on *CurrentDt* and time
+
+In [`Flywheel.js`](..//app/engine/Flywheel.js) the recovery slope is determined. Theoretically, this should be a line following:
+
+$$ {k \* 2π \over I \* Impulses Per Rotation} = {ΔcurrentDt \over Δt} $$
+
+This is expected to be a straight line, where its slope is essentially the dragfactor multiplied by a constant. For the drag-factor calculation (and the closely related recovery slope detection), we observe four things:
+
+* As this tends to be a serious set of datapoints (over 200 for a Concept2), higher polynomial algorithms are not considered applicable. As the dragfactor is essential for all linear metrics, it is crucial it is robust against noise;
+
+* The number of datapoints in the recovery phase isn't known in advance, and is subject to significant change due to variations in recovery time (i.e. sprints), making the Incomplete Theil–Sen estimator incapable of calculating their slopes in the stream as the efficient implementations require a fixed window. OLS has a O(1) complexity for continous datastreams, and has proven to be sufficiently robust for most practical use. Using the Linear Theil-sen estimator results in a near O(N) calculation at the start of the *Drive* phase (where N is the length of the recovery in datapoints). The Quadratic Theil-sen estimator results in a O(N2) calculation at the start of the *Drive* phase. Given the number of datapoints often encountered (a recoveryphase on a Concept 2 contains around 200 datapoints), this is a significant CPU-load that could disrupt the application;
+
+* In non-time critical replays of earlier recorded rowing sessions, both the Incomplete Theil–Sen estimator performed worse than OLS: OLS with a high pass filter on r2 resulted in a much more stable dragfactor than the Incomplete Theil–Sen estimator did. The Theil–Sen estimator, in combination with a filter on r2 has shown to be even a bit more robust than OLS. This suggests that the OLS algorithm combined with a requirement for a sufficiently high r2 handles the outliers sufficiently to prevent drag poisoning and thus provide a stable dragfactor for all calculations. The Linear Theil-Sen estimator outperfomed OLS by a small margin, but noticeably improved stroke detection where OLS could not regardless of parameterisation.
+
+* Applying Quadratic OLS regression does not improve its results when compared to Linear OLS regression or Linear TS. For the drag (and thus recovery slope) calculation, the Linear Theil-Sen estimator has a slightly better performance then OLS, while keeping CPU-load acceptable for a data-intensive rowing machine (Concept 2, 12 datapoints flank, 200 datapoints in the recovery). A Quadratic theil-Sen based drag calculation has shown to be too CPU-intensive. For the stroke detection itself, OLS and Linear Theil-Sen deliver the same results, while OLS is less CPU intensive.
+
+* Use of trimming, for example to prevent heads or tails of the drive entering the drag-calculation has negative effects. Where a test sample (a Concept2 RowErg on drag 68, which is the most difficult for OpenRowingMonitor) the recovery slope with the current algorithm has an R2 of 0.96, applying trimming reduced this to a R2 of 0.93, suggesting that trimming makes the fit worse instead of better;
+
+Therefore, we choose to apply the Linear Theil-Sen estimator for the calculation of the dragfactor and the closely related recovery slope.
+
+As the cyclic error correction filter can provide an indication (i.e. the general Goodness of Fit of the relation between raw values and perfect values, essentially representing the variance), we also include that as weight. This prevents results from badly fitting error corrections of individual magnets to pollute the drag calculation too much, stabilising its slope calculation. At first glance, this might lead to feedback loops, as the systematic error correction is fed by the drag calculation, and the systematic error correction will influence drag calculation. The feeding algorithm of the systematic error correction indeed uses a goodness of fit that is both global and local, where a specific badly fitted magnet might further loose its influence (especially via the local fit) as it local fit will deviate further and further from the regression line as it looses influence. As the Goodness of Fit reported by the systematic error correction focusses on the variance in the relation between raw and perfect values, a enduring low Goodness of Fit for a specific error correction of a magnet indicates that new updates provide a different relation between raw and perfect values each recovery. As this relation in a large part is determined by the slope of the drag calculation (as that determines the perfect value) we consider this unlikely as other magnets tend to stabilise this effect. In practice, we haven't seen this happen yet.
+
+### Linear regression algorithms applied for recovery detection based on *CurrentDt* and time
+
+We use OLS for the stroke detection.
+
+### Regression algorithm used for Angular velocity ω and Angular Acceleration α based on the relation between θ and time
+
+As *currentDt* only provides us with a position and time to work with, options for determining the values of ω and α are quite limited. The standard numerical approach of ω = ${Δθ \over Δt}$ and the subsequent α = ${Δω \over Δt}$ are too inpricise and vulnerable to noise in *CurrentDt*. Tests show (see the test for the cubic function f(x) = x3 + 2x2 + 4x in [`flywheel.test.js`](../app/engine/Flywheel.test.js)) that in a artificial noise free series simulating a continuous accelerating flywheel, the underestimation varies but is significant:
+
+| Test | ω | α |
+| --- | --- | --- |
+| Noise free | -1.8% to -5% | -0.5% to -4.8% |
+| Systematic noise (+/- 0.0001 sec error) | -1.95% to -2.66% | -11.05% to +9.69% |
+
+As this table shows, the traditional numerical approach is too unstable to be usefull, especially for determining the angular acceleration α where the deviation from the theoretical value deviates wildly. In the presence of random noise, deviations become bigger and power and force curves contain several spikes. Abandoning the numerical approach for a regression based approach has resulted with a huge improvement in metric robustness, both in theory and practice.
+
+We thus determine the Angular Velocity ω and Angular Acceleration α based on the regression of the function of θ and time, and then use the function's derivatives to determine angular velocity ω and angular acceleration α. The function of θ through time is quite dynamic: when a simple static force would be applied, θ would behave as 1/2 \* α \* t2 + ω \* t, a quadratic function with respect to time. As the force on the flywheel is quite dynamic throughout the stroke, this function would probably be a cubic, quartic or even quintic in reality. Also observe that we use both the first derived function (i.e. ω) and the second derived function (i.e. α), requiring at least a quadratic regression algorithm, as a liniear regressor would make the second derived function trivial.
+
+Looking at the signals found in practice, we also observe specific issues, which could result in structurally overfitting the dataset if the polynomial would be too high, nihilating its noise reduction effect. As the following sample of three rotations of a Concept2 flywheel shows, due to production tolerances or deliberate design constructs, there are **systematic** errors in the data due to magnet placement or magnet polarity. This results in systematic issues in the datastream:
+
+
+Deviation of the Concept 2 RowErg
+
+Fitting a quadratic curve with at least two full rotations of data (in this case, 12 datapoints) seems to reduce the noise to very acceptable levels, forcing the algorithm to follow the trend, not the individual datapoints. In our view, fitting a third-degree polynomial would result in a better fit with the systematic errors, thus resulting in a much less robust signal. As a cubic regression analysis method will lead to overfitting certain error modes, we are constricted to quadratic regression analysis methods. By using a sliding window algorithm, using multiple quadratic approximations for angular velocity ω and angular acceleration α for the same datapoint, we aim to get close to a cubic regressors behaviour, without the overfitting.
+
+@@@@@
+
+So far, we were able to implement Quadratic Theil-Senn regression and get reliable and robust results. Currently, the use of Quadratic Theil-Senn regression represents a huge improvement from both the traditional numerical approach (as taken by [[1]](#1) and [[4]](#4)) used by earlier approaches of OpenRowingMonitor. In essence, it is a more advanced Moving Least Squares regression approach, where the regression method is Theil-Sen. Practical testing has confirmed that Quadratic Theil-Senn outperformed all Linear Regression methods in terms of robustness and responsiveness. Based on extensive testing with multiple simulated rowing machines, Quadratic Theil-Senn has proven to deliver the best results and thus is selected to determine ω and α.
+
+The (implied) underlying assumption underpinning the use of Quadratic Theil-Senn regression approach is that the Angular Accelration α is constant, or near constant by approximation in the flank under measurment. In essence, quadratic Theil-Senn regression would be fitting if the acceleration would be a constant, and the relation of θ, α and ω thus would be captured in θ = 1/2 \* α \* t2 + ω \* t. We do realize that in rowing the Angular Accelration α, by nature of the rowing stroke, will vary based on the position in the Drive phase: the ideal force curve is a heystack, thus the force on the flywheel varies in time.
+
+As the number of datapoints in a *Flanklength* in the relation to the total number of datapoints in a stroke is relatively small, we use quadratic Theil-Senn regression as an approximation on a smaller interval. In tests, quadratic regression has proven to outperform (i.e. less suspect to noise in the signal) both the numerical approach with noise filtering and the linear regression methods. When using the right efficient algorithm, this has the strong benefit of being robust to noise, at the cost of a O(n2) calculation per new datapoint (where n is the flanklength). Looking at the resulting fit of the Quadratic Theil-Sen estimator, we see that it consistently is above 0.98, which is an extremely good fit given the noise in the Concept 2 RowErg data. Therefore, we consider this is a sufficiently decent approximation while maintaining an sufficiently efficient algorithm to be able to process all data in the datastream in time.
+
+Traditionally, the *Flanklength*, or the *bandwith* in a Moving Least Squares regression, is a balance between robustness against noise and its responsiveness to details (i.e. the bias-variance trade-off). It is important to realize that unlike many other implementations, the *Flanklength* is also highly determined by the presence of systematic repeating errors in the signal: bad placement of specific magnets on the flywheel. The typical advice is to use a *Flanklength* of twice the number of magnets, as that completely surpresses the effect repeated patterns present in the data, allowing the algorithm to correct it.
+
+Although the determination of angular velocity ω and angular acceleration α based on Quadratic Theil-Senn regression over the time versus angular distance θ works decently, we realize it does not respect the true dynamic nature of angular acceleration α. From a pure mathematical perspective, a higher order polynomial would be more appropriate. A cubic regressor, or even better a fourth order polynomal have shown to be better mathematical approximation of the time versus distance function for a Concept2 RowErg. We can inmagine there are better suited third polynomal (cubic) approaches available that can robustly calculate α and ω as a function of time, based on the relation between time and θ. However, getting these to work in a datastream with very tight limitations on CPU-time and memory across many configurations is quite challenging.
+
+However, there are some current practical objections against using these more complex methods:
+
+* Higher order polynomials are less stable in nature, and overfitting is a real issue. As the displacement of magets can present itself as a sinoid-like curve (as the Concept 2 RowErg shows), 3rd or higher polynomials are inclined to follow that curve. As this might introduce wild shocks in our metrics, this might be a potential issue for application;
+* A key limitation is the available number of datapoints. For the determination of a polynomial of the n-th order, you need at least n+1 datapoints (which in OpenRowingMonitor translates to a `flankLength`). Some rowers, for example the Sportstech WRX700, only deliver 5 to 6 datapoints for the entire drive phase, thus putting explicit limits on the number of datapoints available for such an approximation.
+* Calculating a higher order polynomial in a robust way, for example by Theil-Senn regression, is CPU intensive. A quadratic approach requires a O(n2) calculation when a new datapoint is added to the sliding window (i.e. the flank). Our estimate is that with current known robust polynomial regression methods, a cubic approach requires at least a O(n3) calculation, and a 4th polynomial a O(n4) calculation. With smaller flanks (which determines the n) this has proven to be doable, but for machines which produce a lot of datapoints, and thus have more noise and a typically bigger `flankLength` (like the C2 RowErg and Nordictrack RX-800, both with a 12 `flankLength`), this becomes an issue: we consider completing 103 or even 104 complex calculations within the 5 miliseconds that is available before the next datapoint arrives, impossible.
+
+We also observe that in several areas the theoretical best approach did not deliver the best practical result (i.e. a "better" algorithm delivered a more noisy result for α and ω). Therefore, this avenue isn't investigated yet, but will remain a continuing area of improvement.
+
+This doesn't definitively exclude the use of more complex polynomial regression methods: alternative methods for higher polynomials within a datastream could be as CPU intensive as Theil-Senn Quadratic regression now, and their use could be isolated to specific combination of Raspberry hardware and settings. Thus, this choice for the specific algorithm will remain an active area of investigation for future versions.
+
+## The selection and design of used mathematical algorithms
+
+### Noise filtering algorithm
+
+See [`/app/engine/utils/StreamFilter.js`](../app/engine/utils/StreamFilter.js)
+
+For noise filtering, we use a moving median filter, which has the benefit of removing outliers completely. This is more robust than the moving average, where the effect of outliers is reduced, but not removed.
+
+### Linear regression algorithms
+
+There are several different linear regression methods [[9]](#9). We have several requirements on the algorithm:
+
+* it has to delviver results in near-real-time scenarios in a datastream;
+
+* if possible, it has to be robust to outliers: an outlier shouldn't skew the results too much [[10]](#10).
+
+Ordinary Least Squares is by far the most efficient and can easily be applied to datastreams. However, it isn't robust. From a robustness perspective, most promissing methods are [least absolute deviations](https://en.wikipedia.org/wiki/Least_absolute_deviations), the [Theil–Sen estimator](https://en.wikipedia.org/wiki/Theil%E2%80%93Sen_estimator) and the [LASSO technique](https://en.wikipedia.org/wiki/Lasso_(statistics)). Most of these methods, except the Theil–Sen estimator, do not have a near-real-time solution. In the description of the linear regression methods, we describe the most promissing ones.
+
+#### Ordinary Least Squares (OLS)
+
+See [`/app/engine/utils/OLSLinearSeries.js`](../app/engine/utils/OLSLinearSeries.js)
+
+Ordinary Least Squares regression (see [[5]](#5)) and [[6]](#6)) produces results that are generally acceptable and the O(N) performance is well-suited for near-real-time calculations. When implemented in a datastream, the addition of a new datapoint is O(1), and the calculation of a slope also is O(1). When using a high-pass filter on the r2 to disregard any unreliably approximated data, it can also be used to produce reliable results. See `engine/utils/OLSLinearSeries.js` for more information about the implementation.
+
+#### Theil–Sen estimator (Linear TS)
+
+See [`/app/engine/utils/FullTSLinearSeries.js`](../app/engine/utils/FullTSLinearSeries.js)
+
+Although the Theil–Sen estimator has a O(N log(N)) solution available, however we could not find a readily available solution. We did manage to develop a solution that has a O(N) impact during the addition of an additional datapoint in a datastream with a fixed length window, and O(log(N)) impact when determining the slope.
+
+### Polynomial regression algorithm
+
+#### Quadratic Theil–Sen estimator (Quadratic TS)
+
+See [`/app/engine/utils/FullTSQuadraticSeries.js`](../app/engine/utils/FullTSQuadraticSeries.js)
+
+The Theil–Sen estimator can be expanded to apply to Quadratic functions, where the implementation is O(N2). Based on a Lagrange interpolation, we can calculate the coefficients of the formula quite effectively, resulting in a robust estimation more fitting the data. See `engine/utils/FullTSQuadraticSeries.js` for more information about the background of the implementation.
+
+Theil-Sen is normally limited to linear regression. By using Lagrange interpolation, and a "median of triplets" approach we devised an algorithm that can be used on quadratics. In essence, it calculates the ideal quadratic for each combination of three datapoints in the dataset, and uses a median to determine the quadratic coefficent. This suggests an O(n3) algorithm, where n is the *flankLength*. However, by use of a sliding window and reuse of previous calculation data reduced it to O(n2) per added datapoint, which is sufficient for all known applications. Some relevant elements need to be mentioned:
+
+* Traditionally, all coefficients are determined in a single calculation cycle: the a, b and c are all calculated in the same cycle of Lagrange interpolations. However, we implemented a two-stage approach, after determining the quadratic coefficient a, the linear coefficient b and intercept c are calculated via linear Theil-Sen regression on the residual of y = a x2 + ε. Emperical testing shows that this leads to more consistent and better results (i.e. more accurate estimate of angular velocity ω and higher degrees of fit based on the same data). The computational penalty is relatively low, but the improvements are measureable.
+* Use of Tuskey's mean instead of the median also had negative effects. Where a test sample (a Concept2 RowErg on drag 68, which is the most difficult for OpenRowingMonitor) with the current algorithm has an R2 of 0.96, applying Tuskey's mean reduced this to a R2 of 0.91 with a lot of stroke detection errors, even after adjusting key parameters accordingly.
+
+Further improvements to the implementation of the Theil-Sen is an active topic of investigation.
+
+#### Moving Regression analysis
+
+See [`/app/engine/utils/MovingWindowRegressor.js`](../app/engine/utils/MovingWindowRegressor.js)
+
+### Choices for the specific algorithms
+
+#### Implementation choices in the Theil-Sen regression
+
+#### Integration: Use of Quadratic Theil-Senn regression and a weighed average filter for determining ω and α
+
+We implemented this using buffer with *flanklength* datapoints that acts like a sliding window. On each new datapoint the window slides one datapoint, and thus recalculates the critical parameters: a single α for the entire flank and the individual ω's for each individual datapoint in the buffer. As each datapoint slowly slides out of the window, multiple apporximations are made for the same point, resulting in several α's and ω's that are valid approximations for that specific datapoint. Once the datapoint slides out of the sliding window, there are *flankLength* number of approximations for ω and α. A key question is how to combine these multiple approximations α and ω into a single true value for these parameters.
+
+To combine all valid values for α or ω for a specific datapoint to determine the definitive approximation of α and ω for that specific datapoint two main options are available:
+
+* a median of all values. This approach has proven very robust, and can prevent noise from disturbing powercurves, it is very conservative. For example, when compared to Concept 2's results, the forcecurves roughly have the same shape, but the peak values are considerable lower. It also has the downside of producing "blocky" force cuves.
+* Using a weighed averager using Goodness of Fit. The weight is based on the r2: better fitting curves will result in a heiger weight in the calculation, thus preferring approximations that are a better general fit with the data. This results in slightly more stable results and smoother force curves. This approach resulted in smoother (less blocky) force curves while retaining the responsiveness of the force curve. Based on testing ((see the test for the cubic function f(x) = x3 + 2x2 + 4x in [flywheel.test.js](../app/engine/Flywheel.test.js)), we get the following results:
+
+| Test | ω | α |
+| --- | --- | --- |
+| Noise free | -0.20% to -0.48% | -0.83% to -1.86% |
+| Systematic noise (+/- 0.0001 sec error) | -0.18% to -0.46% | -1.05% to -1.95% |
+
+* Using a weighed averager using both a global Goodness of Fit and a local goodness of fit indicator. The global weight is based on the r2: better fitting curves will result in a heiger weight in the calculation, thus preferring approximations that are a better general fit for curve with the total data in the buffer. By also adding the local Goodness of Fit indicator pointwise r2 (i.e. a proximity of the point to the curve at that specific point) a good local fit is also wrighed in. This results in slightly more stable results and smoother force curves. This approach resulted in smoother (less blocky) force curves while retaining the responsiveness of the force curve. Based on testing ((see the test for the cubic function f(x) = x3 + 2x2 + 4x in [flywheel.test.js](../app/engine/Flywheel.test.js)), we get the following results:
+
+| Test | ω | α |
+| --- | --- | --- |
+| Noise free | -0.20% to -0.47% | -0.83% to -1.86% |
+| Systematic noise (+/- 0.0001 sec error) | -0.18% to -0.46% | -1.05% to -1.95% |
+
+Comparison across these tables shows that using the Goodness Of Fit is needed to get more reliable results. The effect of using the Local Goodness of Fit is not that convincing based on this data, but a more detailed analaysis of the data shows small improvements with the respect to the version without the Local Goodness of Fit version. So we choose the weighed averager as basis for the combination of the multiple approximations into a single one.
+
+Finding a better approximation algorithm that ingores outlying values while maintaining the true data responsiveness is a subject for further improvement.
+
+## Open Issues, Known problems and Regrettable design decissions
+
+### Using iteration instead of running sums for Theil-Sen Goodness Of Fit
+
+Currently, both the Theil-Sen regressors (i.e. `TSLinearSeries.js` and `TSQuadraticSeries.js`) iterate over the datapoints in the flank, and determine the sse and sst for the Goodness of Fit calculation. There is an alternative approach, using running sums. THis approach has the huge benefit that running sums are less CPU intensive to maintain, and don't force a sudden large calculation. Key concern here is the dragfactor calculation at the end of a recovery that iterates over a large collection (often over 200 datapoints for a Concept2 RowErg), resulting in a significant workload at one specific moment. When using running sums that are maintained throughout the recovery, it should maintain a lower profile as much of the work is done in small pieces throughout the recovery phase.
+
+For linear regression, it is defined as:
+
+```math
+\begin{aligned}
+&sse = \left( \sum_{i=1}^n weight_i y_i^2 \right)\\
+& - \left( 2b \sum_{i=1}^n weight_i y_i \right)\\
+& - \left( 2a \sum_{i=1}^n weight_i x_i y_i \right)\\
+& + \left( b^2 \sum_{i=1}^n weight_i \right)\\
+& + \left( 2ab \sum_{i=1}^n weight_i x_i \right)\\
+& + \left( a^2 \sum_{i=1}^n weight_i x_i^2 \right)\\
+
+&sst = \left( \sum_{i=1}^n weight_i y_i^2 \right)\\
+& - \left( 2 * \overline{y} * \sum_{i=1}^n weight_i y_i \right)\\
+& + \left( \overline{y}^2 * \sum_{i=1}^n weight_i \right)\\
+\end{aligned}
+```
+
+Where $(x_i, y_i)$ is the i-th datapoint in the flank, and $weight_i$ its weight. $\overline{Y}$ is the weighted average of the entire flank in the y axis. a and b are the coefficients in $y = a x + b$.
+
+For quadratic functions, it is defined as:
+
+```math
+\begin{aligned}
+&sse = \left( \sum_{i=1}^n weight_i y_i^2 \right)\\
+& - \left( 2c \sum_{i=1}^n weight_i y_i \right)\\
+& - \left( 2b \sum_{i=1}^n weight_i x_i y_i \right)\\
+& - \left( 2a \sum_{i=1}^n weight_i x_i^2 y_i \right)\\
+& + \left( 2bc \sum_{i=1}^n weight_i x_i \right)\\
+& + \left( 2ac \sum_{i=1}^n weight_i x_i^2 \right)\\
+& + \left( b^2 \sum_{i=1}^n weight_i x_i^2 \right)\\
+& + \left( 2ab \sum_{i=1}^n weight_i x_i^3 \right)\\
+& + \left( a^2 \sum_{i=1}^n weight_i x_i^4 \right)\\
+& + \left( c^2 \sum_{i=1}^n weight_i \right)\\
+
+&sst = \left( \sum_{i=1}^n weight_i y_i^2 \right)\\
+& - \left( 2 * \overline{y} * \sum_{i=1}^n weight_i y_i \right)\\
+& + \left( \overline{y}^2 * \sum_{i=1}^n weight_i \right)\\
+\end{aligned}
+```
+
+Where $(x_i, y_i)$ is the i-th datapoint in the flank, and $weight_i$ its weight. $\overline{Y}$ is the weighted average of the entire flank in the y axis. a, b and c are the coefficients in $y = a x^2 + b x + c$
+
+However, these implementations suffered from numerical instability. This exposed itself ar relatively small sessions (a 2500 meter row on a Concept2 RowErg) where Goodness Of Fit started to drift, and error between the iteration and running sum started to grow from $10^-15$ to $10^-2$. This latter disturbs the functioning of OpenRowingMonitor. As in the running sum variation a Goodness Of Fit over 1 was frequently encountered, we considered it very likely that it is faulty. Making the underlying `Series.js` object, that is responsible for maintaing these running sums, much more robust by forcing continuous recalculations of these running sums did not resolve this issue.
+
+The current implementation thus relies on the iterative approach, despite the running sum being computationally much more efficient.
+
+@@@@@
+
+## References
+
+[1] Anu Dudhia, "The Physics of ErgoMeters"
+
+[4] Dave Vernooy, ErgWare source code
+
+[5] Wikipedia, "Simple Linear Regression"
+
+[6] University of Colorado, "Simple Linear Regression"
+
+[9] Wikipedia, "Linear regression"
+
+[10] Wikipedia, "Robust regression"
+
+[11] Incomplete Theil-Sen Regression
diff --git a/docs/PM5_Interface.md b/docs/PM5_Interface.md
index f19ba14545..4318ecd72c 100644
--- a/docs/PM5_Interface.md
+++ b/docs/PM5_Interface.md
@@ -1,6 +1,8 @@
# Description of the PM5 interface
-The design goal is to emulate PM5 communication sufficiently for users to connect easily to apps. We aim to have maximum compatibility with all these apps, making these apps to intuitively to use with OpenRowingMonitor. However, it explicitly is **NOT** our goal to completely emulate a full-blown PM5 with racing features and logbook verification. Also features that might lead to cheating or uploading results to the Concept2 logbook are explicitly excluded. Some testing is one on ErgData, as that is the definitive source how Concept2's data is to be interpreted, excluding interpretation errors by independent software developers.
+This document describes the design choices underpinning our PM5 interface. If you are not redesigning or modifying this implementation, this document probably isn't for you.
+
+The design goal is to emulate PM5 communication sufficiently for users to connect easily to apps. We aim to have maximum compatibility with all these apps, making these apps to intuitively to use with OpenRowingMonitor. However, it explicitly is **NOT** our goal to completely emulate a full-blown PM5 with racing features and logbook verification. Also features that might lead to cheating or uploading results to the Concept2 logbook are explicitly excluded. Some testing is one on ErgData, as we consider it the definitive source how Concept2's data is to be interpreted, thus excluding interpretation errors by independent software developers based on less than optimal specifications.
This interface emulation is partially based on the description in Concept 2's API documentation ([[1]](#1) and [[2]](#2)). As this documentation is inconclusive about the timing/triggers for messages, as well as the exact definition of the values used, a large part is also based on analysis of the communication via recorded bluetooth traces with current PM5's.
@@ -9,7 +11,7 @@ This interface emulation is partially based on the description in Concept 2's AP
We aim to be interoperable with the following apps:
-| App | Required characteristics | Remarks |
+| App | Required characteristics | Remarks |
| --- | --------- | ------ |
| [ErgArcade cloud simulation](https://ergarcade.github.io/mrdoob-clouds/) | - [0x0031 "General Status"](#0x0031-general-status)
- [0x0032 "Additional Status"](#0x0032-additional-status)
| |
| [ErgArcade fluid simulation](https://ergarcade.github.io/WebGL-Fluid-Simulation/) | - [0x0031 "General Status"](#0x0031-general-status)
| Actually only uses `STROKESTATE_DRIVING` |
@@ -24,36 +26,36 @@ Some apps, like Aviron, Ergatta, Hydrow, iFIT and Peleton claim compatibility wi
## Structural differences between OpenRowingMonitor and a PM5
-As OpenRowingMonitor and PM5 have been independently developed, the design choices that have been made are not consistent. Here we adress these differences, as they are quite essential in the further implementation.
+As OpenRowingMonitor and PM5 have been independently developed, the design choices that have been made are not consistent by default. Here we adress these differences, as they are quite essential in the further implementation.
### Workout Hierarchy
-OpenRowingMonitor recognizes three levels in a workout: the Session, the underlying Intervals and the Splits in these Intervals (see [the architecture document](./Architecture.md#session-interval-and-split-boundaries-in-sessionmanagerjs) for a more detailed description). A PM5 recognizes either a workout with one or more Intervals of varying length, or a single workout with several underlying splits with identical length. Some apps (ErgZone) even optimize workouts with multiple identical intervals to a workout with splits.
+OpenRowingMonitor recognizes three levels in a workout: the Session, the underlying Intervals and the Splits in these Intervals (see [the architecture document](./Architecture.md#session-interval-and-split-boundaries-in-sessionmanagerjs) for a more detailed description). A PM5 recognizes either a workout with one or more Intervals of varying length, or a single workout with several underlying splits with identical length. Some apps (ErgZone) even optimize workouts with multiple identical intervals to a workout with splits. This might lead to confusing situations where GUI behaviour changes radically due to trivial changes (like shortening a last interval by 1 second), but we consider that beyind our control to adresss.
The [CsafeManagerService.js](../app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js) therefore will map:
* a fixed time/distance PM5 workout to a single OpenRowingMonitor Interval, and add the specified splits as OpenRowingMonitor splits if specified.
* A PM5 workout with multiple intervals to multiple OpenRowingMonitor Intervals, without any splits specified (as they can't be specified by the PM5).
-This makes scoping of many variables challenging as it is unclear whether a variable is intended to capture a split or the interval. Concept2's ambiguous description of most variables in [[1]](#1) and [[2]](#2) does not provide any clarification here.
+This makes scoping of many summarisimg variables challenging as it is unclear whether a variable is intended to capture a split or the interval. Concept2's ambiguous description of most variables in [[1]](#1) and [[2]](#2) does not provide any clarification here.
-[workoutSegment.js](../app/engine/utils/workoutSegment.js)'s default behaviour with missing split information helps here to overcome the structural issues. When split nformation is mising, it 'inherits' the split parameters of the above interval (in essence making the split boundaries identical to the interval). This makes the splits always contain the most granular division of the workout regardless of how the PM5 has communicated the workout. In reporting back to the app, the splits are thus the most likely basis for reporting in the PM5 emulated reporting. However, some variables seem to be scoped to the interval or workout level. A key reason for conducting the traces is to understand the scoping of each variable.
+Our approach here is to make [workoutSegment.js](../app/engine/utils/workoutSegment.js)'s default behaviour with missing split parameters to help overcome these structural issues. When split parameters are mising, it will always 'inherit' the parameters of the above interval (in essence making the split boundaries identical to the interval). This makes the splits always contain the most granular division of the workout regardless of how the PM5 has communicated the workout. In reporting back to the app, the splits are thus the most likely basis for reporting in the PM5 emulated reporting. However, some variables seem to be scoped to the interval or workout level explicitly. A key reason for conducting the traces is to understand the scoping of each variable to prevent confusing these apps.
### Positioning split/interval reporting
-OpenRowingMonitor will always report on the end-of-split boundary, including a summary of the split it just completed. A PM5 will report this **after** the split has concluded (i.e. in tje mew split), reporting about the split it has completed.
+OpenRowingMonitor will always report on the end-of-split boundary, including a summary of the split it just completed. A PM5 will report this **after** the split has concluded (i.e. in the new split), reporting about the split it has completed.
### Positioning planned rest intervals
OpenRowingMonitor treats planned rest intervals similar to normal time based intervals, with the exception that the rowing engine is forced to stop collecting metrics during that interval. A PM5 considers a rest interval a subordinate attribute of a normal interval, and it isn't an independent entity. In [CsafeManagerService.js](../app/peripherals/ble/pm5/csafe-service/CsafeManagerService.js) this is managed by adding a rest interval to OpenRowingMonitor's workout schedule.
-In reporting, we indeed see the PM5 skipping the split/interval reporting when the pause starts, and including the rest data with the split reporting after the pause has ended. This is consistent with the approach that a rest interval only is an extension of an active interval. In OpenRowingMonitor this behaviour is replicated by not reporting the start of a pause as new split, and combining the data from the active split and the rest split. Although the underlying datasets are largely disjunct (as rest intervals have little data associated with them), a key issue is the reporting of the IntervalType, WorkoutState and workoutDurationType in [0x0031 General Status](#0x0031-general-status), and the intervalType [0x0037 "Split Data"](#0x0037-split-data).
+In reporting, we indeed see the PM5 skipping the split/interval reporting when the pause starts, and including the rest data with the split reporting after the pause has ended. This is consistent with Concept2's approach that a rest interval only is an extension of an active interval. In OpenRowingMonitor this behaviour is replicated by not reporting the start of a pause as new split, and combining the data from the active split and the rest split. Although the underlying datasets are largely disjunct (as rest intervals have little data associated with them), a key issue is the reporting of the IntervalType, WorkoutState and workoutDurationType in [0x0031 General Status](#0x0031-general-status), and the intervalType [0x0037 "Split Data"](#0x0037-split-data).
In starting a pause our traces show that message [0x0031 General Status](#0x0031-general-status)'s 'IntervalType' is set from `IntervalTypes.INTERVALTYPE_DIST` to `IntervalTypes.INTERVALTYPE_REST`. [0x0037 "Split Data"](#0x0037-split-data)'s 'IntervalType' reports an `IntervalTypes.INTERVALTYPE_DIST`. For the GeneralStatus message, the workout target clearly contains an element of OpenRowingMonitor's 'sessionState' object (i.e. verify if the sessionState is paused).
### Positioning unplanned rests
-People might deviate from their workout plan and take a break mid-session. In OpenRowingMonitor this is treated as a seperate rest split, clearly separating active and passive metrics. The PM5 essentially ignores the pause, lets time continue and does not change split/interval upon detection.
+People might deviate from their workout plan and take a break mid-session. In OpenRowingMonitor this is treated as a seperate rest split, clearly separating active and passive metrics. Especially for downstream reporting, like Strava, this simplifies analysis a lot. The PM5 essentially ignores the pause, lets time continue and does not change split/interval upon detection. This is emulated by ignoring the reporting of unplanned pauses to apps. All summarizing metrics will be aggregated accoringly, also including rest time where relevant.
### Different definition of moving time and rest time
@@ -84,15 +86,15 @@ Each string of commands represents an interval. It is always closed with `CSAFE_
| WORKOUTTYPE_JUSTROW_SPLITS | A simple unlimited session with splits | single interval, type = 'justrow' | Fixed 'time' or 'distance' |
| WORKOUTTYPE_FIXEDDIST_NOSPLITS | A simple distance session | single interval, type = 'distance' | Undefined[^1] |
| WORKOUTTYPE_FIXEDDIST_SPLITS | A simple distance session with splits | single interval, type = 'distance' | Fixed 'distance' |
+| WORKOUTTYPE_FIXEDDIST_INTERVAL | An unlimited repeating distance based interval | repeating intervals, type = 'distance'[^2] | Undefined[^1] |
| WORKOUTTYPE_FIXEDTIME_NOSPLITS | A simple time limited session | single interval, type = 'time' | Undefined[^1] |
| WORKOUTTYPE_FIXEDTIME_SPLITS | A simple time limited session with splits | single interval, type = 'time' | Fixed 'time' |
| WORKOUTTYPE_FIXEDTIME_INTERVAL | An unlimited repeating time based interval | repeating intervals, type = 'time'[^2] | Undefined[^1] |
-| WORKOUTTYPE_FIXEDDIST_INTERVAL | An unlimited repeating distance based interval | repeating intervals, type = 'distance'[^2] | Undefined[^1] |
+| WORKOUTTYPE_FIXEDCALORIE_SPLITS | A simple calories session with splits | single interval, type = 'calories' | Fixed 'calories' |
+| WORKOUTTYPE_FIXEDCALS_INTERVAL | An unlimited repeating calories based interval | repeating intervals, type = 'calories'[^2] | Undefined[^1] |
| WORKOUTTYPE_VARIABLE_INTERVAL | A series of different variable intervals | multiple intervals | Fixed 'time' or 'distance' per interval |
| WORKOUTTYPE_VARIABLE_UNDEFINEDREST_INTERVAL | Not implemented | Not implemented | Not implemented |
-| WORKOUTTYPE_FIXEDCALORIE_SPLITS | Not implemented | Not implemented | Not implemented |
| WORKOUTTYPE_FIXEDWATTMINUTE_SPLITS | Not implemented | Not implemented | Not implemented |
-| WORKOUTTYPE_FIXEDCALS_INTERVAL | Not implemented | Not implemented | Not implemented |
> [!NOTE]
> Please be aware that apps like ErgData and ErgZone actually do 'optimisations' behind the scene. Three intervals of 8 minutes with 2 minute rests are typically sent as a `WORKOUTTYPE_FIXEDTIME_INTERVAL`, despite this resulting in an endless series. If the planned rests are omited, it will result in a `WORKOUTTYPE_FIXEDTIME_SPLITS` with a single time interval with splits of the length of the intervals. If one would add a single second to any of the individual intervals, it becomes a `WORKOUTTYPE_VARIABLE_INTERVAL`, and all intervals are programmed manually. Obviously, from a user perspective the target displayed in the GUI will vary across these options (see [issue 118](https://github.com/JaapvanEkris/openrowingmonitor/issues/118)).
diff --git a/docs/README.md b/docs/README.md
index 8e966a84bd..0bd7a7bae0 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -10,11 +10,11 @@ OpenRowingMonitor is a reliable, free and open source monitor for rowing machine
It runs on cheap (Raspberry Pi) hardware to calculate rowing metrics, such as power, split time, speed, stroke rate, distance and calories. As it is your data, you can share these metrics with games and analysis in the way you like.
-OpenRowingMonitor runs fine on any rowing machine, as long as you can add something to measure the speed of the flywheel, like magnets. It already has been retrofitted to many rowing machines like the [DIY Openergo](https://openergo.webs.com) and many [existing machines that lack a decent monitor](Supported_Rowers.md), and is used by many on a daily basis. If your machine isn't listed, don't worry, adjusting the settings is easy following the [settings adjustment help guide](rower_settings.md) yourself. And in the [GitHub Discussions](https://github.com/JaapvanEkris/openrowingmonitor/discussions) there always are friendly people to help you set up your machine and the settings.
+OpenRowingMonitor runs fine on any rowing machine, as long as you can add something to measure the speed of the flywheel, like magnets. It already has been retrofitted to many rowing machines like the [DIY Openergo](https://openergo.webs.com) and many [existing machines that lack a decent monitor](Supported_Rowers.md), and is used by many on a daily basis. If your machine isn't listed, don't worry, adjusting the settings is easy following the [settings adjustment help guide](rower_settings.md) yourself. And in the [GitHub Discussions](https://github.com/JaapvanEkris/openrowingmonitor/discussions) and specifically the [Rower Profile development section](https://github.com/JaapvanEkris/openrowingmonitor/discussions/categories/rower-profile-development) there always are friendly people to help you set up your machine and the settings.
## Features
-OpenRowingMonitor can provide you with metrics directly, via smartwatches (ANT+), apps and games (bluetooth) and Home Automation (MQTT). It also allows you to export your data to the analysis tool of your choice.
+OpenRowingMonitor provides you with metrics directly, via smartwatches (ANT+), apps and games (bluetooth) and Home Automation (MQTT). You can also export your data to the analysis tool of your choice.

@@ -23,7 +23,7 @@ The following items describe most of the current features in more detail.
### Rowing Metrics
-OpenRowingMonitor calculates the typical metrics of a rowing machine, where the parameters can be tuned to the specifics of a rower machine by changing the configuration file. We maintain [settings for machines alrrady known to us](Supported_Rowers.md). The underlying software is structurally validated against a Concept2 PM5 in over 300 sessions (totalling over 3 million meters), and results deviate less than 0.1% for every individual rowing session.
+OpenRowingMonitor calculates the typical metrics of a rowing machine, where the parameters can be tuned to the specifics of a rower machine by changing the configuration file. We maintain [settings for machines already known to us](Supported_Rowers.md). The underlying software is structurally validated against a Concept2 PM5 in over 300 sessions (totalling over 3 million meters), and results deviate less than 0.1% for every individual rowing session.
OpenRowingMonitor can display the following key metrics on the user interface:
@@ -42,28 +42,26 @@ OpenRowingMonitor can display the following key metrics on the user interface:
* Distance per stroke (meters)
* Force curve with Peak power (Newtons)
-It calculates and can export many other key rowing metrics, including Recovery Heart Rate, Average handle force (Newton), Peak handle force (Newton) and the associated handle force curve, handle velocity curve and handle power curve.
+It also calculates and exports many other key rowing metrics, including Recovery Heart Rate, Average handle force (Newton), Peak handle force (Newton) and the associated handle force curve, handle velocity curve and handle power curve.
### Web Interface
-The web interface visualizes the basic rowing metrics on any device that can run a web browser (i.e. a smartphone that you attach to your rowing machine while training) in realtime. You can set up the user interface as you like, with the metrics you find important:
+The web interface shows the basic rowing metrics on any device that can run a web browser (i.e. a smartphone that you attach to your rowing machine while training) in realtime. You can set up the user interface as you like, with the metrics you find important:

-Via the Action tile, it can also be used to reset the training metrics and to select the type of bluetooth and ANT+ connection.
-
-If you connect a (optional) physical screen directly to the Raspberry Pi, then this interface can also be directly shown on the device. The installation script can set up a web browser in kiosk mode that runs on the Raspberry Pi.
+If you connect a (optional) physical screen directly to the Raspberry Pi, then this interface can also be directly shown on the device. The installation script helps you set this up.
### Device connections via Bluetooth, ANT+ and MQTT
-OpenRowingMonitor can recieve heartrate data via Bluetooth Low Energy (BLE) and ANT+. But you can also share your rowing metrics with different applications and devices. We support most common industry standards to help you connect to your app and game of choice, OpenRowingMonitor currently supports the following protocols:
+OpenRowingMonitor can recieve heartrate data via Bluetooth Low Energy (BLE) and ANT+. But you can also share your rowing metrics with different applications and devices. We support most common industry standards to help you connect to your app and game of choice. OpenRowingMonitor currently supports the following protocols:
-* **Concept2 PM**: OpenRowingMonitor can simulate a Concept2 PM5, providing compatibility with most rowing apps. This implements the most common parts of the spec, so it might not work with all applications. It is known to work with [EXR](https://www.exrgame.com) (preferred method), [ErgZone](https://Erg.Zone), [Kinomap](https://www.kinomap.com) and all the samples from [The Erg Arcade](https://ergarcade.com).
+* **Concept2 PM**: OpenRowingMonitor can simulate a Concept2 PM5, which is compatible with most rowing apps. This implements the most common parts of the specificarion, so it might not work with all applications. It is known to work with [EXR](https://www.exrgame.com) (preferred method), [ErgZone](https://Erg.Zone), [Kinomap](https://www.kinomap.com) and all the samples from [The Erg Arcade](https://ergarcade.com).
-* **FTMS Rower**: This is the FTMS profile for rowing machines and supports all rowing specific metrics (such as stroke rate). We've successfully tested it with [EXR](https://www.exrgame.com), [Peleton](https://www.onepeloton.com/app), [MyHomeFit](https://myhomefit.de) and [Kinomap](https://www.kinomap.com).
+* **FTMS Rower**: This is the FTMS profile for rowing machines and supports all key rowing specific metrics (such as pace and stroke rate). We've successfully tested it with [EXR](https://www.exrgame.com), [Peleton](https://www.onepeloton.com/app), [MyHomeFit](https://myhomefit.de) and [Kinomap](https://www.kinomap.com).
-* **ANT+ FE-C**: OpenRowingMonitor can broadcast rowing metrics via ANT+ FE-C, which can be recieved by several series of Garmin smartwatches like the Epix/Fenix series, which then can calculate metrics like training load etc..
+* **ANT+ FE-C**: OpenRowingMonitor can broadcast rowing metrics via ANT+ FE-C, which can be received by several series of Garmin smartwatches like the Epix/Fenix series, which then can calculate metrics like training load etc..
* **FTMS Indoor Bike**: This FTMS profile is used by Smart Bike Trainers and widely adopted by bike training apps. It does not support rowing specific metrics, but it can present metrics such as power and distance to the biking application and use cadence for stroke rate. So why not use your virtual rowing bike to row up a mountain in [Zwift](https://www.zwift.com), [Bkool](https://www.bkool.com), [The Sufferfest](https://thesufferfest.com) or similar :-)
@@ -78,19 +76,19 @@ OpenRowingMonitor can recieve heartrate data via Bluetooth Low Energy (BLE) and
### Export of Training Sessions
-OpenRowingMonitor is based on the idea your metrics should be easily accessible for further analysis on data platforms. Automatic uploading your sessions to [RowsAndAll](https://rowsandall.com/), [Intervals.icu](https://intervals.icu/) and [Strava](https://www.strava.com) is an integrated feature. For other platforms this is currently a manual step, see [the integration manual](Integrations.md). To allow the data upliad, OpenRowingMonitor can create the following file types:
+OpenRowingMonitor is based on the idea your metrics should be easily accessible for further analysis on data platforms. Automatic uploading your sessions to [RowsAndAll](https://rowsandall.com/), [Intervals.icu](https://intervals.icu/) and [Strava](https://www.strava.com) is an integrated feature. For other platforms this is currently a manual step, see [the integration manual](Integrations.md). To allow the data upload, OpenRowingMonitor can create the following file types:
-* **RowingData** files, which are comma-seperated files with all metrics OpenRowingMonitor can produce. These can be used with [RowingData](https://pypi.org/project/rowingdata/) to display your results locally, or uploaded to [RowsAndAll](https://rowsandall.com/) for a webbased analysis (including dynamic in-stroke metrics). The csv-files can also be processed manually in Excel, allowing your own custom analysis;
+* **RowingData** files, which are comma-separated files with all metrics OpenRowingMonitor can produce. These can be used with [RowingData](https://pypi.org/project/rowingdata/) to display your results locally, or uploaded to [RowsAndAll](https://rowsandall.com/) for a webbased analysis (including dynamic in-stroke metrics). The csv-files can also be processed manually in Excel, allowing your own custom analysis;
-* **Garmin FIT files**: These are binairy files that contain the most interesting metrics of a rowing session. Most modern training analysis tools will accept a FIT-file. You can upload these files to training platforms like [Strava](https://www.strava.com), [Garmin Connect](https://connect.garmin.com), [Intervals.icu](https://intervals.icu/), [RowsAndAll](https://rowsandall.com/) or [Trainingpeaks](https://trainingpeaks.com) to track your training sessions;
+* **Garmin FIT files**: These are binary files that contain the most interesting metrics of a rowing session. Most modern training analysis tools will accept a FIT-file. You can upload these files to training platforms like [Strava](https://www.strava.com), [Garmin Connect](https://connect.garmin.com), [Intervals.icu](https://intervals.icu/), [RowsAndAll](https://rowsandall.com/) or [Trainingpeaks](https://trainingpeaks.com) to track your training sessions;
-* **Training Center XML files (TCX)**: These are legacy XML-files that contain the most essential metrics of a rowing session. Most training analysis tools will still accept a tcx-file (although FIT usually is recomended). You can upload these files to training platforms like [Strava](https://www.strava.com), [Garmin Connect](https://connect.garmin.com), [Intervals.icu](https://intervals.icu/), [RowsAndAll](https://rowsandall.com/) or [Trainingpeaks](https://trainingpeaks.com) to track your training sessions;
+* **Training Center XML files (TCX)**: These are legacy XML files that contain the most essential metrics of a rowing session. Most training analysis tools will still accept a tcx-file (although FIT usually is recommended). You can upload these files to training platforms like [Strava](https://www.strava.com), [Garmin Connect](https://connect.garmin.com), [Intervals.icu](https://intervals.icu/), [RowsAndAll](https://rowsandall.com/) or [Trainingpeaks](https://trainingpeaks.com) to track your training sessions;
The OpenRowingMonitor installer can also set up a network share that contains all training data so it is easy to grab the files from there and manually upload them to the training platform of your choice.
## Installation
-You will need a Raspberry Pi Zero 2 W, Raspberry Pi 3, Raspberry Pi 4 with a fresh installation of Raspberry Pi OS Lite for this (the 64Bit kernel is recomended). Connect to the device with SSH and just follow the [Detailed Installation Instructions](installation.md) and you'll get a working monitor. This guide will help you install the software and explain how to connect the rowing machine. If you can follow the guide, it will work. If you run into issues, you can always [drop a question in the GitHub Discussions](https://github.com/JaapvanEkris/openrowingmonitor/discussions), and there always is someone to help you.
+You will need a Raspberry Pi Zero 2 W, Raspberry Pi 3, Raspberry Pi 4 with a fresh installation of Raspberry Pi OS Lite for this (the 64Bit kernel is recommended). Connect to the device with SSH and just follow the [Detailed Installation Instructions](installation.md) and you'll get a working monitor. This guide will help you install the software and explain how to connect the rowing machine. If you can follow the guide, it will work. If you run into issues, you can always [drop a question in the GitHub Discussions](https://github.com/JaapvanEkris/openrowingmonitor/discussions), and there always is someone to help you.
> [!IMPORTANT]
> Due to architecture differences, both the Raspberry Pi Zero W (see [this discussion for more information](https://github.com/JaapvanEkris/openrowingmonitor/discussions/33)) and Raspberry Pi 5 (see [this discussion for more information](https://github.com/JaapvanEkris/openrowingmonitor/issues/52)) will **not** work.
@@ -100,10 +98,10 @@ You will need a Raspberry Pi Zero 2 W, Raspberry Pi 3, Raspberry Pi 4 with a fre
## Further information
-This project is in a very stable stage, as it is used daily by many rowers, and the engine is structurally validated against the Concept2 PM5. OpenRowingMonitor is tested extensively for weeks before being released to mainstream users. However, it might contain some things that are still a bit rough on the edges.
+This project is in a very stable stage, as it is used daily by many rowers. The engine is structurally validated against the Concept2 PM5. OpenRowingMonitor usually is tested extensively for weeks before being released to mainstream users. However, it might contain some things that are still a bit rough on the edges.
This is a larger team effort and OpenRowingMonitor had much direct and indirect support by many people during the years, see the [Attribution to these people here](attribution.md). You can see its development throughout the years [here in the Release notes](Release_Notes.md). Our work is never done, so more functionality will be added in the future, so check the [Development Roadmap](backlog.md) if you are curious.
-Contributions to improve OpenRowingMonitor further are always welcome! To get an idea how this all works, you can read the [Archtecture description](Architecture.md), the [Physics of OpenRowingMonitor (for advanced readers)](physics_openrowingmonitor.md) and [Contributing Guidelines](CONTRIBUTING.md) how you can help us improve this project.
+Contributions to improve OpenRowingMonitor further are always welcome! To get an idea how this all works, you can read the [Architecture description](Architecture.md), the [Physics of OpenRowingMonitor (for advanced readers)](physics_openrowingmonitor.md) and [Contributing Guidelines](CONTRIBUTING.md) how you can help us improve this project.
Feel free to leave a message in the [GitHub Discussions](https://github.com/JaapvanEkris/openrowingmonitor/discussions) if you have any questions or ideas related to this project.
diff --git a/docs/Release_Notes.md b/docs/Release_Notes.md
index 8057662a44..f89fd91ad7 100644
--- a/docs/Release_Notes.md
+++ b/docs/Release_Notes.md
@@ -1,19 +1,33 @@
# OpenRowingMonitor Release Notes
+## Version 0.9.7 (January 2026)
+
+Main contributors: [Jaap van Ekris](https://github.com/JaapvanEkris), with support of [Abasz](https://github.com/Abasz)
+
+### New functionality in 0.9.7
+
+- **Addition of the 'Calories' workout type**. You can now program Intervals and splits based on calories to be burned
+- **Introduction of splits in the fit-file**. The fit-file now also has splits, which makes the fit-file closer to a native Garmin recording of the same session
+
+### Bugfixes and robustness improvements in 0.9.7
+
+- **Improvement of the Moving Least Squares regressor**:
+ - Code refactoring to isolate this function from `Flywheel.js`, allowing a more thorough testing of this function's behaviour
+ - Introduced the 'Local Goodness of Fit' function to improve the robustness against noise. This reduces the effect of outliers on stroke detection, the Force curve, Power curve and Handle speed curve
+ - Introduction of a 'Gaussian Weight' filter to reduce the effects of flanks on the regression in a specific datapoint
+ - Added documentation about the mathematical foundations of the algorithms used
+- **Upgrade of the flywheel systematic error filter**, which now can handle systematic errors of magnet positioning on the flywheel. This is more effective at reducing structural measurement noise and allows a reduction of the code complexity in `Flyhweel.js` as all dependent algorithms can use the same datastream again.
+- **Fixed a bug in the initialisation of the `Flywheel.js`**
+- **Improved logging in the Strava uploader** for better troubleshooting (see [issue 145](https://github.com/JaapvanEkris/openrowingmonitor/issues/145))
+- **Fixed a bug where VO2Max calculation missed heartrate data** (see [this discussion](https://github.com/JaapvanEkris/openrowingmonitor/discussions/156))
+- **Increased the test coverage of key algorithms**
+
## Version 0.9.6 (June 2025)
Main contributors: [Abasz](https://github.com/Abasz) and [Jaap van Ekris](https://github.com/JaapvanEkris)
Beta testers: [fkh-bims](https://github.com/fkh-bims), [jryd2000](https://github.com/jryd2000) and [carlito1979](https://github.com/carlito1979)
-### Upgrade instructions for 0.9.6
-
-> [!IMPORTANT]
-> When upgrading from an existing install, several things have to be done by hand:
->
-> - If you use an attached screen, you need to install firefox by `sudo apt-get install firefox`
-> - If you use the automated Strava upload, you have to configure your Strava setup in `config.js` again. Please look at the [integrations manual](Integrations.md) for how to do this.
-
### New functionality in 0.9.6
- **Major upgrade of our PM5 interface**, bringing it much closer to the official PM5 interface specification: apps like [ErgZone](https://Erg.Zone), [EXR](https://exrgame.com) and many others now work in PM5 mode in most scenarios (there are [some known limitations](#known-issues-in-096)). This allows you to set up a workout in the session manager with ease, have force curves presented and record the data (adresses [this request](https://github.com/JaapvanEkris/openrowingmonitor/discussions/78)).
@@ -59,7 +73,7 @@ Main contributors: [Jaap van Ekris](https://github.com/JaapvanEkris) and [Abasz]
### Known issues in 0.9.5
-- **Bluetooth Heartrate can't be switched dynamically**: due to some underlying OS changes, BLE heartrate monitors can't be activated through the GUI without crashing the BLE metrics broadcast (see [the description of issue 69](https://github.com/JaapvanEkris/openrowingmonitor/issues/69)). As this is an issue in the OS, **all current and previous versions of OpenRowingMonitor are also affected by this issue**. Version 0.9.5 has a workaround implemented: configuring the use of a BLE heartrate monitor in the config file should work. However, dynamic switching via the GUI will crash the BLE connections. This issue is resolved in version 0.9.6.
+- **Bluetooth Heartrate can't be switched dynamically**: due to some underlying OS changes, BLE heartrate monitors can't be activated through the GUI without crashing the BLE metrics broadcast (see [the description of issue 69](https://github.com/JaapvanEkris/openrowingmonitor/issues/69)). As this is an issue in the OS, **all current and previous versions of OpenRowingMonitor are also affected by this issue**. Version 0.9.5 has a workaround implemented: configuring the use of a BLE heartrate monitor in the config file should work. However, dynamic switching via the GUI will crash the BLE connections. This issue has been resolved in version 0.9.6.
## Version 0.9.0 (January 2024)
@@ -80,7 +94,7 @@ Main contributors: [Jaap van Ekris](https://github.com/JaapvanEkris), [Abasz](ht
- **Added a configuration sanity check** which logs obvious errors and (if possible) repairs settings, after several users messed up their config and got completely stuck. This configuration sanity check also provides an automated upgrade path for 0.8.2 (old config) users to 0.9.0 (new config), as all the newly added configuration items between these two versions are automatically detected, logged and repaired.
- **Added restart limits** to prevent infinite boot loops of the app crashing and rebooting when there is a config error
- **Fixed the GPIO tick rollover**, which led to a minor hickup in data in rows over 30 minutes
-- **Made Flywheel.js more robust** against faulty GPIO data
+- **Made `Flywheel.js` more robust** against faulty GPIO data
- **Fixed a lot of small memory leaks** which were to untidy closure of dynamic data structures. Although this wasn't encountered in regular training sessions, it did show in long simulations (over 10.000K);
- **Fixed an application crash** in the RowingData generation when the target directory doesn't exist yet;
- **Improved the structure of the peripherals** to allow a more robust BLE and ANT use
@@ -101,15 +115,15 @@ Main contributors: [Jaap van Ekris](https://github.com/JaapvanEkris) and [Abasz]
- **Improved metrics through BLE FTMS and BLE C2-PM5**: Based on the new engine, many metrics are added to both FTMS Rower and PM5, making them as complete as they can be. Most metrics also have over a 1000 km of testing with EXR, and both types of interface have been used with EXR intensly.
- **New export format**: There is a RowingData export, which can export all metrics in .csv, which is accepted by both RowingData and RowsAndAll. It is also useable for users to read their data into Excel. This export brings the force curve to users, although it will require a small subscription to see it in RowsAndAll;
- **Simpler set-up**: a better out-of-the-box experience for new users. We trimmed the number of required settings, and for many cases we’ve succeeded: several settings are brought down to their key elements (like a minimal handle force, which can be set more easily for all rowers) or can be told by looking at the logs (like the recovery slope). For several other settings, their need to set them perfectly has been reduced, requiring less tweaking before OpenRowingMonitor starts producing good data. To support this, there also is a new setup document, to help users set up their own rower;
-- **Switch to 64Bit**: OpenRowingMonitor supports the 64 Bit Lite core, which has a PREEEMPT-kernel. The setup-script accepts this as well, as this should be the preferred kernel to use. The PREEMPT-kernel is optimized for low latency measurements, like IoT applications. As PREEMPT kernels can handle a lot higher priority for the GPIO-thread, this setting has been switched from a binary setting to a priority setting.
-- **An initial stub for session mangement**: As a first step towards sessions and splits, a session object in Server.js is added as a placeholder for session targets. If a target is set, it will termintate the session at the exact right time. As is with the PM5, ORM counts down if a target is set. You can't set these targets through the webGUI or through BLE yet. However, it is a first step towards functional completeness as it lays a preliminary foundation for such functionality.
+- **An initial stub for session management**: As a first step towards sessions and splits, a session object in `Server.js` is added as a placeholder for session targets. If a target is set, it will termintate the session at the exact right time. As is with the PM5, ORM counts down if a target is set. You can't set these targets through the webGUI or through BLE yet. However, it is a first step towards functional completeness as it lays a preliminary foundation for such functionality.
### Bugfixes and robustness improvements in 0.8.4
- **Totally redesigned rowing engine**: Linear and Quadratic Regression models are now the core of the rowing engine, leaving the classical numerical approximation model. The new model is much more robust against noise, and completely removes the need for noise filtering from OpenRowingMonitor.
+- **Switch to 64Bit**: OpenRowingMonitor supports the 64 Bit Lite core, which has a PREEEMPT-kernel. The setup-script accepts this as well, as this should be the preferred kernel to use. The PREEMPT-kernel is optimized for low latency measurements, like IoT applications. As PREEMPT kernels can handle a lot higher priority for the GPIO-thread, this setting has been switched from a binary setting to a priority setting.
- **Improved logging**: the logging has been more focussed on helping the user fix a bad setting, focussing on the underlying state of the engine and its settings (for example the drive time and drive length). Goal is to have users be able to tune their engine based on the log.
- **Finite State Machine based state management**: OpenRowingEngine will now maintain an explicit state for the rower, and RowingStatistics will maintain an explicit state for the session. Aside reducing the code complexity significantly, it greatly impoved robustness.
-- **Added a new GPIO-library**, making measurement of the flywheel data much more accurate and allowing to "debounce" the measurements, as many sensors have this issue
+- **Added a new GPIO-library**, making measurement of the flywheel data much more accurate and allowing to "debounce" the measurements, as many sensors have this issue (see [issue 85](https://github.com/laberning/openrowingmonitor/issues/85))
## Version 0.8.2 (Febuary 2022)
diff --git a/docs/Supported_Rowers.md b/docs/Supported_Rowers.md
index 1c9953dac2..8435816bd5 100644
--- a/docs/Supported_Rowers.md
+++ b/docs/Supported_Rowers.md
@@ -7,14 +7,14 @@ The following rowers are known to work, or are even actively supported:
| Brand | Type | Rower type | Measurement type | HW Modification needed | Support status | Rower profile | Basic Metrics | Advanced Metrics | Limitations | Remarks |
| ----- | ---- | ---- | ---- | ---- | ---- | ---- | ---- | ---- | ---- | ---------------- |
| Abilica | Winrower 2.0 | Air rower | Handle drive wheel | No | Known to work | - | Yes | No | Static distance | see [this discussion](https://github.com/laberning/openrowingmonitor/discussions/48) |
-| Concept 2 | Model B, C | Air rower | Flywheel | [Modification to electrical signal](https://oshwlab.com/jpbpcb/rower2) | Active support | Concept2_Model_C | Yes | Yes | None | See [this](https://github.com/laberning/openrowingmonitor/issues/77), [this](https://github.com/laberning/openrowingmonitor/discussions/38) and [this](https://github.com/laberning/openrowingmonitor/discussions/151) [this](https://github.com/laberning/openrowingmonitor/discussions/157)discussions|
+| Concept 2 | Model B, C | Air rower | Flywheel | [Modification to electrical signal](https://oshwlab.com/jpbpcb/rower2) | Active support | Concept2_Model_C | Yes | Yes | None | See [this](https://github.com/laberning/openrowingmonitor/issues/77), [this](https://github.com/laberning/openrowingmonitor/discussions/38) and [this](https://github.com/laberning/openrowingmonitor/discussions/151) [this](https://github.com/laberning/openrowingmonitor/discussions/157)discussions |
| | Model D, E | Air rower | Flywheel | [Modification to electrical signal](hardware_setup_Concept2_RowErg.md) | Active support | Concept2_RowErg | Yes | Yes | None | [Concept 2 Model D, Model E and RowErg setup](hardware_setup_Concept2_RowErg.md) |
| | RowErg | Air rower | Flywheel | [Modification to electrical signal](hardware_setup_Concept2_RowErg.md) | Active support | Concept2_RowErg | Yes | Yes | None | [Concept 2 Model D, Model E and RowErg setup](hardware_setup_Concept2_RowErg.md) |
| Decathlon | Rower 120 | Physical friction | Flywheel | Adding sensor and adding magnets to the flywheel | In development | - | - | - | - | see [this discussion](https://github.com/laberning/openrowingmonitor/issues/110) |
| DKN | R-320 | Air Rower | Flywheel | No | Full support | DKN_R320 | Yes | No | Static drag | - |
| Domyos | FR120 | Air Rower | Flywheel | No | Known to work | DKN_R320 | Yes | No | Static drag | see [this discussion](https://github.com/laberning/openrowingmonitor/discussions/154) |
| FDF | FR-E520 | Water rower | Impellor | Sensor replacement | Known to work | - | Yes | - | - | see [this discussion](https://github.com/laberning/openrowingmonitor/discussions/156) |
-| | Neon Pro V | Air rower | Flywheel | Sensor replacement | Known to work | - | Yes | - | - | see [this](https://github.com/laberning/openrowingmonitor/discussions/87) and [this](https://github.com/JaapvanEkris/openrowingmonitor/discussions/11) discussion|
+| | Neon Pro V | Air rower | Flywheel | Sensor replacement | Known to work | - | Yes | - | - | see [this](https://github.com/laberning/openrowingmonitor/discussions/87) and [this](https://github.com/JaapvanEkris/openrowingmonitor/discussions/11) discussion |
| ForceUSA | R3 | Air Rower | Flywheel | No | Supported | ForceUSA_R3 | Yes | Yes | None | - |
| ISE | SY-1750 | Magnetic | Flywheel | Change placement of the reed switches | Known to work | Manual config | Yes | No | Static drag | [see this discussion](https://github.com/laberning/openrowingmonitor/discussions/143) |
| JLL | Ventus 2 | Hybrid Magnetic and Air rower | Flywheel | Unknown | Known to work | Unknown | Yes | Unknown | Unknown | [see this discussion](https://github.com/JaapvanEkris/openrowingmonitor/discussions/42) |
diff --git a/docs/backlog.md b/docs/backlog.md
index 5d3c7e362f..9410b5c9cd 100644
--- a/docs/backlog.md
+++ b/docs/backlog.md
@@ -4,9 +4,19 @@ This is currently is a very minimalistic Backlog for further development of this
If you would like to contribute to this project, you are more than welcome, but please read the [Contributing Guidelines](CONTRIBUTING.md) first to get the most out of your valuable time.
+## 0.9.7 (currently in development)
+
+* Add a lot of testcases to complete unit/integration testing of all functions used. Especially:
+ * Testing core objects, like the Theil-Senn regression analysis functions (not the algorithm), as well as their integrated behaviour
+ * Testing all the `workoutSegment.js` object metrics and cutting capabilities, including Calories
+* Add calories as interval type
+ * Describe the physics
+ * Add weight correction factor (see [C2 formula](https://www.concept2.com/training/calorie-calculator))
+
## Soon
* Improve the user interface (We really need help on this!)
+* Look at an alternative for pigpio library as it doesn't work on a Raspberry Pi 5 (see [issue 52](https://github.com/JaapvanEkris/openrowingmonitor/issues/52))
* Move to the Wayland window manager, to keep in step with Raspberry Pi OS
* Introduce training plans (i.e. a distance/time to row):
* Integrate with rowsandall.com to retrieve training planning
@@ -17,13 +27,6 @@ If you would like to contribute to this project, you are more than welcome, but
* Integrate with intervals.icu to retrieve training targets
* add user friendly possibility for user to define workouts with targets via the GUI
* add user friendly possibility for user to define workouts with targets via the PM5
-* Add calories as interval type
- * Add weight correction factor (see [C2 formula](https://www.concept2.com/training/calorie-calculator))
- * Make Calories a continuous metric (similar to distance) instead of a cycle based one
- * Add it as a stop criterium for the session manager
- * Add it as a workout option to the FIT recorder
- * Modify the PM5 peripheral to broadcast the right data
- * Update the GUI to allow selecting it
## Later
diff --git a/docs/installation.md b/docs/installation.md
index 935a0b4d55..b8597704af 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -33,7 +33,8 @@ The cheapest solution is a headless Raspberry Pi Zero 2W (roughly $15), the most
### Initialization of the Raspberry Pi
-- Install **Raspberry Pi OS Lite** on the SD Card i.e. with the [Raspberry Pi Imager](https://www.raspberrypi.org/software). Here, Raspberry Pi OS Lite 64 Bit is recommended as it is better suited for real-time environments. This can be done by selecting "other" Raspberry Pi OS in the imager and select OS Lite 64 Bit. We typically support the current and previous (Legacy) version of Raspberry Pi OS.
+
+- Install **Raspberry Pi OS Lite (Legacy 64-bit)** on the SD Card i.e. with the [Raspberry Pi Imager](https://www.raspberrypi.org/software). This can be done by selecting "Raspberry Pi OS (other)" in the imager and then selecting "Raspberry Pi OS Lite (Legacy) 64-bit". The Legacy version is based on Debian 12 (Bookworm) and is required for compatibility - the current version of Raspberry Pi OS (based on Debian 13 Trixie) is not yet supported.
- In the Raspbverry Pi Imager, configure the network connection and enable SSH. In the Raspberry Pi Imager, you can automatically do this while writing the SD Card, just press `Ctrl-Shift-X`(see [here](https://www.raspberrypi.org/blog/raspberry-pi-imager-update-to-v1-6/) for a description), otherwise follow the instructions below
- Connect the device to your network ([headless](https://www.raspberrypi.org/documentation/configuration/wireless/headless.md) or via [command line](https://www.raspberrypi.org/documentation/configuration/wireless/wireless-cli.md))
- Enable [SSH](https://www.raspberrypi.org/documentation/remote-access/ssh/README.md)
diff --git a/docs/physics_openrowingmonitor.md b/docs/physics_openrowingmonitor.md
index f258d931c8..4409a39e20 100644
--- a/docs/physics_openrowingmonitor.md
+++ b/docs/physics_openrowingmonitor.md
@@ -1,9 +1,9 @@
-# The physics behind Open Rowing Monitor
+# The physics behind OpenRowingMonitor
-In this document we explain the physics behind the Open Rowing Monitor, to allow for independent review and software maintenance. This work wouldn't have been possible without some solid physics, described by some people with real knowledge of the subject matter. Please note that any errors in our implementation probably is on us, not them. When appropriate, we link to these sources. When possible, we also link to the source code to allow further investigation and keep the link with the actual implementation.
+In this document we explain the physics behind the OpenRowingMonitor, to allow for independent review and software maintenance. This document is to be read in conjuntion with the [mathematical foundations of OpenRowingMonitor](./Mathematical_Foundations.md), as that describes the implementation choices for the specific (regression) algorithms used for specific functions. This work wouldn't have been possible without some solid physics, described by some people with real knowledge of the subject matter. Please note that any errors in our implementation probably is on us, not them. When appropriate, we link to these sources. When possible, we also link to the source code to allow further investigation and keep the link with the actual implementation.
-Please note that this text is used as a rationale for design decissions of the physics used in Open Rowing Monitor. So it is of interest for people maintaining the code (as it explains why we do things the way we do) and for academics to verify or improve our solution. For these academics, we conclude with a section of open design issues as they might provide avenues of future research. If you are interested in just using Open Rowing Monitor as-is, this might not be the text you are looking for.
+Please note that this text is used as a rationale for design decissions of the physics used in OpenRowingMonitor. So it is of interest for people maintaining the code (as it explains why we do things the way we do) and for academics to verify or improve our solution. For these academics, we conclude with a section of open design issues as they might provide avenues of future research. If you are interested in just using OpenRowingMonitor as-is, this might not be the text you are looking for.
## Basic concepts
@@ -31,7 +31,7 @@ There are several types of rowers:
* **Magnetic resistance**: where the resistance is constant
-There are also hybrid rowers, which combine air resistance and magnetic resistance. The differences in physical behavior can be significant, for example a magnetic rower has a constant resistance while a air rower's resistance is dependent on the flywheel's speed. We suspect that on a water rower behaves slightly different from an air rower, as the rotated water mass changes shape when the rotational velocity changes. Currently for Open Rowing Monitor, we consider that the key principle is similar enough for all these rowers (some mass is made to spin and drag brings its speed down) to treat them all as an air rower as a first approximation. However, we are still investigating how to adapt for these specific machines.
+There are also hybrid rowers, which combine air resistance and magnetic resistance. The differences in physical behavior can be significant, for example a magnetic rower has a constant resistance while a air rower's resistance is dependent on the flywheel's speed. We suspect that on a water rower behaves slightly different from an air rower, as the rotated water mass changes shape when the rotational velocity changes. Currently for OpenRowingMonitor, we consider that the key principle is similar enough for all these rowers (some mass is made to spin and drag brings its speed down) to treat them all as an air rower as a first approximation. However, we are still investigating how to adapt for these specific machines.
### Phases in the rowing stroke
@@ -56,7 +56,7 @@ Combined, we define a *Drive* followed by a *Recovery* a **Stroke**. In the calc
## Leading design principles of the rowing engine
-As described in [the architecture](Architecture.md), the rowing engine is the core of Open Rowing Monitor and consists of three major parts:
+As described in [the architecture](Architecture.md), the rowing engine is the core of OpenRowingMonitor and consists of three major parts:
* `engine/Flywheel.js`, which determines rotational metrics,
@@ -74,13 +74,13 @@ Although the physics is well-understood and even well-described publicly (see [[
## Relevant rotational metrics
-Typically, actual measurements are done in the rotational part of the rower, on the flywheel. We explicitly assume that Open Rowing Monitor measures the flywheel movement (directly or indirectly). Some rowing machines are known to measure the movement of the driving axle and thus the velocity and direction of the handle, and not the driven flywheel. This type of measurement blocks access to the physical behaviour of the flywheel (especially acceleration and coast down behaviour), thus making most of the physics engine irrelevant. Open Rowing Monitor can handle some of these rowing machines by fixing specific parameters, but as this measurement approach excludes any meaningful measurement, we will exclude it in the further description.
+Typically, actual measurements are done in the rotational part of the rower, on the flywheel. We explicitly assume that OpenRowingMonitor measures the flywheel movement (directly or indirectly). Some rowing machines are known to measure the movement of the driving axle and thus the velocity and direction of the handle, and not the driven flywheel. This type of measurement blocks access to the physical behaviour of the flywheel (especially acceleration and coast down behaviour), thus making most of the physics engine irrelevant. OpenRowingMonitor can handle some of these rowing machines by fixing specific parameters, but as this measurement approach excludes any meaningful measurement, we will exclude it in the further description.
In a typical rowing machine, there is a magnetic reed sensor or optical sensor that will measure time between either magnets or reflective stripes on the flywheel or impellor, which gives an **Impulse** each time a magnet or stripe passes. For example, when the flywheel rotates on a NordicTrack RX800, the passing of a magnet on the flywheel triggers a reed-switch, that delivers a pulse to our Raspberry Pi.
-Depending on the **number of impulse providers** (i.e. the number of magnets or stripes), the number of impulses per rotation increases, increasing the resolution of the measurement. As described in [the architecture](Architecture.md), Open Rowing Monitor's `GpioTimerService.js` measures the time between two subsequent impulses and reports as a *currentDt* value. The constant stream of *currentDt* values is the basis for all our angular calculations, which are typically performed in the `pushValue()` function of `engine/Flywheel.js`.
+Depending on the **number of impulse providers** (i.e. the number of magnets or stripes), the number of impulses per rotation increases, increasing the resolution of the measurement. As described in [the architecture](Architecture.md), OpenRowingMonitor's `GpioTimerService.js` measures the time between two subsequent impulses and reports as a *currentDt* value. The constant stream of *currentDt* values is the basis for all our angular calculations, which are typically performed in the `pushValue()` function of `engine/Flywheel.js`.
-Open Rowing Monitor needs to keep track of several metrics about the flywheel and its state, including:
+OpenRowingMonitor needs to keep track of several metrics about the flywheel and its state, including:
* The **Angular Distance** of the flywheel in Radians (denoted with θ): in essence the distance the flywheel has traveled (i.e. the number of Radians the flywheel has rotated) since the start of the session;
@@ -133,7 +133,7 @@ Summarizing, both Angular Velocity ω and Angular Acceleration α are
### Determining the "drag factor" of the flywheel
-In the recovery phase, the only force exerted on the flywheel is the (air-/water-/magnetic-)resistance. Thus we can calculate the *drag factor of the flywheel* based on deceleration through the recovery phase [[1]](#1). This calculation is performed in the `markRecoveryPhaseCompleted()` function of `engine/Flywheel.js`. There are several approaches described in literature [[1]](#1), which Open Rowing Monitor extends to deliver a reliable and practically applicable approach.
+In the recovery phase, the only force exerted on the flywheel is the (air-/water-/magnetic-)resistance. Thus we can calculate the *drag factor of the flywheel* based on deceleration through the recovery phase [[1]](#1). This calculation is performed in the `markRecoveryPhaseCompleted()` function of `engine/Flywheel.js`. There are several approaches described in literature [[1]](#1), which OpenRowingMonitor extends to deliver a reliable and practically applicable approach.
A first numerical approach is presented by through [[1]](#1) in formula 7.2a:
@@ -194,7 +194,7 @@ One of the key elements of rowing is detecting the stroke phases and thus calcul
* The **Recovery Phase**, where the rower returns to his starting position and the flywheel decelerates as the drag on the flywheel is slowing it down;
-As the rowing cycle always follows this fixed schema, Open Rowing Monitor models it as a finite state machine (implemented in `handleRotationImpulse` in `engine/Rower.js`).
+As the rowing cycle always follows this fixed schema, OpenRowingMonitor models it as a finite state machine (implemented in `handleRotationImpulse` in `engine/Rower.js`).
```mermaid
stateDiagram-v2
@@ -207,13 +207,13 @@ stateDiagram-v2
Finite state machine of rowing cycle
-From the perspective of Open Rowing Monitor, there only is a stream of *CurrentDt*'s, which should form the basis of this detection:
+From the perspective of OpenRowingMonitor, there only is a stream of *CurrentDt*'s, which should form the basis of this detection:
The following picture shows the time between impulses through time:
example currentDt Measurements of a flywheel
-Open Rowing Monitor combines two types of force detection, which work independently: *basic force detection* and *advanced stroke detection*. Both can detect a stroke accuratly, and the combination has proven its use.
+OpenRowingMonitor combines two types of force detection, which work independently: *basic force detection* and *advanced stroke detection*. Both can detect a stroke accuratly, and the combination has proven its use.
In `engine/Flywheel.js`, two functions provide force detection, which use the following criteria before attempting a stroke phase transition:
@@ -242,7 +242,7 @@ The simple force detection uses this approach by looking at the slope of *curren
A more nuanced, but more vulnerable, approach is to compare the slope of this function with the typical slope encountered during the recovery phase of the stroke (which routinely is determined during the drag calculation). When the flywheel is unpowered, the slope will be close to the recovery slope, and otherwise it is powered. This is a more accurate, but more vulnerable, approach, as small deviations could lead to missed strokes. It is noted that practical testing has shown that this works reliably for many machines.
-In Open Rowing Monitor, the settings allow for using the more robust ascending/descending approach (by setting *minumumRecoverySlope* to 0), for a more accurate approach (by setting *minumumRecoverySlope* to a static value) or even a dynamic approach (by setting *autoAdjustRecoverySlope* to true)
+In OpenRowingMonitor, the settings allow for using the more robust ascending/descending approach (by setting *minumumRecoverySlope* to 0), for a more accurate approach (by setting *minumumRecoverySlope* to a static value) or even a dynamic approach (by setting *autoAdjustRecoverySlope* to true)
### Advanced force detection through torque τ
@@ -259,7 +259,7 @@ We do this by setting a minimum Torque (through setting *minumumForceBeforeStrok
#### A note about detection accuracy
-Open Rowing Monitor only will get impulses at discrete points in time. As Open Rowing Monitor doesn't measure torque on the flywheel directly, it can't determine where the flywheel exactly accelerates/decelerates as there is no continous measurement. Open Rowing Monitor can only detect a change in the times across several impulses, but it can't detect the exact time of torque change. In essence, at best we only can conclude that the torque has changes somewhere near a specific impulse, but we can't be certain about where the acceleration exactly has taken place and we can only estimate how big the force must have been.
+OpenRowingMonitor only will get impulses at discrete points in time. As OpenRowingMonitor doesn't measure torque on the flywheel directly, it can't determine where the flywheel exactly accelerates/decelerates as there is no continous measurement. OpenRowingMonitor can only detect a change in the times across several impulses, but it can't detect the exact time of torque change. In essence, at best we only can conclude that the torque has changes somewhere near a specific impulse, but we can't be certain about where the acceleration exactly has taken place and we can only estimate how big the force must have been.
## Relevant linear metrics
@@ -322,7 +322,7 @@ Still, we currently choose to use $\overline{P}$ = k \* $\overline{ω}$2 to disregard any unreliably approximated data, it can also be used to produce reliable results. See `engine/utils/OLSLinearSeries.js` for more information about the implementation.
-
-#### Theil–Sen estimator (Linear TS)
-
-Although the Theil–Sen estimator has a O(N log(N)) solution available, however we could not find a readily available solution. We did manage to develop a solution that has a O(N) impact during the addition of an additional datapoint in a datastream with a fixed length window, and O(log(N)) impact when determining the slope.
-
-#### Incomplete Theil–Sen estimator (Inc Linear TS)
-
-There also is an Incomplete Theis-Sen estimator for Linear Regression [[11]](#11), which is O(1) for the addition of new datapoints in a datastream with a fixed length window, and O(log(N)) for the determination of the slope. Our tests on real-life data show that in several cases the Incomplete Theil-Sen delivers more robust results than the full Theil-Sen estimator.
-
-#### Quadratic Theil–Sen estimator (Quadratic TS)
-
-The Theil–Sen estimator can be expanded to apply to Quadratic functions, where the implementation is O(N2). Based on a Lagrange interpolation, we can calculate the coefficients of the formula quite effectively, resulting in a robust estimation more fitting the data. See `engine/utils/FullTSQuadraticSeries.js` for more information about the background of the implementation.
-
-### Choices for the specific algorithms
-
-#### Regression algorithm used for drag calculation
-
-For the drag-factor calculation (and the closely related recovery slope detection), we observe three things:
-
-* The number of datapoints in the recovery phase isn't known in advance, and is subject to significant change due to variations in recovery time (i.e. sprints), making the Incomplete Theil–Sen estimator incapable of calculating their slopes in the stream as the efficient implementations require a fixed window. OLS has a O(1) complexity for continous datastreams, and has proven to be sufficiently robust for most practical use. Using the Linear Theil-sen estimator results in a near O(N) calculation at the start of the *Drive* phase (where N is the length of the recovery in datapoints). The Quadratic Theil-sen estimator results in a O(N2) calculation at the start of the *Drive* phase. Given the number of datapoints often encountered (a recoveryphase on a Concept 2 contains around 200 datapoints), this is a significant CPU-load that could disrupt the application;
-
-* In non-time critical replays of earlier recorded rowing sessions, both the Incomplete Theil–Sen estimator performed worse than OLS: OLS with a high pass filter on r2 resulted in a much more stable dragfactor than the Incomplete Theil–Sen estimator did. The Theil–Sen estimator, in combination with a filter on r2 has shown to be even a bit more robust than OLS. This suggests that the OLS algorithm combined with a requirement for a sufficiently high r2 handles the outliers sufficiently to prevent drag poisoning and thus provide a stable dragfactor for all calculations. The Linear Theil-Sen estimator outperfomed OLS by a small margin, but noticeably improved stroke detection where OLS could not regardless of parameterisation.
-
-* Applying Quadratic OLS regression does not improve its results when compared to Linear OLS regression or Linear TS. For the drag (and thus recovery slope) calculation, the Linear Theil-Sen estimator has a slightly better performance then OLS, while keeping CPU-load acceptable for a data-intensive rowing machine (Concept 2, 12 datapoints flank, 200 datapoints in the recovery). A Quadratic theil-Sen based drag calculation has shown to be too CPU-intensive. For the stroke detection itself, OLS and Linear Theil-Sen deliver the same results, while OLS is less CPU intensive.
-
-Therefore, we choose to apply the Linear Theil-Sen estimator for the calculation of the dragfactor and the related recovery slope detection, and use OLS for the stroke detection.
-
-#### Regression algorithm used for Angular velocity and Angular Acceleration
-
-We determine the Angular Velocity ω and Angular Acceleration α based on the relation between θ and time. First of all, we observe that we use both the first derived function (i.e. ω) and the second derived function (i.e. α), making a quadratic or even a cubic regression algorithm more appropriate, as a liniear regressor would make the second derived function trivial. Practical testing has confirmed that Quadratic Theil-Senn outperformed all Linear Regression methods in terms of robustness and responsiveness. Based on extensive testing with multiple simulated rowing machines, Full Quadratic Theil-Senn has proven to deliver the best results and thus is selected to determine ω and α.
-
## Open Issues, Known problems and Regrettable design decissions
### Use of simplified power calculation
@@ -454,47 +404,10 @@ A simplified formula is provided by [[1]](#1) (formula 9.1), [[2]](#2) and [[3]]
$$ \overline{P} = k \* \overline{\omega}^3 $$
-Open Rowing Monitor uses the latter simplified version. As shown by academic research [[15]](#15), this is sufficiently reliable and accurate providing that that ω doesn't vary much across subsequent strokes. When there is a significant acceleration or decelleration of the flywheel across subsequent strokes (at the start, during acceleration in sprints or due to stroke-by-stroke variation), the reported/calculated power starts to deviate from the externally applied power.
+OpenRowingMonitor uses the latter simplified version. As shown by academic research [[15]](#15), this is sufficiently reliable and accurate providing that that ω doesn't vary much across subsequent strokes. When there is a significant acceleration or decelleration of the flywheel across subsequent strokes (at the start, during acceleration in sprints or due to stroke-by-stroke variation), the reported/calculated power starts to deviate from the externally applied power.
Currently, this is an accepted issue, as the simplified formula has the huge benefit of being much more robust against errors in both the *CurrentDt*/ω measurement and the stroke detection algorithm. As Concept 2 seems to have taken shortcut in a thoroughly matured product [[15]](#15), we are not inclined to change this quickly. Especially as the robustness of both the ω calculation and stroke phase detection varies across types of rowing machines, it is an improvement that should be handled with extreme caution.
-### Use of Quadratic Theil-Senn regression for determining α and ω based on time and θ
-
-Abandoning the numerical approach for a regression based approach has resulted with a huge improvement in metric robustness. So far, we were able to implement Quadratic Theil-Senn regression and get reliable and robust results. Currently, the use of Quadratic Theil-Senn regression represents a huge improvement from both the traditional numerical approach (as taken by [[1]](#1) and [[4]](#4)) used by earlier approaches of Open Rowing Monitor.
-
-The (implied) underlying assumption underpinning the use of Quadratic Theil-Senn regression approach is that the Angular Accelration α is constant, or near constant by approximation in the flank under measurment. In essence, quadratic Theil-Senn regression would be fitting if the acceleration would be a constant, and the relation of θ, α and ω thus would be captured in θ = 1/2 \* α \* t2 + ω \* t. We do realize that in rowing the Angular Accelration α, by nature of the rowing stroke, will vary based on the position in the Drive phase: the ideal force curve is a heystack, thus the force on the flywheel varies in time.
-
-As the number of datapoints in a *Flanklength* in the relation to the total number of datapoints in a stroke is relatively small, we use quadratic Theil-Senn regression as an approximation on a smaller interval. In tests, quadratic regression has proven to outperform (i.e. less suspect to noise in the signal) both the numerical approach with noise filtering and the linear regression methods. When using the right efficient algorithm, this has the strong benefit of being robust to noise, at the cost of a O(n2) calculation per new datapoint (where n is the flanklength). Looking at the resulting fit of the Quadratic Theil-Sen estimator, we see that it consistently is above 0.98, which is an extremely good fit given the noise in the Concept 2 RowErg data. Therefore, we consider this is a sufficiently decent approximation while maintaining an sufficiently efficient algorithm to be able to process all data in the datastream in time.
-
-Although the determination of angular velocity ω and angular acceleration α based on Quadratic Theil-Senn regression over the time versus angular distance θ works decently, we realize it does not respect the true dynamic nature of angular acceleration α. From a pure mathematical perspective, a higher order polynomial would be more appropriate. A cubic regressor, or even better a fourth order polynomal have shown to be better mathematical approximation of the time versus distance function for a Concept2 RowErg. We can inmagine there are better suited third polynomal (cubic) approaches available that can robustly calculate α and ω as a function of time, based on the relation between time and θ. However, getting these to work in a datastream with very tight limitations on CPU-time and memory across many configurations is quite challenging.
-
-However, there are some current practical objections against using these more complex methods:
-
-* Higher order polynomials are less stable in nature, and overfitting is a real issue. As the displacement of magets can present itself as a sinoid-like curve (as the Concept 2 RowErg shows), 3rd or higher polynomials are inclined to follow that curve. As this might introduce wild shocks in our metrics, this might be a potential issue for application;
-* A key limitation is the available number of datapoints. For the determination of a polynomial of the n-th order, you need at least n+1 datapoints (which in Open Rowing Monitor translates to a `flankLength`). Some rowers, for example the Sportstech WRX700, only deliver 5 to 6 datapoints for the entire drive phase, thus putting explicit limits on the number of datapoints available for such an approximation.
-* Calculating a higher order polynomial in a robust way, for example by Theil-Senn regression, is CPU intensive. A quadratic approach requires a O(n2) calculation when a new datapoint is added to the sliding window (i.e. the flank). Our estimate is that with current known robust polynomial regression methods, a cubic approach requires at least a O(n3) calculation, and a 4th polynomial a O(n4) calculation. With smaller flanks (which determines the n) this has proven to be doable, but for machines which produce a lot of datapoints, and thus have more noise and a typically bigger `flankLength` (like the C2 RowErg and Nordictrack RX-800, both with a 12 `flankLength`), this becomes an issue: we consider completing 103 or even 104 complex calculations within the 5 miliseconds that is available before the next datapoint arrives, impossible.
-
-We also observe specific practical issues, which could result in structurally overfitting the dataset, nihilating its noise reduction effect. As the following sample of three rotations of a Concept2 flywheel shows, due to production tolerances or deliberate design constructs, there are **systematic** errors in the data due to magnet placement or magnet polarity. This results in systematic issues in the datastream:
-
-
-Deviation of the Concept 2 RowErg
-
-Fitting a quadratic curve with at least two full rotations of data (in this case, 12 datapoints) seems to reduce the noise to very acceptable levels. In our view, fitting a third-degree polynomial would result in a better fit with these systematic errors, but resulting in a much less robust signal.
-
-We also observe that in several areas the theoretical best approach did not deliver the best practical result (i.e. a "better" algorithm delivered a more noisy result for α and ω). Therefore, this avenue isn't investigated yet, but will remain a continuing area of improvement.
-
-This doesn't definitively exclude the use of more complex polynomial regression methods: alternative methods for higher polynomials within a datastream could be as CPU intensive as Theil-Senn Quadratic regression now, and their use could be isolated to specific combination of Raspberry hardware and settings. Thus, this will remain an active area of investigation for future versions.
-
-### Use of Quadratic Theil-Senn regression and a weighed average filter for determining α and ω
-
-For a specific flank, our quadratic regression algorithm calculates a single α for the entire flank and the individual ω's for each point on that flank. The flank acts like a sliding window: on each new datapoint the window slides one datapoint, and thus recalculates the critical parameters. Thus, as a datapoint will be part of several flank calculations, we obtain several α's and ω's that are valid approximations for that specific datapoint. Once the datapoint slides out of the sliding window, there are *flankLength* number of approximations for ω and α. A key question is how to combine these multiple approximations α and ω into a single true value for these parameters.
-
-To obtain the most stable result, a median of all valid values for α and ω can be used to calculate the definitive approximation of α and ω for that specific datapoint. Although this approach has proven very robust, and even necessary to prevent noise from disturbing powercurves, it is very conservative. For example, when compared to Concept 2's results, the powercurves have the same shape, but the peak values are considerable lower. It also has the downside of producing "blocky" force cuves.
-
-Using a weighed averager resulted in slightly more stable results and resulted in smoother force curves. The weight is based on the r2: better fitting curves will result in a heiger weigt in the calculation, thus preferring approximations that are a better fit with the data. This approach resulted in smoother (less blocky) force curves while retaining the responsiveness of the force curve.
-
-Reducing extreme values while maintaining the true data responsiveness is a subject for further improvement.
-
## References
[1] Anu Dudhia, "The Physics of ErgoMeters"
diff --git a/docs/rower_settings.md b/docs/rower_settings.md
index 1ef35e508e..f6457cd4cf 100644
--- a/docs/rower_settings.md
+++ b/docs/rower_settings.md
@@ -349,6 +349,9 @@ After getting the stroke detection right, we now turn to getting the basic linea
This results in a number, which works and can't be compared to anything else on the planet as that drag factor is highly dependent on the physical construction of the flywheel and mechanical properties of the transmission of power to the flywheel. For example, the Drag Factor for a Concept 2 ranges between 69 (Damper setting 1) and 220 (Damper setting 10). The NordicTrack RX-800 ranges from 150 to 450, where the 150 feels much lighter than a 150 on the Concept2. The Sportstech WRX700 water rower has a drag factor of 32000.
+> [!TIP]
+> Please realize that changing the `dragfactor` wil affect stroke detection of the first stroke, and all subsequent strokes if `autoAdjustDragFactor` is false, as increasing it makes the reported forces on the flywheel bigger. So the `minimumForceBeforeStroke` might need adjustment too.
+
### Setting the flywheel inertia
**flywheelInertia** is the moment of inertia of the flywheel (in kg\*m2), which in practice influences the dynamically calculated dragfactor (and thus power, distance, speed and pace), but also the calculated force and power on the handle. A formal way to measure it is outlined in [Flywheel moment of inertia](https://dvernooy.github.io/projects/ergware/). However, the most practical way to set it is by rowing and see if the calculated drag factor approximates the previously set dragfactor needed to get a certain pace.
@@ -363,6 +366,9 @@ If your flywheel inertia is set correctly, the calculated drag factor will be ve
Please note that this logmessage will change when autoAdjustDragFactor is set to true, but this content will always be reported in debug mode.
+> [!TIP]
+> Please realize that changing the `flywheelInertia` wil affect stroke detection if `autoAdjustDragFactor` is true, as increasing it makes the reported forces on the flywheel bigger. So the `minimumForceBeforeStroke` might need adjustment too.
+
## Settings you COULD change for a new rower
In the previous section, we've guided you to set up a very robust working rower, but it will result in more crude data. To improve the accuracy of many measurements, you could switch to a more accurate and dynamic metric calculation. This does require a more sophisticated rower: you need quite a few data points per stroke, with much accuracy, to get this working reliably. So this setup certainly isn't for every rowing machine out there, although some options might just work. And again a lot of rowing to get these settings right is involved.
diff --git a/package-lock.json b/package-lock.json
index 74a0ebe07a..88599c78ca 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,72 +1,57 @@
{
"name": "OpenRowingMonitor",
- "version": "0.9.6",
+ "version": "0.9.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "OpenRowingMonitor",
- "version": "0.9.6",
+ "version": "0.9.7",
"license": "GPL-3.0",
"dependencies": {
- "@markw65/fit-file-writer": "^0.1.6",
+ "@markw65/fit-file-writer": "^0.1.7",
"ble-host": "^1.0.3",
- "chart.js": "^4.5.0",
+ "chart.js": "^4.5.1",
"chartjs-plugin-datalabels": "^2.2.0",
- "finalhandler": "^2.1.0",
- "incyclist-ant-plus": "^0.3.5",
+ "finalhandler": "^2.1.1",
+ "incyclist-ant-plus": "^0.3.6",
"lit": "^2.8.0",
"loglevel": "^1.9.1",
- "mqtt": "^5.13.1",
+ "mqtt": "^5.14.1",
"node-fetch": "^3.3.2",
"nosleep.js": "0.12.0",
"pigpio": "3.3.1",
- "replace-in-file": "^8.3.0",
- "serve-static": "^2.2.0",
- "ws": "^8.18.3"
+ "replace-in-file": "^8.4.0",
+ "serve-static": "^2.2.1",
+ "ws": "^8.19.0"
},
"devDependencies": {
- "@babel/eslint-parser": "^7.27.5",
- "@babel/plugin-proposal-decorators": "^7.23.9",
- "@babel/preset-env": "^7.27.2",
- "@eslint/js": "^9.30.0",
- "@rollup/plugin-babel": "^6.0.4",
- "@rollup/plugin-commonjs": "^28.0.6",
- "@rollup/plugin-node-resolve": "^16.0.0",
+ "@babel/eslint-parser": "^7.28.5",
+ "@babel/plugin-proposal-decorators": "^7.28.0",
+ "@babel/preset-env": "^7.28.5",
+ "@eslint/js": "^9.39.2",
+ "@rollup/plugin-babel": "^6.1.0",
+ "@rollup/plugin-commonjs": "^29.0.0",
+ "@rollup/plugin-node-resolve": "^16.0.3",
"@rollup/plugin-terser": "^0.4.4",
- "@stylistic/eslint-plugin": "^5.1.0",
- "@web/rollup-plugin-html": "^2.1.2",
- "eslint": "^9.30.0",
- "globals": "^16.2.0",
+ "@stylistic/eslint-plugin": "^5.6.1",
+ "@web/rollup-plugin-html": "^3.0.0",
+ "eslint": "^9.39.2",
+ "globals": "^17.0.0",
"http2-proxy": "5.0.53",
- "markdownlint-cli2": "^0.18.1",
- "nodemon": "^3.0.3",
+ "markdownlint-cli2": "^0.20.0",
+ "nodemon": "^3.1.11",
"npm-run-all": "4.1.5",
- "rollup": "^4.44.1",
+ "rollup": "^4.55.1",
"rollup-plugin-summary": "^3.0.0",
- "simple-git-hooks": "^2.9.0",
- "tar": "^7.4.3",
+ "simple-git-hooks": "^2.13.1",
+ "tar": "^7.5.2",
"uvu": "^0.5.6"
},
"engines": {
"node": ">=20"
}
},
- "node_modules/@ampproject/remapping": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
- "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
- "dev": true,
- "license": "Apache-2.0",
- "peer": true,
- "dependencies": {
- "@jridgewell/gen-mapping": "^0.3.5",
- "@jridgewell/trace-mapping": "^0.3.24"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
"node_modules/@babel/code-frame": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
@@ -83,9 +68,9 @@
}
},
"node_modules/@babel/compat-data": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.27.7.tgz",
- "integrity": "sha512-xgu/ySj2mTiUFmdE9yCMfBxLp4DHd5DwmbbD05YAuICfodYT3VvRxbrh81LGQ/8UpSdtMdfKMn3KouYDX59DGQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz",
+ "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -93,23 +78,23 @@
}
},
"node_modules/@babel/core": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.27.7.tgz",
- "integrity": "sha512-BU2f9tlKQ5CAthiMIgpzAh4eDTLWo1mqi9jqE2OxMG0E/OM199VJt2q8BztTxpnSW0i1ymdwLXRJnYzvDM5r2w==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz",
+ "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
- "@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.27.5",
+ "@babel/generator": "^7.28.5",
"@babel/helper-compilation-targets": "^7.27.2",
- "@babel/helper-module-transforms": "^7.27.3",
- "@babel/helpers": "^7.27.6",
- "@babel/parser": "^7.27.7",
+ "@babel/helper-module-transforms": "^7.28.3",
+ "@babel/helpers": "^7.28.4",
+ "@babel/parser": "^7.28.5",
"@babel/template": "^7.27.2",
- "@babel/traverse": "^7.27.7",
- "@babel/types": "^7.27.7",
+ "@babel/traverse": "^7.28.5",
+ "@babel/types": "^7.28.5",
+ "@jridgewell/remapping": "^2.3.5",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.2",
@@ -125,9 +110,9 @@
}
},
"node_modules/@babel/eslint-parser": {
- "version": "7.27.5",
- "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.27.5.tgz",
- "integrity": "sha512-HLkYQfRICudzcOtjGwkPvGc5nF1b4ljLZh1IRDj50lRZ718NAKVgQpIAUX8bfg6u/yuSKY3L7E0YzIV+OxrB8Q==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.28.5.tgz",
+ "integrity": "sha512-fcdRcWahONYo+JRnJg1/AekOacGvKx12Gu0qXJXFi2WBqQA1i7+O5PaxRB7kxE/Op94dExnCiiar6T09pvdHpA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -144,16 +129,16 @@
}
},
"node_modules/@babel/generator": {
- "version": "7.27.5",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.5.tgz",
- "integrity": "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz",
+ "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/parser": "^7.27.5",
- "@babel/types": "^7.27.3",
- "@jridgewell/gen-mapping": "^0.3.5",
- "@jridgewell/trace-mapping": "^0.3.25",
+ "@babel/parser": "^7.28.5",
+ "@babel/types": "^7.28.5",
+ "@jridgewell/gen-mapping": "^0.3.12",
+ "@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
},
"engines": {
@@ -191,18 +176,18 @@
}
},
"node_modules/@babel/helper-create-class-features-plugin": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz",
- "integrity": "sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.5.tgz",
+ "integrity": "sha512-q3WC4JfdODypvxArsJQROfupPBq9+lMwjKq7C33GhbFYJsufD0yd/ziwD+hJucLeWsnFPWZjsU2DNFqBPE7jwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/helper-annotate-as-pure": "^7.27.1",
- "@babel/helper-member-expression-to-functions": "^7.27.1",
+ "@babel/helper-annotate-as-pure": "^7.27.3",
+ "@babel/helper-member-expression-to-functions": "^7.28.5",
"@babel/helper-optimise-call-expression": "^7.27.1",
"@babel/helper-replace-supers": "^7.27.1",
"@babel/helper-skip-transparent-expression-wrappers": "^7.27.1",
- "@babel/traverse": "^7.27.1",
+ "@babel/traverse": "^7.28.5",
"semver": "^6.3.1"
},
"engines": {
@@ -213,14 +198,14 @@
}
},
"node_modules/@babel/helper-create-regexp-features-plugin": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.27.1.tgz",
- "integrity": "sha512-uVDC72XVf8UbrH5qQTc18Agb8emwjTiZrQE11Nv3CuBEZmVvTwwE9CBUEvHku06gQCAyYf8Nv6ja1IN+6LMbxQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.28.5.tgz",
+ "integrity": "sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/helper-annotate-as-pure": "^7.27.1",
- "regexpu-core": "^6.2.0",
+ "@babel/helper-annotate-as-pure": "^7.27.3",
+ "regexpu-core": "^6.3.1",
"semver": "^6.3.1"
},
"engines": {
@@ -247,15 +232,25 @@
"@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
}
},
+ "node_modules/@babel/helper-globals": {
+ "version": "7.28.0",
+ "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
+ "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
"node_modules/@babel/helper-member-expression-to-functions": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz",
- "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz",
+ "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/traverse": "^7.28.5",
+ "@babel/types": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -276,15 +271,15 @@
}
},
"node_modules/@babel/helper-module-transforms": {
- "version": "7.27.3",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz",
- "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==",
+ "version": "7.28.3",
+ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
+ "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-module-imports": "^7.27.1",
"@babel/helper-validator-identifier": "^7.27.1",
- "@babel/traverse": "^7.27.3"
+ "@babel/traverse": "^7.28.3"
},
"engines": {
"node": ">=6.9.0"
@@ -377,9 +372,9 @@
}
},
"node_modules/@babel/helper-validator-identifier": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
- "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
+ "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"dev": true,
"license": "MIT",
"engines": {
@@ -397,43 +392,43 @@
}
},
"node_modules/@babel/helper-wrap-function": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.27.1.tgz",
- "integrity": "sha512-NFJK2sHUvrjo8wAU/nQTWU890/zB2jj0qBcCbZbbf+005cAsv6tMjXz31fBign6M5ov1o0Bllu+9nbqkfsjjJQ==",
+ "version": "7.28.3",
+ "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.28.3.tgz",
+ "integrity": "sha512-zdf983tNfLZFletc0RRXYrHrucBEg95NIFMkn6K9dbeMYnsgHaSBGcQqdsCSStG2PYwRre0Qc2NNSCXbG+xc6g==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/template": "^7.27.1",
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
+ "@babel/template": "^7.27.2",
+ "@babel/traverse": "^7.28.3",
+ "@babel/types": "^7.28.2"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helpers": {
- "version": "7.27.6",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz",
- "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==",
+ "version": "7.28.4",
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
+ "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/template": "^7.27.2",
- "@babel/types": "^7.27.6"
+ "@babel/types": "^7.28.4"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz",
- "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
+ "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/types": "^7.27.7"
+ "@babel/types": "^7.28.5"
},
"bin": {
"parser": "bin/babel-parser.js"
@@ -443,14 +438,14 @@
}
},
"node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.27.1.tgz",
- "integrity": "sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.28.5.tgz",
+ "integrity": "sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/traverse": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -510,14 +505,14 @@
}
},
"node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.27.1.tgz",
- "integrity": "sha512-6BpaYGDavZqkI6yT+KSPdpZFfpnd68UKXbcjI9pJ13pvHhPrCKWOOLp+ysvMeA+DxnhuPpgIaRpxRxo5A9t5jw==",
+ "version": "7.28.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.28.3.tgz",
+ "integrity": "sha512-b6YTX108evsvE4YgWyQ921ZAFFQm3Bn+CA3+ZXlNVnPhx+UfsVURoPjfGAPCjBgrqo30yX/C2nZGX96DxvR9Iw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/traverse": "^7.28.3"
},
"engines": {
"node": ">=6.9.0"
@@ -527,9 +522,9 @@
}
},
"node_modules/@babel/plugin-proposal-decorators": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.27.1.tgz",
- "integrity": "sha512-DTxe4LBPrtFdsWzgpmbBKevg3e9PBy+dXRt19kSbucbZvL2uqtdqwwpluL1jfxYE0wIDTFp1nTy/q6gNLsxXrg==",
+ "version": "7.28.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.28.0.tgz",
+ "integrity": "sha512-zOiZqvANjWDUaUS9xMxbMcK/Zccztbe/6ikvUXaG9nsPH3w6qh5UaPGAnirI/WhIbZ8m3OHU0ReyPrknG+ZKeg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -639,15 +634,15 @@
}
},
"node_modules/@babel/plugin-transform-async-generator-functions": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.27.1.tgz",
- "integrity": "sha512-eST9RrwlpaoJBDHShc+DS2SG4ATTi2MYNb4OxYkf3n+7eb49LWpnS+HSpVfW4x927qQwgk8A2hGNVaajAEw0EA==",
+ "version": "7.28.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.28.0.tgz",
+ "integrity": "sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
"@babel/helper-remap-async-to-generator": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/traverse": "^7.28.0"
},
"engines": {
"node": ">=6.9.0"
@@ -691,9 +686,9 @@
}
},
"node_modules/@babel/plugin-transform-block-scoping": {
- "version": "7.27.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.27.5.tgz",
- "integrity": "sha512-JF6uE2s67f0y2RZcm2kpAUEbD50vH62TyWVebxwHAlbSdM49VqPz8t4a1uIjp4NIOIZ4xzLfjY5emt/RCyC7TQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.5.tgz",
+ "integrity": "sha512-45DmULpySVvmq9Pj3X9B+62Xe+DJGov27QravQJU1LLcapR6/10i+gYVAucGGJpHBp5mYxIMK4nDAT/QDLr47g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -724,13 +719,13 @@
}
},
"node_modules/@babel/plugin-transform-class-static-block": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.27.1.tgz",
- "integrity": "sha512-s734HmYU78MVzZ++joYM+NkJusItbdRcbm+AGRgJCt3iA+yux0QpD9cBVdz3tKyrjVYWRl7j0mHSmv4lhV0aoA==",
+ "version": "7.28.3",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.28.3.tgz",
+ "integrity": "sha512-LtPXlBbRoc4Njl/oh1CeD/3jC+atytbnf/UqLoqTDcEYGUPj022+rvfkbDYieUrSj3CaV4yHDByPE+T2HwfsJg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.27.1",
+ "@babel/helper-create-class-features-plugin": "^7.28.3",
"@babel/helper-plugin-utils": "^7.27.1"
},
"engines": {
@@ -741,18 +736,18 @@
}
},
"node_modules/@babel/plugin-transform-classes": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.27.7.tgz",
- "integrity": "sha512-CuLkokN1PEZ0Fsjtq+001aog/C2drDK9nTfK/NRK0n6rBin6cBrvM+zfQjDE+UllhR6/J4a6w8Xq9i4yi3mQrw==",
+ "version": "7.28.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.4.tgz",
+ "integrity": "sha512-cFOlhIYPBv/iBoc+KS3M6et2XPtbT2HiCRfBXWtfpc9OAyostldxIf9YAYB6ypURBBbx+Qv6nyrLzASfJe+hBA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.27.3",
"@babel/helper-compilation-targets": "^7.27.2",
+ "@babel/helper-globals": "^7.28.0",
"@babel/helper-plugin-utils": "^7.27.1",
"@babel/helper-replace-supers": "^7.27.1",
- "@babel/traverse": "^7.27.7",
- "globals": "^11.1.0"
+ "@babel/traverse": "^7.28.4"
},
"engines": {
"node": ">=6.9.0"
@@ -761,16 +756,6 @@
"@babel/core": "^7.0.0-0"
}
},
- "node_modules/@babel/plugin-transform-classes/node_modules/globals": {
- "version": "11.12.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
- "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=4"
- }
- },
"node_modules/@babel/plugin-transform-computed-properties": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz",
@@ -789,14 +774,14 @@
}
},
"node_modules/@babel/plugin-transform-destructuring": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.27.7.tgz",
- "integrity": "sha512-pg3ZLdIKWCP0CrJm0O4jYjVthyBeioVfvz9nwt6o5paUxsgJ/8GucSMAIaj6M7xA4WY+SrvtGu2LijzkdyecWQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.5.tgz",
+ "integrity": "sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/traverse": "^7.27.7"
+ "@babel/traverse": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -871,10 +856,27 @@
"@babel/core": "^7.0.0-0"
}
},
+ "node_modules/@babel/plugin-transform-explicit-resource-management": {
+ "version": "7.28.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.0.tgz",
+ "integrity": "sha512-K8nhUcn3f6iB+P3gwCv/no7OdzOZQcKchW6N389V6PD8NUWKZHzndOd9sPDVbMoBsbmjMqlB4L9fm+fEFNVlwQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1",
+ "@babel/plugin-transform-destructuring": "^7.28.0"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
"node_modules/@babel/plugin-transform-exponentiation-operator": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.27.1.tgz",
- "integrity": "sha512-uspvXnhHvGKf2r4VVtBpeFnuDWsJLQ6MF6lGJLC89jBR1uoVeqM416AZtTuhTezOfgHicpJQmoD5YUakO/YmXQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.28.5.tgz",
+ "integrity": "sha512-D4WIMaFtwa2NizOp+dnoFjRez/ClKiC2BqqImwKd1X28nqBtZEyCYJ2ozQrrzlxAFrcrjxo39S6khe9RNDlGzw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -971,9 +973,9 @@
}
},
"node_modules/@babel/plugin-transform-logical-assignment-operators": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.27.1.tgz",
- "integrity": "sha512-SJvDs5dXxiae4FbSL1aBJlG4wvl594N6YEVVn9e3JGulwioy6z3oPjx/sQBO3Y4NwUu5HNix6KJ3wBZoewcdbw==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.28.5.tgz",
+ "integrity": "sha512-axUuqnUTBuXyHGcJEVVh9pORaN6wC5bYfE7FGzPiaWa3syib9m7g+/IT/4VgCOe2Upef43PHzeAvcrVek6QuuA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1037,16 +1039,16 @@
}
},
"node_modules/@babel/plugin-transform-modules-systemjs": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.27.1.tgz",
- "integrity": "sha512-w5N1XzsRbc0PQStASMksmUeqECuzKuTJer7kFagK8AXgpCMkeDMO5S+aaFb7A51ZYDF7XI34qsTX+fkHiIm5yA==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.28.5.tgz",
+ "integrity": "sha512-vn5Jma98LCOeBy/KpeQhXcV2WZgaRUtjwQmjoBuLNlOmkg0fB5pdvYVeWRYI69wWKwK2cD1QbMiUQnoujWvrew==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/helper-module-transforms": "^7.27.1",
+ "@babel/helper-module-transforms": "^7.28.3",
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1",
- "@babel/traverse": "^7.27.1"
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "@babel/traverse": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1138,17 +1140,17 @@
}
},
"node_modules/@babel/plugin-transform-object-rest-spread": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.27.7.tgz",
- "integrity": "sha512-201B1kFTWhckclcXpWHc8uUpYziDX/Pl4rxl0ZX0DiCZ3jknwfSUALL3QCYeeXXB37yWxJbo+g+Vfq8pAaHi3w==",
+ "version": "7.28.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.4.tgz",
+ "integrity": "sha512-373KA2HQzKhQCYiRVIRr+3MjpCObqzDlyrM6u4I201wL8Mp2wHf7uB8GhDwis03k2ti8Zr65Zyyqs1xOxUF/Ew==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-compilation-targets": "^7.27.2",
"@babel/helper-plugin-utils": "^7.27.1",
- "@babel/plugin-transform-destructuring": "^7.27.7",
+ "@babel/plugin-transform-destructuring": "^7.28.0",
"@babel/plugin-transform-parameters": "^7.27.7",
- "@babel/traverse": "^7.27.7"
+ "@babel/traverse": "^7.28.4"
},
"engines": {
"node": ">=6.9.0"
@@ -1191,9 +1193,9 @@
}
},
"node_modules/@babel/plugin-transform-optional-chaining": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.27.1.tgz",
- "integrity": "sha512-BQmKPPIuc8EkZgNKsv0X4bPmOoayeu4F1YCwx2/CfmDSXDbp7GnzlUH+/ul5VGfRg1AoFPsrIThlEBj2xb4CAg==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.28.5.tgz",
+ "integrity": "sha512-N6fut9IZlPnjPwgiQkXNhb+cT8wQKFlJNqcZkWlcTqkcqx6/kU4ynGmLFoa4LViBSirn05YAwk+sQBbPfxtYzQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1275,9 +1277,9 @@
}
},
"node_modules/@babel/plugin-transform-regenerator": {
- "version": "7.27.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.27.5.tgz",
- "integrity": "sha512-uhB8yHerfe3MWnuLAhEbeQ4afVoqv8BQsPqrTv7e/jZ9y00kJL6l9a/f4OWaKxotmjzewfEyXE1vgDJenkQ2/Q==",
+ "version": "7.28.4",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.4.tgz",
+ "integrity": "sha512-+ZEdQlBoRg9m2NnzvEeLgtvBMO4tkFBw5SQIUgLICgTrumLoU7lr+Oghi6km2PFj+dbUt2u1oby2w3BDO9YQnA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1472,64 +1474,65 @@
}
},
"node_modules/@babel/preset-env": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.27.2.tgz",
- "integrity": "sha512-Ma4zSuYSlGNRlCLO+EAzLnCmJK2vdstgv+n7aUP+/IKZrOfWHOJVdSJtuub8RzHTj3ahD37k5OKJWvzf16TQyQ==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.28.5.tgz",
+ "integrity": "sha512-S36mOoi1Sb6Fz98fBfE+UZSpYw5mJm0NUHtIKrOuNcqeFauy1J6dIvXm2KRVKobOSaGq4t/hBXdN4HGU3wL9Wg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/compat-data": "^7.27.2",
+ "@babel/compat-data": "^7.28.5",
"@babel/helper-compilation-targets": "^7.27.2",
"@babel/helper-plugin-utils": "^7.27.1",
"@babel/helper-validator-option": "^7.27.1",
- "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.27.1",
+ "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.28.5",
"@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1",
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1",
"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1",
- "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.27.1",
+ "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.28.3",
"@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2",
"@babel/plugin-syntax-import-assertions": "^7.27.1",
"@babel/plugin-syntax-import-attributes": "^7.27.1",
"@babel/plugin-syntax-unicode-sets-regex": "^7.18.6",
"@babel/plugin-transform-arrow-functions": "^7.27.1",
- "@babel/plugin-transform-async-generator-functions": "^7.27.1",
+ "@babel/plugin-transform-async-generator-functions": "^7.28.0",
"@babel/plugin-transform-async-to-generator": "^7.27.1",
"@babel/plugin-transform-block-scoped-functions": "^7.27.1",
- "@babel/plugin-transform-block-scoping": "^7.27.1",
+ "@babel/plugin-transform-block-scoping": "^7.28.5",
"@babel/plugin-transform-class-properties": "^7.27.1",
- "@babel/plugin-transform-class-static-block": "^7.27.1",
- "@babel/plugin-transform-classes": "^7.27.1",
+ "@babel/plugin-transform-class-static-block": "^7.28.3",
+ "@babel/plugin-transform-classes": "^7.28.4",
"@babel/plugin-transform-computed-properties": "^7.27.1",
- "@babel/plugin-transform-destructuring": "^7.27.1",
+ "@babel/plugin-transform-destructuring": "^7.28.5",
"@babel/plugin-transform-dotall-regex": "^7.27.1",
"@babel/plugin-transform-duplicate-keys": "^7.27.1",
"@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.27.1",
"@babel/plugin-transform-dynamic-import": "^7.27.1",
- "@babel/plugin-transform-exponentiation-operator": "^7.27.1",
+ "@babel/plugin-transform-explicit-resource-management": "^7.28.0",
+ "@babel/plugin-transform-exponentiation-operator": "^7.28.5",
"@babel/plugin-transform-export-namespace-from": "^7.27.1",
"@babel/plugin-transform-for-of": "^7.27.1",
"@babel/plugin-transform-function-name": "^7.27.1",
"@babel/plugin-transform-json-strings": "^7.27.1",
"@babel/plugin-transform-literals": "^7.27.1",
- "@babel/plugin-transform-logical-assignment-operators": "^7.27.1",
+ "@babel/plugin-transform-logical-assignment-operators": "^7.28.5",
"@babel/plugin-transform-member-expression-literals": "^7.27.1",
"@babel/plugin-transform-modules-amd": "^7.27.1",
"@babel/plugin-transform-modules-commonjs": "^7.27.1",
- "@babel/plugin-transform-modules-systemjs": "^7.27.1",
+ "@babel/plugin-transform-modules-systemjs": "^7.28.5",
"@babel/plugin-transform-modules-umd": "^7.27.1",
"@babel/plugin-transform-named-capturing-groups-regex": "^7.27.1",
"@babel/plugin-transform-new-target": "^7.27.1",
"@babel/plugin-transform-nullish-coalescing-operator": "^7.27.1",
"@babel/plugin-transform-numeric-separator": "^7.27.1",
- "@babel/plugin-transform-object-rest-spread": "^7.27.2",
+ "@babel/plugin-transform-object-rest-spread": "^7.28.4",
"@babel/plugin-transform-object-super": "^7.27.1",
"@babel/plugin-transform-optional-catch-binding": "^7.27.1",
- "@babel/plugin-transform-optional-chaining": "^7.27.1",
- "@babel/plugin-transform-parameters": "^7.27.1",
+ "@babel/plugin-transform-optional-chaining": "^7.28.5",
+ "@babel/plugin-transform-parameters": "^7.27.7",
"@babel/plugin-transform-private-methods": "^7.27.1",
"@babel/plugin-transform-private-property-in-object": "^7.27.1",
"@babel/plugin-transform-property-literals": "^7.27.1",
- "@babel/plugin-transform-regenerator": "^7.27.1",
+ "@babel/plugin-transform-regenerator": "^7.28.4",
"@babel/plugin-transform-regexp-modifiers": "^7.27.1",
"@babel/plugin-transform-reserved-words": "^7.27.1",
"@babel/plugin-transform-shorthand-properties": "^7.27.1",
@@ -1542,10 +1545,10 @@
"@babel/plugin-transform-unicode-regex": "^7.27.1",
"@babel/plugin-transform-unicode-sets-regex": "^7.27.1",
"@babel/preset-modules": "0.1.6-no-external-plugins",
- "babel-plugin-polyfill-corejs2": "^0.4.10",
- "babel-plugin-polyfill-corejs3": "^0.11.0",
- "babel-plugin-polyfill-regenerator": "^0.6.1",
- "core-js-compat": "^3.40.0",
+ "babel-plugin-polyfill-corejs2": "^0.4.14",
+ "babel-plugin-polyfill-corejs3": "^0.13.0",
+ "babel-plugin-polyfill-regenerator": "^0.6.5",
+ "core-js-compat": "^3.43.0",
"semver": "^6.3.1"
},
"engines": {
@@ -1571,9 +1574,9 @@
}
},
"node_modules/@babel/runtime": {
- "version": "7.27.6",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.6.tgz",
- "integrity": "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==",
+ "version": "7.28.4",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
+ "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
@@ -1595,43 +1598,33 @@
}
},
"node_modules/@babel/traverse": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.7.tgz",
- "integrity": "sha512-X6ZlfR/O/s5EQ/SnUSLzr+6kGnkg8HXGMzpgsMsrJVcfDtH1vIp6ctCN4eZ1LS5c0+te5Cb6Y514fASjMRJ1nw==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz",
+ "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.27.5",
- "@babel/parser": "^7.27.7",
+ "@babel/generator": "^7.28.5",
+ "@babel/helper-globals": "^7.28.0",
+ "@babel/parser": "^7.28.5",
"@babel/template": "^7.27.2",
- "@babel/types": "^7.27.7",
- "debug": "^4.3.1",
- "globals": "^11.1.0"
+ "@babel/types": "^7.28.5",
+ "debug": "^4.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
- "node_modules/@babel/traverse/node_modules/globals": {
- "version": "11.12.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
- "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=4"
- }
- },
"node_modules/@babel/types": {
- "version": "7.27.7",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.7.tgz",
- "integrity": "sha512-8OLQgDScAOHXnAz2cV+RfzzNMipuLVBz2biuAJFMV9bfkNf393je3VM8CLkjQodW5+iWsSJdSgSWT6rsZoXHPw==",
+ "version": "7.28.5",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
+ "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.27.1"
+ "@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
@@ -1649,9 +1642,9 @@
}
},
"node_modules/@eslint-community/eslint-utils": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz",
- "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==",
+ "version": "4.9.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz",
+ "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1681,9 +1674,9 @@
}
},
"node_modules/@eslint-community/regexpp": {
- "version": "4.12.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
- "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
+ "version": "4.12.2",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
+ "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1691,13 +1684,13 @@
}
},
"node_modules/@eslint/config-array": {
- "version": "0.21.0",
- "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz",
- "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==",
+ "version": "0.21.1",
+ "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz",
+ "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@eslint/object-schema": "^2.1.6",
+ "@eslint/object-schema": "^2.1.7",
"debug": "^4.3.1",
"minimatch": "^3.1.2"
},
@@ -1706,19 +1699,22 @@
}
},
"node_modules/@eslint/config-helpers": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz",
- "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==",
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz",
+ "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==",
"dev": true,
"license": "Apache-2.0",
+ "dependencies": {
+ "@eslint/core": "^0.17.0"
+ },
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
"node_modules/@eslint/core": {
- "version": "0.14.0",
- "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz",
- "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==",
+ "version": "0.17.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz",
+ "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -1729,9 +1725,9 @@
}
},
"node_modules/@eslint/eslintrc": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz",
- "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==",
+ "version": "3.3.3",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz",
+ "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1741,7 +1737,7 @@
"globals": "^14.0.0",
"ignore": "^5.2.0",
"import-fresh": "^3.2.1",
- "js-yaml": "^4.1.0",
+ "js-yaml": "^4.1.1",
"minimatch": "^3.1.2",
"strip-json-comments": "^3.1.1"
},
@@ -1766,9 +1762,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.30.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.30.0.tgz",
- "integrity": "sha512-Wzw3wQwPvc9sHM+NjakWTcPx11mbZyiYHuwWa/QfZ7cIRX7WK54PSk7bdyXDaoaopUcMatv1zaQvOAAO8hCdww==",
+ "version": "9.39.2",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz",
+ "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1779,9 +1775,9 @@
}
},
"node_modules/@eslint/object-schema": {
- "version": "2.1.6",
- "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz",
- "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==",
+ "version": "2.1.7",
+ "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz",
+ "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==",
"dev": true,
"license": "Apache-2.0",
"engines": {
@@ -1789,32 +1785,19 @@
}
},
"node_modules/@eslint/plugin-kit": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz",
- "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==",
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz",
+ "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@eslint/core": "^0.15.1",
+ "@eslint/core": "^0.17.0",
"levn": "^0.4.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
- "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": {
- "version": "0.15.1",
- "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz",
- "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==",
- "dev": true,
- "license": "Apache-2.0",
- "dependencies": {
- "@types/json-schema": "^7.0.15"
- },
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- }
- },
"node_modules/@humanfs/core": {
"version": "0.19.1",
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
@@ -1826,33 +1809,19 @@
}
},
"node_modules/@humanfs/node": {
- "version": "0.16.6",
- "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz",
- "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
+ "version": "0.16.7",
+ "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz",
+ "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@humanfs/core": "^0.19.1",
- "@humanwhocodes/retry": "^0.3.0"
+ "@humanwhocodes/retry": "^0.4.0"
},
"engines": {
"node": ">=18.18.0"
}
},
- "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
- "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=18.18"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/nzakas"
- }
- },
"node_modules/@humanwhocodes/module-importer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
@@ -1881,10 +1850,32 @@
"url": "https://github.com/sponsors/nzakas"
}
},
+ "node_modules/@isaacs/balanced-match": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
+ "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
+ "license": "MIT",
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
+ "node_modules/@isaacs/brace-expansion": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
+ "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
+ "license": "MIT",
+ "dependencies": {
+ "@isaacs/balanced-match": "^4.0.1"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
+ "dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^5.1.2",
@@ -1912,34 +1903,32 @@
}
},
"node_modules/@jridgewell/gen-mapping": {
- "version": "0.3.8",
- "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz",
- "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==",
+ "version": "0.3.13",
+ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
+ "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@jridgewell/set-array": "^1.2.1",
- "@jridgewell/sourcemap-codec": "^1.4.10",
+ "@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
- },
- "engines": {
- "node": ">=6.0.0"
}
},
- "node_modules/@jridgewell/resolve-uri": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
- "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
+ "node_modules/@jridgewell/remapping": {
+ "version": "2.3.5",
+ "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz",
+ "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
"dev": true,
"license": "MIT",
- "engines": {
- "node": ">=6.0.0"
+ "peer": true,
+ "dependencies": {
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.24"
}
},
- "node_modules/@jridgewell/set-array": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
- "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
+ "node_modules/@jridgewell/resolve-uri": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
+ "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1947,9 +1936,9 @@
}
},
"node_modules/@jridgewell/source-map": {
- "version": "0.3.6",
- "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz",
- "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==",
+ "version": "0.3.11",
+ "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.11.tgz",
+ "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1958,16 +1947,16 @@
}
},
"node_modules/@jridgewell/sourcemap-codec": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
- "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
+ "version": "1.5.5",
+ "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
+ "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
- "version": "0.3.25",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
- "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
+ "version": "0.3.31",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
+ "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1982,9 +1971,9 @@
"license": "MIT"
},
"node_modules/@lit-labs/ssr-dom-shim": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/@lit-labs/ssr-dom-shim/-/ssr-dom-shim-1.3.0.tgz",
- "integrity": "sha512-nQIWonJ6eFAvUUrSlwyHDm/aE8PBDu5kRpL0vHMg6K8fK3Diq1xdPjTnsJSwxABhaZ+5eBi1btQB5ShUTKo4nQ==",
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/@lit-labs/ssr-dom-shim/-/ssr-dom-shim-1.5.0.tgz",
+ "integrity": "sha512-HLomZXMmrCFHSRKESF5vklAKsDY7/fsT/ZhqCu3V0UoW/Qbv8wxmO4W9bx4KnCCF2Zak4yuk+AGraK/bPmI4kA==",
"license": "BSD-3-Clause"
},
"node_modules/@lit/reactive-element": {
@@ -1997,9 +1986,9 @@
}
},
"node_modules/@markw65/fit-file-writer": {
- "version": "0.1.6",
- "resolved": "https://registry.npmjs.org/@markw65/fit-file-writer/-/fit-file-writer-0.1.6.tgz",
- "integrity": "sha512-EmJhk6mEnSK1Xy0sxyxEUxus9Dzvz58rNOyltnse8k2JUnbL8Uwz1+6E0XYJQ+FQLbrmkY7jluEEaftLb/ceIg==",
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/@markw65/fit-file-writer/-/fit-file-writer-0.1.7.tgz",
+ "integrity": "sha512-EVs8LZJcN+4VAZ5V7hGdUkhZV3mGhtLvY4XNS7zIybPQxfmrWv0UEzTY8Zry0t20xJtcHFXOgsCIb8p74pzwew==",
"license": "MIT"
},
"node_modules/@nicolo-ribaudo/eslint-scope-5-internals": {
@@ -2054,6 +2043,7 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
+ "dev": true,
"license": "MIT",
"optional": true,
"engines": {
@@ -2061,9 +2051,9 @@
}
},
"node_modules/@rollup/plugin-babel": {
- "version": "6.0.4",
- "resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-6.0.4.tgz",
- "integrity": "sha512-YF7Y52kFdFT/xVSuVdjkV5ZdX/3YtmX0QulG+x0taQOtJdHYzVU61aSSkAgVJ7NOv6qPkIYiJSgSWWN/DM5sGw==",
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-6.1.0.tgz",
+ "integrity": "sha512-dFZNuFD2YRcoomP4oYf+DvQNSUA9ih+A3vUqopQx5EdtPGo3WBnQcI/S8pwpz91UsGfL0HsMSOlaMld8HrbubA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2088,9 +2078,9 @@
}
},
"node_modules/@rollup/plugin-commonjs": {
- "version": "28.0.6",
- "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.6.tgz",
- "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==",
+ "version": "29.0.0",
+ "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-29.0.0.tgz",
+ "integrity": "sha512-U2YHaxR2cU/yAiwKJtJRhnyLk7cifnQw0zUpISsocBDoHDJn+HTV74ABqnwr5bEgWUwFZC9oFL6wLe21lHu5eQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2115,9 +2105,9 @@
}
},
"node_modules/@rollup/plugin-node-resolve": {
- "version": "16.0.1",
- "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.1.tgz",
- "integrity": "sha512-tk5YCxJWIG81umIvNkSod2qK5KyQW19qcBF/B78n1bjtOON6gzKoVeSzAE8yHCZEDmqkHKkxplExA8KzdJLJpA==",
+ "version": "16.0.3",
+ "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.3.tgz",
+ "integrity": "sha512-lUYM3UBGuM93CnMPG1YocWu7X802BrNF3jW2zny5gQyLQgRFJhV1Sq0Zi74+dh/6NBx1DxFC4b4GXg9wUCG5Qg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2163,9 +2153,9 @@
}
},
"node_modules/@rollup/pluginutils": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.2.0.tgz",
- "integrity": "sha512-qWJ2ZTbmumwiLFomfzTyt5Kng4hwPi9rwCYN4SHb6eaRU1KNO4ccxINHr/VhH4GgPlt1XfSTLX2LBTme8ne4Zw==",
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz",
+ "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2186,9 +2176,9 @@
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.44.1.tgz",
- "integrity": "sha512-JAcBr1+fgqx20m7Fwe1DxPUl/hPkee6jA6Pl7n1v2EFiktAHenTaXl5aIFjUIEsfn9w3HE4gK1lEgNGMzBDs1w==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.55.1.tgz",
+ "integrity": "sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg==",
"cpu": [
"arm"
],
@@ -2200,9 +2190,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.44.1.tgz",
- "integrity": "sha512-RurZetXqTu4p+G0ChbnkwBuAtwAbIwJkycw1n6GvlGlBuS4u5qlr5opix8cBAYFJgaY05TWtM+LaoFggUmbZEQ==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.55.1.tgz",
+ "integrity": "sha512-eFZCb1YUqhTysgW3sj/55du5cG57S7UTNtdMjCW7LwVcj3dTTcowCsC8p7uBdzKsZYa8J7IDE8lhMI+HX1vQvg==",
"cpu": [
"arm64"
],
@@ -2214,9 +2204,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.44.1.tgz",
- "integrity": "sha512-fM/xPesi7g2M7chk37LOnmnSTHLG/v2ggWqKj3CCA1rMA4mm5KVBT1fNoswbo1JhPuNNZrVwpTvlCVggv8A2zg==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.55.1.tgz",
+ "integrity": "sha512-p3grE2PHcQm2e8PSGZdzIhCKbMCw/xi9XvMPErPhwO17vxtvCN5FEA2mSLgmKlCjHGMQTP6phuQTYWUnKewwGg==",
"cpu": [
"arm64"
],
@@ -2228,9 +2218,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.44.1.tgz",
- "integrity": "sha512-gDnWk57urJrkrHQ2WVx9TSVTH7lSlU7E3AFqiko+bgjlh78aJ88/3nycMax52VIVjIm3ObXnDL2H00e/xzoipw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.55.1.tgz",
+ "integrity": "sha512-rDUjG25C9qoTm+e02Esi+aqTKSBYwVTaoS1wxcN47/Luqef57Vgp96xNANwt5npq9GDxsH7kXxNkJVEsWEOEaQ==",
"cpu": [
"x64"
],
@@ -2242,9 +2232,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.44.1.tgz",
- "integrity": "sha512-wnFQmJ/zPThM5zEGcnDcCJeYJgtSLjh1d//WuHzhf6zT3Md1BvvhJnWoy+HECKu2bMxaIcfWiu3bJgx6z4g2XA==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.55.1.tgz",
+ "integrity": "sha512-+JiU7Jbp5cdxekIgdte0jfcu5oqw4GCKr6i3PJTlXTCU5H5Fvtkpbs4XJHRmWNXF+hKmn4v7ogI5OQPaupJgOg==",
"cpu": [
"arm64"
],
@@ -2256,9 +2246,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.44.1.tgz",
- "integrity": "sha512-uBmIxoJ4493YATvU2c0upGz87f99e3wop7TJgOA/bXMFd2SvKCI7xkxY/5k50bv7J6dw1SXT4MQBQSLn8Bb/Uw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.55.1.tgz",
+ "integrity": "sha512-V5xC1tOVWtLLmr3YUk2f6EJK4qksksOYiz/TCsFHu/R+woubcLWdC9nZQmwjOAbmExBIVKsm1/wKmEy4z4u4Bw==",
"cpu": [
"x64"
],
@@ -2270,9 +2260,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.44.1.tgz",
- "integrity": "sha512-n0edDmSHlXFhrlmTK7XBuwKlG5MbS7yleS1cQ9nn4kIeW+dJH+ExqNgQ0RrFRew8Y+0V/x6C5IjsHrJmiHtkxQ==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.55.1.tgz",
+ "integrity": "sha512-Rn3n+FUk2J5VWx+ywrG/HGPTD9jXNbicRtTM11e/uorplArnXZYsVifnPPqNNP5BsO3roI4n8332ukpY/zN7rQ==",
"cpu": [
"arm"
],
@@ -2284,9 +2274,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.44.1.tgz",
- "integrity": "sha512-8WVUPy3FtAsKSpyk21kV52HCxB+me6YkbkFHATzC2Yd3yuqHwy2lbFL4alJOLXKljoRw08Zk8/xEj89cLQ/4Nw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.55.1.tgz",
+ "integrity": "sha512-grPNWydeKtc1aEdrJDWk4opD7nFtQbMmV7769hiAaYyUKCT1faPRm2av8CX1YJsZ4TLAZcg9gTR1KvEzoLjXkg==",
"cpu": [
"arm"
],
@@ -2298,9 +2288,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.44.1.tgz",
- "integrity": "sha512-yuktAOaeOgorWDeFJggjuCkMGeITfqvPgkIXhDqsfKX8J3jGyxdDZgBV/2kj/2DyPaLiX6bPdjJDTu9RB8lUPQ==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.55.1.tgz",
+ "integrity": "sha512-a59mwd1k6x8tXKcUxSyISiquLwB5pX+fJW9TkWU46lCqD/GRDe9uDN31jrMmVP3feI3mhAdvcCClhV8V5MhJFQ==",
"cpu": [
"arm64"
],
@@ -2312,9 +2302,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.44.1.tgz",
- "integrity": "sha512-W+GBM4ifET1Plw8pdVaecwUgxmiH23CfAUj32u8knq0JPFyK4weRy6H7ooxYFD19YxBulL0Ktsflg5XS7+7u9g==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.55.1.tgz",
+ "integrity": "sha512-puS1MEgWX5GsHSoiAsF0TYrpomdvkaXm0CofIMG5uVkP6IBV+ZO9xhC5YEN49nsgYo1DuuMquF9+7EDBVYu4uA==",
"cpu": [
"arm64"
],
@@ -2325,10 +2315,10 @@
"linux"
]
},
- "node_modules/@rollup/rollup-linux-loongarch64-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.44.1.tgz",
- "integrity": "sha512-1zqnUEMWp9WrGVuVak6jWTl4fEtrVKfZY7CvcBmUUpxAJ7WcSowPSAWIKa/0o5mBL/Ij50SIf9tuirGx63Ovew==",
+ "node_modules/@rollup/rollup-linux-loong64-gnu": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.55.1.tgz",
+ "integrity": "sha512-r3Wv40in+lTsULSb6nnoudVbARdOwb2u5fpeoOAZjFLznp6tDU8kd+GTHmJoqZ9lt6/Sys33KdIHUaQihFcu7g==",
"cpu": [
"loong64"
],
@@ -2339,10 +2329,38 @@
"linux"
]
},
- "node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.44.1.tgz",
- "integrity": "sha512-Rl3JKaRu0LHIx7ExBAAnf0JcOQetQffaw34T8vLlg9b1IhzcBgaIdnvEbbsZq9uZp3uAH+JkHd20Nwn0h9zPjA==",
+ "node_modules/@rollup/rollup-linux-loong64-musl": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.55.1.tgz",
+ "integrity": "sha512-MR8c0+UxAlB22Fq4R+aQSPBayvYa3+9DrwG/i1TKQXFYEaoW3B5b/rkSRIypcZDdWjWnpcvxbNaAJDcSbJU3Lw==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-gnu": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.55.1.tgz",
+ "integrity": "sha512-3KhoECe1BRlSYpMTeVrD4sh2Pw2xgt4jzNSZIIPLFEsnQn9gAnZagW9+VqDqAHgm1Xc77LzJOo2LdigS5qZ+gw==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-musl": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.55.1.tgz",
+ "integrity": "sha512-ziR1OuZx0vdYZZ30vueNZTg73alF59DicYrPViG0NEgDVN8/Jl87zkAPu4u6VjZST2llgEUjaiNl9JM6HH1Vdw==",
"cpu": [
"ppc64"
],
@@ -2354,9 +2372,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.44.1.tgz",
- "integrity": "sha512-j5akelU3snyL6K3N/iX7otLBIl347fGwmd95U5gS/7z6T4ftK288jKq3A5lcFKcx7wwzb5rgNvAg3ZbV4BqUSw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.55.1.tgz",
+ "integrity": "sha512-uW0Y12ih2XJRERZ4jAfKamTyIHVMPQnTZcQjme2HMVDAHY4amf5u414OqNYC+x+LzRdRcnIG1YodLrrtA8xsxw==",
"cpu": [
"riscv64"
],
@@ -2368,9 +2386,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.44.1.tgz",
- "integrity": "sha512-ppn5llVGgrZw7yxbIm8TTvtj1EoPgYUAbfw0uDjIOzzoqlZlZrLJ/KuiE7uf5EpTpCTrNt1EdtzF0naMm0wGYg==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.55.1.tgz",
+ "integrity": "sha512-u9yZ0jUkOED1BFrqu3BwMQoixvGHGZ+JhJNkNKY/hyoEgOwlqKb62qu+7UjbPSHYjiVy8kKJHvXKv5coH4wDeg==",
"cpu": [
"riscv64"
],
@@ -2382,9 +2400,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.44.1.tgz",
- "integrity": "sha512-Hu6hEdix0oxtUma99jSP7xbvjkUM/ycke/AQQ4EC5g7jNRLLIwjcNwaUy95ZKBJJwg1ZowsclNnjYqzN4zwkAw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.55.1.tgz",
+ "integrity": "sha512-/0PenBCmqM4ZUd0190j7J0UsQ/1nsi735iPRakO8iPciE7BQ495Y6msPzaOmvx0/pn+eJVVlZrNrSh4WSYLxNg==",
"cpu": [
"s390x"
],
@@ -2396,9 +2414,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.44.1.tgz",
- "integrity": "sha512-EtnsrmZGomz9WxK1bR5079zee3+7a+AdFlghyd6VbAjgRJDbTANJ9dcPIPAi76uG05micpEL+gPGmAKYTschQw==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.55.1.tgz",
+ "integrity": "sha512-a8G4wiQxQG2BAvo+gU6XrReRRqj+pLS2NGXKm8io19goR+K8lw269eTrPkSdDTALwMmJp4th2Uh0D8J9bEV1vg==",
"cpu": [
"x64"
],
@@ -2410,9 +2428,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.44.1.tgz",
- "integrity": "sha512-iAS4p+J1az6Usn0f8xhgL4PaU878KEtutP4hqw52I4IO6AGoyOkHCxcc4bqufv1tQLdDWFx8lR9YlwxKuv3/3g==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.55.1.tgz",
+ "integrity": "sha512-bD+zjpFrMpP/hqkfEcnjXWHMw5BIghGisOKPj+2NaNDuVT+8Ds4mPf3XcPHuat1tz89WRL+1wbcxKY3WSbiT7w==",
"cpu": [
"x64"
],
@@ -2423,10 +2441,38 @@
"linux"
]
},
+ "node_modules/@rollup/rollup-openbsd-x64": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.55.1.tgz",
+ "integrity": "sha512-eLXw0dOiqE4QmvikfQ6yjgkg/xDM+MdU9YJuP4ySTibXU0oAvnEWXt7UDJmD4UkYialMfOGFPJnIHSe/kdzPxg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-openharmony-arm64": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.55.1.tgz",
+ "integrity": "sha512-xzm44KgEP11te3S2HCSyYf5zIzWmx3n8HDCc7EE59+lTcswEWNpvMLfd9uJvVX8LCg9QWG67Xt75AuHn4vgsXw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ]
+ },
"node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.44.1.tgz",
- "integrity": "sha512-NtSJVKcXwcqozOl+FwI41OH3OApDyLk3kqTJgx8+gp6On9ZEt5mYhIsKNPGuaZr3p9T6NWPKGU/03Vw4CNU9qg==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.55.1.tgz",
+ "integrity": "sha512-yR6Bl3tMC/gBok5cz/Qi0xYnVbIxGx5Fcf/ca0eB6/6JwOY+SRUcJfI0OpeTpPls7f194as62thCt/2BjxYN8g==",
"cpu": [
"arm64"
],
@@ -2438,9 +2484,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.44.1.tgz",
- "integrity": "sha512-JYA3qvCOLXSsnTR3oiyGws1Dm0YTuxAAeaYGVlGpUsHqloPcFjPg+X0Fj2qODGLNwQOAcCiQmHub/V007kiH5A==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.55.1.tgz",
+ "integrity": "sha512-3fZBidchE0eY0oFZBnekYCfg+5wAB0mbpCBuofh5mZuzIU/4jIVkbESmd2dOsFNS78b53CYv3OAtwqkZZmU5nA==",
"cpu": [
"ia32"
],
@@ -2451,10 +2497,24 @@
"win32"
]
},
+ "node_modules/@rollup/rollup-win32-x64-gnu": {
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.55.1.tgz",
+ "integrity": "sha512-xGGY5pXj69IxKb4yv/POoocPy/qmEGhimy/FoTpTSVju3FYXUQQMFCaZZXJVidsmGxRioZAwpThl/4zX41gRKg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
"node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.44.1.tgz",
- "integrity": "sha512-J8o22LuF0kTe7m+8PvW9wk3/bRq5+mRo5Dqo6+vXb7otCm3TPhYOJqOaQtGU9YMWQSL3krMnoOxMr0+9E6F3Ug==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.55.1.tgz",
+ "integrity": "sha512-SPEpaL6DX4rmcXtnhdrQYgzQ5W2uW3SCJch88lB2zImhJRhIIK44fkUrgIV/Q8yUNfw5oyZ5vkeQsZLhCb06lw==",
"cpu": [
"x64"
],
@@ -2466,9 +2526,9 @@
]
},
"node_modules/@sindresorhus/merge-streams": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz",
- "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==",
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz",
+ "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2479,18 +2539,18 @@
}
},
"node_modules/@stylistic/eslint-plugin": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.1.0.tgz",
- "integrity": "sha512-TJRJul4u/lmry5N/kyCU+7RWWOk0wyXN+BncRlDYBqpLFnzXkd7QGVfN7KewarFIXv0IX0jSF/Ksu7aHWEDeuw==",
+ "version": "5.6.1",
+ "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.6.1.tgz",
+ "integrity": "sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@eslint-community/eslint-utils": "^4.7.0",
- "@typescript-eslint/types": "^8.34.1",
+ "@eslint-community/eslint-utils": "^4.9.0",
+ "@typescript-eslint/types": "^8.47.0",
"eslint-visitor-keys": "^4.2.1",
"espree": "^10.4.0",
"estraverse": "^5.3.0",
- "picomatch": "^4.0.2"
+ "picomatch": "^4.0.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2551,12 +2611,12 @@
"license": "MIT"
},
"node_modules/@types/node": {
- "version": "24.0.7",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.7.tgz",
- "integrity": "sha512-YIEUUr4yf8q8oQoXPpSlnvKNVKDQlPMWrmOcgzoduo7kvA2UF0/BwJ/eMKFTiTtkNL17I0M6Xe2tvwFU7be6iw==",
+ "version": "25.0.3",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
+ "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
"license": "MIT",
"dependencies": {
- "undici-types": "~7.8.0"
+ "undici-types": "~7.16.0"
}
},
"node_modules/@types/parse5": {
@@ -2567,9 +2627,9 @@
"license": "MIT"
},
"node_modules/@types/readable-stream": {
- "version": "4.0.21",
- "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.21.tgz",
- "integrity": "sha512-19eKVv9tugr03IgfXlA9UVUVRbW6IuqRO5B92Dl4a6pT7K8uaGrNS0GkxiZD0BOk6PLuXl5FhWl//eX/pzYdTQ==",
+ "version": "4.0.23",
+ "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.23.tgz",
+ "integrity": "sha512-wwXrtQvbMHxCbBgjHaMGEmImFTQxxpfMOR/ZoQnXxB1woqkUbdLGFDgauo00Py9IudiaqSeiBiulSV9i6XIPig==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
@@ -2596,16 +2656,25 @@
"license": "MIT"
},
"node_modules/@types/w3c-web-usb": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/@types/w3c-web-usb/-/w3c-web-usb-1.0.10.tgz",
- "integrity": "sha512-CHgUI5kTc/QLMP8hODUHhge0D4vx+9UiAwIGiT0sTy/B2XpdX1U5rJt6JSISgr6ikRT7vxV9EVAFeYZqUnl1gQ==",
+ "version": "1.0.13",
+ "resolved": "https://registry.npmjs.org/@types/w3c-web-usb/-/w3c-web-usb-1.0.13.tgz",
+ "integrity": "sha512-N2nSl3Xsx8mRHZBvMSdNGtzMyeleTvtlEw+ujujgXalPqOjIA6UtrqcB6OzyUjkTbDm3J7P1RNK1lgoO7jxtsw==",
"license": "MIT",
"optional": true
},
+ "node_modules/@types/ws": {
+ "version": "8.18.1",
+ "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
+ "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@typescript-eslint/types": {
- "version": "8.35.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.35.0.tgz",
- "integrity": "sha512-0mYH3emanku0vHw2aRLNGqe7EXh9WHEhi7kZzscrMDf6IIRUQ5Jk4wp1QrledE/36KtdZrVfKnE32eZCf/vaVQ==",
+ "version": "8.52.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.52.0.tgz",
+ "integrity": "sha512-LWQV1V4q9V4cT4H5JCIx3481iIFxH1UkVk+ZkGGAV1ZGcjGI9IoFOfg3O6ywz8QqCDEp7Inlg6kovMofsNRaGg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2631,9 +2700,9 @@
}
},
"node_modules/@web/rollup-plugin-html": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/@web/rollup-plugin-html/-/rollup-plugin-html-2.3.0.tgz",
- "integrity": "sha512-ap4AisBacK6WwrTnVlPErupxlywWU1ELsjGIMZ4VpofvhbVTBIGErJo5VEj2mSJyEH3I1EbzUcWuhDCePrnWEw==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/@web/rollup-plugin-html/-/rollup-plugin-html-3.0.0.tgz",
+ "integrity": "sha512-zl0h5UuLLIymDHrNc52SF18ZSA5S2kS6g0SSPNEFeBSnneYuYM4P3hU7cJGFIP6wX+3U9WdEA09/Lq8bMtaaCg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2714,9 +2783,9 @@
}
},
"node_modules/ansi-regex": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
- "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+ "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"license": "MIT",
"engines": {
"node": ">=12"
@@ -2729,6 +2798,7 @@
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
@@ -2855,14 +2925,14 @@
}
},
"node_modules/babel-plugin-polyfill-corejs3": {
- "version": "0.11.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.11.1.tgz",
- "integrity": "sha512-yGCqvBT4rwMczo28xkH/noxJ6MZ4nJfkVYdoDaC/utLtWrXxv27HVrzAeSbqR8SxDsp46n0YF47EbHoixy6rXQ==",
+ "version": "0.13.0",
+ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.13.0.tgz",
+ "integrity": "sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@babel/helper-define-polyfill-provider": "^0.6.3",
- "core-js-compat": "^3.40.0"
+ "@babel/helper-define-polyfill-provider": "^0.6.5",
+ "core-js-compat": "^3.43.0"
},
"peerDependencies": {
"@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
@@ -2885,6 +2955,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
"license": "MIT"
},
"node_modules/base64-js": {
@@ -2907,6 +2978,16 @@
],
"license": "MIT"
},
+ "node_modules/baseline-browser-mapping": {
+ "version": "2.9.12",
+ "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.12.tgz",
+ "integrity": "sha512-Mij6Lij93pTAIsSYy5cyBQ975Qh9uLEc5rwGTpomiZeXZL9yIS6uORJakb3ScHgfs0serMMfIbXzokPMuEiRyw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "baseline-browser-mapping": "dist/cli.js"
+ }
+ },
"node_modules/binary-extensions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
@@ -2930,9 +3011,9 @@
}
},
"node_modules/bl": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/bl/-/bl-6.1.0.tgz",
- "integrity": "sha512-ClDyJGQkc8ZtzdAAbAwBmhMSpwN/sC9HA8jxdYm6nVUbCfZbe2mgza4qh7AuEYyEPB/c4Kznf9s66bnsKMQDjw==",
+ "version": "6.1.6",
+ "resolved": "https://registry.npmjs.org/bl/-/bl-6.1.6.tgz",
+ "integrity": "sha512-jLsPgN/YSvPUg9UX0Kd73CXpm2Psg9FxMeCSXnk3WBO3CMT10JMwijubhGfHCnFu6TPn1ei3b975dxv7K2pWVg==",
"license": "MIT",
"dependencies": {
"@types/readable-stream": "^4.0.0",
@@ -2974,6 +3055,18 @@
"node": ">=8"
}
},
+ "node_modules/broker-factory": {
+ "version": "3.1.11",
+ "resolved": "https://registry.npmjs.org/broker-factory/-/broker-factory-3.1.11.tgz",
+ "integrity": "sha512-ex4RuEI0AJOdaIcXe1lu9EqRAVkoYvdcvwLvNcE5UZQzYNqzPY+z0frnlxT4+cUwNVpE//9MwGx4lKiLH+pEcw==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.28.4",
+ "fast-unique-numbers": "^9.0.24",
+ "tslib": "^2.8.1",
+ "worker-factory": "^7.0.46"
+ }
+ },
"node_modules/brotli-size": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/brotli-size/-/brotli-size-4.0.0.tgz",
@@ -2988,9 +3081,9 @@
}
},
"node_modules/browserslist": {
- "version": "4.25.1",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz",
- "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==",
+ "version": "4.28.1",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
+ "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
"dev": true,
"funding": [
{
@@ -3008,10 +3101,11 @@
],
"license": "MIT",
"dependencies": {
- "caniuse-lite": "^1.0.30001726",
- "electron-to-chromium": "^1.5.173",
- "node-releases": "^2.0.19",
- "update-browserslist-db": "^1.1.3"
+ "baseline-browser-mapping": "^2.9.0",
+ "caniuse-lite": "^1.0.30001759",
+ "electron-to-chromium": "^1.5.263",
+ "node-releases": "^2.0.27",
+ "update-browserslist-db": "^1.2.0"
},
"bin": {
"browserslist": "cli.js"
@@ -3122,9 +3216,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001726",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz",
- "integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==",
+ "version": "1.0.30001763",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001763.tgz",
+ "integrity": "sha512-mh/dGtq56uN98LlNX9qdbKnzINhX0QzhiWBFEkFfsFO4QyCvL8YegrJAazCwXIeqkIob8BlZPGM3xdnY+sgmvQ==",
"dev": true,
"funding": [
{
@@ -3193,9 +3287,9 @@
}
},
"node_modules/chart.js": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.0.tgz",
- "integrity": "sha512-aYeC/jDgSEx8SHWZvANYMioYMZ2KX02W6f6uVfyteuCGcadDLcYVHdfdygsTQkQ4TKn5lghoojAsPj5pu0SnvQ==",
+ "version": "4.5.1",
+ "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz",
+ "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==",
"license": "MIT",
"dependencies": {
"@kurkle/color": "^0.3.0"
@@ -3336,72 +3430,66 @@
}
},
"node_modules/cliui": {
- "version": "8.0.1",
- "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
- "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz",
+ "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==",
"license": "ISC",
"dependencies": {
- "string-width": "^4.2.0",
- "strip-ansi": "^6.0.1",
- "wrap-ansi": "^7.0.0"
+ "string-width": "^7.2.0",
+ "strip-ansi": "^7.1.0",
+ "wrap-ansi": "^9.0.0"
},
"engines": {
- "node": ">=12"
+ "node": ">=20"
}
},
- "node_modules/cliui/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "node_modules/cliui/node_modules/ansi-styles": {
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"license": "MIT",
"engines": {
- "node": ">=8"
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/cliui/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "version": "10.6.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
+ "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"license": "MIT"
},
"node_modules/cliui/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
+ "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"license": "MIT",
"dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
+ "emoji-regex": "^10.3.0",
+ "get-east-asian-width": "^1.0.0",
+ "strip-ansi": "^7.1.0"
},
"engines": {
- "node": ">=8"
- }
- },
- "node_modules/cliui/node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
+ "node": ">=18"
},
- "engines": {
- "node": ">=8"
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cliui/node_modules/wrap-ansi": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
- "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
+ "version": "9.0.2",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
+ "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^6.2.1",
+ "string-width": "^7.0.0",
+ "strip-ansi": "^7.1.0"
},
"engines": {
- "node": ">=10"
+ "node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
@@ -3411,6 +3499,7 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
@@ -3423,6 +3512,7 @@
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true,
"license": "MIT"
},
"node_modules/commander": {
@@ -3493,13 +3583,13 @@
"peer": true
},
"node_modules/core-js-compat": {
- "version": "3.43.0",
- "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.43.0.tgz",
- "integrity": "sha512-2GML2ZsCc5LR7hZYz4AXmjQw8zuy2T//2QntwdnpuYI7jteT6GVYJL7F6C2C57R7gSYrcqVW3lAALefdbhBLDA==",
+ "version": "3.47.0",
+ "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.47.0.tgz",
+ "integrity": "sha512-IGfuznZ/n7Kp9+nypamBhvwdwLsW6KC8IOaURw2doAK5e98AG3acVLdh0woOnEqCfUtS+Vu882JE4k/DAm3ItQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "browserslist": "^4.25.0"
+ "browserslist": "^4.28.0"
},
"funding": {
"type": "opencollective",
@@ -3510,6 +3600,7 @@
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
@@ -3593,9 +3684,9 @@
}
},
"node_modules/debug": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
- "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
+ "version": "4.4.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
+ "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
@@ -3696,9 +3787,9 @@
}
},
"node_modules/detect-libc": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
- "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==",
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
+ "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==",
"dev": true,
"license": "Apache-2.0",
"engines": {
@@ -3765,6 +3856,7 @@
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
+ "dev": true,
"license": "MIT"
},
"node_modules/ee-first": {
@@ -3774,9 +3866,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
- "version": "1.5.177",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.177.tgz",
- "integrity": "sha512-7EH2G59nLsEMj97fpDuvVcYi6lwTcM1xuWw3PssD8xzboAW7zj7iB3COEEEATUfjLHrs5uKBLQT03V/8URx06g==",
+ "version": "1.5.267",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz",
+ "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==",
"dev": true,
"license": "ISC"
},
@@ -3784,6 +3876,7 @@
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
+ "dev": true,
"license": "MIT"
},
"node_modules/encodeurl": {
@@ -3809,9 +3902,9 @@
}
},
"node_modules/error-ex": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
- "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
+ "version": "1.3.4",
+ "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
+ "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3819,9 +3912,9 @@
}
},
"node_modules/es-abstract": {
- "version": "1.24.0",
- "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
- "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==",
+ "version": "1.24.1",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz",
+ "integrity": "sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3983,25 +4076,24 @@
}
},
"node_modules/eslint": {
- "version": "9.30.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.30.0.tgz",
- "integrity": "sha512-iN/SiPxmQu6EVkf+m1qpBxzUhE12YqFLOSySuOyVLJLEF9nzTf+h/1AJYc1JWzCnktggeNrjvQGLngDzXirU6g==",
+ "version": "9.39.2",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz",
+ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@eslint-community/eslint-utils": "^4.2.0",
+ "@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1",
- "@eslint/config-array": "^0.21.0",
- "@eslint/config-helpers": "^0.3.0",
- "@eslint/core": "^0.14.0",
+ "@eslint/config-array": "^0.21.1",
+ "@eslint/config-helpers": "^0.4.2",
+ "@eslint/core": "^0.17.0",
"@eslint/eslintrc": "^3.3.1",
- "@eslint/js": "9.30.0",
- "@eslint/plugin-kit": "^0.3.1",
+ "@eslint/js": "9.39.2",
+ "@eslint/plugin-kit": "^0.4.1",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
"@humanwhocodes/retry": "^0.4.2",
"@types/estree": "^1.0.6",
- "@types/json-schema": "^7.0.15",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
"cross-spawn": "^7.0.6",
@@ -4139,9 +4231,9 @@
}
},
"node_modules/esquery": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz",
- "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
+ "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
@@ -4270,22 +4362,22 @@
"license": "MIT"
},
"node_modules/fast-unique-numbers": {
- "version": "8.0.13",
- "resolved": "https://registry.npmjs.org/fast-unique-numbers/-/fast-unique-numbers-8.0.13.tgz",
- "integrity": "sha512-7OnTFAVPefgw2eBJ1xj2PGGR9FwYzSUso9decayHgCDX4sJkHLdcsYTytTg+tYv+wKF3U8gJuSBz2jJpQV4u/g==",
+ "version": "9.0.24",
+ "resolved": "https://registry.npmjs.org/fast-unique-numbers/-/fast-unique-numbers-9.0.24.tgz",
+ "integrity": "sha512-Dv0BYn4waOWse94j16rsZ5w/0zoaCa74O3q6IZjMqaXbtT92Q+Sb6pPk+phGzD8Xh+nueQmSRI3tSCaHKidzKw==",
"license": "MIT",
"dependencies": {
- "@babel/runtime": "^7.23.8",
- "tslib": "^2.6.2"
+ "@babel/runtime": "^7.28.4",
+ "tslib": "^2.8.1"
},
"engines": {
- "node": ">=16.1.0"
+ "node": ">=18.2.0"
}
},
"node_modules/fastq": {
- "version": "1.19.1",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
- "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
+ "version": "1.20.1",
+ "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz",
+ "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -4293,11 +4385,14 @@
}
},
"node_modules/fdir": {
- "version": "6.4.6",
- "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz",
- "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
"dev": true,
"license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
"peerDependencies": {
"picomatch": "^3 || ^4"
},
@@ -4373,9 +4468,9 @@
}
},
"node_modules/finalhandler": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz",
- "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==",
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz",
+ "integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==",
"license": "MIT",
"dependencies": {
"debug": "^4.4.0",
@@ -4386,7 +4481,11 @@
"statuses": "^2.0.1"
},
"engines": {
- "node": ">= 0.8"
+ "node": ">= 18.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
}
},
"node_modules/find-up": {
@@ -4447,6 +4546,7 @@
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
"integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
+ "dev": true,
"license": "ISC",
"dependencies": {
"cross-spawn": "^7.0.6",
@@ -4536,6 +4636,16 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/generator-function": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz",
+ "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/gensync": {
"version": "1.0.0-beta.2",
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
@@ -4556,6 +4666,18 @@
"node": "6.* || 8.* || >= 10.*"
}
},
+ "node_modules/get-east-asian-width": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
+ "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
@@ -4614,9 +4736,10 @@
}
},
"node_modules/glob": {
- "version": "10.4.5",
- "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
- "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
+ "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
+ "dev": true,
"license": "ISC",
"dependencies": {
"foreground-child": "^3.1.0",
@@ -4650,6 +4773,7 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
@@ -4659,6 +4783,7 @@
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
@@ -4671,9 +4796,9 @@
}
},
"node_modules/globals": {
- "version": "16.2.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-16.2.0.tgz",
- "integrity": "sha512-O+7l9tPdHCU320IigZZPj5zmRCFG9xHmx9cU8FqU2Rp+JN714seHV+2S9+JslCpY4gJwU2vOGox0wzgae/MCEg==",
+ "version": "17.0.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-17.0.0.tgz",
+ "integrity": "sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -4701,21 +4826,21 @@
}
},
"node_modules/globby": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz",
- "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==",
+ "version": "15.0.0",
+ "resolved": "https://registry.npmjs.org/globby/-/globby-15.0.0.tgz",
+ "integrity": "sha512-oB4vkQGqlMl682wL1IlWd02tXCbquGWM4voPEI85QmNKCaw8zGTm1f1rubFgkg3Eli2PtKlFgrnmUqasbQWlkw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@sindresorhus/merge-streams": "^2.1.0",
+ "@sindresorhus/merge-streams": "^4.0.0",
"fast-glob": "^3.3.3",
- "ignore": "^7.0.3",
+ "ignore": "^7.0.5",
"path-type": "^6.0.0",
"slash": "^5.1.0",
"unicorn-magic": "^0.3.0"
},
"engines": {
- "node": ">=18"
+ "node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
@@ -4919,28 +5044,23 @@
}
},
"node_modules/http-errors": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
- "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
+ "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"license": "MIT",
"dependencies": {
- "depd": "2.0.0",
- "inherits": "2.0.4",
- "setprototypeof": "1.2.0",
- "statuses": "2.0.1",
- "toidentifier": "1.0.1"
+ "depd": "~2.0.0",
+ "inherits": "~2.0.4",
+ "setprototypeof": "~1.2.0",
+ "statuses": "~2.0.2",
+ "toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
- }
- },
- "node_modules/http-errors/node_modules/statuses": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
- "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.8"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
}
},
"node_modules/http2-proxy": {
@@ -5015,9 +5135,9 @@
}
},
"node_modules/incyclist-ant-plus": {
- "version": "0.3.5",
- "resolved": "https://registry.npmjs.org/incyclist-ant-plus/-/incyclist-ant-plus-0.3.5.tgz",
- "integrity": "sha512-My4xhH+Ecms//APTHdTu17Mi1nk2iVPXbYJ3T2FDn2eETbr1DTctZaLLjH4paykAeZxjMboWbQCu13yBlT75ZQ==",
+ "version": "0.3.6",
+ "resolved": "https://registry.npmjs.org/incyclist-ant-plus/-/incyclist-ant-plus-0.3.6.tgz",
+ "integrity": "sha512-8qp2X71OaWWa8RvOIqQRuSJBH2/UPjDn5nW574ZcNsYQEA0S7XXvGBVI5j0IN9Iav7HyEGIn++HrDXgqShy9uQ==",
"license": "MIT",
"dependencies": {
"queue-fifo": "^0.2.6"
@@ -5026,7 +5146,7 @@
"node": ">=16.0.0"
},
"optionalDependencies": {
- "usb": "^2.15.0"
+ "usb": "^2.16.0"
}
},
"node_modules/inherits": {
@@ -5051,14 +5171,10 @@
}
},
"node_modules/ip-address": {
- "version": "9.0.5",
- "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz",
- "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==",
+ "version": "10.1.0",
+ "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz",
+ "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==",
"license": "MIT",
- "dependencies": {
- "jsbn": "1.1.0",
- "sprintf-js": "^1.1.3"
- },
"engines": {
"node": ">= 12"
}
@@ -5285,20 +5401,22 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/is-generator-function": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
- "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz",
+ "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bound": "^1.0.3",
- "get-proto": "^1.0.0",
+ "call-bound": "^1.0.4",
+ "generator-function": "^2.0.0",
+ "get-proto": "^1.0.1",
"has-tostringtag": "^1.0.2",
"safe-regex-test": "^1.1.0"
},
@@ -5559,12 +5677,14 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "dev": true,
"license": "ISC"
},
"node_modules/jackspeak": {
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+ "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/cliui": "^8.0.2"
@@ -5594,9 +5714,9 @@
"license": "MIT"
},
"node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -5606,12 +5726,6 @@
"js-yaml": "bin/js-yaml.js"
}
},
- "node_modules/jsbn": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz",
- "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==",
- "license": "MIT"
- },
"node_modules/jsesc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
@@ -5675,9 +5789,9 @@
"license": "MIT"
},
"node_modules/katex": {
- "version": "0.16.22",
- "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.22.tgz",
- "integrity": "sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==",
+ "version": "0.16.27",
+ "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.27.tgz",
+ "integrity": "sha512-aeQoDkuRWSqQN6nSvVCEFvfXdqo1OQiCmmW1kc9xSdjutPv7BGO7pqY9sQRJpMOGrEdfDgF2TfRXe5eUAD2Waw==",
"dev": true,
"funding": [
"https://opencollective.com/katex",
@@ -5736,9 +5850,9 @@
}
},
"node_modules/lightningcss": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz",
- "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz",
+ "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==",
"dev": true,
"license": "MPL-2.0",
"dependencies": {
@@ -5752,22 +5866,44 @@
"url": "https://opencollective.com/parcel"
},
"optionalDependencies": {
- "lightningcss-darwin-arm64": "1.30.1",
- "lightningcss-darwin-x64": "1.30.1",
- "lightningcss-freebsd-x64": "1.30.1",
- "lightningcss-linux-arm-gnueabihf": "1.30.1",
- "lightningcss-linux-arm64-gnu": "1.30.1",
- "lightningcss-linux-arm64-musl": "1.30.1",
- "lightningcss-linux-x64-gnu": "1.30.1",
- "lightningcss-linux-x64-musl": "1.30.1",
- "lightningcss-win32-arm64-msvc": "1.30.1",
- "lightningcss-win32-x64-msvc": "1.30.1"
+ "lightningcss-android-arm64": "1.30.2",
+ "lightningcss-darwin-arm64": "1.30.2",
+ "lightningcss-darwin-x64": "1.30.2",
+ "lightningcss-freebsd-x64": "1.30.2",
+ "lightningcss-linux-arm-gnueabihf": "1.30.2",
+ "lightningcss-linux-arm64-gnu": "1.30.2",
+ "lightningcss-linux-arm64-musl": "1.30.2",
+ "lightningcss-linux-x64-gnu": "1.30.2",
+ "lightningcss-linux-x64-musl": "1.30.2",
+ "lightningcss-win32-arm64-msvc": "1.30.2",
+ "lightningcss-win32-x64-msvc": "1.30.2"
+ }
+ },
+ "node_modules/lightningcss-android-arm64": {
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz",
+ "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
}
},
"node_modules/lightningcss-darwin-arm64": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz",
- "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz",
+ "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==",
"cpu": [
"arm64"
],
@@ -5786,9 +5922,9 @@
}
},
"node_modules/lightningcss-darwin-x64": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz",
- "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz",
+ "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==",
"cpu": [
"x64"
],
@@ -5807,9 +5943,9 @@
}
},
"node_modules/lightningcss-freebsd-x64": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz",
- "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz",
+ "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==",
"cpu": [
"x64"
],
@@ -5828,9 +5964,9 @@
}
},
"node_modules/lightningcss-linux-arm-gnueabihf": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz",
- "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz",
+ "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==",
"cpu": [
"arm"
],
@@ -5849,9 +5985,9 @@
}
},
"node_modules/lightningcss-linux-arm64-gnu": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz",
- "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz",
+ "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==",
"cpu": [
"arm64"
],
@@ -5870,9 +6006,9 @@
}
},
"node_modules/lightningcss-linux-arm64-musl": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz",
- "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz",
+ "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==",
"cpu": [
"arm64"
],
@@ -5891,9 +6027,9 @@
}
},
"node_modules/lightningcss-linux-x64-gnu": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz",
- "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz",
+ "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==",
"cpu": [
"x64"
],
@@ -5912,9 +6048,9 @@
}
},
"node_modules/lightningcss-linux-x64-musl": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz",
- "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz",
+ "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==",
"cpu": [
"x64"
],
@@ -5933,9 +6069,9 @@
}
},
"node_modules/lightningcss-win32-arm64-msvc": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz",
- "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz",
+ "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==",
"cpu": [
"arm64"
],
@@ -5954,9 +6090,9 @@
}
},
"node_modules/lightningcss-win32-x64-msvc": {
- "version": "1.30.1",
- "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz",
- "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==",
+ "version": "1.30.2",
+ "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz",
+ "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==",
"cpu": [
"x64"
],
@@ -6102,13 +6238,13 @@
}
},
"node_modules/magic-string": {
- "version": "0.30.17",
- "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz",
- "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==",
+ "version": "0.30.21",
+ "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
+ "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@jridgewell/sourcemap-codec": "^1.5.0"
+ "@jridgewell/sourcemap-codec": "^1.5.5"
}
},
"node_modules/markdown-it": {
@@ -6130,9 +6266,9 @@
}
},
"node_modules/markdownlint": {
- "version": "0.38.0",
- "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.38.0.tgz",
- "integrity": "sha512-xaSxkaU7wY/0852zGApM8LdlIfGCW8ETZ0Rr62IQtAnUMlMuifsg09vWJcNYeL4f0anvr8Vo4ZQar8jGpV0btQ==",
+ "version": "0.40.0",
+ "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.40.0.tgz",
+ "integrity": "sha512-UKybllYNheWac61Ia7T6fzuQNDZimFIpCg2w6hHjgV1Qu0w1TV0LlSgryUGzM0bkKQCBhy2FDhEELB73Kb0kAg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6143,7 +6279,8 @@
"micromark-extension-gfm-footnote": "2.1.0",
"micromark-extension-gfm-table": "2.1.1",
"micromark-extension-math": "3.1.0",
- "micromark-util-types": "2.0.2"
+ "micromark-util-types": "2.0.2",
+ "string-width": "8.1.0"
},
"engines": {
"node": ">=20"
@@ -6153,18 +6290,18 @@
}
},
"node_modules/markdownlint-cli2": {
- "version": "0.18.1",
- "resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.18.1.tgz",
- "integrity": "sha512-/4Osri9QFGCZOCTkfA8qJF+XGjKYERSHkXzxSyS1hd3ZERJGjvsUao2h4wdnvpHp6Tu2Jh/bPHM0FE9JJza6ng==",
+ "version": "0.20.0",
+ "resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.20.0.tgz",
+ "integrity": "sha512-esPk+8Qvx/f0bzI7YelUeZp+jCtFOk3KjZ7s9iBQZ6HlymSXoTtWGiIRZP05/9Oy2ehIoIjenVwndxGtxOIJYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "globby": "14.1.0",
- "js-yaml": "4.1.0",
+ "globby": "15.0.0",
+ "js-yaml": "4.1.1",
"jsonc-parser": "3.3.1",
"markdown-it": "14.1.0",
- "markdownlint": "0.38.0",
- "markdownlint-cli2-formatter-default": "0.0.5",
+ "markdownlint": "0.40.0",
+ "markdownlint-cli2-formatter-default": "0.0.6",
"micromatch": "4.0.8"
},
"bin": {
@@ -6178,9 +6315,9 @@
}
},
"node_modules/markdownlint-cli2-formatter-default": {
- "version": "0.0.5",
- "resolved": "https://registry.npmjs.org/markdownlint-cli2-formatter-default/-/markdownlint-cli2-formatter-default-0.0.5.tgz",
- "integrity": "sha512-4XKTwQ5m1+Txo2kuQ3Jgpo/KmnG+X90dWt4acufg6HVGadTUG5hzHF/wssp9b5MBYOMCnZ9RMPaU//uHsszF8Q==",
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/markdownlint-cli2-formatter-default/-/markdownlint-cli2-formatter-default-0.0.6.tgz",
+ "integrity": "sha512-VVDGKsq9sgzu378swJ0fcHfSicUnMxnL8gnLm/Q4J/xsNJ4e5bA6lvAz7PCzIl0/No0lHyaWdqVD2jotxOSFMQ==",
"dev": true,
"license": "MIT",
"funding": {
@@ -6190,6 +6327,23 @@
"markdownlint-cli2": ">=0.0.4"
}
},
+ "node_modules/markdownlint/node_modules/string-width": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
+ "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "get-east-asian-width": "^1.3.0",
+ "strip-ansi": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=20"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
@@ -6799,15 +6953,19 @@
}
},
"node_modules/mime-types": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
- "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz",
+ "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
- "node": ">= 0.6"
+ "node": ">=18"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
}
},
"node_modules/minimatch": {
@@ -6842,9 +7000,9 @@
}
},
"node_modules/minizlib": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
- "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz",
+ "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6854,31 +7012,17 @@
"node": ">= 18"
}
},
- "node_modules/mkdirp": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
- "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "mkdirp": "dist/cjs/src/bin.js"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/mqtt": {
- "version": "5.13.1",
- "resolved": "https://registry.npmjs.org/mqtt/-/mqtt-5.13.1.tgz",
- "integrity": "sha512-g+4G+ma0UeL3Pgu1y1si2NHb4VLIEUCtF789WrG99lLG0XZyoab2EJoy58YgGSg/1yFdthDBH0+4llsZZD/vug==",
+ "version": "5.14.1",
+ "resolved": "https://registry.npmjs.org/mqtt/-/mqtt-5.14.1.tgz",
+ "integrity": "sha512-NxkPxE70Uq3Ph7goefQa7ggSsVzHrayCD0OyxlJgITN/EbzlZN+JEPmaAZdxP1LsIT5FamDyILoQTF72W7Nnbw==",
"license": "MIT",
"dependencies": {
+ "@types/readable-stream": "^4.0.21",
+ "@types/ws": "^8.18.1",
"commist": "^3.2.0",
"concat-stream": "^2.0.0",
- "debug": "^4.4.0",
+ "debug": "^4.4.1",
"help-me": "^5.0.0",
"lru-cache": "^10.4.3",
"minimist": "^1.2.8",
@@ -6886,10 +7030,10 @@
"number-allocator": "^1.0.14",
"readable-stream": "^4.7.0",
"rfdc": "^1.4.1",
- "socks": "^2.8.3",
+ "socks": "^2.8.6",
"split2": "^4.2.0",
- "worker-timers": "^7.1.8",
- "ws": "^8.18.0"
+ "worker-timers": "^8.0.23",
+ "ws": "^8.18.3"
},
"bin": {
"mqtt": "build/bin/mqtt.js",
@@ -6934,9 +7078,9 @@
"license": "MIT"
},
"node_modules/nan": {
- "version": "2.22.2",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz",
- "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==",
+ "version": "2.24.0",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.24.0.tgz",
+ "integrity": "sha512-Vpf9qnVW1RaDkoNKFUvfxqAbtI8ncb8OJlqZ9wwpXzWPEsvsB1nvdUi6oYrHIkQ1Y/tMDnr1h4nczS0VB9Xykg==",
"license": "MIT"
},
"node_modules/natural-compare": {
@@ -7022,16 +7166,16 @@
}
},
"node_modules/node-releases": {
- "version": "2.0.19",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
- "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
+ "version": "2.0.27",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
+ "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==",
"dev": true,
"license": "MIT"
},
"node_modules/nodemon": {
- "version": "3.1.10",
- "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz",
- "integrity": "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw==",
+ "version": "3.1.11",
+ "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.11.tgz",
+ "integrity": "sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -7068,9 +7212,9 @@
}
},
"node_modules/nodemon/node_modules/semver": {
- "version": "7.7.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
- "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "version": "7.7.3",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
+ "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
"dev": true,
"license": "ISC",
"bin": {
@@ -7447,6 +7591,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
+ "dev": true,
"license": "BlueOak-1.0.0"
},
"node_modules/param-case": {
@@ -7548,6 +7693,7 @@
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -7564,6 +7710,7 @@
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
"integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+ "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"lru-cache": "^10.2.0",
@@ -7580,6 +7727,7 @@
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+ "dev": true,
"license": "ISC"
},
"node_modules/path-type": {
@@ -7603,9 +7751,9 @@
"license": "ISC"
},
"node_modules/picomatch": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
- "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"engines": {
@@ -7864,9 +8012,9 @@
"license": "MIT"
},
"node_modules/regenerate-unicode-properties": {
- "version": "10.2.0",
- "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz",
- "integrity": "sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==",
+ "version": "10.2.2",
+ "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.2.tgz",
+ "integrity": "sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -7898,18 +8046,18 @@
}
},
"node_modules/regexpu-core": {
- "version": "6.2.0",
- "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.2.0.tgz",
- "integrity": "sha512-H66BPQMrv+V16t8xtmq+UC0CBpiTBA60V8ibS1QVReIp8T1z8hwFxqcGzm9K6lgsN7sB5edVH8a+ze6Fqm4weA==",
+ "version": "6.4.0",
+ "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.4.0.tgz",
+ "integrity": "sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==",
"dev": true,
"license": "MIT",
"dependencies": {
"regenerate": "^1.4.2",
- "regenerate-unicode-properties": "^10.2.0",
+ "regenerate-unicode-properties": "^10.2.2",
"regjsgen": "^0.8.0",
- "regjsparser": "^0.12.0",
+ "regjsparser": "^0.13.0",
"unicode-match-property-ecmascript": "^2.0.0",
- "unicode-match-property-value-ecmascript": "^2.1.0"
+ "unicode-match-property-value-ecmascript": "^2.2.1"
},
"engines": {
"node": ">=4"
@@ -7923,31 +8071,18 @@
"license": "MIT"
},
"node_modules/regjsparser": {
- "version": "0.12.0",
- "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.12.0.tgz",
- "integrity": "sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==",
+ "version": "0.13.0",
+ "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.13.0.tgz",
+ "integrity": "sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
- "jsesc": "~3.0.2"
+ "jsesc": "~3.1.0"
},
"bin": {
"regjsparser": "bin/parser"
}
},
- "node_modules/regjsparser/node_modules/jsesc": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz",
- "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "jsesc": "bin/jsesc"
- },
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/relateurl": {
"version": "0.2.7",
"resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz",
@@ -7959,14 +8094,14 @@
}
},
"node_modules/replace-in-file": {
- "version": "8.3.0",
- "resolved": "https://registry.npmjs.org/replace-in-file/-/replace-in-file-8.3.0.tgz",
- "integrity": "sha512-4VhddQiMCPIuypiwHDTM+XHjZoVu9h7ngBbSCnwGRcwdHwxltjt/m//Ep3GDwqaOx1fDSrKFQ+n7uo4uVcEz9Q==",
+ "version": "8.4.0",
+ "resolved": "https://registry.npmjs.org/replace-in-file/-/replace-in-file-8.4.0.tgz",
+ "integrity": "sha512-D28k8jy2LtUGbCzCnR3znajaTWIjJ/Uee3UdodzcHRxE7zn6NmYW/dcSqyivnsYU3W+MxdX6SbF28NvJ0GRoLA==",
"license": "MIT",
"dependencies": {
- "chalk": "^5.3.0",
- "glob": "^10.4.2",
- "yargs": "^17.7.2"
+ "chalk": "^5.6.2",
+ "glob": "^13.0.0",
+ "yargs": "^18.0.0"
},
"bin": {
"replace-in-file": "bin/cli.js"
@@ -7976,9 +8111,9 @@
}
},
"node_modules/replace-in-file/node_modules/chalk": {
- "version": "5.4.1",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz",
- "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==",
+ "version": "5.6.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz",
+ "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==",
"license": "MIT",
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
@@ -7987,23 +8122,71 @@
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
- "node_modules/require-directory": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
- "license": "MIT",
+ "node_modules/replace-in-file/node_modules/glob": {
+ "version": "13.0.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.0.tgz",
+ "integrity": "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA==",
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "minimatch": "^10.1.1",
+ "minipass": "^7.1.2",
+ "path-scurry": "^2.0.0"
+ },
"engines": {
- "node": ">=0.10.0"
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/replace-in-file/node_modules/lru-cache": {
+ "version": "11.2.4",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
+ "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
+ "license": "BlueOak-1.0.0",
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
+ "node_modules/replace-in-file/node_modules/minimatch": {
+ "version": "10.1.1",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
+ "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "@isaacs/brace-expansion": "^5.0.0"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/replace-in-file/node_modules/path-scurry": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz",
+ "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==",
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "lru-cache": "^11.0.0",
+ "minipass": "^7.1.2"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/resolve": {
- "version": "1.22.10",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
- "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
+ "version": "1.22.11",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz",
+ "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "is-core-module": "^2.16.0",
+ "is-core-module": "^2.16.1",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
@@ -8045,9 +8228,9 @@
"license": "MIT"
},
"node_modules/rollup": {
- "version": "4.44.1",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.44.1.tgz",
- "integrity": "sha512-x8H8aPvD+xbl0Do8oez5f5o8eMS3trfCghc4HhLAnCkj7Vl0d1JWGs0UF/D886zLW2rOj2QymV/JcSSsw+XDNg==",
+ "version": "4.55.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.55.1.tgz",
+ "integrity": "sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -8061,26 +8244,31 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.44.1",
- "@rollup/rollup-android-arm64": "4.44.1",
- "@rollup/rollup-darwin-arm64": "4.44.1",
- "@rollup/rollup-darwin-x64": "4.44.1",
- "@rollup/rollup-freebsd-arm64": "4.44.1",
- "@rollup/rollup-freebsd-x64": "4.44.1",
- "@rollup/rollup-linux-arm-gnueabihf": "4.44.1",
- "@rollup/rollup-linux-arm-musleabihf": "4.44.1",
- "@rollup/rollup-linux-arm64-gnu": "4.44.1",
- "@rollup/rollup-linux-arm64-musl": "4.44.1",
- "@rollup/rollup-linux-loongarch64-gnu": "4.44.1",
- "@rollup/rollup-linux-powerpc64le-gnu": "4.44.1",
- "@rollup/rollup-linux-riscv64-gnu": "4.44.1",
- "@rollup/rollup-linux-riscv64-musl": "4.44.1",
- "@rollup/rollup-linux-s390x-gnu": "4.44.1",
- "@rollup/rollup-linux-x64-gnu": "4.44.1",
- "@rollup/rollup-linux-x64-musl": "4.44.1",
- "@rollup/rollup-win32-arm64-msvc": "4.44.1",
- "@rollup/rollup-win32-ia32-msvc": "4.44.1",
- "@rollup/rollup-win32-x64-msvc": "4.44.1",
+ "@rollup/rollup-android-arm-eabi": "4.55.1",
+ "@rollup/rollup-android-arm64": "4.55.1",
+ "@rollup/rollup-darwin-arm64": "4.55.1",
+ "@rollup/rollup-darwin-x64": "4.55.1",
+ "@rollup/rollup-freebsd-arm64": "4.55.1",
+ "@rollup/rollup-freebsd-x64": "4.55.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.55.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.55.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.55.1",
+ "@rollup/rollup-linux-arm64-musl": "4.55.1",
+ "@rollup/rollup-linux-loong64-gnu": "4.55.1",
+ "@rollup/rollup-linux-loong64-musl": "4.55.1",
+ "@rollup/rollup-linux-ppc64-gnu": "4.55.1",
+ "@rollup/rollup-linux-ppc64-musl": "4.55.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.55.1",
+ "@rollup/rollup-linux-riscv64-musl": "4.55.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.55.1",
+ "@rollup/rollup-linux-x64-gnu": "4.55.1",
+ "@rollup/rollup-linux-x64-musl": "4.55.1",
+ "@rollup/rollup-openbsd-x64": "4.55.1",
+ "@rollup/rollup-openharmony-arm64": "4.55.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.55.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.55.1",
+ "@rollup/rollup-win32-x64-gnu": "4.55.1",
+ "@rollup/rollup-win32-x64-msvc": "4.55.1",
"fsevents": "~2.3.2"
}
},
@@ -8243,25 +8431,29 @@
}
},
"node_modules/send": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz",
- "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==",
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz",
+ "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==",
"license": "MIT",
"dependencies": {
- "debug": "^4.3.5",
+ "debug": "^4.4.3",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"fresh": "^2.0.0",
- "http-errors": "^2.0.0",
- "mime-types": "^3.0.1",
+ "http-errors": "^2.0.1",
+ "mime-types": "^3.0.2",
"ms": "^2.1.3",
"on-finished": "^2.4.1",
"range-parser": "^1.2.1",
- "statuses": "^2.0.1"
+ "statuses": "^2.0.2"
},
"engines": {
"node": ">= 18"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
}
},
"node_modules/serialize-javascript": {
@@ -8275,9 +8467,9 @@
}
},
"node_modules/serve-static": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz",
- "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==",
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz",
+ "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==",
"license": "MIT",
"dependencies": {
"encodeurl": "^2.0.0",
@@ -8287,6 +8479,10 @@
},
"engines": {
"node": ">= 18"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
}
},
"node_modules/set-function-length": {
@@ -8348,6 +8544,7 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"shebang-regex": "^3.0.0"
@@ -8360,6 +8557,7 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -8458,6 +8656,7 @@
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
+ "dev": true,
"license": "ISC",
"engines": {
"node": ">=14"
@@ -8467,9 +8666,9 @@
}
},
"node_modules/simple-git-hooks": {
- "version": "2.13.0",
- "resolved": "https://registry.npmjs.org/simple-git-hooks/-/simple-git-hooks-2.13.0.tgz",
- "integrity": "sha512-N+goiLxlkHJlyaYEglFypzVNMaNplPAk5syu0+OPp/Bk6dwVoXF6FfOw2vO0Dp+JHsBaI+w6cm8TnFl2Hw6tDA==",
+ "version": "2.13.1",
+ "resolved": "https://registry.npmjs.org/simple-git-hooks/-/simple-git-hooks-2.13.1.tgz",
+ "integrity": "sha512-WszCLXwT4h2k1ufIXAgsbiTOazqqevFCIncOuUBZJ91DdvWcC5+OFkluWRQPrcuSYd8fjq+o2y1QfWqYMoAToQ==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
@@ -8491,9 +8690,9 @@
}
},
"node_modules/simple-update-notifier/node_modules/semver": {
- "version": "7.7.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
- "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
+ "version": "7.7.3",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
+ "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
"dev": true,
"license": "ISC",
"bin": {
@@ -8534,12 +8733,12 @@
"license": "MIT"
},
"node_modules/socks": {
- "version": "2.8.5",
- "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.5.tgz",
- "integrity": "sha512-iF+tNDQla22geJdTyJB1wM/qrX9DMRwWrciEPwWLPRWAUEM8sQiyxgckLxWT1f7+9VabJS0jTGGr4QgBuvi6Ww==",
+ "version": "2.8.7",
+ "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz",
+ "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==",
"license": "MIT",
"dependencies": {
- "ip-address": "^9.0.5",
+ "ip-address": "^10.0.1",
"smart-buffer": "^4.2.0"
},
"engines": {
@@ -8598,9 +8797,9 @@
}
},
"node_modules/spdx-license-ids": {
- "version": "3.0.21",
- "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz",
- "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==",
+ "version": "3.0.22",
+ "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz",
+ "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==",
"dev": true,
"license": "CC0-1.0"
},
@@ -8613,12 +8812,6 @@
"node": ">= 10.x"
}
},
- "node_modules/sprintf-js": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
- "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==",
- "license": "BSD-3-Clause"
- },
"node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
@@ -8655,6 +8848,7 @@
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"eastasianwidth": "^0.2.0",
@@ -8673,6 +8867,7 @@
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
@@ -8687,6 +8882,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -8696,12 +8892,14 @@
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true,
"license": "MIT"
},
"node_modules/string-width-cjs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -8789,9 +8987,9 @@
}
},
"node_modules/strip-ansi": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
- "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+ "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
@@ -8808,6 +9006,7 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -8820,6 +9019,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -8875,17 +9075,16 @@
}
},
"node_modules/tar": {
- "version": "7.4.3",
- "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
- "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
+ "version": "7.5.2",
+ "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz",
+ "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==",
"dev": true,
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
- "minizlib": "^3.0.1",
- "mkdirp": "^3.0.1",
+ "minizlib": "^3.1.0",
"yallist": "^5.0.0"
},
"engines": {
@@ -8903,14 +9102,14 @@
}
},
"node_modules/terser": {
- "version": "5.43.1",
- "resolved": "https://registry.npmjs.org/terser/-/terser-5.43.1.tgz",
- "integrity": "sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg==",
+ "version": "5.44.1",
+ "resolved": "https://registry.npmjs.org/terser/-/terser-5.44.1.tgz",
+ "integrity": "sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"@jridgewell/source-map": "^0.3.3",
- "acorn": "^8.14.0",
+ "acorn": "^8.15.0",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
},
@@ -9097,9 +9296,9 @@
"license": "MIT"
},
"node_modules/undici-types": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz",
- "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==",
+ "version": "7.16.0",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
+ "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
"license": "MIT"
},
"node_modules/unicode-canonical-property-names-ecmascript": {
@@ -9127,9 +9326,9 @@
}
},
"node_modules/unicode-match-property-value-ecmascript": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz",
- "integrity": "sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg==",
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.1.tgz",
+ "integrity": "sha512-JQ84qTuMg4nVkx8ga4A16a1epI9H6uTXAknqxkGF/aFfRLw1xC/Bp24HNLaZhHSkWd3+84t8iXnp1J0kYcZHhg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -9137,9 +9336,9 @@
}
},
"node_modules/unicode-property-aliases-ecmascript": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz",
- "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==",
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.2.0.tgz",
+ "integrity": "sha512-hpbDzxUY9BFwX+UeBnxv3Sh1q7HFxj48DTmXchNgRa46lO8uj3/1iEn3MiNUYTg1g9ctIqXCCERn8gYZhHC5lQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -9160,9 +9359,9 @@
}
},
"node_modules/update-browserslist-db": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
- "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
+ "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
"dev": true,
"funding": [
{
@@ -9201,9 +9400,9 @@
}
},
"node_modules/usb": {
- "version": "2.15.0",
- "resolved": "https://registry.npmjs.org/usb/-/usb-2.15.0.tgz",
- "integrity": "sha512-BA9r7PFxyYp99wps1N70lIqdPb2Utcl2KkWohDtWUmhDBeM5hDH1Zl/L/CZvWxd5W3RUCNm1g+b+DEKZ6cHzqg==",
+ "version": "2.16.0",
+ "resolved": "https://registry.npmjs.org/usb/-/usb-2.16.0.tgz",
+ "integrity": "sha512-jD88fvzDViMDH5KmmNJgzMBDj/95bDTt6+kBNaNxP4G98xUTnDMiLUY2CYmToba6JAFhM9VkcaQuxCNRLGR7zg==",
"hasInstallScript": true,
"license": "MIT",
"optional": true,
@@ -9217,9 +9416,9 @@
}
},
"node_modules/usb/node_modules/node-addon-api": {
- "version": "8.4.0",
- "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.4.0.tgz",
- "integrity": "sha512-D9DI/gXHvVmjHS08SVch0Em8G5S1P+QWtU31appcKT/8wFSPRcdHadIFSAntdMMVM5zz+/DL+bL/gz3UDppqtg==",
+ "version": "8.5.0",
+ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.5.0.tgz",
+ "integrity": "sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A==",
"license": "MIT",
"optional": true,
"engines": {
@@ -9275,6 +9474,7 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^2.0.0"
@@ -9385,44 +9585,58 @@
"node": ">=0.10.0"
}
},
+ "node_modules/worker-factory": {
+ "version": "7.0.46",
+ "resolved": "https://registry.npmjs.org/worker-factory/-/worker-factory-7.0.46.tgz",
+ "integrity": "sha512-Sr1hq2FMgNa04UVhYQacsw+i58BtMimzDb4+CqYphZ97OfefRpURu0UZ+JxMr/H36VVJBfuVkxTK7MytsanC3w==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.28.4",
+ "fast-unique-numbers": "^9.0.24",
+ "tslib": "^2.8.1"
+ }
+ },
"node_modules/worker-timers": {
- "version": "7.1.8",
- "resolved": "https://registry.npmjs.org/worker-timers/-/worker-timers-7.1.8.tgz",
- "integrity": "sha512-R54psRKYVLuzff7c1OTFcq/4Hue5Vlz4bFtNEIarpSiCYhpifHU3aIQI29S84o1j87ePCYqbmEJPqwBTf+3sfw==",
+ "version": "8.0.27",
+ "resolved": "https://registry.npmjs.org/worker-timers/-/worker-timers-8.0.27.tgz",
+ "integrity": "sha512-+7ptDduAWj6Wd09Ga0weRFRx/MUwLhExazn+zu3IrwF0N2U2FPqFRR5W3Qz4scnI3cOILzdIEEytIJ2vbeD9Gw==",
"license": "MIT",
"dependencies": {
- "@babel/runtime": "^7.24.5",
- "tslib": "^2.6.2",
- "worker-timers-broker": "^6.1.8",
- "worker-timers-worker": "^7.0.71"
+ "@babel/runtime": "^7.28.4",
+ "tslib": "^2.8.1",
+ "worker-timers-broker": "^8.0.13",
+ "worker-timers-worker": "^9.0.11"
}
},
"node_modules/worker-timers-broker": {
- "version": "6.1.8",
- "resolved": "https://registry.npmjs.org/worker-timers-broker/-/worker-timers-broker-6.1.8.tgz",
- "integrity": "sha512-FUCJu9jlK3A8WqLTKXM9E6kAmI/dR1vAJ8dHYLMisLNB/n3GuaFIjJ7pn16ZcD1zCOf7P6H62lWIEBi+yz/zQQ==",
+ "version": "8.0.13",
+ "resolved": "https://registry.npmjs.org/worker-timers-broker/-/worker-timers-broker-8.0.13.tgz",
+ "integrity": "sha512-PZnHHmqOY5oMKQPyfJhqPI9cb3QFmwD3lCIc/Zip6sShpfG2rvvCVDl0xeabGIspiEpP5exNNIlTUHjgP5VAcg==",
"license": "MIT",
"dependencies": {
- "@babel/runtime": "^7.24.5",
- "fast-unique-numbers": "^8.0.13",
- "tslib": "^2.6.2",
- "worker-timers-worker": "^7.0.71"
+ "@babel/runtime": "^7.28.4",
+ "broker-factory": "^3.1.11",
+ "fast-unique-numbers": "^9.0.24",
+ "tslib": "^2.8.1",
+ "worker-timers-worker": "^9.0.11"
}
},
"node_modules/worker-timers-worker": {
- "version": "7.0.71",
- "resolved": "https://registry.npmjs.org/worker-timers-worker/-/worker-timers-worker-7.0.71.tgz",
- "integrity": "sha512-ks/5YKwZsto1c2vmljroppOKCivB/ma97g9y77MAAz2TBBjPPgpoOiS1qYQKIgvGTr2QYPT3XhJWIB6Rj2MVPQ==",
+ "version": "9.0.11",
+ "resolved": "https://registry.npmjs.org/worker-timers-worker/-/worker-timers-worker-9.0.11.tgz",
+ "integrity": "sha512-pArb5xtgHWImYpXhjg1OFv7JFG0ubmccb73TFoXHXjG830fFj+16N57q9YeBnZX52dn+itRrMoJZ9HaZBVzDaA==",
"license": "MIT",
"dependencies": {
- "@babel/runtime": "^7.24.5",
- "tslib": "^2.6.2"
+ "@babel/runtime": "^7.28.4",
+ "tslib": "^2.8.1",
+ "worker-factory": "^7.0.46"
}
},
"node_modules/wrap-ansi": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.1.0",
@@ -9441,6 +9655,7 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
@@ -9458,6 +9673,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -9467,12 +9683,14 @@
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true,
"license": "MIT"
},
"node_modules/wrap-ansi-cjs/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
@@ -9487,6 +9705,7 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -9496,9 +9715,10 @@
}
},
"node_modules/wrap-ansi/node_modules/ansi-styles": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
- "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
+ "dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
@@ -9508,9 +9728,9 @@
}
},
"node_modules/ws": {
- "version": "8.18.3",
- "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
- "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
+ "version": "8.19.0",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
+ "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
@@ -9545,71 +9765,52 @@
"license": "ISC"
},
"node_modules/yargs": {
- "version": "17.7.2",
- "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
- "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
+ "version": "18.0.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz",
+ "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==",
"license": "MIT",
"dependencies": {
- "cliui": "^8.0.1",
+ "cliui": "^9.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
- "require-directory": "^2.1.1",
- "string-width": "^4.2.3",
+ "string-width": "^7.2.0",
"y18n": "^5.0.5",
- "yargs-parser": "^21.1.1"
+ "yargs-parser": "^22.0.0"
},
"engines": {
- "node": ">=12"
+ "node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yargs-parser": {
- "version": "21.1.1",
- "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
- "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
+ "version": "22.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz",
+ "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==",
"license": "ISC",
"engines": {
- "node": ">=12"
- }
- },
- "node_modules/yargs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
+ "node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yargs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "version": "10.6.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
+ "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"license": "MIT"
},
"node_modules/yargs/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
+ "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"license": "MIT",
"dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
+ "emoji-regex": "^10.3.0",
+ "get-east-asian-width": "^1.0.0",
+ "strip-ansi": "^7.1.0"
},
"engines": {
- "node": ">=8"
- }
- },
- "node_modules/yargs/node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
+ "node": ">=18"
},
- "engines": {
- "node": ">=8"
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/yocto-queue": {
diff --git a/package.json b/package.json
index 6f3c705d84..1c099fd309 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "OpenRowingMonitor",
- "version": "0.9.6",
+ "version": "0.9.7",
"description": "A free and open source performance monitor for rowing machines",
"main": "app/server.js",
"author": "Jaap van Ekris",
@@ -29,43 +29,43 @@
},
"//fix1Comment": "We install lit@2.8.0 as lit@3.0.0 breaks the webpage displaying metrics",
"dependencies": {
- "@markw65/fit-file-writer": "^0.1.6",
+ "@markw65/fit-file-writer": "^0.1.7",
"ble-host": "^1.0.3",
- "chart.js": "^4.5.0",
+ "chart.js": "^4.5.1",
"chartjs-plugin-datalabels": "^2.2.0",
- "finalhandler": "^2.1.0",
- "incyclist-ant-plus": "^0.3.5",
+ "finalhandler": "^2.1.1",
+ "incyclist-ant-plus": "^0.3.6",
"lit": "^2.8.0",
"loglevel": "^1.9.1",
- "mqtt": "^5.13.1",
+ "mqtt": "^5.14.1",
"node-fetch": "^3.3.2",
"nosleep.js": "0.12.0",
"pigpio": "3.3.1",
- "replace-in-file": "^8.3.0",
- "serve-static": "^2.2.0",
- "ws": "^8.18.3"
+ "replace-in-file": "^8.4.0",
+ "serve-static": "^2.2.1",
+ "ws": "^8.19.0"
},
"devDependencies": {
- "@babel/eslint-parser": "^7.27.5",
- "@babel/plugin-proposal-decorators": "^7.23.9",
- "@babel/preset-env": "^7.27.2",
- "@eslint/js": "^9.30.0",
- "@rollup/plugin-babel": "^6.0.4",
- "@rollup/plugin-commonjs": "^28.0.6",
- "@rollup/plugin-node-resolve": "^16.0.0",
+ "@babel/eslint-parser": "^7.28.5",
+ "@babel/plugin-proposal-decorators": "^7.28.0",
+ "@babel/preset-env": "^7.28.5",
+ "@eslint/js": "^9.39.2",
+ "@rollup/plugin-babel": "^6.1.0",
+ "@rollup/plugin-commonjs": "^29.0.0",
+ "@rollup/plugin-node-resolve": "^16.0.3",
"@rollup/plugin-terser": "^0.4.4",
- "@stylistic/eslint-plugin": "^5.1.0",
- "@web/rollup-plugin-html": "^2.1.2",
- "eslint": "^9.30.0",
- "globals": "^16.2.0",
+ "@stylistic/eslint-plugin": "^5.6.1",
+ "@web/rollup-plugin-html": "^3.0.0",
+ "eslint": "^9.39.2",
+ "globals": "^17.0.0",
"http2-proxy": "5.0.53",
- "markdownlint-cli2": "^0.18.1",
- "nodemon": "^3.0.3",
+ "markdownlint-cli2": "^0.20.0",
+ "nodemon": "^3.1.11",
"npm-run-all": "4.1.5",
- "rollup": "^4.44.1",
+ "rollup": "^4.55.1",
"rollup-plugin-summary": "^3.0.0",
- "simple-git-hooks": "^2.9.0",
- "tar": "^7.4.3",
+ "simple-git-hooks": "^2.13.1",
+ "tar": "^7.5.2",
"uvu": "^0.5.6"
}
}