Skip to content

Commit 83fa931

Browse files
authored
Fix bug reporting latency (#489)
1 parent 220c8b6 commit 83fa931

File tree

2 files changed

+20
-4
lines changed

2 files changed

+20
-4
lines changed

NeuralAmpModeler/NeuralAmpModeler.cpp

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -366,6 +366,7 @@ void NeuralAmpModeler::OnReset()
366366
// If there is a model or IR loaded, they need to be checked for resampling.
367367
_ResetModelAndIR(sampleRate, GetBlockSize());
368368
mToneStack->Reset(sampleRate, maxBlockSize);
369+
_UpdateLatency();
369370
}
370371

371372
void NeuralAmpModeler::OnIdle()
@@ -533,7 +534,7 @@ void NeuralAmpModeler::_ApplyDSPStaging()
533534
mModel = nullptr;
534535
mNAMPath.Set("");
535536
mShouldRemoveModel = false;
536-
SetLatency(0);
537+
_UpdateLatency();
537538
}
538539
if (mShouldRemoveIR)
539540
{
@@ -548,7 +549,7 @@ void NeuralAmpModeler::_ApplyDSPStaging()
548549
mModel = std::move(mStagedModel);
549550
mStagedModel = nullptr;
550551
mNewModelLoadedInDSP = true;
551-
SetLatency(mModel->GetLatency());
552+
_UpdateLatency();
552553
}
553554
if (mStagedIR != nullptr)
554555
{
@@ -881,6 +882,17 @@ int NeuralAmpModeler::_UnserializeStateLegacy_0_7_9(const IByteChunk& chunk, int
881882
return pos;
882883
}
883884

885+
void NeuralAmpModeler::_UpdateLatency()
886+
{
887+
int latency = 0;
888+
if (mModel)
889+
{
890+
latency += mModel->GetLatency();
891+
}
892+
// Other things that add latency here...
893+
SetLatency(latency);
894+
}
895+
884896
void NeuralAmpModeler::_UpdateMeters(sample** inputPointer, sample** outputPointer, const size_t nFrames,
885897
const size_t nChansIn, const size_t nChansOut)
886898
{

NeuralAmpModeler/NeuralAmpModeler.h

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ class ResamplingNAM : public nam::DSP
126126
// We can afford to be careful
127127
throw std::runtime_error("More frames were provided than the max expected!");
128128

129-
if (GetExpectedSampleRate() == GetEncapsulatedSampleRate())
129+
if (!NeedToResample())
130130
{
131131
mEncapsulated->process(input, output, num_frames);
132132
mEncapsulated->finalize_(num_frames);
@@ -156,7 +156,7 @@ class ResamplingNAM : public nam::DSP
156156
mFinalized = true;
157157
};
158158

159-
int GetLatency() const { return mResampler.GetLatency(); };
159+
int GetLatency() const { return NeedToResample() ? mResampler.GetLatency() : 0; };
160160

161161
void Reset(const double sampleRate, const int maxBlockSize)
162162
{
@@ -182,6 +182,7 @@ class ResamplingNAM : public nam::DSP
182182
double GetEncapsulatedSampleRate() const { return GetNAMSampleRate(mEncapsulated); };
183183

184184
private:
185+
bool NeedToResample() const { return GetExpectedSampleRate() != GetEncapsulatedSampleRate(); };
185186
// The encapsulated NAM
186187
std::unique_ptr<nam::DSP> mEncapsulated;
187188
// The processing for NAM is a little weird--there's a call to .finalize_() that's expected.
@@ -271,6 +272,9 @@ class NeuralAmpModeler final : public iplug::Plugin
271272
int _UnserializeStateLegacy_0_7_9(const iplug::IByteChunk& chunk, int startPos);
272273
// And other legacy unsrializations if/as needed...
273274

275+
// Make sure that the latency is reported correctly.
276+
void _UpdateLatency();
277+
274278
// Update level meters
275279
// Called within ProcessBlock().
276280
// Assume _ProcessInput() and _ProcessOutput() were run immediately before.

0 commit comments

Comments
 (0)