Skip to content

Commit

Permalink
Fix bug reporting latency (#489)
Browse files Browse the repository at this point in the history
  • Loading branch information
sdatkinson authored Jul 29, 2024
1 parent 220c8b6 commit 83fa931
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 4 deletions.
16 changes: 14 additions & 2 deletions NeuralAmpModeler/NeuralAmpModeler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,7 @@ void NeuralAmpModeler::OnReset()
// If there is a model or IR loaded, they need to be checked for resampling.
_ResetModelAndIR(sampleRate, GetBlockSize());
mToneStack->Reset(sampleRate, maxBlockSize);
_UpdateLatency();
}

void NeuralAmpModeler::OnIdle()
Expand Down Expand Up @@ -533,7 +534,7 @@ void NeuralAmpModeler::_ApplyDSPStaging()
mModel = nullptr;
mNAMPath.Set("");
mShouldRemoveModel = false;
SetLatency(0);
_UpdateLatency();
}
if (mShouldRemoveIR)
{
Expand All @@ -548,7 +549,7 @@ void NeuralAmpModeler::_ApplyDSPStaging()
mModel = std::move(mStagedModel);
mStagedModel = nullptr;
mNewModelLoadedInDSP = true;
SetLatency(mModel->GetLatency());
_UpdateLatency();
}
if (mStagedIR != nullptr)
{
Expand Down Expand Up @@ -881,6 +882,17 @@ int NeuralAmpModeler::_UnserializeStateLegacy_0_7_9(const IByteChunk& chunk, int
return pos;
}

void NeuralAmpModeler::_UpdateLatency()
{
int latency = 0;
if (mModel)
{
latency += mModel->GetLatency();
}
// Other things that add latency here...
SetLatency(latency);
}

void NeuralAmpModeler::_UpdateMeters(sample** inputPointer, sample** outputPointer, const size_t nFrames,
const size_t nChansIn, const size_t nChansOut)
{
Expand Down
8 changes: 6 additions & 2 deletions NeuralAmpModeler/NeuralAmpModeler.h
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ class ResamplingNAM : public nam::DSP
// We can afford to be careful
throw std::runtime_error("More frames were provided than the max expected!");

if (GetExpectedSampleRate() == GetEncapsulatedSampleRate())
if (!NeedToResample())
{
mEncapsulated->process(input, output, num_frames);
mEncapsulated->finalize_(num_frames);
Expand Down Expand Up @@ -156,7 +156,7 @@ class ResamplingNAM : public nam::DSP
mFinalized = true;
};

int GetLatency() const { return mResampler.GetLatency(); };
int GetLatency() const { return NeedToResample() ? mResampler.GetLatency() : 0; };

void Reset(const double sampleRate, const int maxBlockSize)
{
Expand All @@ -182,6 +182,7 @@ class ResamplingNAM : public nam::DSP
double GetEncapsulatedSampleRate() const { return GetNAMSampleRate(mEncapsulated); };

private:
bool NeedToResample() const { return GetExpectedSampleRate() != GetEncapsulatedSampleRate(); };
// The encapsulated NAM
std::unique_ptr<nam::DSP> mEncapsulated;
// The processing for NAM is a little weird--there's a call to .finalize_() that's expected.
Expand Down Expand Up @@ -271,6 +272,9 @@ class NeuralAmpModeler final : public iplug::Plugin
int _UnserializeStateLegacy_0_7_9(const iplug::IByteChunk& chunk, int startPos);
// And other legacy unsrializations if/as needed...

// Make sure that the latency is reported correctly.
void _UpdateLatency();

// Update level meters
// Called within ProcessBlock().
// Assume _ProcessInput() and _ProcessOutput() were run immediately before.
Expand Down

0 comments on commit 83fa931

Please sign in to comment.