915 lines
34 KiB
C++
915 lines
34 KiB
C++
#include "PluginProcessor.h"
|
|
#include "PluginEditor.h"
|
|
|
|
// ============================================================
|
|
// Voice infrastructure
|
|
|
|
namespace
|
|
{
|
|
constexpr float kMorphMin = 0.02f;
|
|
constexpr float kMorphMax = 0.98f;
|
|
constexpr float kMorphSmoothCoeff = 0.18f;
|
|
}
|
|
|
|
struct VoiceParams
|
|
{
|
|
juce::ADSR::Parameters ampParams;
|
|
juce::ADSR::Parameters filterParams;
|
|
float cutoffBase { 8000.0f };
|
|
float filterEnvAmount { 0.0f };
|
|
std::array<int, 3> slotIndices { { 0, 1, 2 } };
|
|
float staticMorph { 0.0f };
|
|
float perVoiceGain { 0.5f };
|
|
bool osc2Active { true };
|
|
float osc2Detune { 1.003f };
|
|
};
|
|
|
|
class WavetableSound : public juce::SynthesiserSound
|
|
{
|
|
public:
|
|
bool appliesToNote (int) override { return true; }
|
|
bool appliesToChannel (int) override { return true; }
|
|
};
|
|
|
|
class WavetableVoice : public juce::SynthesiserVoice
|
|
{
|
|
public:
|
|
explicit WavetableVoice (WavetableSynthAudioProcessor& proc) : processor (proc)
|
|
{
|
|
voiceFilter.setType (juce::dsp::StateVariableTPTFilterType::lowpass);
|
|
}
|
|
|
|
bool canPlaySound (juce::SynthesiserSound* s) override
|
|
{
|
|
return dynamic_cast<WavetableSound*> (s) != nullptr;
|
|
}
|
|
|
|
void setParams (const VoiceParams& vp)
|
|
{
|
|
params = vp;
|
|
ampEnv.setParameters (params.ampParams);
|
|
filterEnv.setParameters (params.filterParams);
|
|
pendingSlotUpdate = true;
|
|
secondaryFrequency = currentFrequency * params.osc2Detune;
|
|
updatePhaseIncrement();
|
|
updateMipLevel();
|
|
}
|
|
|
|
void setMorphBuffer (const float* ptr) { morphBuffer = ptr; }
|
|
|
|
void startNote (int midiNoteNumber, float velocity, juce::SynthesiserSound*, int) override
|
|
{
|
|
juce::ignoreUnused (velocity);
|
|
const float freq = (float) juce::MidiMessage::getMidiNoteInHertz (midiNoteNumber);
|
|
currentFrequency = freq;
|
|
secondaryFrequency = freq * params.osc2Detune;
|
|
updatePhaseIncrement();
|
|
updateMipLevel();
|
|
updateSlotWaves();
|
|
|
|
primaryPhase = 0.0f;
|
|
secondaryPhase = 0.0f;
|
|
ampEnv.noteOn();
|
|
filterEnv.noteOn();
|
|
active = true;
|
|
voiceFilter.reset();
|
|
}
|
|
|
|
void stopNote (float velocity, bool allowTailOff) override
|
|
{
|
|
juce::ignoreUnused (velocity);
|
|
|
|
if (allowTailOff)
|
|
{
|
|
ampEnv.noteOff();
|
|
filterEnv.noteOff();
|
|
}
|
|
else
|
|
{
|
|
ampEnv.reset();
|
|
filterEnv.reset();
|
|
clearCurrentNote();
|
|
active = false;
|
|
currentFrequency = 0.0f;
|
|
secondaryFrequency = 0.0f;
|
|
primaryIncrement = 0.0f;
|
|
secondaryIncrement = 0.0f;
|
|
}
|
|
}
|
|
|
|
void pitchWheelMoved (int) override {}
|
|
void controllerMoved (int, int) override {}
|
|
|
|
void renderNextBlock (juce::AudioBuffer<float>& buffer, int startSample, int numSamples) override
|
|
{
|
|
if (!active || !isVoiceActive()) return;
|
|
if (pendingSlotUpdate) updateSlotWaves();
|
|
|
|
const int channels = buffer.getNumChannels();
|
|
float* left = buffer.getWritePointer (0, startSample);
|
|
float* right = channels > 1 ? buffer.getWritePointer (1, startSample) : nullptr;
|
|
const float* morph = morphBuffer != nullptr ? morphBuffer + startSample : nullptr;
|
|
|
|
for (int i = 0; i < numSamples; ++i)
|
|
{
|
|
const float morphValue = juce::jlimit (kMorphMin, kMorphMax,
|
|
morph != nullptr ? morph[i] : params.staticMorph);
|
|
const float framePos = morphValue * (float) (WavetableSynthAudioProcessor::kMorphFrames - 1);
|
|
const int segment = morphValue < 0.5f ? 0 : 1;
|
|
const float segAlpha = segment == 0
|
|
? juce::jlimit (0.0f, 1.0f, morphValue * 2.0f)
|
|
: juce::jlimit (0.0f, 1.0f, (morphValue - 0.5f) * 2.0f);
|
|
|
|
const float primaryMain = sampleWave (slotWaves[segment], framePos, primaryPhase);
|
|
const float primaryNext = sampleWave (slotWaves[segment + 1], framePos, primaryPhase);
|
|
float waveSample = primaryMain + segAlpha * (primaryNext - primaryMain);
|
|
|
|
if (params.osc2Active)
|
|
{
|
|
const float secondaryMain = sampleWave (slotWaves[segment], framePos, secondaryPhase);
|
|
const float secondaryNext = sampleWave (slotWaves[segment + 1], framePos, secondaryPhase);
|
|
const float osc2Sample = secondaryMain + segAlpha * (secondaryNext - secondaryMain);
|
|
waveSample = 0.5f * (waveSample + osc2Sample);
|
|
}
|
|
|
|
const float envValue = ampEnv.getNextSample();
|
|
const float modValue = filterEnv.getNextSample();
|
|
const float cutoff = juce::jlimit (20.0f, 20000.0f,
|
|
params.cutoffBase + params.filterEnvAmount
|
|
* modValue * (20000.0f - params.cutoffBase));
|
|
voiceFilter.setCutoffFrequency (cutoff);
|
|
|
|
const float filtered = voiceFilter.processSample (0, waveSample);
|
|
const float output = params.perVoiceGain * envValue * filtered;
|
|
|
|
left[i] += output;
|
|
if (right != nullptr) right[i] += output;
|
|
|
|
advancePhase (primaryPhase, primaryIncrement);
|
|
if (params.osc2Active)
|
|
advancePhase (secondaryPhase, secondaryIncrement);
|
|
}
|
|
|
|
if (! ampEnv.isActive())
|
|
{
|
|
clearCurrentNote();
|
|
active = false;
|
|
currentFrequency = 0.0f;
|
|
secondaryFrequency = 0.0f;
|
|
primaryIncrement = 0.0f;
|
|
secondaryIncrement = 0.0f;
|
|
}
|
|
}
|
|
|
|
void setCurrentPlaybackSampleRate (double newRate) override
|
|
{
|
|
juce::SynthesiserVoice::setCurrentPlaybackSampleRate (newRate);
|
|
const juce::dsp::ProcessSpec spec { newRate, 32u, 1u };
|
|
voiceFilter.reset();
|
|
voiceFilter.prepare (spec);
|
|
voiceFilter.setType (juce::dsp::StateVariableTPTFilterType::lowpass);
|
|
ampEnv.setSampleRate (newRate);
|
|
filterEnv.setSampleRate (newRate);
|
|
updatePhaseIncrement();
|
|
}
|
|
|
|
private:
|
|
float sampleWave (const WaveMorph* set, float framePos, float phaseValue) const
|
|
{
|
|
if (set == nullptr) return 0.0f;
|
|
|
|
const float clamped = juce::jlimit (0.0f,
|
|
(float) (WavetableSynthAudioProcessor::kMorphFrames - 1),
|
|
framePos);
|
|
const int frameIdx0 = (int) clamped;
|
|
const int frameIdx1 = juce::jmin (frameIdx0 + 1, WavetableSynthAudioProcessor::kMorphFrames - 1);
|
|
const float frameFrac = clamped - (float) frameIdx0;
|
|
|
|
const auto& table0 = set->frames[(size_t) frameIdx0].mip[(size_t) currentMip];
|
|
const auto& table1 = set->frames[(size_t) frameIdx1].mip[(size_t) currentMip];
|
|
|
|
// Interpolate adjacent frames so morph sweeps remain continuous.
|
|
const float s0 = sampleTable (table0, phaseValue);
|
|
const float s1 = sampleTable (table1, phaseValue);
|
|
return s0 + frameFrac * (s1 - s0);
|
|
}
|
|
|
|
float sampleTable (const std::vector<float>& table, float phaseValue) const
|
|
{
|
|
if (table.empty()) return 0.0f;
|
|
const float idx = phaseValue;
|
|
const int i0 = (int) idx & (WavetableSynthAudioProcessor::kTableSize - 1);
|
|
const int i1 = (i0 + 1) & (WavetableSynthAudioProcessor::kTableSize - 1);
|
|
const float frac = idx - (float) i0;
|
|
const float s0 = table[(size_t) i0];
|
|
const float s1 = table[(size_t) i1];
|
|
return s0 + frac * (s1 - s0);
|
|
}
|
|
|
|
void advancePhase (float& phaseValue, float increment)
|
|
{
|
|
phaseValue += increment;
|
|
if (phaseValue >= (float) WavetableSynthAudioProcessor::kTableSize)
|
|
phaseValue -= (float) WavetableSynthAudioProcessor::kTableSize;
|
|
}
|
|
|
|
void updatePhaseIncrement()
|
|
{
|
|
const double sr = getSampleRate();
|
|
if (sr <= 0.0) return;
|
|
primaryIncrement = (float) ((double) WavetableSynthAudioProcessor::kTableSize * (double) currentFrequency / sr);
|
|
if (params.osc2Active)
|
|
secondaryIncrement = (float) ((double) WavetableSynthAudioProcessor::kTableSize * (double) secondaryFrequency / sr);
|
|
else
|
|
secondaryIncrement = 0.0f;
|
|
}
|
|
|
|
void updateMipLevel()
|
|
{
|
|
const float freqForMip = params.osc2Active
|
|
? juce::jmax (currentFrequency, secondaryFrequency)
|
|
: currentFrequency;
|
|
currentMip = juce::jlimit (0, WavetableSynthAudioProcessor::kMipLevels - 1,
|
|
processor.chooseMipLevel (freqForMip));
|
|
}
|
|
|
|
void updateSlotWaves()
|
|
{
|
|
for (int i = 0; i < 3; ++i)
|
|
slotWaves[i] = processor.getWavePtr (params.slotIndices[(size_t) i]);
|
|
|
|
pendingSlotUpdate = false;
|
|
}
|
|
|
|
WavetableSynthAudioProcessor& processor;
|
|
VoiceParams params;
|
|
const WaveMorph* slotWaves[3] { nullptr, nullptr, nullptr };
|
|
|
|
juce::ADSR ampEnv;
|
|
juce::ADSR filterEnv;
|
|
juce::dsp::StateVariableTPTFilter<float> voiceFilter;
|
|
|
|
const float* morphBuffer { nullptr };
|
|
|
|
float primaryPhase { 0.0f };
|
|
float secondaryPhase { 0.0f };
|
|
float primaryIncrement { 0.0f };
|
|
float secondaryIncrement { 0.0f };
|
|
float currentFrequency { 0.0f };
|
|
float secondaryFrequency { 0.0f };
|
|
int currentMip { 0 };
|
|
bool active { false };
|
|
bool pendingSlotUpdate { false };
|
|
};
|
|
|
|
// ============================================================
|
|
// Utilities
|
|
|
|
void WavetableSynthAudioProcessor::normalize (std::vector<float>& t)
|
|
{
|
|
float mx = 0.0f;
|
|
for (auto v : t) mx = juce::jmax (mx, std::abs (v));
|
|
if (mx > 0.0f)
|
|
for (auto& v : t)
|
|
v /= mx;
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::addSine (std::vector<float>& t, int harmonic, float amp)
|
|
{
|
|
const float k = (float) harmonic;
|
|
const int N = (int) t.size();
|
|
for (int n = 0; n < N; ++n)
|
|
t[(size_t) n] += amp * std::sin (juce::MathConstants<float>::twoPi * k * (float) n / (float) N);
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::removeDC (std::vector<float>& t)
|
|
{
|
|
if (t.empty()) return;
|
|
double sum = 0.0;
|
|
for (auto v : t) sum += (double) v;
|
|
const float mean = (float) (sum / (double) t.size());
|
|
for (auto& v : t) v -= mean;
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::enforceZeroStart (std::vector<float>& t)
|
|
{
|
|
if (t.empty()) return;
|
|
|
|
// find first zero crossing; if none, fall back to minimum magnitude point
|
|
int zeroIndex = 0;
|
|
for (int i = 1; i < (int) t.size(); ++i)
|
|
{
|
|
const float a = t[(size_t) (i - 1)];
|
|
const float b = t[(size_t) i];
|
|
if ((a <= 0.0f && b >= 0.0f) || (a >= 0.0f && b <= 0.0f))
|
|
{
|
|
zeroIndex = i;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (zeroIndex > 0 && zeroIndex < (int) t.size())
|
|
std::rotate (t.begin(), t.begin() + zeroIndex, t.end());
|
|
|
|
t[0] = 0.0f;
|
|
}
|
|
|
|
WaveMorph WavetableSynthAudioProcessor::buildAdditiveMorph (std::function<float(int)> ampFn,
|
|
bool oddOnly, float altPhase)
|
|
{
|
|
WaveMorph morph {};
|
|
const int N = kTableSize;
|
|
const int NyquistHarmonic = N / 2;
|
|
|
|
for (int frame = 0; frame < kMorphFrames; ++frame)
|
|
{
|
|
const float frameAlpha = (float) frame / (float) juce::jmax (1, kMorphFrames - 1);
|
|
|
|
for (int level = 0; level < kMipLevels; ++level)
|
|
{
|
|
auto& table = morph.frames[(size_t) frame].mip[(size_t) level];
|
|
table.assign ((size_t) N, 0.0f);
|
|
|
|
const float levelAttenuation = std::pow (0.5f, (float) level);
|
|
const int harmonicLimit = juce::jmax (1, (int) std::floor ((float) NyquistHarmonic * levelAttenuation * juce::jlimit (0.1f, 1.0f, frameAlpha + 0.05f)));
|
|
|
|
for (int h = 1; h <= harmonicLimit; ++h)
|
|
{
|
|
if (oddOnly && (h % 2 == 0)) continue;
|
|
float a = ampFn (h);
|
|
if (a == 0.0f) continue;
|
|
a = (altPhase > 0.0f ? a : ((h % 2) ? a : -a));
|
|
addSine (table, h, a);
|
|
}
|
|
|
|
removeDC (table);
|
|
enforceZeroStart (table);
|
|
normalize (table);
|
|
}
|
|
}
|
|
|
|
return morph;
|
|
}
|
|
|
|
// ---- preset wave builders ----
|
|
WaveMorph WavetableSynthAudioProcessor::makeSine()
|
|
{
|
|
return buildAdditiveMorph ([](int h) { return (h == 1) ? 1.0f : 0.0f; });
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeSaw()
|
|
{
|
|
// Thin the highest frame slightly to keep corrected ramp usable
|
|
return buildAdditiveMorph ([](int h) { return 1.0f / (float) h; }, false, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeSquare()
|
|
{
|
|
// odd harmonics 1/h
|
|
return buildAdditiveMorph ([](int h) { return 1.0f / (float) h; }, true, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeTriangle()
|
|
{
|
|
// odd harmonics 1/h^2 with alternating signs
|
|
return buildAdditiveMorph ([](int h) { return 1.0f / ((float) h * (float) h); }, true, -1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makePulse (float duty)
|
|
{
|
|
duty = juce::jlimit (0.01f, 0.99f, duty);
|
|
// Fourier for pulse: amp_k = (2/(k*pi)) * sin(k*pi*duty)
|
|
return buildAdditiveMorph ([=](int k)
|
|
{
|
|
return (2.0f / (juce::MathConstants<float>::pi * (float) k))
|
|
* std::sin (juce::MathConstants<float>::pi * (float) k * duty);
|
|
}, false, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeEven()
|
|
{
|
|
// even-only 1/h
|
|
return buildAdditiveMorph ([](int h) { return (h % 2 == 0) ? 1.0f / (float) h : 0.0f; }, false, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeOdd()
|
|
{
|
|
return makeSquare();
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeHalfSineRect()
|
|
{
|
|
// half-rectified sine (rich, smooth)
|
|
return buildAdditiveMorph ([](int h)
|
|
{
|
|
// analytic series for rectified sine → only even harmonics
|
|
if (h % 2 == 1) return 0.0f;
|
|
const float k = (float) h;
|
|
// ~1/k^2 rolloff
|
|
return 1.0f / (k * k * 0.25f);
|
|
}, false, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeBell()
|
|
{
|
|
// exponential decay across harmonics
|
|
return buildAdditiveMorph ([](int h) { return std::exp (-0.25f * (float) (h - 1)); }, false, +1.0f);
|
|
}
|
|
WaveMorph WavetableSynthAudioProcessor::makeOrgan()
|
|
{
|
|
// 8', 4', 2 2/3', 2' drawbars-ish
|
|
return buildAdditiveMorph ([](int h)
|
|
{
|
|
switch (h)
|
|
{
|
|
case 1: return 1.0f;
|
|
case 2: return 0.5f;
|
|
case 3: return 0.35f;
|
|
case 4: return 0.28f;
|
|
case 5: return 0.22f;
|
|
default: return 0.0f;
|
|
}
|
|
}, false, +1.0f);
|
|
}
|
|
|
|
// ============================================================
|
|
// Construction
|
|
|
|
WavetableSynthAudioProcessor::WavetableSynthAudioProcessor()
|
|
: apvts (*this, nullptr, "PARAMS", createParameterLayout())
|
|
{
|
|
buildFactoryWaves();
|
|
|
|
synth.clearVoices();
|
|
for (int i = 0; i < 16; ++i)
|
|
synth.addVoice (new WavetableVoice (*this));
|
|
|
|
synth.clearSounds();
|
|
synth.addSound (new WavetableSound());
|
|
synth.setNoteStealingEnabled (true);
|
|
|
|
presetFade.setCurrentAndTargetValue (1.0f);
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::buildFactoryWaves()
|
|
{
|
|
waves.clear();
|
|
waves.reserve (kBrowserCapacity);
|
|
|
|
// 20 factory slots
|
|
waves.push_back (makeSine()); // 0
|
|
waves.push_back (makeSaw()); // 1
|
|
waves.push_back (makeSquare()); // 2
|
|
waves.push_back (makeTriangle()); // 3
|
|
waves.push_back (makePulse (0.25f));// 4
|
|
waves.push_back (makePulse (0.10f));// 5
|
|
waves.push_back (makePulse (0.60f));// 6
|
|
waves.push_back (makeEven()); // 7
|
|
waves.push_back (makeOdd()); // 8
|
|
waves.push_back (makeHalfSineRect());// 9
|
|
waves.push_back (makeOrgan()); // 10
|
|
waves.push_back (makeBell()); // 11
|
|
// fill to 20 with variations
|
|
waves.push_back (makePulse (0.33f));// 12
|
|
waves.push_back (makePulse (0.75f));// 13
|
|
waves.push_back (makePulse (0.90f));// 14
|
|
waves.push_back (makeSaw()); // 15
|
|
waves.push_back (makeSquare()); // 16
|
|
waves.push_back (makeTriangle()); // 17
|
|
waves.push_back (makeEven()); // 18
|
|
waves.push_back (makeBell()); // 19
|
|
defaultTableCount = kFactorySlots;
|
|
nextUserInsert = 0;
|
|
}
|
|
|
|
const std::vector<float>* WavetableSynthAudioProcessor::getPreviewTablePtr (int index) const
|
|
{
|
|
if (index < 0 || index >= (int) waves.size()) return nullptr;
|
|
return &waves[(size_t) index].frames[0].mip[0]; // widest-band level for thumbnail
|
|
}
|
|
|
|
// ============================================================
|
|
// APVTS layout
|
|
|
|
juce::AudioProcessorValueTreeState::ParameterLayout
|
|
WavetableSynthAudioProcessor::createParameterLayout()
|
|
{
|
|
using AP = juce::AudioProcessorValueTreeState;
|
|
std::vector<std::unique_ptr<juce::RangedAudioParameter>> p;
|
|
|
|
// Master first so editor can attach even if others change
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>(
|
|
"MASTER", "Master", juce::NormalisableRange<float> (0.0f, 1.5f, 0.0f, 0.5f), 0.75f));
|
|
|
|
// Morph + LFO
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("MORPH", "Morph",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterBool>("MORPH_LOOP_ON", "Morph Loop", false));
|
|
p.push_back (std::make_unique<juce::AudioParameterChoice>("MORPH_LOOP_MODE", "Morph Loop Mode",
|
|
juce::StringArray { "Forward", "Ping-Pong", "Half Trip" }, 0));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("LFO_RATE", "LFO Rate",
|
|
juce::NormalisableRange<float> (0.01f, 10.0f, 0.0f, 0.4f), 0.2f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("LFO_DEPTH", "LFO Depth",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.0f));
|
|
|
|
// ADSR
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("ATTACK", "Attack",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.01f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("DECAY", "Decay",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.2f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("SUSTAIN", "Sustain",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.8f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("RELEASE", "Release",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.3f));
|
|
|
|
// Filter + filter env
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CUTOFF", "Cutoff",
|
|
juce::NormalisableRange<float> (20.0f, 20000.0f, 0.0f, 0.5f), 8000.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("FENV_A", "FEnv A",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.01f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("FENV_D", "FEnv D",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.2f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("FENV_S", "FEnv S",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("FENV_R", "FEnv R",
|
|
juce::NormalisableRange<float> (0.001f, 5.0f, 0.0f, 0.5f), 0.3f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("FENV_AMT", "FEnv Amt",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.5f));
|
|
|
|
// Browser slot indices
|
|
p.push_back (std::make_unique<juce::AudioParameterInt>("SLOT_A", "Slot A", 0, kBrowserCapacity - 1, 0));
|
|
p.push_back (std::make_unique<juce::AudioParameterInt>("SLOT_B", "Slot B", 0, kBrowserCapacity - 1, 1));
|
|
p.push_back (std::make_unique<juce::AudioParameterInt>("SLOT_C", "Slot C", 0, kBrowserCapacity - 1, 2));
|
|
|
|
// Osc2 mute toggle
|
|
p.push_back (std::make_unique<juce::AudioParameterBool>("OSC2_MUTE", "Deactivate Osc2", false));
|
|
|
|
// Chorus / Reverb (keep for GUI; safe defaults)
|
|
p.push_back (std::make_unique<juce::AudioParameterBool>("CHORUS_ON", "Chorus On", false));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CH_RATE", "Ch Rate",
|
|
juce::NormalisableRange<float> (0.05f, 5.0f, 0.0f, 0.5f), 1.2f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CH_DEPTH","Ch Depth",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.3f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CH_DELAY","Ch Delay",
|
|
juce::NormalisableRange<float> (1.0f, 30.0f), 8.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CH_FB", "Ch Fb",
|
|
juce::NormalisableRange<float> (-0.95f, 0.95f), 0.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("CH_MIX", "Ch Mix",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.25f));
|
|
|
|
p.push_back (std::make_unique<juce::AudioParameterBool>("REVERB_ON", "Reverb On", true));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("RV_ROOM", "Rv Room",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.4f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("RV_DAMP","Rv Damp",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.3f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("RV_WIDTH","Rv Width",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 1.0f));
|
|
p.push_back (std::make_unique<juce::AudioParameterFloat>("RV_WET", "Rv Wet",
|
|
juce::NormalisableRange<float> (0.0f, 1.0f), 0.12f));
|
|
|
|
return { p.begin(), p.end() };
|
|
}
|
|
|
|
// ============================================================
|
|
// Prepare / process
|
|
|
|
void WavetableSynthAudioProcessor::prepareToPlay (double sampleRate, int samplesPerBlock)
|
|
{
|
|
synth.setCurrentPlaybackSampleRate (sampleRate);
|
|
|
|
juce::dsp::ProcessSpec spec;
|
|
spec.sampleRate = sampleRate;
|
|
spec.maximumBlockSize = (juce::uint32) samplesPerBlock;
|
|
spec.numChannels = (juce::uint32) getTotalNumOutputChannels();
|
|
|
|
chorus.reset();
|
|
chorus.prepare (spec);
|
|
|
|
reverbParams = {};
|
|
reverb.setParameters (reverbParams);
|
|
reverb.reset();
|
|
|
|
morphBuffer.clear();
|
|
morphBuffer.resize ((size_t) juce::jmax (1, samplesPerBlock));
|
|
|
|
morphState = juce::jlimit (kMorphMin, kMorphMax,
|
|
apvts.getRawParameterValue ("MORPH")->load());
|
|
morphLoopPhase = 0.0f;
|
|
morphLoopDirection = 1;
|
|
morphLoopStage = 0;
|
|
morphLoopStagePhase = 0.0f;
|
|
morphDisplay.store (morphState, std::memory_order_relaxed);
|
|
}
|
|
|
|
int WavetableSynthAudioProcessor::chooseMipLevel (float fundamentalHz) const
|
|
{
|
|
// Rough mapping: level increases as note goes higher
|
|
// Level 0 for lowest notes, up to kMipLevels-1 for highest.
|
|
const float ref = 55.0f; // A1
|
|
const float ratio = fundamentalHz / ref;
|
|
int L = (int) std::floor (std::log2 (juce::jmax (1.0f, ratio)));
|
|
return juce::jlimit (0, kMipLevels - 1, L);
|
|
}
|
|
|
|
const WaveMorph* WavetableSynthAudioProcessor::getWavePtr (int index) const
|
|
{
|
|
if (waves.empty()) return nullptr;
|
|
const int idx = juce::jlimit (0, (int) waves.size() - 1, index);
|
|
return &waves[(size_t) idx];
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::processBlock (juce::AudioBuffer<float>& buffer,
|
|
juce::MidiBuffer& midi)
|
|
{
|
|
juce::ScopedNoDenormals nd;
|
|
buffer.clear();
|
|
const int numSamples = buffer.getNumSamples();
|
|
const double sr = getSampleRate() > 0.0 ? getSampleRate() : 44100.0;
|
|
|
|
if ((int) morphBuffer.size() < numSamples)
|
|
morphBuffer.resize ((size_t) numSamples);
|
|
|
|
const float baseMorph = apvts.getRawParameterValue ("MORPH")->load();
|
|
const float lfoRate = apvts.getRawParameterValue ("LFO_RATE")->load();
|
|
const float lfoDepth = apvts.getRawParameterValue ("LFO_DEPTH")->load();
|
|
const float cutoffBase = apvts.getRawParameterValue ("CUTOFF")->load();
|
|
const float filterAmt = apvts.getRawParameterValue ("FENV_AMT")->load();
|
|
const bool chorusOn = apvts.getRawParameterValue ("CHORUS_ON")->load() > 0.5f;
|
|
const bool reverbOn = apvts.getRawParameterValue ("REVERB_ON")->load() > 0.5f;
|
|
|
|
auto clampSlot = [this](int idx)
|
|
{
|
|
return juce::jlimit (0, juce::jmax (0, (int) waves.size() - 1), idx);
|
|
};
|
|
|
|
VoiceParams params;
|
|
params.ampParams.attack = apvts.getRawParameterValue ("ATTACK")->load();
|
|
params.ampParams.decay = apvts.getRawParameterValue ("DECAY")->load();
|
|
params.ampParams.sustain = apvts.getRawParameterValue ("SUSTAIN")->load();
|
|
params.ampParams.release = apvts.getRawParameterValue ("RELEASE")->load();
|
|
|
|
params.filterParams.attack = apvts.getRawParameterValue ("FENV_A")->load();
|
|
params.filterParams.decay = apvts.getRawParameterValue ("FENV_D")->load();
|
|
params.filterParams.sustain = apvts.getRawParameterValue ("FENV_S")->load();
|
|
params.filterParams.release = apvts.getRawParameterValue ("FENV_R")->load();
|
|
|
|
params.cutoffBase = cutoffBase;
|
|
params.filterEnvAmount = filterAmt;
|
|
params.slotIndices = { clampSlot ((int) apvts.getRawParameterValue ("SLOT_A")->load()),
|
|
clampSlot ((int) apvts.getRawParameterValue ("SLOT_B")->load()),
|
|
clampSlot ((int) apvts.getRawParameterValue ("SLOT_C")->load()) };
|
|
params.staticMorph = juce::jlimit (kMorphMin, kMorphMax, baseMorph);
|
|
params.perVoiceGain = 0.5f;
|
|
params.osc2Active = apvts.getRawParameterValue ("OSC2_MUTE")->load() < 0.5f;
|
|
params.osc2Detune = 1.003f;
|
|
|
|
for (int i = 0; i < synth.getNumVoices(); ++i)
|
|
if (auto* v = dynamic_cast<WavetableVoice*> (synth.getVoice (i)))
|
|
{
|
|
v->setParams (params);
|
|
v->setMorphBuffer (morphBuffer.data());
|
|
}
|
|
|
|
chorus.setRate (apvts.getRawParameterValue ("CH_RATE")->load());
|
|
chorus.setDepth (apvts.getRawParameterValue ("CH_DEPTH")->load());
|
|
chorus.setCentreDelay (apvts.getRawParameterValue ("CH_DELAY")->load());
|
|
chorus.setFeedback (apvts.getRawParameterValue ("CH_FB")->load());
|
|
chorus.setMix (apvts.getRawParameterValue ("CH_MIX")->load());
|
|
|
|
reverbParams.roomSize = apvts.getRawParameterValue ("RV_ROOM")->load();
|
|
reverbParams.damping = apvts.getRawParameterValue ("RV_DAMP")->load();
|
|
reverbParams.width = apvts.getRawParameterValue ("RV_WIDTH")->load();
|
|
reverbParams.wetLevel = apvts.getRawParameterValue ("RV_WET")->load();
|
|
reverbParams.dryLevel = 1.0f - reverbParams.wetLevel;
|
|
reverb.setParameters (reverbParams);
|
|
|
|
const bool loopEnabled = apvts.getRawParameterValue ("MORPH_LOOP_ON")->load() > 0.5f;
|
|
const int loopMode = juce::jlimit (0, 2, (int) apvts.getRawParameterValue ("MORPH_LOOP_MODE")->load());
|
|
const float depth = juce::jlimit (0.0f, 1.0f, lfoDepth);
|
|
const float phaseIncrement = juce::jlimit (0.0001f, 20.0f, lfoRate) / (float) sr;
|
|
|
|
float loopPhase = morphLoopPhase;
|
|
int loopDirection = morphLoopDirection;
|
|
int loopStage = morphLoopStage % 4;
|
|
float loopStagePhase = morphLoopStagePhase;
|
|
float smoothed = morphState;
|
|
|
|
static constexpr std::array<float, 4> stageStart { 0.0f, 0.5f, 0.0f, 1.0f };
|
|
static constexpr std::array<float, 4> stageEnd { 0.5f, 0.0f, 1.0f, 0.0f };
|
|
|
|
for (int i = 0; i < numSamples; ++i)
|
|
{
|
|
float modValue = baseMorph;
|
|
|
|
if (loopEnabled && depth > 0.0f)
|
|
{
|
|
switch (loopMode)
|
|
{
|
|
case 0: // forward
|
|
{
|
|
loopPhase += phaseIncrement;
|
|
if (loopPhase >= 1.0f)
|
|
loopPhase -= std::floor (loopPhase);
|
|
modValue = loopPhase;
|
|
break;
|
|
}
|
|
case 1: // ping pong
|
|
{
|
|
loopPhase += phaseIncrement * (float) loopDirection;
|
|
if (loopPhase >= 1.0f)
|
|
{
|
|
loopPhase = 1.0f;
|
|
loopDirection = -1;
|
|
}
|
|
else if (loopPhase <= 0.0f)
|
|
{
|
|
loopPhase = 0.0f;
|
|
loopDirection = 1;
|
|
}
|
|
modValue = loopPhase;
|
|
break;
|
|
}
|
|
case 2: // half trip
|
|
default:
|
|
{
|
|
loopStagePhase += phaseIncrement;
|
|
if (loopStagePhase >= 1.0f)
|
|
{
|
|
loopStagePhase -= 1.0f;
|
|
loopStage = (loopStage + 1) % 4;
|
|
}
|
|
const float start = stageStart[(size_t) loopStage];
|
|
const float end = stageEnd[(size_t) loopStage];
|
|
modValue = start + loopStagePhase * (end - start);
|
|
break;
|
|
}
|
|
}
|
|
modValue = juce::jlimit (kMorphMin, kMorphMax, modValue);
|
|
}
|
|
|
|
const float target = (loopEnabled && depth > 0.0f)
|
|
? juce::jlimit (kMorphMin, kMorphMax,
|
|
(1.0f - depth) * baseMorph + depth * modValue)
|
|
: juce::jlimit (kMorphMin, kMorphMax, baseMorph);
|
|
|
|
smoothed += kMorphSmoothCoeff * (target - smoothed);
|
|
morphBuffer[(size_t) i] = smoothed;
|
|
}
|
|
|
|
morphState = smoothed;
|
|
morphLoopPhase = loopPhase;
|
|
morphLoopDirection = loopDirection;
|
|
morphLoopStage = loopStage;
|
|
morphLoopStagePhase = loopStagePhase;
|
|
morphDisplay.store (smoothed, std::memory_order_relaxed);
|
|
|
|
synth.renderNextBlock (buffer, midi, 0, numSamples);
|
|
midi.clear();
|
|
|
|
const int channels = buffer.getNumChannels();
|
|
if (presetFade.isSmoothing() || presetFade.getCurrentValue() < 0.999f)
|
|
{
|
|
auto* channelData = buffer.getArrayOfWritePointers();
|
|
for (int i = 0; i < numSamples; ++i)
|
|
{
|
|
const float g = presetFade.getNextValue();
|
|
for (int ch = 0; ch < channels; ++ch)
|
|
channelData[ch][i] *= g;
|
|
}
|
|
}
|
|
|
|
constexpr float mixHeadroom = 0.75f;
|
|
buffer.applyGain (mixHeadroom);
|
|
|
|
juce::dsp::AudioBlock<float> blk (buffer);
|
|
if (chorusOn) chorus.process (juce::dsp::ProcessContextReplacing<float> (blk));
|
|
if (reverbOn) reverb.process (juce::dsp::ProcessContextReplacing<float> (blk));
|
|
|
|
const float master = apvts.getRawParameterValue ("MASTER")->load();
|
|
buffer.applyGain (master);
|
|
}
|
|
|
|
// ============================================================
|
|
// State
|
|
|
|
void WavetableSynthAudioProcessor::getStateInformation (juce::MemoryBlock& destData)
|
|
{
|
|
auto state = apvts.copyState();
|
|
if (auto xml = state.createXml()) copyXmlToBinary (*xml, destData);
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::setStateInformation (const void* data, int sizeInBytes)
|
|
{
|
|
if (auto xml = getXmlFromBinary (data, sizeInBytes))
|
|
if (xml->hasTagName (apvts.state.getType()))
|
|
{
|
|
apvts.replaceState (juce::ValueTree::fromXml (*xml));
|
|
notifyPresetLoaded();
|
|
}
|
|
}
|
|
|
|
// ============================================================
|
|
// User waves
|
|
|
|
int WavetableSynthAudioProcessor::addOrReplaceUserWavetable (const std::vector<float>& singleCycle)
|
|
{
|
|
const int N = kTableSize;
|
|
std::vector<float> resampled (N);
|
|
|
|
// resample incoming single cycle to our table size
|
|
for (int i = 0; i < N; ++i)
|
|
{
|
|
const float p = (float) i / (float) N;
|
|
const float idx = p * (float) singleCycle.size();
|
|
const int i0 = (int) idx;
|
|
const int i1 = juce::jmin ((int) singleCycle.size() - 1, i0 + 1);
|
|
const float frac = idx - (float) i0;
|
|
resampled[(size_t) i] = singleCycle[(size_t) i0]
|
|
+ frac * (singleCycle[(size_t) i1] - singleCycle[(size_t) i0]);
|
|
}
|
|
removeDC (resampled);
|
|
enforceZeroStart (resampled);
|
|
normalize (resampled);
|
|
|
|
// estimate sine-series amplitudes for harmonics
|
|
const int Hmax = N / 2;
|
|
std::vector<float> amps ((size_t) Hmax + 1, 0.0f);
|
|
for (int h = 1; h <= Hmax; ++h)
|
|
{
|
|
double acc = 0.0;
|
|
for (int n = 0; n < N; ++n)
|
|
acc += (double) resampled[(size_t) n]
|
|
* std::sin (juce::MathConstants<double>::twoPi * (double) h * (double) n / (double) N);
|
|
amps[(size_t) h] = (float) (2.0 * acc / (double) N);
|
|
}
|
|
|
|
WaveMorph morph {};
|
|
for (int frame = 0; frame < kMorphFrames; ++frame)
|
|
{
|
|
const float frameAlpha = (float) frame / (float) juce::jmax (1, kMorphFrames - 1);
|
|
|
|
for (int level = 0; level < kMipLevels; ++level)
|
|
{
|
|
auto& table = morph.frames[(size_t) frame].mip[(size_t) level];
|
|
table.assign ((size_t) N, 0.0f);
|
|
|
|
const float levelAttenuation = std::pow (0.5f, (float) level);
|
|
const float limitF = (float) Hmax * levelAttenuation * juce::jlimit (0.1f, 1.0f, frameAlpha + 0.05f);
|
|
const int harmonicLimit = juce::jlimit (1, Hmax, (int) std::floor (limitF));
|
|
|
|
for (int h = 1; h <= harmonicLimit; ++h)
|
|
addSine (table, h, amps[(size_t) h]);
|
|
|
|
removeDC (table);
|
|
enforceZeroStart (table);
|
|
normalize (table);
|
|
}
|
|
}
|
|
|
|
// store into browser grid (append or replace round-robin in user region)
|
|
if ((int) waves.size() < kBrowserCapacity)
|
|
{
|
|
waves.push_back (std::move (morph));
|
|
return (int) waves.size() - 1;
|
|
}
|
|
|
|
const int userCap = kBrowserCapacity - defaultTableCount;
|
|
if (userCap <= 0) return -1;
|
|
const int slot = defaultTableCount + (nextUserInsert % userCap);
|
|
nextUserInsert++;
|
|
waves[(size_t) slot] = std::move (morph);
|
|
return slot;
|
|
}
|
|
|
|
void WavetableSynthAudioProcessor::notifyPresetLoaded()
|
|
{
|
|
constexpr float safeMaster = 0.85f;
|
|
if (auto* masterParam = apvts.getParameter ("MASTER"))
|
|
{
|
|
const float current = masterParam->convertFrom0to1 (masterParam->getValue());
|
|
if (current > safeMaster)
|
|
masterParam->setValueNotifyingHost (masterParam->convertTo0to1 (safeMaster));
|
|
}
|
|
|
|
double sr = getSampleRate();
|
|
if (sr <= 0.0)
|
|
sr = 44100.0;
|
|
|
|
// Trigger a short fade so freshly-loaded presets come in under control.
|
|
presetFade.reset (sr, 0.02); // gentle 20ms fade
|
|
presetFade.setCurrentAndTargetValue (0.0f);
|
|
presetFade.setTargetValue (1.0f);
|
|
}
|
|
|
|
bool WavetableSynthAudioProcessor::isMorphLoopActive() const noexcept
|
|
{
|
|
const bool enabled = apvts.getRawParameterValue ("MORPH_LOOP_ON")->load() > 0.5f;
|
|
if (! enabled)
|
|
return false;
|
|
return apvts.getRawParameterValue ("LFO_DEPTH")->load() > 0.0f;
|
|
}
|
|
|
|
// ============================================================
|
|
|
|
juce::AudioProcessorEditor* WavetableSynthAudioProcessor::createEditor()
|
|
{
|
|
return new WavetableSynthAudioProcessorEditor (*this);
|
|
}
|
|
|
|
juce::AudioProcessor* JUCE_CALLTYPE createPluginFilter()
|
|
{
|
|
return new WavetableSynthAudioProcessor();
|
|
}
|