#ifndef MAINCOMPONENT_H_INCLUDED #define MAINCOMPONENT_H_INCLUDED #include "../JuceLibraryCode/JuceHeader.h" #include //============================================================================== /* MainWindow > MainComponent */ class SmoothedValue { public: SmoothedValue(float v, int steps=64): _v(v), _target(v), _delta(0), _ksteps(steps), _nsteps(0) { } ~SmoothedValue() { } void setValue(float v) { _nsteps=0; _target = v; _v = v; _delta = 0; } void setTargetValue(float target) { _nsteps = _ksteps; _target = target; _delta = (_target-_v)/(float)_nsteps; } float getValue() const { return _v; } float getTargetValue() const { return _target; } void tick() { if (_nsteps) { _v = _target-_delta*(float)_nsteps; _nsteps--; } } bool isStillSmoothing() const { return (_nsteps!=0); } private: float _target; float _delta; float _v; int _nsteps; const int _ksteps; }; class MainContentComponent : public AudioAppComponent, public Slider::Listener, public TextButton::Listener { public: //============================================================================== MainContentComponent() { setSize (600, 200); //Two output channels setAudioChannels (0, 2); //Volume Slider addAndMakeVisible (volumeSlider); volumeSlider.setRange (-96, 6); volumeSlider.setTextValueSuffix (" db"); volumeSlider.setValue (-6); volumeSlider.addListener (this); volumeSlider.setSkewFactorFromMidPoint(0.5); volumeLabel.setText ("Volume", dontSendNotification); volumeLabel.attachToComponent (&volumeSlider, true); //Frequency Slider addAndMakeVisible (freqSlider); freqSlider.setRange (10, 20000); freqSlider.setTextValueSuffix (" Hz"); freqSlider.setValue (500.0); freqSlider.addListener (this); freqSlider.setSkewFactorFromMidPoint (500); freqLabel.setText ("Freq", dontSendNotification); freqLabel.attachToComponent (&freqSlider, true); //Mute Button addAndMakeVisible (muteButton); muteButton.setButtonText ("Mute"); muteButton.addListener (this); muteButton.setEnabled (true); //Interpolate Button addAndMakeVisible (interpolateButton); interpolateButton.setButtonText ("Interpolate"); interpolateButton.addListener (this); interpolateButton.setEnabled (true); } ~MainContentComponent() { shutdownAudio(); } void buttonClicked (Button* button) override { //Only two buttons here, mute and interpolation for their respective toggling. if (button == &muteButton) { mute = !mute; //std::cout << "Mute: " << mute << std::endl; } if (button == &interpolateButton) { interpolating = !interpolating; //starts as 1 //std::cout << "Interpolating: " << interpolating << std::endl; } } void sliderValueChanged (Slider *slider) override { if (slider == &volumeSlider) { //Amplitude is given in the db targetAmplitude = pow (10, ((float)volumeSlider.getValue() / 20.0)); } if (slider == &freqSlider) { targetFrequency = (float)freqSlider.getValue(); } } //============================================================================== void prepareToPlay(int samplesPerBlockExpected, double sampleRate) override { // This function will be called when the audio device is started, or when // its settings (i.e. sample rate, block size, etc) are changed. // You can use this function to initialise any resources you might need, // but be careful - it will be called on the audio thread, not the GUI thread. // For more details, see the help for AudioProcessor::prepareToPlay() amplitude = 0.5; targetAmplitude = amplitude; frequency = 500.0; targetFrequency = frequency; time = 0.0; deltaTime = 1 / sampleRate; currentSampleRate = sampleRate; } void getNextAudioBlock(const AudioSourceChannelInfo& bufferToFill) override { //Loops time if it gets to numeric limits if (time >= std::numeric_limits::max()) { time = 0.0; } //First the samples are processed in a monoBuffer, and then later iterated in a second for loop for channels, //where each monoBuffer[sample] is assigned to the iterating channel's current buffer[sample] . float *monoBuffer = new float[bufferToFill.numSamples]; if (mute) return bufferToFill.clearActiveBufferRegion(); if (!interpolating) { for (int sample = 0; sample < bufferToFill.numSamples; ++sample) { float value = targetAmplitude * sin(2 * double_Pi * targetFrequency * time); monoBuffer[sample] = value; time += deltaTime; } } else { //if the value picked by the slider is equal to the current frequency, no need for smoothing if (targetFrequency == frequency) { for (int sample = 0; sample < bufferToFill.numSamples; ++sample) { float value = amplitude * sin(2 * double_Pi * frequency * time); monoBuffer[sample] = value; time += deltaTime; //std::cout << "buffer[sample]: " << monoBuffer[sample] << " sample: " << sample << " frequency: " << frequency << std::endl; } } else //interpolation goes here :( { for (int sample = 0; sample < bufferToFill.numSamples; ++sample) { /* Now here are the main problems - I have two attempts at reducing the artifacting. One is my interpolate function. And the other is the SmoothedValue lifted from KVRaudio. (1) How would I go about writing my own linear interpolation function? I can only do linear interpolation or spline interpolation, as I have to use a method taught in my class. (2) The SmoothedValue is supposedly linear interpolation, but it looks really weird for me and I don't understand exactly how it's categorized as linear interpolation. (3) SmoothedValue isn't working at all the way I'm using it now - I've tried dozens of approaches for it and it still doesn't do anything. */ //First approach (ideal for my goals): frequency += interpolate(sample, bufferToFill.numSamples, frequency, targetFrequency, deltaTime); //Second approach (using SmoothedValue class): /* SmoothedValue freqSmooth (frequency, (int) currentSampleRate); freqSmooth.setTargetValue (targetFrequency); int i = 0; while (freqSmooth.isStillSmoothing()) { freqSmooth.tick(); printf("smoothing i = %d\n", i++); } frequency = freqSmooth.getValue(); // std::cout << "buffer[sample]: " << monoBuffer[sample] << " sample: " << sample << " frequency: " << frequency << std::endl; float value = amplitude * sin(2 * double_Pi * frequency * time); monoBuffer[sample] = value; time += deltaTime; */ } } } // iterate over all available output channels for (int channel = 0; channel < bufferToFill.buffer->getNumChannels(); ++channel) { // Get a pointer to the start sample in the buffer for this audio output channel float* const buffer = bufferToFill.buffer->getWritePointer(channel, bufferToFill.startSample); for (int sample = 0; sample < bufferToFill.numSamples; ++sample) { buffer[sample] = monoBuffer[sample]; //std::cout << "buffer[sample]: " << buffer[sample] << " sample: " << sample << std::endl; // std::cout << "buffer[sample]: " << buffer[sample] << " sample: " << sample << std::endl; } } } void releaseResources() override { } float interpolate (float sample, float numSamples, float curVal, float targetVal, float deltaTime) { /* First approach (ideal for my goals) - have one interpolation function gradually smooth the value transition returning the increment for each sample. I can't quite wrap my head around *what* I am interpolating though. Pretty much all of the implementations I've seen involve a set of at least two coordinates (x0,y0), (x1,y1). If I want a smooth ramp between frequency and targetFrequency, then how do I solve it? */ //float increment = (targetVal - curVal) / (float) numSamples; //float deltaTime = 1/numSamples; float x0 = deltaTime * sample; float x1 = deltaTime * (sample+1); float y0 = sample * curVal; float y1 = sample * targetVal; float y; //result float avg = (x0 + x1)/2; //um valor entre x0 e x1 if (y0 == y1) { return y0; } else { y = y0 + ( (avg) - x0 ) * ( (y1 - y0) / (x1 - x0) ); //ideally, y here would be a "smoothed" gradual increase in the frequency. return y; } } //============================================================================== void paint(Graphics& g) override { // (Our component is opaque, so we must completely fill the background with a solid colour) g.fillAll(Colours::darkgrey); } void resized() override { // This is called when the MainContentComponent is resized. // If you add any child components, this is where you should // update their positions. const int sliderLeft = 50; volumeSlider.setBounds (sliderLeft, 20, getWidth() - sliderLeft - 10, 20); freqSlider.setBounds (sliderLeft, 50, getWidth() - sliderLeft - 10, 20); muteButton.setBounds (10, 110, getWidth() - 20, 20); interpolateButton.setBounds (10, 150, getWidth() - 20, 20); } private: //============================================================================== // Your private member variables go here... float currentSampleRate; float amplitude, targetAmplitude; float frequency, targetFrequency; float time; float deltaTime; // GUI Slider volumeSlider; Slider freqSlider; Label volumeLabel; Label freqLabel; TextButton muteButton; TextButton interpolateButton; bool interpolating; bool mute; JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(MainContentComponent) }; // (This function is called by the app startup code to create our main component) Component* createMainContentComponent() { return new MainContentComponent(); } #endif // MAINCOMPONENT_H_INCLUDED