paulxstretch/deps/juce/examples/Audio/AudioSynthesiserDemo.h

323 lines
12 KiB
C
Raw Normal View History

/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioSynthesiserDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Simple synthesiser application.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioSynthesiserDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
#include "../Assets/AudioLiveScrollingDisplay.h"
//==============================================================================
/** Our demo synth sound is just a basic sine wave.. */
struct SineWaveSound : public SynthesiserSound
{
SineWaveSound() {}
bool appliesToNote (int /*midiNoteNumber*/) override { return true; }
bool appliesToChannel (int /*midiChannel*/) override { return true; }
};
//==============================================================================
/** Our demo synth voice just plays a sine wave.. */
struct SineWaveVoice : public SynthesiserVoice
{
SineWaveVoice() {}
bool canPlaySound (SynthesiserSound* sound) override
{
return dynamic_cast<SineWaveSound*> (sound) != nullptr;
}
void startNote (int midiNoteNumber, float velocity,
SynthesiserSound*, int /*currentPitchWheelPosition*/) override
{
currentAngle = 0.0;
level = velocity * 0.15;
tailOff = 0.0;
auto cyclesPerSecond = MidiMessage::getMidiNoteInHertz (midiNoteNumber);
auto cyclesPerSample = cyclesPerSecond / getSampleRate();
angleDelta = cyclesPerSample * MathConstants<double>::twoPi;
}
void stopNote (float /*velocity*/, bool allowTailOff) override
{
if (allowTailOff)
{
// start a tail-off by setting this flag. The render callback will pick up on
// this and do a fade out, calling clearCurrentNote() when it's finished.
if (tailOff == 0.0) // we only need to begin a tail-off if it's not already doing so - the
tailOff = 1.0; // stopNote method could be called more than once.
}
else
{
// we're being told to stop playing immediately, so reset everything..
clearCurrentNote();
angleDelta = 0.0;
}
}
void pitchWheelMoved (int /*newValue*/) override {}
void controllerMoved (int /*controllerNumber*/, int /*newValue*/) override {}
void renderNextBlock (AudioBuffer<float>& outputBuffer, int startSample, int numSamples) override
{
if (angleDelta != 0.0)
{
if (tailOff > 0.0)
{
while (--numSamples >= 0)
{
auto currentSample = (float) (std::sin (currentAngle) * level * tailOff);
for (auto i = outputBuffer.getNumChannels(); --i >= 0;)
outputBuffer.addSample (i, startSample, currentSample);
currentAngle += angleDelta;
++startSample;
tailOff *= 0.99;
if (tailOff <= 0.005)
{
clearCurrentNote();
angleDelta = 0.0;
break;
}
}
}
else
{
while (--numSamples >= 0)
{
auto currentSample = (float) (std::sin (currentAngle) * level);
for (auto i = outputBuffer.getNumChannels(); --i >= 0;)
outputBuffer.addSample (i, startSample, currentSample);
currentAngle += angleDelta;
++startSample;
}
}
}
}
using SynthesiserVoice::renderNextBlock;
private:
double currentAngle = 0.0, angleDelta = 0.0, level = 0.0, tailOff = 0.0;
};
//==============================================================================
// This is an audio source that streams the output of our demo synth.
struct SynthAudioSource : public AudioSource
{
SynthAudioSource (MidiKeyboardState& keyState) : keyboardState (keyState)
{
// Add some voices to our synth, to play the sounds..
for (auto i = 0; i < 4; ++i)
{
synth.addVoice (new SineWaveVoice()); // These voices will play our custom sine-wave sounds..
synth.addVoice (new SamplerVoice()); // and these ones play the sampled sounds
}
// ..and add a sound for them to play...
setUsingSineWaveSound();
}
void setUsingSineWaveSound()
{
synth.clearSounds();
synth.addSound (new SineWaveSound());
}
void setUsingSampledSound()
{
WavAudioFormat wavFormat;
std::unique_ptr<AudioFormatReader> audioReader (wavFormat.createReaderFor (createAssetInputStream ("cello.wav").release(), true));
BigInteger allNotes;
allNotes.setRange (0, 128, true);
synth.clearSounds();
synth.addSound (new SamplerSound ("demo sound",
*audioReader,
allNotes,
74, // root midi note
0.1, // attack time
0.1, // release time
10.0 // maximum sample length
));
}
void prepareToPlay (int /*samplesPerBlockExpected*/, double sampleRate) override
{
midiCollector.reset (sampleRate);
synth.setCurrentPlaybackSampleRate (sampleRate);
}
void releaseResources() override {}
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
// the synth always adds its output to the audio buffer, so we have to clear it
// first..
bufferToFill.clearActiveBufferRegion();
// fill a midi buffer with incoming messages from the midi input.
MidiBuffer incomingMidi;
midiCollector.removeNextBlockOfMessages (incomingMidi, bufferToFill.numSamples);
// pass these messages to the keyboard state so that it can update the component
// to show on-screen which keys are being pressed on the physical midi keyboard.
// This call will also add midi messages to the buffer which were generated by
// the mouse-clicking on the on-screen keyboard.
keyboardState.processNextMidiBuffer (incomingMidi, 0, bufferToFill.numSamples, true);
// and now get the synth to process the midi events and generate its output.
synth.renderNextBlock (*bufferToFill.buffer, incomingMidi, 0, bufferToFill.numSamples);
}
//==============================================================================
// this collects real-time midi messages from the midi input device, and
// turns them into blocks that we can process in our audio callback
MidiMessageCollector midiCollector;
// this represents the state of which keys on our on-screen keyboard are held
// down. When the mouse is clicked on the keyboard component, this object also
// generates midi messages for this, which we can pass on to our synth.
MidiKeyboardState& keyboardState;
// the synth itself!
Synthesiser synth;
};
//==============================================================================
class AudioSynthesiserDemo : public Component
{
public:
AudioSynthesiserDemo()
{
addAndMakeVisible (keyboardComponent);
addAndMakeVisible (sineButton);
sineButton.setRadioGroupId (321);
sineButton.setToggleState (true, dontSendNotification);
sineButton.onClick = [this] { synthAudioSource.setUsingSineWaveSound(); };
addAndMakeVisible (sampledButton);
sampledButton.setRadioGroupId (321);
sampledButton.onClick = [this] { synthAudioSource.setUsingSampledSound(); };
addAndMakeVisible (liveAudioDisplayComp);
audioDeviceManager.addAudioCallback (&liveAudioDisplayComp);
audioSourcePlayer.setSource (&synthAudioSource);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioDeviceManager.addAudioCallback (&audioSourcePlayer);
audioDeviceManager.addMidiInputDeviceCallback ({}, &(synthAudioSource.midiCollector));
setOpaque (true);
setSize (640, 480);
}
~AudioSynthesiserDemo() override
{
audioSourcePlayer.setSource (nullptr);
audioDeviceManager.removeMidiInputDeviceCallback ({}, &(synthAudioSource.midiCollector));
audioDeviceManager.removeAudioCallback (&audioSourcePlayer);
audioDeviceManager.removeAudioCallback (&liveAudioDisplayComp);
}
//==============================================================================
void paint (Graphics& g) override
{
g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground));
}
void resized() override
{
keyboardComponent .setBounds (8, 96, getWidth() - 16, 64);
sineButton .setBounds (16, 176, 150, 24);
sampledButton .setBounds (16, 200, 150, 24);
liveAudioDisplayComp.setBounds (8, 8, getWidth() - 16, 64);
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager (0, 2) };
#endif
MidiKeyboardState keyboardState;
AudioSourcePlayer audioSourcePlayer;
SynthAudioSource synthAudioSource { keyboardState };
MidiKeyboardComponent keyboardComponent { keyboardState, MidiKeyboardComponent::horizontalKeyboard};
ToggleButton sineButton { "Use sine wave" };
ToggleButton sampledButton { "Use sampled sound" };
LiveScrollingAudioDisplay liveAudioDisplayComp;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioSynthesiserDemo)
};