git subrepo clone --branch=sono6good https://github.com/essej/JUCE.git deps/juce

subrepo:
  subdir:   "deps/juce"
  merged:   "b13f9084e"
upstream:
  origin:   "https://github.com/essej/JUCE.git"
  branch:   "sono6good"
  commit:   "b13f9084e"
git-subrepo:
  version:  "0.4.3"
  origin:   "https://github.com/ingydotnet/git-subrepo.git"
  commit:   "2f68596"
This commit is contained in:
essej
2022-04-18 17:51:22 -04:00
parent 63e175fee6
commit 25bd5d8adb
3210 changed files with 1045392 additions and 0 deletions

186
deps/juce/examples/Audio/AudioAppDemo.h vendored Normal file
View File

@ -0,0 +1,186 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioAppDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Simple audio application.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioAppDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
//==============================================================================
class AudioAppDemo : public AudioAppComponent
{
public:
//==============================================================================
AudioAppDemo()
#ifdef JUCE_DEMO_RUNNER
: AudioAppComponent (getSharedAudioDeviceManager (0, 2))
#endif
{
setAudioChannels (0, 2);
setSize (800, 600);
}
~AudioAppDemo() override
{
shutdownAudio();
}
//==============================================================================
void prepareToPlay (int samplesPerBlockExpected, double newSampleRate) override
{
sampleRate = newSampleRate;
expectedSamplesPerBlock = samplesPerBlockExpected;
}
/* This method generates the actual audio samples.
In this example the buffer is filled with a sine wave whose frequency and
amplitude are controlled by the mouse position.
*/
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
bufferToFill.clearActiveBufferRegion();
auto originalPhase = phase;
for (auto chan = 0; chan < bufferToFill.buffer->getNumChannels(); ++chan)
{
phase = originalPhase;
auto* channelData = bufferToFill.buffer->getWritePointer (chan, bufferToFill.startSample);
for (auto i = 0; i < bufferToFill.numSamples ; ++i)
{
channelData[i] = amplitude * std::sin (phase);
// increment the phase step for the next sample
phase = std::fmod (phase + phaseDelta, MathConstants<float>::twoPi);
}
}
}
void releaseResources() override
{
// This gets automatically called when audio device parameters change
// or device is restarted.
}
//==============================================================================
void paint (Graphics& g) override
{
// (Our component is opaque, so we must completely fill the background with a solid colour)
g.fillAll (getLookAndFeel().findColour (ResizableWindow::backgroundColourId));
auto centreY = (float) getHeight() / 2.0f;
auto radius = amplitude * 200.0f;
if (radius >= 0.0f)
{
// Draw an ellipse based on the mouse position and audio volume
g.setColour (Colours::lightgreen);
g.fillEllipse (jmax (0.0f, lastMousePosition.x) - radius / 2.0f,
jmax (0.0f, lastMousePosition.y) - radius / 2.0f,
radius, radius);
}
// Draw a representative sine wave.
Path wavePath;
wavePath.startNewSubPath (0, centreY);
for (auto x = 1.0f; x < (float) getWidth(); ++x)
wavePath.lineTo (x, centreY + amplitude * (float) getHeight() * 2.0f
* std::sin (x * frequency * 0.0001f));
g.setColour (getLookAndFeel().findColour (Slider::thumbColourId));
g.strokePath (wavePath, PathStrokeType (2.0f));
}
// Mouse handling..
void mouseDown (const MouseEvent& e) override
{
mouseDrag (e);
}
void mouseDrag (const MouseEvent& e) override
{
lastMousePosition = e.position;
frequency = (float) (getHeight() - e.y) * 10.0f;
amplitude = jmin (0.9f, 0.2f * e.position.x / (float) getWidth());
phaseDelta = (float) (MathConstants<double>::twoPi * frequency / sampleRate);
repaint();
}
void mouseUp (const MouseEvent&) override
{
amplitude = 0.0f;
repaint();
}
void resized() override
{
// This is called when the component is resized.
// If you add any child components, this is where you should
// update their positions.
}
private:
//==============================================================================
float phase = 0.0f;
float phaseDelta = 0.0f;
float frequency = 5000.0f;
float amplitude = 0.2f;
double sampleRate = 0.0;
int expectedSamplesPerBlock = 0;
Point<float> lastMousePosition;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioAppDemo)
};

View File

@ -0,0 +1,403 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioLatencyDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Tests the audio latency of a device.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioLatencyDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
#include "../Assets/AudioLiveScrollingDisplay.h"
//==============================================================================
class LatencyTester : public AudioIODeviceCallback,
private Timer
{
public:
LatencyTester (TextEditor& editorBox)
: resultsBox (editorBox)
{}
//==============================================================================
void beginTest()
{
resultsBox.moveCaretToEnd();
resultsBox.insertTextAtCaret (newLine + newLine + "Starting test..." + newLine);
resultsBox.moveCaretToEnd();
startTimer (50);
const ScopedLock sl (lock);
createTestSound();
recordedSound.clear();
playingSampleNum = recordedSampleNum = 0;
testIsRunning = true;
}
void timerCallback() override
{
if (testIsRunning && recordedSampleNum >= recordedSound.getNumSamples())
{
testIsRunning = false;
stopTimer();
// Test has finished, so calculate the result..
auto latencySamples = calculateLatencySamples();
resultsBox.moveCaretToEnd();
resultsBox.insertTextAtCaret (getMessageDescribingResult (latencySamples));
resultsBox.moveCaretToEnd();
}
}
String getMessageDescribingResult (int latencySamples)
{
String message;
if (latencySamples >= 0)
{
message << newLine
<< "Results:" << newLine
<< latencySamples << " samples (" << String (latencySamples * 1000.0 / sampleRate, 1)
<< " milliseconds)" << newLine
<< "The audio device reports an input latency of "
<< deviceInputLatency << " samples, output latency of "
<< deviceOutputLatency << " samples." << newLine
<< "So the corrected latency = "
<< (latencySamples - deviceInputLatency - deviceOutputLatency)
<< " samples (" << String ((latencySamples - deviceInputLatency - deviceOutputLatency) * 1000.0 / sampleRate, 2)
<< " milliseconds)";
}
else
{
message << newLine
<< "Couldn't detect the test signal!!" << newLine
<< "Make sure there's no background noise that might be confusing it..";
}
return message;
}
//==============================================================================
void audioDeviceAboutToStart (AudioIODevice* device) override
{
testIsRunning = false;
playingSampleNum = recordedSampleNum = 0;
sampleRate = device->getCurrentSampleRate();
deviceInputLatency = device->getInputLatencyInSamples();
deviceOutputLatency = device->getOutputLatencyInSamples();
recordedSound.setSize (1, (int) (0.9 * sampleRate));
recordedSound.clear();
}
void audioDeviceStopped() override {}
void audioDeviceIOCallback (const float** inputChannelData, int numInputChannels,
float** outputChannelData, int numOutputChannels, int numSamples) override
{
const ScopedLock sl (lock);
if (testIsRunning)
{
auto* recordingBuffer = recordedSound.getWritePointer (0);
auto* playBuffer = testSound.getReadPointer (0);
for (int i = 0; i < numSamples; ++i)
{
if (recordedSampleNum < recordedSound.getNumSamples())
{
auto inputSamp = 0.0f;
for (auto j = numInputChannels; --j >= 0;)
if (inputChannelData[j] != nullptr)
inputSamp += inputChannelData[j][i];
recordingBuffer[recordedSampleNum] = inputSamp;
}
++recordedSampleNum;
auto outputSamp = (playingSampleNum < testSound.getNumSamples()) ? playBuffer[playingSampleNum] : 0.0f;
for (auto j = numOutputChannels; --j >= 0;)
if (outputChannelData[j] != nullptr)
outputChannelData[j][i] = outputSamp;
++playingSampleNum;
}
}
else
{
// We need to clear the output buffers, in case they're full of junk..
for (int i = 0; i < numOutputChannels; ++i)
if (outputChannelData[i] != nullptr)
zeromem (outputChannelData[i], (size_t) numSamples * sizeof (float));
}
}
private:
TextEditor& resultsBox;
AudioBuffer<float> testSound, recordedSound;
Array<int> spikePositions;
CriticalSection lock;
int playingSampleNum = 0;
int recordedSampleNum = -1;
double sampleRate = 0.0;
bool testIsRunning = false;
int deviceInputLatency, deviceOutputLatency;
//==============================================================================
// create a test sound which consists of a series of randomly-spaced audio spikes..
void createTestSound()
{
auto length = ((int) sampleRate) / 4;
testSound.setSize (1, length);
testSound.clear();
Random rand;
for (int i = 0; i < length; ++i)
testSound.setSample (0, i, (rand.nextFloat() - rand.nextFloat() + rand.nextFloat() - rand.nextFloat()) * 0.06f);
spikePositions.clear();
int spikePos = 0;
int spikeDelta = 50;
while (spikePos < length - 1)
{
spikePositions.add (spikePos);
testSound.setSample (0, spikePos, 0.99f);
testSound.setSample (0, spikePos + 1, -0.99f);
spikePos += spikeDelta;
spikeDelta += spikeDelta / 6 + rand.nextInt (5);
}
}
// Searches a buffer for a set of spikes that matches those in the test sound
int findOffsetOfSpikes (const AudioBuffer<float>& buffer) const
{
auto minSpikeLevel = 5.0f;
auto smooth = 0.975;
auto* s = buffer.getReadPointer (0);
int spikeDriftAllowed = 5;
Array<int> spikesFound;
spikesFound.ensureStorageAllocated (100);
auto runningAverage = 0.0;
int lastSpike = 0;
for (int i = 0; i < buffer.getNumSamples() - 10; ++i)
{
auto samp = std::abs (s[i]);
if (samp > runningAverage * minSpikeLevel && i > lastSpike + 20)
{
lastSpike = i;
spikesFound.add (i);
}
runningAverage = runningAverage * smooth + (1.0 - smooth) * samp;
}
int bestMatch = -1;
auto bestNumMatches = spikePositions.size() / 3; // the minimum number of matches required
if (spikesFound.size() < bestNumMatches)
return -1;
for (int offsetToTest = 0; offsetToTest < buffer.getNumSamples() - 2048; ++offsetToTest)
{
int numMatchesHere = 0;
int foundIndex = 0;
for (int refIndex = 0; refIndex < spikePositions.size(); ++refIndex)
{
auto referenceSpike = spikePositions.getUnchecked (refIndex) + offsetToTest;
int spike = 0;
while ((spike = spikesFound.getUnchecked (foundIndex)) < referenceSpike - spikeDriftAllowed
&& foundIndex < spikesFound.size() - 1)
++foundIndex;
if (spike >= referenceSpike - spikeDriftAllowed && spike <= referenceSpike + spikeDriftAllowed)
++numMatchesHere;
}
if (numMatchesHere > bestNumMatches)
{
bestNumMatches = numMatchesHere;
bestMatch = offsetToTest;
if (numMatchesHere == spikePositions.size())
break;
}
}
return bestMatch;
}
int calculateLatencySamples() const
{
// Detect the sound in both our test sound and the recording of it, and measure the difference
// in their start times..
auto referenceStart = findOffsetOfSpikes (testSound);
jassert (referenceStart >= 0);
auto recordedStart = findOffsetOfSpikes (recordedSound);
return (recordedStart < 0) ? -1
: (recordedStart - referenceStart);
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (LatencyTester)
};
//==============================================================================
class AudioLatencyDemo : public Component
{
public:
AudioLatencyDemo()
{
setOpaque (true);
liveAudioScroller.reset (new LiveScrollingAudioDisplay());
addAndMakeVisible (liveAudioScroller.get());
addAndMakeVisible (resultsBox);
resultsBox.setMultiLine (true);
resultsBox.setReturnKeyStartsNewLine (true);
resultsBox.setReadOnly (true);
resultsBox.setScrollbarsShown (true);
resultsBox.setCaretVisible (false);
resultsBox.setPopupMenuEnabled (true);
resultsBox.setColour (TextEditor::outlineColourId, Colour (0x1c000000));
resultsBox.setColour (TextEditor::shadowColourId, Colour (0x16000000));
resultsBox.setText ("Running this test measures the round-trip latency between the audio output and input "
"devices you\'ve got selected.\n\n"
"It\'ll play a sound, then try to measure the time at which the sound arrives "
"back at the audio input. Obviously for this to work you need to have your "
"microphone somewhere near your speakers...");
addAndMakeVisible (startTestButton);
startTestButton.onClick = [this] { startTest(); };
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioDeviceManager.addAudioCallback (liveAudioScroller.get());
setSize (500, 500);
}
~AudioLatencyDemo() override
{
audioDeviceManager.removeAudioCallback (liveAudioScroller.get());
audioDeviceManager.removeAudioCallback (latencyTester .get());
latencyTester .reset();
liveAudioScroller.reset();
}
void startTest()
{
if (latencyTester.get() == nullptr)
{
latencyTester.reset (new LatencyTester (resultsBox));
audioDeviceManager.addAudioCallback (latencyTester.get());
}
latencyTester->beginTest();
}
void paint (Graphics& g) override
{
g.fillAll (findColour (ResizableWindow::backgroundColourId));
}
void resized() override
{
auto b = getLocalBounds().reduced (5);
if (liveAudioScroller.get() != nullptr)
{
liveAudioScroller->setBounds (b.removeFromTop (b.getHeight() / 5));
b.removeFromTop (10);
}
startTestButton.setBounds (b.removeFromBottom (b.getHeight() / 10));
b.removeFromBottom (10);
resultsBox.setBounds (b);
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager (1, 2) };
#endif
std::unique_ptr<LatencyTester> latencyTester;
std::unique_ptr<LiveScrollingAudioDisplay> liveAudioScroller;
TextButton startTestButton { "Test Latency" };
TextEditor resultsBox;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioLatencyDemo)
};

View File

@ -0,0 +1,560 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioPlaybackDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Plays an audio file.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
type: Component
mainClass: AudioPlaybackDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
//==============================================================================
class DemoThumbnailComp : public Component,
public ChangeListener,
public FileDragAndDropTarget,
public ChangeBroadcaster,
private ScrollBar::Listener,
private Timer
{
public:
DemoThumbnailComp (AudioFormatManager& formatManager,
AudioTransportSource& source,
Slider& slider)
: transportSource (source),
zoomSlider (slider),
thumbnail (512, formatManager, thumbnailCache)
{
thumbnail.addChangeListener (this);
addAndMakeVisible (scrollbar);
scrollbar.setRangeLimits (visibleRange);
scrollbar.setAutoHide (false);
scrollbar.addListener (this);
currentPositionMarker.setFill (Colours::white.withAlpha (0.85f));
addAndMakeVisible (currentPositionMarker);
}
~DemoThumbnailComp() override
{
scrollbar.removeListener (this);
thumbnail.removeChangeListener (this);
}
void setURL (const URL& url)
{
InputSource* inputSource = nullptr;
#if ! JUCE_IOS
if (url.isLocalFile())
{
inputSource = new FileInputSource (url.getLocalFile());
}
else
#endif
{
if (inputSource == nullptr)
inputSource = new URLInputSource (url);
}
if (inputSource != nullptr)
{
thumbnail.setSource (inputSource);
Range<double> newRange (0.0, thumbnail.getTotalLength());
scrollbar.setRangeLimits (newRange);
setRange (newRange);
startTimerHz (40);
}
}
URL getLastDroppedFile() const noexcept { return lastFileDropped; }
void setZoomFactor (double amount)
{
if (thumbnail.getTotalLength() > 0)
{
auto newScale = jmax (0.001, thumbnail.getTotalLength() * (1.0 - jlimit (0.0, 0.99, amount)));
auto timeAtCentre = xToTime ((float) getWidth() / 2.0f);
setRange ({ timeAtCentre - newScale * 0.5, timeAtCentre + newScale * 0.5 });
}
}
void setRange (Range<double> newRange)
{
visibleRange = newRange;
scrollbar.setCurrentRange (visibleRange);
updateCursorPosition();
repaint();
}
void setFollowsTransport (bool shouldFollow)
{
isFollowingTransport = shouldFollow;
}
void paint (Graphics& g) override
{
g.fillAll (Colours::darkgrey);
g.setColour (Colours::lightblue);
if (thumbnail.getTotalLength() > 0.0)
{
auto thumbArea = getLocalBounds();
thumbArea.removeFromBottom (scrollbar.getHeight() + 4);
thumbnail.drawChannels (g, thumbArea.reduced (2),
visibleRange.getStart(), visibleRange.getEnd(), 1.0f);
}
else
{
g.setFont (14.0f);
g.drawFittedText ("(No audio file selected)", getLocalBounds(), Justification::centred, 2);
}
}
void resized() override
{
scrollbar.setBounds (getLocalBounds().removeFromBottom (14).reduced (2));
}
void changeListenerCallback (ChangeBroadcaster*) override
{
// this method is called by the thumbnail when it has changed, so we should repaint it..
repaint();
}
bool isInterestedInFileDrag (const StringArray& /*files*/) override
{
return true;
}
void filesDropped (const StringArray& files, int /*x*/, int /*y*/) override
{
lastFileDropped = URL (File (files[0]));
sendChangeMessage();
}
void mouseDown (const MouseEvent& e) override
{
mouseDrag (e);
}
void mouseDrag (const MouseEvent& e) override
{
if (canMoveTransport())
transportSource.setPosition (jmax (0.0, xToTime ((float) e.x)));
}
void mouseUp (const MouseEvent&) override
{
transportSource.start();
}
void mouseWheelMove (const MouseEvent&, const MouseWheelDetails& wheel) override
{
if (thumbnail.getTotalLength() > 0.0)
{
auto newStart = visibleRange.getStart() - wheel.deltaX * (visibleRange.getLength()) / 10.0;
newStart = jlimit (0.0, jmax (0.0, thumbnail.getTotalLength() - (visibleRange.getLength())), newStart);
if (canMoveTransport())
setRange ({ newStart, newStart + visibleRange.getLength() });
if (wheel.deltaY != 0.0f)
zoomSlider.setValue (zoomSlider.getValue() - wheel.deltaY);
repaint();
}
}
private:
AudioTransportSource& transportSource;
Slider& zoomSlider;
ScrollBar scrollbar { false };
AudioThumbnailCache thumbnailCache { 5 };
AudioThumbnail thumbnail;
Range<double> visibleRange;
bool isFollowingTransport = false;
URL lastFileDropped;
DrawableRectangle currentPositionMarker;
float timeToX (const double time) const
{
if (visibleRange.getLength() <= 0)
return 0;
return (float) getWidth() * (float) ((time - visibleRange.getStart()) / visibleRange.getLength());
}
double xToTime (const float x) const
{
return (x / (float) getWidth()) * (visibleRange.getLength()) + visibleRange.getStart();
}
bool canMoveTransport() const noexcept
{
return ! (isFollowingTransport && transportSource.isPlaying());
}
void scrollBarMoved (ScrollBar* scrollBarThatHasMoved, double newRangeStart) override
{
if (scrollBarThatHasMoved == &scrollbar)
if (! (isFollowingTransport && transportSource.isPlaying()))
setRange (visibleRange.movedToStartAt (newRangeStart));
}
void timerCallback() override
{
if (canMoveTransport())
updateCursorPosition();
else
setRange (visibleRange.movedToStartAt (transportSource.getCurrentPosition() - (visibleRange.getLength() / 2.0)));
}
void updateCursorPosition()
{
currentPositionMarker.setVisible (transportSource.isPlaying() || isMouseButtonDown());
currentPositionMarker.setRectangle (Rectangle<float> (timeToX (transportSource.getCurrentPosition()) - 0.75f, 0,
1.5f, (float) (getHeight() - scrollbar.getHeight())));
}
};
//==============================================================================
class AudioPlaybackDemo : public Component,
#if (JUCE_ANDROID || JUCE_IOS)
private Button::Listener,
#else
private FileBrowserListener,
#endif
private ChangeListener
{
public:
AudioPlaybackDemo()
{
addAndMakeVisible (zoomLabel);
zoomLabel.setFont (Font (15.00f, Font::plain));
zoomLabel.setJustificationType (Justification::centredRight);
zoomLabel.setEditable (false, false, false);
zoomLabel.setColour (TextEditor::textColourId, Colours::black);
zoomLabel.setColour (TextEditor::backgroundColourId, Colour (0x00000000));
addAndMakeVisible (followTransportButton);
followTransportButton.onClick = [this] { updateFollowTransportState(); };
#if (JUCE_ANDROID || JUCE_IOS)
addAndMakeVisible (chooseFileButton);
chooseFileButton.addListener (this);
#else
addAndMakeVisible (fileTreeComp);
directoryList.setDirectory (File::getSpecialLocation (File::userHomeDirectory), true, true);
fileTreeComp.setTitle ("Files");
fileTreeComp.setColour (FileTreeComponent::backgroundColourId, Colours::lightgrey.withAlpha (0.6f));
fileTreeComp.addListener (this);
addAndMakeVisible (explanation);
explanation.setFont (Font (14.00f, Font::plain));
explanation.setJustificationType (Justification::bottomRight);
explanation.setEditable (false, false, false);
explanation.setColour (TextEditor::textColourId, Colours::black);
explanation.setColour (TextEditor::backgroundColourId, Colour (0x00000000));
#endif
addAndMakeVisible (zoomSlider);
zoomSlider.setRange (0, 1, 0);
zoomSlider.onValueChange = [this] { thumbnail->setZoomFactor (zoomSlider.getValue()); };
zoomSlider.setSkewFactor (2);
thumbnail.reset (new DemoThumbnailComp (formatManager, transportSource, zoomSlider));
addAndMakeVisible (thumbnail.get());
thumbnail->addChangeListener (this);
addAndMakeVisible (startStopButton);
startStopButton.setColour (TextButton::buttonColourId, Colour (0xff79ed7f));
startStopButton.setColour (TextButton::textColourOffId, Colours::black);
startStopButton.onClick = [this] { startOrStop(); };
// audio setup
formatManager.registerBasicFormats();
thread.startThread (3);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioDeviceManager.addAudioCallback (&audioSourcePlayer);
audioSourcePlayer.setSource (&transportSource);
setOpaque (true);
setSize (500, 500);
}
~AudioPlaybackDemo() override
{
transportSource .setSource (nullptr);
audioSourcePlayer.setSource (nullptr);
audioDeviceManager.removeAudioCallback (&audioSourcePlayer);
#if (JUCE_ANDROID || JUCE_IOS)
chooseFileButton.removeListener (this);
#else
fileTreeComp.removeListener (this);
#endif
thumbnail->removeChangeListener (this);
}
void paint (Graphics& g) override
{
g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground));
}
void resized() override
{
auto r = getLocalBounds().reduced (4);
auto controls = r.removeFromBottom (90);
auto controlRightBounds = controls.removeFromRight (controls.getWidth() / 3);
#if (JUCE_ANDROID || JUCE_IOS)
chooseFileButton.setBounds (controlRightBounds.reduced (10));
#else
explanation.setBounds (controlRightBounds);
#endif
auto zoom = controls.removeFromTop (25);
zoomLabel .setBounds (zoom.removeFromLeft (50));
zoomSlider.setBounds (zoom);
followTransportButton.setBounds (controls.removeFromTop (25));
startStopButton .setBounds (controls);
r.removeFromBottom (6);
#if JUCE_ANDROID || JUCE_IOS
thumbnail->setBounds (r);
#else
thumbnail->setBounds (r.removeFromBottom (140));
r.removeFromBottom (6);
fileTreeComp.setBounds (r);
#endif
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager (0, 2) };
#endif
AudioFormatManager formatManager;
TimeSliceThread thread { "audio file preview" };
#if (JUCE_ANDROID || JUCE_IOS)
std::unique_ptr<FileChooser> fileChooser;
TextButton chooseFileButton {"Choose Audio File...", "Choose an audio file for playback"};
#else
DirectoryContentsList directoryList {nullptr, thread};
FileTreeComponent fileTreeComp {directoryList};
Label explanation { {}, "Select an audio file in the treeview above, and this page will display its waveform, and let you play it.." };
#endif
URL currentAudioFile;
AudioSourcePlayer audioSourcePlayer;
AudioTransportSource transportSource;
std::unique_ptr<AudioFormatReaderSource> currentAudioFileSource;
std::unique_ptr<DemoThumbnailComp> thumbnail;
Label zoomLabel { {}, "zoom:" };
Slider zoomSlider { Slider::LinearHorizontal, Slider::NoTextBox };
ToggleButton followTransportButton { "Follow Transport" };
TextButton startStopButton { "Play/Stop" };
//==============================================================================
void showAudioResource (URL resource)
{
if (loadURLIntoTransport (resource))
currentAudioFile = std::move (resource);
zoomSlider.setValue (0, dontSendNotification);
thumbnail->setURL (currentAudioFile);
}
bool loadURLIntoTransport (const URL& audioURL)
{
// unload the previous file source and delete it..
transportSource.stop();
transportSource.setSource (nullptr);
currentAudioFileSource.reset();
AudioFormatReader* reader = nullptr;
#if ! JUCE_IOS
if (audioURL.isLocalFile())
{
reader = formatManager.createReaderFor (audioURL.getLocalFile());
}
else
#endif
{
if (reader == nullptr)
reader = formatManager.createReaderFor (audioURL.createInputStream (URL::InputStreamOptions (URL::ParameterHandling::inAddress)));
}
if (reader != nullptr)
{
currentAudioFileSource.reset (new AudioFormatReaderSource (reader, true));
// ..and plug it into our transport source
transportSource.setSource (currentAudioFileSource.get(),
32768, // tells it to buffer this many samples ahead
&thread, // this is the background thread to use for reading-ahead
reader->sampleRate); // allows for sample rate correction
return true;
}
return false;
}
void startOrStop()
{
if (transportSource.isPlaying())
{
transportSource.stop();
}
else
{
transportSource.setPosition (0);
transportSource.start();
}
}
void updateFollowTransportState()
{
thumbnail->setFollowsTransport (followTransportButton.getToggleState());
}
#if (JUCE_ANDROID || JUCE_IOS)
void buttonClicked (Button* btn) override
{
if (btn == &chooseFileButton && fileChooser.get() == nullptr)
{
if (! RuntimePermissions::isGranted (RuntimePermissions::readExternalStorage))
{
SafePointer<AudioPlaybackDemo> safeThis (this);
RuntimePermissions::request (RuntimePermissions::readExternalStorage,
[safeThis] (bool granted) mutable
{
if (safeThis != nullptr && granted)
safeThis->buttonClicked (&safeThis->chooseFileButton);
});
return;
}
if (FileChooser::isPlatformDialogAvailable())
{
fileChooser.reset (new FileChooser ("Select an audio file...", File(), "*.wav;*.mp3;*.aif"));
fileChooser->launchAsync (FileBrowserComponent::openMode | FileBrowserComponent::canSelectFiles,
[this] (const FileChooser& fc) mutable
{
if (fc.getURLResults().size() > 0)
{
auto u = fc.getURLResult();
showAudioResource (std::move (u));
}
fileChooser = nullptr;
}, nullptr);
}
else
{
NativeMessageBox::showAsync (MessageBoxOptions()
.withIconType (MessageBoxIconType::WarningIcon)
.withTitle ("Enable Code Signing")
.withMessage ("You need to enable code-signing for your iOS project and enable \"iCloud Documents\" "
"permissions to be able to open audio files on your iDevice. See: "
"https://forum.juce.com/t/native-ios-android-file-choosers"),
nullptr);
}
}
}
#else
void selectionChanged() override
{
showAudioResource (URL (fileTreeComp.getSelectedFile()));
}
void fileClicked (const File&, const MouseEvent&) override {}
void fileDoubleClicked (const File&) override {}
void browserRootChanged (const File&) override {}
#endif
void changeListenerCallback (ChangeBroadcaster* source) override
{
if (source == thumbnail.get())
showAudioResource (URL (thumbnail->getLastDroppedFile()));
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioPlaybackDemo)
};

View File

@ -0,0 +1,376 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioRecordingDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Records audio to a file.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioRecordingDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
#include "../Assets/AudioLiveScrollingDisplay.h"
//==============================================================================
/** A simple class that acts as an AudioIODeviceCallback and writes the
incoming audio data to a WAV file.
*/
class AudioRecorder : public AudioIODeviceCallback
{
public:
AudioRecorder (AudioThumbnail& thumbnailToUpdate)
: thumbnail (thumbnailToUpdate)
{
backgroundThread.startThread();
}
~AudioRecorder() override
{
stop();
}
//==============================================================================
void startRecording (const File& file)
{
stop();
if (sampleRate > 0)
{
// Create an OutputStream to write to our destination file...
file.deleteFile();
if (auto fileStream = std::unique_ptr<FileOutputStream> (file.createOutputStream()))
{
// Now create a WAV writer object that writes to our output stream...
WavAudioFormat wavFormat;
if (auto writer = wavFormat.createWriterFor (fileStream.get(), sampleRate, 1, 16, {}, 0))
{
fileStream.release(); // (passes responsibility for deleting the stream to the writer object that is now using it)
// Now we'll create one of these helper objects which will act as a FIFO buffer, and will
// write the data to disk on our background thread.
threadedWriter.reset (new AudioFormatWriter::ThreadedWriter (writer, backgroundThread, 32768));
// Reset our recording thumbnail
thumbnail.reset (writer->getNumChannels(), writer->getSampleRate());
nextSampleNum = 0;
// And now, swap over our active writer pointer so that the audio callback will start using it..
const ScopedLock sl (writerLock);
activeWriter = threadedWriter.get();
}
}
}
}
void stop()
{
// First, clear this pointer to stop the audio callback from using our writer object..
{
const ScopedLock sl (writerLock);
activeWriter = nullptr;
}
// Now we can delete the writer object. It's done in this order because the deletion could
// take a little time while remaining data gets flushed to disk, so it's best to avoid blocking
// the audio callback while this happens.
threadedWriter.reset();
}
bool isRecording() const
{
return activeWriter.load() != nullptr;
}
//==============================================================================
void audioDeviceAboutToStart (AudioIODevice* device) override
{
sampleRate = device->getCurrentSampleRate();
}
void audioDeviceStopped() override
{
sampleRate = 0;
}
void audioDeviceIOCallback (const float** inputChannelData, int numInputChannels,
float** outputChannelData, int numOutputChannels,
int numSamples) override
{
const ScopedLock sl (writerLock);
if (activeWriter.load() != nullptr && numInputChannels >= thumbnail.getNumChannels())
{
activeWriter.load()->write (inputChannelData, numSamples);
// Create an AudioBuffer to wrap our incoming data, note that this does no allocations or copies, it simply references our input data
AudioBuffer<float> buffer (const_cast<float**> (inputChannelData), thumbnail.getNumChannels(), numSamples);
thumbnail.addBlock (nextSampleNum, buffer, 0, numSamples);
nextSampleNum += numSamples;
}
// We need to clear the output buffers, in case they're full of junk..
for (int i = 0; i < numOutputChannels; ++i)
if (outputChannelData[i] != nullptr)
FloatVectorOperations::clear (outputChannelData[i], numSamples);
}
private:
AudioThumbnail& thumbnail;
TimeSliceThread backgroundThread { "Audio Recorder Thread" }; // the thread that will write our audio data to disk
std::unique_ptr<AudioFormatWriter::ThreadedWriter> threadedWriter; // the FIFO used to buffer the incoming data
double sampleRate = 0.0;
int64 nextSampleNum = 0;
CriticalSection writerLock;
std::atomic<AudioFormatWriter::ThreadedWriter*> activeWriter { nullptr };
};
//==============================================================================
class RecordingThumbnail : public Component,
private ChangeListener
{
public:
RecordingThumbnail()
{
formatManager.registerBasicFormats();
thumbnail.addChangeListener (this);
}
~RecordingThumbnail() override
{
thumbnail.removeChangeListener (this);
}
AudioThumbnail& getAudioThumbnail() { return thumbnail; }
void setDisplayFullThumbnail (bool displayFull)
{
displayFullThumb = displayFull;
repaint();
}
void paint (Graphics& g) override
{
g.fillAll (Colours::darkgrey);
g.setColour (Colours::lightgrey);
if (thumbnail.getTotalLength() > 0.0)
{
auto endTime = displayFullThumb ? thumbnail.getTotalLength()
: jmax (30.0, thumbnail.getTotalLength());
auto thumbArea = getLocalBounds();
thumbnail.drawChannels (g, thumbArea.reduced (2), 0.0, endTime, 1.0f);
}
else
{
g.setFont (14.0f);
g.drawFittedText ("(No file recorded)", getLocalBounds(), Justification::centred, 2);
}
}
private:
AudioFormatManager formatManager;
AudioThumbnailCache thumbnailCache { 10 };
AudioThumbnail thumbnail { 512, formatManager, thumbnailCache };
bool displayFullThumb = false;
void changeListenerCallback (ChangeBroadcaster* source) override
{
if (source == &thumbnail)
repaint();
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RecordingThumbnail)
};
//==============================================================================
class AudioRecordingDemo : public Component
{
public:
AudioRecordingDemo()
{
setOpaque (true);
addAndMakeVisible (liveAudioScroller);
addAndMakeVisible (explanationLabel);
explanationLabel.setFont (Font (15.0f, Font::plain));
explanationLabel.setJustificationType (Justification::topLeft);
explanationLabel.setEditable (false, false, false);
explanationLabel.setColour (TextEditor::textColourId, Colours::black);
explanationLabel.setColour (TextEditor::backgroundColourId, Colour (0x00000000));
addAndMakeVisible (recordButton);
recordButton.setColour (TextButton::buttonColourId, Colour (0xffff5c5c));
recordButton.setColour (TextButton::textColourOnId, Colours::black);
recordButton.onClick = [this]
{
if (recorder.isRecording())
stopRecording();
else
startRecording();
};
addAndMakeVisible (recordingThumbnail);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioDeviceManager.addAudioCallback (&liveAudioScroller);
audioDeviceManager.addAudioCallback (&recorder);
setSize (500, 500);
}
~AudioRecordingDemo() override
{
audioDeviceManager.removeAudioCallback (&recorder);
audioDeviceManager.removeAudioCallback (&liveAudioScroller);
}
void paint (Graphics& g) override
{
g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground));
}
void resized() override
{
auto area = getLocalBounds();
liveAudioScroller .setBounds (area.removeFromTop (80).reduced (8));
recordingThumbnail.setBounds (area.removeFromTop (80).reduced (8));
recordButton .setBounds (area.removeFromTop (36).removeFromLeft (140).reduced (8));
explanationLabel .setBounds (area.reduced (8));
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager (1, 0) };
#endif
LiveScrollingAudioDisplay liveAudioScroller;
RecordingThumbnail recordingThumbnail;
AudioRecorder recorder { recordingThumbnail.getAudioThumbnail() };
Label explanationLabel { {}, "This page demonstrates how to record a wave file from the live audio input..\n\n"
#if (JUCE_ANDROID || JUCE_IOS)
"After you are done with your recording you can share with other apps."
#else
"Pressing record will start recording a file in your \"Documents\" folder."
#endif
};
TextButton recordButton { "Record" };
File lastRecording;
void startRecording()
{
if (! RuntimePermissions::isGranted (RuntimePermissions::writeExternalStorage))
{
SafePointer<AudioRecordingDemo> safeThis (this);
RuntimePermissions::request (RuntimePermissions::writeExternalStorage,
[safeThis] (bool granted) mutable
{
if (granted)
safeThis->startRecording();
});
return;
}
#if (JUCE_ANDROID || JUCE_IOS)
auto parentDir = File::getSpecialLocation (File::tempDirectory);
#else
auto parentDir = File::getSpecialLocation (File::userDocumentsDirectory);
#endif
lastRecording = parentDir.getNonexistentChildFile ("JUCE Demo Audio Recording", ".wav");
recorder.startRecording (lastRecording);
recordButton.setButtonText ("Stop");
recordingThumbnail.setDisplayFullThumbnail (false);
}
void stopRecording()
{
recorder.stop();
#if JUCE_CONTENT_SHARING
SafePointer<AudioRecordingDemo> safeThis (this);
File fileToShare = lastRecording;
ContentSharer::getInstance()->shareFiles (Array<URL> ({URL (fileToShare)}),
[safeThis, fileToShare] (bool success, const String& error)
{
if (fileToShare.existsAsFile())
fileToShare.deleteFile();
if (! success && error.isNotEmpty())
NativeMessageBox::showAsync (MessageBoxOptions()
.withIconType (MessageBoxIconType::WarningIcon)
.withTitle ("Sharing Error")
.withMessage (error),
nullptr);
});
#endif
lastRecording = File();
recordButton.setButtonText ("Record");
recordingThumbnail.setDisplayFullThumbnail (true);
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioRecordingDemo)
};

View File

@ -0,0 +1,173 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioSettingsDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Displays information about audio devices.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioSettingsDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
//==============================================================================
class AudioSettingsDemo : public Component,
public ChangeListener
{
public:
AudioSettingsDemo()
{
setOpaque (true);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioSetupComp.reset (new AudioDeviceSelectorComponent (audioDeviceManager,
0, 256, 0, 256, true, true, true, false));
addAndMakeVisible (audioSetupComp.get());
addAndMakeVisible (diagnosticsBox);
diagnosticsBox.setMultiLine (true);
diagnosticsBox.setReturnKeyStartsNewLine (true);
diagnosticsBox.setReadOnly (true);
diagnosticsBox.setScrollbarsShown (true);
diagnosticsBox.setCaretVisible (false);
diagnosticsBox.setPopupMenuEnabled (true);
audioDeviceManager.addChangeListener (this);
logMessage ("Audio device diagnostics:\n");
dumpDeviceInfo();
setSize (500, 600);
}
~AudioSettingsDemo() override
{
audioDeviceManager.removeChangeListener (this);
}
void paint (Graphics& g) override
{
g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground));
}
void resized() override
{
auto r = getLocalBounds().reduced (4);
audioSetupComp->setBounds (r.removeFromTop (proportionOfHeight (0.65f)));
diagnosticsBox.setBounds (r);
}
void dumpDeviceInfo()
{
logMessage ("--------------------------------------");
logMessage ("Current audio device type: " + (audioDeviceManager.getCurrentDeviceTypeObject() != nullptr
? audioDeviceManager.getCurrentDeviceTypeObject()->getTypeName()
: "<none>"));
if (AudioIODevice* device = audioDeviceManager.getCurrentAudioDevice())
{
logMessage ("Current audio device: " + device->getName().quoted());
logMessage ("Sample rate: " + String (device->getCurrentSampleRate()) + " Hz");
logMessage ("Block size: " + String (device->getCurrentBufferSizeSamples()) + " samples");
logMessage ("Output Latency: " + String (device->getOutputLatencyInSamples()) + " samples");
logMessage ("Input Latency: " + String (device->getInputLatencyInSamples()) + " samples");
logMessage ("Bit depth: " + String (device->getCurrentBitDepth()));
logMessage ("Input channel names: " + device->getInputChannelNames().joinIntoString (", "));
logMessage ("Active input channels: " + getListOfActiveBits (device->getActiveInputChannels()));
logMessage ("Output channel names: " + device->getOutputChannelNames().joinIntoString (", "));
logMessage ("Active output channels: " + getListOfActiveBits (device->getActiveOutputChannels()));
}
else
{
logMessage ("No audio device open");
}
}
void logMessage (const String& m)
{
diagnosticsBox.moveCaretToEnd();
diagnosticsBox.insertTextAtCaret (m + newLine);
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager() };
#endif
std::unique_ptr<AudioDeviceSelectorComponent> audioSetupComp;
TextEditor diagnosticsBox;
void changeListenerCallback (ChangeBroadcaster*) override
{
dumpDeviceInfo();
}
void lookAndFeelChanged() override
{
diagnosticsBox.applyFontToAllText (diagnosticsBox.getFont());
}
static String getListOfActiveBits (const BigInteger& b)
{
StringArray bits;
for (int i = 0; i <= b.getHighestBit(); ++i)
if (b[i])
bits.add (String (i));
return bits.joinIntoString (", ");
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioSettingsDemo)
};

View File

@ -0,0 +1,322 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: AudioSynthesiserDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Simple synthesiser application.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: AudioSynthesiserDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
#include "../Assets/DemoUtilities.h"
#include "../Assets/AudioLiveScrollingDisplay.h"
//==============================================================================
/** Our demo synth sound is just a basic sine wave.. */
struct SineWaveSound : public SynthesiserSound
{
SineWaveSound() {}
bool appliesToNote (int /*midiNoteNumber*/) override { return true; }
bool appliesToChannel (int /*midiChannel*/) override { return true; }
};
//==============================================================================
/** Our demo synth voice just plays a sine wave.. */
struct SineWaveVoice : public SynthesiserVoice
{
SineWaveVoice() {}
bool canPlaySound (SynthesiserSound* sound) override
{
return dynamic_cast<SineWaveSound*> (sound) != nullptr;
}
void startNote (int midiNoteNumber, float velocity,
SynthesiserSound*, int /*currentPitchWheelPosition*/) override
{
currentAngle = 0.0;
level = velocity * 0.15;
tailOff = 0.0;
auto cyclesPerSecond = MidiMessage::getMidiNoteInHertz (midiNoteNumber);
auto cyclesPerSample = cyclesPerSecond / getSampleRate();
angleDelta = cyclesPerSample * MathConstants<double>::twoPi;
}
void stopNote (float /*velocity*/, bool allowTailOff) override
{
if (allowTailOff)
{
// start a tail-off by setting this flag. The render callback will pick up on
// this and do a fade out, calling clearCurrentNote() when it's finished.
if (tailOff == 0.0) // we only need to begin a tail-off if it's not already doing so - the
tailOff = 1.0; // stopNote method could be called more than once.
}
else
{
// we're being told to stop playing immediately, so reset everything..
clearCurrentNote();
angleDelta = 0.0;
}
}
void pitchWheelMoved (int /*newValue*/) override {}
void controllerMoved (int /*controllerNumber*/, int /*newValue*/) override {}
void renderNextBlock (AudioBuffer<float>& outputBuffer, int startSample, int numSamples) override
{
if (angleDelta != 0.0)
{
if (tailOff > 0.0)
{
while (--numSamples >= 0)
{
auto currentSample = (float) (std::sin (currentAngle) * level * tailOff);
for (auto i = outputBuffer.getNumChannels(); --i >= 0;)
outputBuffer.addSample (i, startSample, currentSample);
currentAngle += angleDelta;
++startSample;
tailOff *= 0.99;
if (tailOff <= 0.005)
{
clearCurrentNote();
angleDelta = 0.0;
break;
}
}
}
else
{
while (--numSamples >= 0)
{
auto currentSample = (float) (std::sin (currentAngle) * level);
for (auto i = outputBuffer.getNumChannels(); --i >= 0;)
outputBuffer.addSample (i, startSample, currentSample);
currentAngle += angleDelta;
++startSample;
}
}
}
}
using SynthesiserVoice::renderNextBlock;
private:
double currentAngle = 0.0, angleDelta = 0.0, level = 0.0, tailOff = 0.0;
};
//==============================================================================
// This is an audio source that streams the output of our demo synth.
struct SynthAudioSource : public AudioSource
{
SynthAudioSource (MidiKeyboardState& keyState) : keyboardState (keyState)
{
// Add some voices to our synth, to play the sounds..
for (auto i = 0; i < 4; ++i)
{
synth.addVoice (new SineWaveVoice()); // These voices will play our custom sine-wave sounds..
synth.addVoice (new SamplerVoice()); // and these ones play the sampled sounds
}
// ..and add a sound for them to play...
setUsingSineWaveSound();
}
void setUsingSineWaveSound()
{
synth.clearSounds();
synth.addSound (new SineWaveSound());
}
void setUsingSampledSound()
{
WavAudioFormat wavFormat;
std::unique_ptr<AudioFormatReader> audioReader (wavFormat.createReaderFor (createAssetInputStream ("cello.wav").release(), true));
BigInteger allNotes;
allNotes.setRange (0, 128, true);
synth.clearSounds();
synth.addSound (new SamplerSound ("demo sound",
*audioReader,
allNotes,
74, // root midi note
0.1, // attack time
0.1, // release time
10.0 // maximum sample length
));
}
void prepareToPlay (int /*samplesPerBlockExpected*/, double sampleRate) override
{
midiCollector.reset (sampleRate);
synth.setCurrentPlaybackSampleRate (sampleRate);
}
void releaseResources() override {}
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
// the synth always adds its output to the audio buffer, so we have to clear it
// first..
bufferToFill.clearActiveBufferRegion();
// fill a midi buffer with incoming messages from the midi input.
MidiBuffer incomingMidi;
midiCollector.removeNextBlockOfMessages (incomingMidi, bufferToFill.numSamples);
// pass these messages to the keyboard state so that it can update the component
// to show on-screen which keys are being pressed on the physical midi keyboard.
// This call will also add midi messages to the buffer which were generated by
// the mouse-clicking on the on-screen keyboard.
keyboardState.processNextMidiBuffer (incomingMidi, 0, bufferToFill.numSamples, true);
// and now get the synth to process the midi events and generate its output.
synth.renderNextBlock (*bufferToFill.buffer, incomingMidi, 0, bufferToFill.numSamples);
}
//==============================================================================
// this collects real-time midi messages from the midi input device, and
// turns them into blocks that we can process in our audio callback
MidiMessageCollector midiCollector;
// this represents the state of which keys on our on-screen keyboard are held
// down. When the mouse is clicked on the keyboard component, this object also
// generates midi messages for this, which we can pass on to our synth.
MidiKeyboardState& keyboardState;
// the synth itself!
Synthesiser synth;
};
//==============================================================================
class AudioSynthesiserDemo : public Component
{
public:
AudioSynthesiserDemo()
{
addAndMakeVisible (keyboardComponent);
addAndMakeVisible (sineButton);
sineButton.setRadioGroupId (321);
sineButton.setToggleState (true, dontSendNotification);
sineButton.onClick = [this] { synthAudioSource.setUsingSineWaveSound(); };
addAndMakeVisible (sampledButton);
sampledButton.setRadioGroupId (321);
sampledButton.onClick = [this] { synthAudioSource.setUsingSampledSound(); };
addAndMakeVisible (liveAudioDisplayComp);
audioDeviceManager.addAudioCallback (&liveAudioDisplayComp);
audioSourcePlayer.setSource (&synthAudioSource);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
audioDeviceManager.initialise (numInputChannels, 2, nullptr, true, {}, nullptr);
});
#endif
audioDeviceManager.addAudioCallback (&audioSourcePlayer);
audioDeviceManager.addMidiInputDeviceCallback ({}, &(synthAudioSource.midiCollector));
setOpaque (true);
setSize (640, 480);
}
~AudioSynthesiserDemo() override
{
audioSourcePlayer.setSource (nullptr);
audioDeviceManager.removeMidiInputDeviceCallback ({}, &(synthAudioSource.midiCollector));
audioDeviceManager.removeAudioCallback (&audioSourcePlayer);
audioDeviceManager.removeAudioCallback (&liveAudioDisplayComp);
}
//==============================================================================
void paint (Graphics& g) override
{
g.fillAll (getUIColourIfAvailable (LookAndFeel_V4::ColourScheme::UIColour::windowBackground));
}
void resized() override
{
keyboardComponent .setBounds (8, 96, getWidth() - 16, 64);
sineButton .setBounds (16, 176, 150, 24);
sampledButton .setBounds (16, 200, 150, 24);
liveAudioDisplayComp.setBounds (8, 8, getWidth() - 16, 64);
}
private:
// if this PIP is running inside the demo runner, we'll use the shared device manager instead
#ifndef JUCE_DEMO_RUNNER
AudioDeviceManager audioDeviceManager;
#else
AudioDeviceManager& audioDeviceManager { getSharedAudioDeviceManager (0, 2) };
#endif
MidiKeyboardState keyboardState;
AudioSourcePlayer audioSourcePlayer;
SynthAudioSource synthAudioSource { keyboardState };
MidiKeyboardComponent keyboardComponent { keyboardState, MidiKeyboardComponent::horizontalKeyboard};
ToggleButton sineButton { "Use sine wave" };
ToggleButton sampledButton { "Use sampled sound" };
LiveScrollingAudioDisplay liveAudioDisplayComp;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioSynthesiserDemo)
};

24
deps/juce/examples/Audio/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,24 @@
# ==============================================================================
#
# This file is part of the JUCE library.
# Copyright (c) 2020 - Raw Material Software Limited
#
# JUCE is an open source library subject to commercial or open-source
# licensing.
#
# By using JUCE, you agree to the terms of both the JUCE 6 End-User License
# Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
#
# End User License Agreement: www.juce.com/juce-6-licence
# Privacy Policy: www.juce.com/juce-privacy-policy
#
# Or: You may also use this code under the terms of the GPL v3 (see
# www.gnu.org/licenses).
#
# JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
# EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
# DISCLAIMED.
#
# ==============================================================================
_juce_add_pips()

1050
deps/juce/examples/Audio/MPEDemo.h vendored Normal file

File diff suppressed because it is too large Load Diff

483
deps/juce/examples/Audio/MidiDemo.h vendored Normal file
View File

@ -0,0 +1,483 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: MidiDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Handles incoming and outcoming midi messages.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: MidiDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
//==============================================================================
struct MidiDeviceListEntry : ReferenceCountedObject
{
MidiDeviceListEntry (MidiDeviceInfo info) : deviceInfo (info) {}
MidiDeviceInfo deviceInfo;
std::unique_ptr<MidiInput> inDevice;
std::unique_ptr<MidiOutput> outDevice;
using Ptr = ReferenceCountedObjectPtr<MidiDeviceListEntry>;
};
//==============================================================================
class MidiDemo : public Component,
private Timer,
private MidiKeyboardState::Listener,
private MidiInputCallback,
private AsyncUpdater
{
public:
//==============================================================================
MidiDemo()
: midiKeyboard (keyboardState, MidiKeyboardComponent::horizontalKeyboard),
midiInputSelector (new MidiDeviceListBox ("Midi Input Selector", *this, true)),
midiOutputSelector (new MidiDeviceListBox ("Midi Output Selector", *this, false))
{
addLabelAndSetStyle (midiInputLabel);
addLabelAndSetStyle (midiOutputLabel);
addLabelAndSetStyle (incomingMidiLabel);
addLabelAndSetStyle (outgoingMidiLabel);
midiKeyboard.setName ("MIDI Keyboard");
addAndMakeVisible (midiKeyboard);
midiMonitor.setMultiLine (true);
midiMonitor.setReturnKeyStartsNewLine (false);
midiMonitor.setReadOnly (true);
midiMonitor.setScrollbarsShown (true);
midiMonitor.setCaretVisible (false);
midiMonitor.setPopupMenuEnabled (false);
midiMonitor.setText ({});
addAndMakeVisible (midiMonitor);
if (! BluetoothMidiDevicePairingDialogue::isAvailable())
pairButton.setEnabled (false);
addAndMakeVisible (pairButton);
pairButton.onClick = []
{
RuntimePermissions::request (RuntimePermissions::bluetoothMidi,
[] (bool wasGranted)
{
if (wasGranted)
BluetoothMidiDevicePairingDialogue::open();
});
};
keyboardState.addListener (this);
addAndMakeVisible (midiInputSelector .get());
addAndMakeVisible (midiOutputSelector.get());
setSize (732, 520);
startTimer (500);
}
~MidiDemo() override
{
stopTimer();
midiInputs .clear();
midiOutputs.clear();
keyboardState.removeListener (this);
midiInputSelector .reset();
midiOutputSelector.reset();
}
//==============================================================================
void timerCallback() override
{
updateDeviceList (true);
updateDeviceList (false);
}
void handleNoteOn (MidiKeyboardState*, int midiChannel, int midiNoteNumber, float velocity) override
{
MidiMessage m (MidiMessage::noteOn (midiChannel, midiNoteNumber, velocity));
m.setTimeStamp (Time::getMillisecondCounterHiRes() * 0.001);
sendToOutputs (m);
}
void handleNoteOff (MidiKeyboardState*, int midiChannel, int midiNoteNumber, float velocity) override
{
MidiMessage m (MidiMessage::noteOff (midiChannel, midiNoteNumber, velocity));
m.setTimeStamp (Time::getMillisecondCounterHiRes() * 0.001);
sendToOutputs (m);
}
void paint (Graphics&) override {}
void resized() override
{
auto margin = 10;
midiInputLabel.setBounds (margin, margin,
(getWidth() / 2) - (2 * margin), 24);
midiOutputLabel.setBounds ((getWidth() / 2) + margin, margin,
(getWidth() / 2) - (2 * margin), 24);
midiInputSelector->setBounds (margin, (2 * margin) + 24,
(getWidth() / 2) - (2 * margin),
(getHeight() / 2) - ((4 * margin) + 24 + 24));
midiOutputSelector->setBounds ((getWidth() / 2) + margin, (2 * margin) + 24,
(getWidth() / 2) - (2 * margin),
(getHeight() / 2) - ((4 * margin) + 24 + 24));
pairButton.setBounds (margin, (getHeight() / 2) - (margin + 24),
getWidth() - (2 * margin), 24);
outgoingMidiLabel.setBounds (margin, getHeight() / 2, getWidth() - (2 * margin), 24);
midiKeyboard.setBounds (margin, (getHeight() / 2) + (24 + margin), getWidth() - (2 * margin), 64);
incomingMidiLabel.setBounds (margin, (getHeight() / 2) + (24 + (2 * margin) + 64),
getWidth() - (2 * margin), 24);
auto y = (getHeight() / 2) + ((2 * 24) + (3 * margin) + 64);
midiMonitor.setBounds (margin, y,
getWidth() - (2 * margin), getHeight() - y - margin);
}
void openDevice (bool isInput, int index)
{
if (isInput)
{
jassert (midiInputs[index]->inDevice.get() == nullptr);
midiInputs[index]->inDevice = MidiInput::openDevice (midiInputs[index]->deviceInfo.identifier, this);
if (midiInputs[index]->inDevice.get() == nullptr)
{
DBG ("MidiDemo::openDevice: open input device for index = " << index << " failed!");
return;
}
midiInputs[index]->inDevice->start();
}
else
{
jassert (midiOutputs[index]->outDevice.get() == nullptr);
midiOutputs[index]->outDevice = MidiOutput::openDevice (midiOutputs[index]->deviceInfo.identifier);
if (midiOutputs[index]->outDevice.get() == nullptr)
{
DBG ("MidiDemo::openDevice: open output device for index = " << index << " failed!");
}
}
}
void closeDevice (bool isInput, int index)
{
if (isInput)
{
jassert (midiInputs[index]->inDevice.get() != nullptr);
midiInputs[index]->inDevice->stop();
midiInputs[index]->inDevice.reset();
}
else
{
jassert (midiOutputs[index]->outDevice.get() != nullptr);
midiOutputs[index]->outDevice.reset();
}
}
int getNumMidiInputs() const noexcept
{
return midiInputs.size();
}
int getNumMidiOutputs() const noexcept
{
return midiOutputs.size();
}
ReferenceCountedObjectPtr<MidiDeviceListEntry> getMidiDevice (int index, bool isInput) const noexcept
{
return isInput ? midiInputs[index] : midiOutputs[index];
}
private:
//==============================================================================
struct MidiDeviceListBox : public ListBox,
private ListBoxModel
{
MidiDeviceListBox (const String& name,
MidiDemo& contentComponent,
bool isInputDeviceList)
: ListBox (name, this),
parent (contentComponent),
isInput (isInputDeviceList)
{
setOutlineThickness (1);
setMultipleSelectionEnabled (true);
setClickingTogglesRowSelection (true);
}
//==============================================================================
int getNumRows() override
{
return isInput ? parent.getNumMidiInputs()
: parent.getNumMidiOutputs();
}
void paintListBoxItem (int rowNumber, Graphics& g,
int width, int height, bool rowIsSelected) override
{
auto textColour = getLookAndFeel().findColour (ListBox::textColourId);
if (rowIsSelected)
g.fillAll (textColour.interpolatedWith (getLookAndFeel().findColour (ListBox::backgroundColourId), 0.5));
g.setColour (textColour);
g.setFont ((float) height * 0.7f);
if (isInput)
{
if (rowNumber < parent.getNumMidiInputs())
g.drawText (parent.getMidiDevice (rowNumber, true)->deviceInfo.name,
5, 0, width, height,
Justification::centredLeft, true);
}
else
{
if (rowNumber < parent.getNumMidiOutputs())
g.drawText (parent.getMidiDevice (rowNumber, false)->deviceInfo.name,
5, 0, width, height,
Justification::centredLeft, true);
}
}
//==============================================================================
void selectedRowsChanged (int) override
{
auto newSelectedItems = getSelectedRows();
if (newSelectedItems != lastSelectedItems)
{
for (auto i = 0; i < lastSelectedItems.size(); ++i)
{
if (! newSelectedItems.contains (lastSelectedItems[i]))
parent.closeDevice (isInput, lastSelectedItems[i]);
}
for (auto i = 0; i < newSelectedItems.size(); ++i)
{
if (! lastSelectedItems.contains (newSelectedItems[i]))
parent.openDevice (isInput, newSelectedItems[i]);
}
lastSelectedItems = newSelectedItems;
}
}
//==============================================================================
void syncSelectedItemsWithDeviceList (const ReferenceCountedArray<MidiDeviceListEntry>& midiDevices)
{
SparseSet<int> selectedRows;
for (auto i = 0; i < midiDevices.size(); ++i)
if (midiDevices[i]->inDevice.get() != nullptr || midiDevices[i]->outDevice.get() != nullptr)
selectedRows.addRange (Range<int> (i, i + 1));
lastSelectedItems = selectedRows;
updateContent();
setSelectedRows (selectedRows, dontSendNotification);
}
private:
//==============================================================================
MidiDemo& parent;
bool isInput;
SparseSet<int> lastSelectedItems;
};
//==============================================================================
void handleIncomingMidiMessage (MidiInput* /*source*/, const MidiMessage& message) override
{
// This is called on the MIDI thread
const ScopedLock sl (midiMonitorLock);
incomingMessages.add (message);
triggerAsyncUpdate();
}
void handleAsyncUpdate() override
{
// This is called on the message loop
Array<MidiMessage> messages;
{
const ScopedLock sl (midiMonitorLock);
messages.swapWith (incomingMessages);
}
String messageText;
for (auto& m : messages)
messageText << m.getDescription() << "\n";
midiMonitor.insertTextAtCaret (messageText);
}
void sendToOutputs (const MidiMessage& msg)
{
for (auto midiOutput : midiOutputs)
if (midiOutput->outDevice.get() != nullptr)
midiOutput->outDevice->sendMessageNow (msg);
}
//==============================================================================
bool hasDeviceListChanged (const Array<MidiDeviceInfo>& availableDevices, bool isInputDevice)
{
ReferenceCountedArray<MidiDeviceListEntry>& midiDevices = isInputDevice ? midiInputs
: midiOutputs;
if (availableDevices.size() != midiDevices.size())
return true;
for (auto i = 0; i < availableDevices.size(); ++i)
if (availableDevices[i] != midiDevices[i]->deviceInfo)
return true;
return false;
}
ReferenceCountedObjectPtr<MidiDeviceListEntry> findDevice (MidiDeviceInfo device, bool isInputDevice) const
{
const ReferenceCountedArray<MidiDeviceListEntry>& midiDevices = isInputDevice ? midiInputs
: midiOutputs;
for (auto& d : midiDevices)
if (d->deviceInfo == device)
return d;
return nullptr;
}
void closeUnpluggedDevices (const Array<MidiDeviceInfo>& currentlyPluggedInDevices, bool isInputDevice)
{
ReferenceCountedArray<MidiDeviceListEntry>& midiDevices = isInputDevice ? midiInputs
: midiOutputs;
for (auto i = midiDevices.size(); --i >= 0;)
{
auto& d = *midiDevices[i];
if (! currentlyPluggedInDevices.contains (d.deviceInfo))
{
if (isInputDevice ? d.inDevice .get() != nullptr
: d.outDevice.get() != nullptr)
closeDevice (isInputDevice, i);
midiDevices.remove (i);
}
}
}
void updateDeviceList (bool isInputDeviceList)
{
auto availableDevices = isInputDeviceList ? MidiInput::getAvailableDevices()
: MidiOutput::getAvailableDevices();
if (hasDeviceListChanged (availableDevices, isInputDeviceList))
{
ReferenceCountedArray<MidiDeviceListEntry>& midiDevices
= isInputDeviceList ? midiInputs : midiOutputs;
closeUnpluggedDevices (availableDevices, isInputDeviceList);
ReferenceCountedArray<MidiDeviceListEntry> newDeviceList;
// add all currently plugged-in devices to the device list
for (auto& newDevice : availableDevices)
{
MidiDeviceListEntry::Ptr entry = findDevice (newDevice, isInputDeviceList);
if (entry == nullptr)
entry = new MidiDeviceListEntry (newDevice);
newDeviceList.add (entry);
}
// actually update the device list
midiDevices = newDeviceList;
// update the selection status of the combo-box
if (auto* midiSelector = isInputDeviceList ? midiInputSelector.get() : midiOutputSelector.get())
midiSelector->syncSelectedItemsWithDeviceList (midiDevices);
}
}
//==============================================================================
void addLabelAndSetStyle (Label& label)
{
label.setFont (Font (15.00f, Font::plain));
label.setJustificationType (Justification::centredLeft);
label.setEditable (false, false, false);
label.setColour (TextEditor::textColourId, Colours::black);
label.setColour (TextEditor::backgroundColourId, Colour (0x00000000));
addAndMakeVisible (label);
}
//==============================================================================
Label midiInputLabel { "Midi Input Label", "MIDI Input:" };
Label midiOutputLabel { "Midi Output Label", "MIDI Output:" };
Label incomingMidiLabel { "Incoming Midi Label", "Received MIDI messages:" };
Label outgoingMidiLabel { "Outgoing Midi Label", "Play the keyboard to send MIDI messages..." };
MidiKeyboardState keyboardState;
MidiKeyboardComponent midiKeyboard;
TextEditor midiMonitor { "MIDI Monitor" };
TextButton pairButton { "MIDI Bluetooth devices..." };
std::unique_ptr<MidiDeviceListBox> midiInputSelector, midiOutputSelector;
ReferenceCountedArray<MidiDeviceListEntry> midiInputs, midiOutputs;
CriticalSection midiMonitorLock;
Array<MidiMessage> incomingMessages;
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiDemo)
};

View File

@ -0,0 +1,389 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: PluckedStringsDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Simulation of a plucked string sound.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: PluckedStringsDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
//==============================================================================
/**
A very basic generator of a simulated plucked string sound, implementing
the Karplus-Strong algorithm.
Not performance-optimised!
*/
class StringSynthesiser
{
public:
//==============================================================================
/** Constructor.
@param sampleRate The audio sample rate to use.
@param frequencyInHz The fundamental frequency of the simulated string in
Hertz.
*/
StringSynthesiser (double sampleRate, double frequencyInHz)
{
doPluckForNextBuffer.set (false);
prepareSynthesiserState (sampleRate, frequencyInHz);
}
//==============================================================================
/** Excite the simulated string by plucking it at a given position.
@param pluckPosition The position of the plucking, relative to the length
of the string. Must be between 0 and 1.
*/
void stringPlucked (float pluckPosition)
{
jassert (pluckPosition >= 0.0 && pluckPosition <= 1.0);
// we choose a very simple approach to communicate with the audio thread:
// simply tell the synth to perform the plucking excitation at the beginning
// of the next buffer (= when generateAndAddData is called the next time).
if (doPluckForNextBuffer.compareAndSetBool (1, 0))
{
// plucking in the middle gives the largest amplitude;
// plucking at the very ends will do nothing.
amplitude = std::sin (MathConstants<float>::pi * pluckPosition);
}
}
//==============================================================================
/** Generate next chunk of mono audio output and add it into a buffer.
@param outBuffer Buffer to fill (one channel only). New sound will be
added to existing content of the buffer (instead of
replacing it).
@param numSamples Number of samples to generate (make sure that outBuffer
has enough space).
*/
void generateAndAddData (float* outBuffer, int numSamples)
{
if (doPluckForNextBuffer.compareAndSetBool (0, 1))
exciteInternalBuffer();
// cycle through the delay line and apply a simple averaging filter
for (auto i = 0; i < numSamples; ++i)
{
auto nextPos = (pos + 1) % delayLine.size();
delayLine[nextPos] = (float) (decay * 0.5 * (delayLine[nextPos] + delayLine[pos]));
outBuffer[i] += delayLine[pos];
pos = nextPos;
}
}
private:
//==============================================================================
void prepareSynthesiserState (double sampleRate, double frequencyInHz)
{
auto delayLineLength = (size_t) roundToInt (sampleRate / frequencyInHz);
// we need a minimum delay line length to get a reasonable synthesis.
// if you hit this assert, increase sample rate or decrease frequency!
jassert (delayLineLength > 50);
delayLine.resize (delayLineLength);
std::fill (delayLine.begin(), delayLine.end(), 0.0f);
excitationSample.resize (delayLineLength);
// as the excitation sample we use random noise between -1 and 1
// (as a simple approximation to a plucking excitation)
std::generate (excitationSample.begin(),
excitationSample.end(),
[] { return (Random::getSystemRandom().nextFloat() * 2.0f) - 1.0f; } );
}
void exciteInternalBuffer()
{
// fill the buffer with the precomputed excitation sound (scaled with amplitude)
jassert (delayLine.size() >= excitationSample.size());
std::transform (excitationSample.begin(),
excitationSample.end(),
delayLine.begin(),
[this] (double sample) { return static_cast<float> (amplitude * sample); } );
}
//==============================================================================
const double decay = 0.998;
double amplitude = 0.0;
Atomic<int> doPluckForNextBuffer;
std::vector<float> excitationSample, delayLine;
size_t pos = 0;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (StringSynthesiser)
};
//==============================================================================
/*
This component represents a horizontal vibrating musical string of fixed height
and variable length. The string can be excited by calling stringPlucked().
*/
class StringComponent : public Component,
private Timer
{
public:
StringComponent (int lengthInPixels, Colour stringColour)
: length (lengthInPixels), colour (stringColour)
{
// ignore mouse-clicks so that our parent can get them instead.
setInterceptsMouseClicks (false, false);
setSize (length, height);
startTimerHz (60);
}
//==============================================================================
void stringPlucked (float pluckPositionRelative)
{
amplitude = maxAmplitude * std::sin (pluckPositionRelative * MathConstants<float>::pi);
phase = MathConstants<float>::pi;
}
//==============================================================================
void paint (Graphics& g) override
{
g.setColour (colour);
g.strokePath (generateStringPath(), PathStrokeType (2.0f));
}
Path generateStringPath() const
{
auto y = (float) height / 2.0f;
Path stringPath;
stringPath.startNewSubPath (0, y);
stringPath.quadraticTo ((float) length / 2.0f, y + (std::sin (phase) * amplitude), (float) length, y);
return stringPath;
}
//==============================================================================
void timerCallback() override
{
updateAmplitude();
updatePhase();
repaint();
}
void updateAmplitude()
{
// this determines the decay of the visible string vibration.
amplitude *= 0.99f;
}
void updatePhase()
{
// this determines the visible vibration frequency.
// just an arbitrary number chosen to look OK:
auto phaseStep = 400.0f / (float) length;
phase += phaseStep;
if (phase >= MathConstants<float>::twoPi)
phase -= MathConstants<float>::twoPi;
}
private:
//==============================================================================
int length;
Colour colour;
int height = 20;
float amplitude = 0.0f;
const float maxAmplitude = 12.0f;
float phase = 0.0f;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (StringComponent)
};
//==============================================================================
class PluckedStringsDemo : public AudioAppComponent
{
public:
PluckedStringsDemo()
#ifdef JUCE_DEMO_RUNNER
: AudioAppComponent (getSharedAudioDeviceManager (0, 2))
#endif
{
createStringComponents();
setSize (800, 560);
// specify the number of input and output channels that we want to open
auto audioDevice = deviceManager.getCurrentAudioDevice();
auto numInputChannels = (audioDevice != nullptr ? audioDevice->getActiveInputChannels() .countNumberOfSetBits() : 0);
auto numOutputChannels = jmax (audioDevice != nullptr ? audioDevice->getActiveOutputChannels().countNumberOfSetBits() : 2, 2);
setAudioChannels (numInputChannels, numOutputChannels);
}
~PluckedStringsDemo() override
{
shutdownAudio();
}
//==============================================================================
void prepareToPlay (int /*samplesPerBlockExpected*/, double sampleRate) override
{
generateStringSynths (sampleRate);
}
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
bufferToFill.clearActiveBufferRegion();
for (auto channel = 0; channel < bufferToFill.buffer->getNumChannels(); ++channel)
{
auto* channelData = bufferToFill.buffer->getWritePointer (channel, bufferToFill.startSample);
if (channel == 0)
{
for (auto synth : stringSynths)
synth->generateAndAddData (channelData, bufferToFill.numSamples);
}
else
{
memcpy (channelData,
bufferToFill.buffer->getReadPointer (0),
((size_t) bufferToFill.numSamples) * sizeof (float));
}
}
}
void releaseResources() override
{
stringSynths.clear();
}
//==============================================================================
void paint (Graphics&) override {}
void resized() override
{
auto xPos = 20;
auto yPos = 20;
auto yDistance = 50;
for (auto stringLine : stringLines)
{
stringLine->setTopLeftPosition (xPos, yPos);
yPos += yDistance;
addAndMakeVisible (stringLine);
}
}
private:
void mouseDown (const MouseEvent& e) override
{
mouseDrag (e);
}
void mouseDrag (const MouseEvent& e) override
{
for (auto i = 0; i < stringLines.size(); ++i)
{
auto* stringLine = stringLines.getUnchecked (i);
if (stringLine->getBounds().contains (e.getPosition()))
{
auto position = (e.position.x - (float) stringLine->getX()) / (float) stringLine->getWidth();
stringLine->stringPlucked (position);
stringSynths.getUnchecked (i)->stringPlucked (position);
}
}
}
//==============================================================================
struct StringParameters
{
StringParameters (int midiNote)
: frequencyInHz (MidiMessage::getMidiNoteInHertz (midiNote)),
lengthInPixels ((int) (760 / (frequencyInHz / MidiMessage::getMidiNoteInHertz (42))))
{}
double frequencyInHz;
int lengthInPixels;
};
static Array<StringParameters> getDefaultStringParameters()
{
return Array<StringParameters> (42, 44, 46, 49, 51, 54, 56, 58, 61, 63, 66, 68, 70);
}
void createStringComponents()
{
for (auto stringParams : getDefaultStringParameters())
{
stringLines.add (new StringComponent (stringParams.lengthInPixels,
Colour::fromHSV (Random().nextFloat(), 0.6f, 0.9f, 1.0f)));
}
}
void generateStringSynths (double sampleRate)
{
stringSynths.clear();
for (auto stringParams : getDefaultStringParameters())
{
stringSynths.add (new StringSynthesiser (sampleRate, stringParams.frequencyInHz));
}
}
//==============================================================================
OwnedArray<StringComponent> stringLines;
OwnedArray<StringSynthesiser> stringSynths;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (PluckedStringsDemo)
};

189
deps/juce/examples/Audio/SimpleFFTDemo.h vendored Normal file
View File

@ -0,0 +1,189 @@
/*
==============================================================================
This file is part of the JUCE examples.
Copyright (c) 2020 - Raw Material Software Limited
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES,
WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR
PURPOSE, ARE DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this PIP. A PIP is a short snippet
of code that can be read by the Projucer and used to generate a JUCE project.
BEGIN_JUCE_PIP_METADATA
name: SimpleFFTDemo
version: 1.0.0
vendor: JUCE
website: http://juce.com
description: Simple FFT application.
dependencies: juce_audio_basics, juce_audio_devices, juce_audio_formats,
juce_audio_processors, juce_audio_utils, juce_core,
juce_data_structures, juce_dsp, juce_events, juce_graphics,
juce_gui_basics, juce_gui_extra
exporters: xcode_mac, vs2019, linux_make, androidstudio, xcode_iphone
moduleFlags: JUCE_STRICT_REFCOUNTEDPOINTER=1
type: Component
mainClass: SimpleFFTDemo
useLocalCopy: 1
END_JUCE_PIP_METADATA
*******************************************************************************/
#pragma once
//==============================================================================
class SimpleFFTDemo : public AudioAppComponent,
private Timer
{
public:
SimpleFFTDemo() :
#ifdef JUCE_DEMO_RUNNER
AudioAppComponent (getSharedAudioDeviceManager (1, 0)),
#endif
forwardFFT (fftOrder),
spectrogramImage (Image::RGB, 512, 512, true)
{
setOpaque (true);
#ifndef JUCE_DEMO_RUNNER
RuntimePermissions::request (RuntimePermissions::recordAudio,
[this] (bool granted)
{
int numInputChannels = granted ? 2 : 0;
setAudioChannels (numInputChannels, 2);
});
#else
setAudioChannels (2, 2);
#endif
startTimerHz (60);
setSize (700, 500);
}
~SimpleFFTDemo() override
{
shutdownAudio();
}
//==============================================================================
void prepareToPlay (int /*samplesPerBlockExpected*/, double /*newSampleRate*/) override
{
// (nothing to do here)
}
void releaseResources() override
{
// (nothing to do here)
}
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override
{
if (bufferToFill.buffer->getNumChannels() > 0)
{
const auto* channelData = bufferToFill.buffer->getReadPointer (0, bufferToFill.startSample);
for (auto i = 0; i < bufferToFill.numSamples; ++i)
pushNextSampleIntoFifo (channelData[i]);
bufferToFill.clearActiveBufferRegion();
}
}
//==============================================================================
void paint (Graphics& g) override
{
g.fillAll (Colours::black);
g.setOpacity (1.0f);
g.drawImage (spectrogramImage, getLocalBounds().toFloat());
}
void timerCallback() override
{
if (nextFFTBlockReady)
{
drawNextLineOfSpectrogram();
nextFFTBlockReady = false;
repaint();
}
}
void pushNextSampleIntoFifo (float sample) noexcept
{
// if the fifo contains enough data, set a flag to say
// that the next line should now be rendered..
if (fifoIndex == fftSize)
{
if (! nextFFTBlockReady)
{
zeromem (fftData, sizeof (fftData));
memcpy (fftData, fifo, sizeof (fifo));
nextFFTBlockReady = true;
}
fifoIndex = 0;
}
fifo[fifoIndex++] = sample;
}
void drawNextLineOfSpectrogram()
{
auto rightHandEdge = spectrogramImage.getWidth() - 1;
auto imageHeight = spectrogramImage.getHeight();
// first, shuffle our image leftwards by 1 pixel..
spectrogramImage.moveImageSection (0, 0, 1, 0, rightHandEdge, imageHeight);
// then render our FFT data..
forwardFFT.performFrequencyOnlyForwardTransform (fftData);
// find the range of values produced, so we can scale our rendering to
// show up the detail clearly
auto maxLevel = FloatVectorOperations::findMinAndMax (fftData, fftSize / 2);
for (auto y = 1; y < imageHeight; ++y)
{
auto skewedProportionY = 1.0f - std::exp (std::log ((float) y / (float) imageHeight) * 0.2f);
auto fftDataIndex = jlimit (0, fftSize / 2, (int) (skewedProportionY * (int) fftSize / 2));
auto level = jmap (fftData[fftDataIndex], 0.0f, jmax (maxLevel.getEnd(), 1e-5f), 0.0f, 1.0f);
spectrogramImage.setPixelAt (rightHandEdge, y, Colour::fromHSV (level, 1.0f, level, 1.0f));
}
}
enum
{
fftOrder = 10,
fftSize = 1 << fftOrder
};
private:
dsp::FFT forwardFFT;
Image spectrogramImage;
float fifo [fftSize];
float fftData [2 * fftSize];
int fifoIndex = 0;
bool nextFFTBlockReady = false;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (SimpleFFTDemo)
};