migrating to the latest JUCE version
This commit is contained in:
@ -2,7 +2,7 @@
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2020 - Raw Material Software Limited
|
||||
Copyright (c) 2022 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
@ -1,481 +1,470 @@
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2020 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
|
||||
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
|
||||
STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
|
||||
METHOD (constructor, "<init>", "(IIIIII)V") \
|
||||
METHOD (getState, "getState", "()I") \
|
||||
METHOD (play, "play", "()V") \
|
||||
METHOD (stop, "stop", "()V") \
|
||||
METHOD (release, "release", "()V") \
|
||||
METHOD (flush, "flush", "()V") \
|
||||
METHOD (write, "write", "([SII)I") \
|
||||
|
||||
DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack")
|
||||
#undef JNI_CLASS_MEMBERS
|
||||
|
||||
//==============================================================================
|
||||
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
|
||||
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
|
||||
METHOD (constructor, "<init>", "(IIIII)V") \
|
||||
METHOD (getState, "getState", "()I") \
|
||||
METHOD (startRecording, "startRecording", "()V") \
|
||||
METHOD (stop, "stop", "()V") \
|
||||
METHOD (read, "read", "([SII)I") \
|
||||
METHOD (release, "release", "()V") \
|
||||
|
||||
DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord")
|
||||
#undef JNI_CLASS_MEMBERS
|
||||
|
||||
//==============================================================================
|
||||
enum
|
||||
{
|
||||
CHANNEL_OUT_STEREO = 12,
|
||||
CHANNEL_IN_STEREO = 12,
|
||||
CHANNEL_IN_MONO = 16,
|
||||
ENCODING_PCM_16BIT = 2,
|
||||
STREAM_MUSIC = 3,
|
||||
MODE_STREAM = 1,
|
||||
STATE_UNINITIALIZED = 0
|
||||
};
|
||||
|
||||
const char* const javaAudioTypeName = "Android Audio";
|
||||
|
||||
//==============================================================================
|
||||
class AndroidAudioIODevice : public AudioIODevice,
|
||||
public Thread
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
AndroidAudioIODevice (const String& deviceName)
|
||||
: AudioIODevice (deviceName, javaAudioTypeName),
|
||||
Thread ("audio"),
|
||||
minBufferSizeOut (0), minBufferSizeIn (0), callback (nullptr), sampleRate (0),
|
||||
numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
|
||||
numClientOutputChannels (0), numDeviceOutputChannels (0),
|
||||
actualBufferSize (0), isRunning (false),
|
||||
inputChannelBuffer (1, 1),
|
||||
outputChannelBuffer (1, 1)
|
||||
{
|
||||
JNIEnv* env = getEnv();
|
||||
sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
|
||||
|
||||
minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
|
||||
minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
|
||||
|
||||
if (minBufferSizeIn <= 0)
|
||||
{
|
||||
minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
|
||||
|
||||
if (minBufferSizeIn > 0)
|
||||
numDeviceInputChannelsAvailable = 1;
|
||||
else
|
||||
numDeviceInputChannelsAvailable = 0;
|
||||
}
|
||||
|
||||
DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
|
||||
<< sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
|
||||
}
|
||||
|
||||
~AndroidAudioIODevice() override
|
||||
{
|
||||
close();
|
||||
}
|
||||
|
||||
StringArray getOutputChannelNames() override
|
||||
{
|
||||
StringArray s;
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
return s;
|
||||
}
|
||||
|
||||
StringArray getInputChannelNames() override
|
||||
{
|
||||
StringArray s;
|
||||
|
||||
if (numDeviceInputChannelsAvailable == 2)
|
||||
{
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
}
|
||||
else if (numDeviceInputChannelsAvailable == 1)
|
||||
{
|
||||
s.add ("Audio Input");
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
Array<double> getAvailableSampleRates() override
|
||||
{
|
||||
Array<double> r;
|
||||
r.add ((double) sampleRate);
|
||||
return r;
|
||||
}
|
||||
|
||||
Array<int> getAvailableBufferSizes() override
|
||||
{
|
||||
Array<int> b;
|
||||
int n = 16;
|
||||
|
||||
for (int i = 0; i < 50; ++i)
|
||||
{
|
||||
b.add (n);
|
||||
n += n < 64 ? 16
|
||||
: (n < 512 ? 32
|
||||
: (n < 1024 ? 64
|
||||
: (n < 2048 ? 128 : 256)));
|
||||
}
|
||||
|
||||
return b;
|
||||
}
|
||||
|
||||
int getDefaultBufferSize() override { return 2048; }
|
||||
|
||||
String open (const BigInteger& inputChannels,
|
||||
const BigInteger& outputChannels,
|
||||
double requestedSampleRate,
|
||||
int bufferSize) override
|
||||
{
|
||||
close();
|
||||
|
||||
if (sampleRate != (int) requestedSampleRate)
|
||||
return "Sample rate not allowed";
|
||||
|
||||
lastError.clear();
|
||||
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
|
||||
|
||||
numDeviceInputChannels = 0;
|
||||
numDeviceOutputChannels = 0;
|
||||
|
||||
activeOutputChans = outputChannels;
|
||||
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
|
||||
numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
|
||||
|
||||
activeInputChans = inputChannels;
|
||||
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
|
||||
numClientInputChannels = activeInputChans.countNumberOfSetBits();
|
||||
|
||||
actualBufferSize = preferredBufferSize;
|
||||
inputChannelBuffer.setSize (2, actualBufferSize);
|
||||
inputChannelBuffer.clear();
|
||||
outputChannelBuffer.setSize (2, actualBufferSize);
|
||||
outputChannelBuffer.clear();
|
||||
|
||||
JNIEnv* env = getEnv();
|
||||
|
||||
if (numClientOutputChannels > 0)
|
||||
{
|
||||
numDeviceOutputChannels = 2;
|
||||
outputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioTrack, AudioTrack.constructor,
|
||||
STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
|
||||
(jint) (minBufferSizeOut * numDeviceOutputChannels * static_cast<int> (sizeof (int16))), MODE_STREAM)));
|
||||
|
||||
const bool supportsUnderrunCount = (getAndroidSDKVersion() >= 24);
|
||||
getUnderrunCount = supportsUnderrunCount ? env->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : nullptr;
|
||||
|
||||
int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
|
||||
if (outputDeviceState > 0)
|
||||
{
|
||||
isRunning = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// failed to open the device
|
||||
outputDevice.clear();
|
||||
lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
|
||||
}
|
||||
}
|
||||
|
||||
if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
|
||||
{
|
||||
if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
|
||||
{
|
||||
// If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
|
||||
// before trying to open an audio input device. This is not going to work!
|
||||
jassertfalse;
|
||||
|
||||
inputDevice.clear();
|
||||
lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
|
||||
}
|
||||
else
|
||||
{
|
||||
numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
|
||||
inputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioRecord, AudioRecord.constructor,
|
||||
0 /* (default audio source) */, sampleRate,
|
||||
numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
|
||||
ENCODING_PCM_16BIT,
|
||||
(jint) (minBufferSizeIn * numDeviceInputChannels * static_cast<int> (sizeof (int16))))));
|
||||
|
||||
int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
|
||||
if (inputDeviceState > 0)
|
||||
{
|
||||
isRunning = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// failed to open the device
|
||||
inputDevice.clear();
|
||||
lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isRunning)
|
||||
{
|
||||
if (outputDevice != nullptr)
|
||||
env->CallVoidMethod (outputDevice, AudioTrack.play);
|
||||
|
||||
if (inputDevice != nullptr)
|
||||
env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
|
||||
|
||||
startThread (8);
|
||||
}
|
||||
else
|
||||
{
|
||||
closeDevices();
|
||||
}
|
||||
|
||||
return lastError;
|
||||
}
|
||||
|
||||
void close() override
|
||||
{
|
||||
if (isRunning)
|
||||
{
|
||||
stopThread (2000);
|
||||
isRunning = false;
|
||||
closeDevices();
|
||||
}
|
||||
}
|
||||
|
||||
int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
|
||||
int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
|
||||
bool isOpen() override { return isRunning; }
|
||||
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
|
||||
int getCurrentBitDepth() override { return 16; }
|
||||
double getCurrentSampleRate() override { return sampleRate; }
|
||||
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
|
||||
BigInteger getActiveInputChannels() const override { return activeInputChans; }
|
||||
String getLastError() override { return lastError; }
|
||||
bool isPlaying() override { return isRunning && callback != nullptr; }
|
||||
|
||||
int getXRunCount() const noexcept override
|
||||
{
|
||||
if (outputDevice != nullptr && getUnderrunCount != nullptr)
|
||||
return getEnv()->CallIntMethod (outputDevice, getUnderrunCount);
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
void start (AudioIODeviceCallback* newCallback) override
|
||||
{
|
||||
if (isRunning && callback != newCallback)
|
||||
{
|
||||
if (newCallback != nullptr)
|
||||
newCallback->audioDeviceAboutToStart (this);
|
||||
|
||||
const ScopedLock sl (callbackLock);
|
||||
callback = newCallback;
|
||||
}
|
||||
}
|
||||
|
||||
void stop() override
|
||||
{
|
||||
if (isRunning)
|
||||
{
|
||||
AudioIODeviceCallback* lastCallback;
|
||||
|
||||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
lastCallback = callback;
|
||||
callback = nullptr;
|
||||
}
|
||||
|
||||
if (lastCallback != nullptr)
|
||||
lastCallback->audioDeviceStopped();
|
||||
}
|
||||
}
|
||||
|
||||
void run() override
|
||||
{
|
||||
JNIEnv* env = getEnv();
|
||||
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
|
||||
|
||||
while (! threadShouldExit())
|
||||
{
|
||||
if (inputDevice != nullptr)
|
||||
{
|
||||
jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
|
||||
|
||||
if (numRead < actualBufferSize * numDeviceInputChannels)
|
||||
{
|
||||
DBG ("Audio read under-run! " << numRead);
|
||||
}
|
||||
|
||||
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
|
||||
|
||||
if (chan < numDeviceInputChannels)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
|
||||
d.convertSamples (s, actualBufferSize);
|
||||
}
|
||||
else
|
||||
{
|
||||
d.clearSamples (actualBufferSize);
|
||||
}
|
||||
}
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
||||
}
|
||||
|
||||
if (threadShouldExit())
|
||||
break;
|
||||
|
||||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
|
||||
if (callback != nullptr)
|
||||
{
|
||||
callback->audioDeviceIOCallback (inputChannelBuffer.getArrayOfReadPointers(), numClientInputChannels,
|
||||
outputChannelBuffer.getArrayOfWritePointers(), numClientOutputChannels,
|
||||
actualBufferSize);
|
||||
}
|
||||
else
|
||||
{
|
||||
outputChannelBuffer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
if (outputDevice != nullptr)
|
||||
{
|
||||
if (threadShouldExit())
|
||||
break;
|
||||
|
||||
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
|
||||
|
||||
const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
|
||||
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
|
||||
d.convertSamples (s, actualBufferSize);
|
||||
}
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
||||
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
|
||||
|
||||
if (numWritten < actualBufferSize * numDeviceOutputChannels)
|
||||
{
|
||||
DBG ("Audio write underrun! " << numWritten);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int minBufferSizeOut, minBufferSizeIn;
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
CriticalSection callbackLock;
|
||||
AudioIODeviceCallback* callback;
|
||||
jint sampleRate;
|
||||
int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
|
||||
int numClientOutputChannels, numDeviceOutputChannels;
|
||||
int actualBufferSize;
|
||||
bool isRunning;
|
||||
String lastError;
|
||||
BigInteger activeOutputChans, activeInputChans;
|
||||
GlobalRef outputDevice, inputDevice;
|
||||
AudioBuffer<float> inputChannelBuffer, outputChannelBuffer;
|
||||
jmethodID getUnderrunCount = nullptr;
|
||||
|
||||
void closeDevices()
|
||||
{
|
||||
if (outputDevice != nullptr)
|
||||
{
|
||||
outputDevice.callVoidMethod (AudioTrack.stop);
|
||||
outputDevice.callVoidMethod (AudioTrack.release);
|
||||
outputDevice.clear();
|
||||
}
|
||||
|
||||
if (inputDevice != nullptr)
|
||||
{
|
||||
inputDevice.callVoidMethod (AudioRecord.stop);
|
||||
inputDevice.callVoidMethod (AudioRecord.release);
|
||||
inputDevice.clear();
|
||||
}
|
||||
}
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
|
||||
};
|
||||
|
||||
//==============================================================================
|
||||
class AndroidAudioIODeviceType : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
|
||||
|
||||
//==============================================================================
|
||||
void scanForDevices() {}
|
||||
StringArray getDeviceNames (bool) const { return StringArray (javaAudioTypeName); }
|
||||
int getDefaultDeviceIndex (bool) const { return 0; }
|
||||
int getIndexOfDevice (AudioIODevice* device, bool) const { return device != nullptr ? 0 : -1; }
|
||||
bool hasSeparateInputsAndOutputs() const { return false; }
|
||||
|
||||
AudioIODevice* createDevice (const String& outputDeviceName,
|
||||
const String& inputDeviceName)
|
||||
{
|
||||
std::unique_ptr<AndroidAudioIODevice> dev;
|
||||
|
||||
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
|
||||
{
|
||||
dev.reset (new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
|
||||
: inputDeviceName));
|
||||
|
||||
if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
|
||||
dev = nullptr;
|
||||
}
|
||||
|
||||
return dev.release();
|
||||
}
|
||||
|
||||
private:
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
|
||||
};
|
||||
|
||||
|
||||
//==============================================================================
|
||||
extern bool isOboeAvailable();
|
||||
extern bool isOpenSLAvailable();
|
||||
|
||||
} // namespace juce
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2022 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
|
||||
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
|
||||
STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
|
||||
METHOD (constructor, "<init>", "(IIIIII)V") \
|
||||
METHOD (getState, "getState", "()I") \
|
||||
METHOD (play, "play", "()V") \
|
||||
METHOD (stop, "stop", "()V") \
|
||||
METHOD (release, "release", "()V") \
|
||||
METHOD (flush, "flush", "()V") \
|
||||
METHOD (write, "write", "([SII)I") \
|
||||
|
||||
DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack")
|
||||
#undef JNI_CLASS_MEMBERS
|
||||
|
||||
//==============================================================================
|
||||
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
|
||||
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
|
||||
METHOD (constructor, "<init>", "(IIIII)V") \
|
||||
METHOD (getState, "getState", "()I") \
|
||||
METHOD (startRecording, "startRecording", "()V") \
|
||||
METHOD (stop, "stop", "()V") \
|
||||
METHOD (read, "read", "([SII)I") \
|
||||
METHOD (release, "release", "()V") \
|
||||
|
||||
DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord")
|
||||
#undef JNI_CLASS_MEMBERS
|
||||
|
||||
//==============================================================================
|
||||
enum
|
||||
{
|
||||
CHANNEL_OUT_STEREO = 12,
|
||||
CHANNEL_IN_STEREO = 12,
|
||||
CHANNEL_IN_MONO = 16,
|
||||
ENCODING_PCM_16BIT = 2,
|
||||
STREAM_MUSIC = 3,
|
||||
MODE_STREAM = 1,
|
||||
STATE_UNINITIALIZED = 0
|
||||
};
|
||||
|
||||
const char* const javaAudioTypeName = "Android Audio";
|
||||
|
||||
//==============================================================================
|
||||
class AndroidAudioIODevice : public AudioIODevice,
|
||||
public Thread
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
AndroidAudioIODevice (const String& deviceName)
|
||||
: AudioIODevice (deviceName, javaAudioTypeName),
|
||||
Thread ("audio"),
|
||||
minBufferSizeOut (0), minBufferSizeIn (0), callback (nullptr), sampleRate (0),
|
||||
numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
|
||||
numClientOutputChannels (0), numDeviceOutputChannels (0),
|
||||
actualBufferSize (0), isRunning (false),
|
||||
inputChannelBuffer (1, 1),
|
||||
outputChannelBuffer (1, 1)
|
||||
{
|
||||
JNIEnv* env = getEnv();
|
||||
sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
|
||||
|
||||
minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
|
||||
minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
|
||||
|
||||
if (minBufferSizeIn <= 0)
|
||||
{
|
||||
minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
|
||||
|
||||
if (minBufferSizeIn > 0)
|
||||
numDeviceInputChannelsAvailable = 1;
|
||||
else
|
||||
numDeviceInputChannelsAvailable = 0;
|
||||
}
|
||||
|
||||
DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
|
||||
<< sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
|
||||
}
|
||||
|
||||
~AndroidAudioIODevice() override
|
||||
{
|
||||
close();
|
||||
}
|
||||
|
||||
StringArray getOutputChannelNames() override
|
||||
{
|
||||
StringArray s;
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
return s;
|
||||
}
|
||||
|
||||
StringArray getInputChannelNames() override
|
||||
{
|
||||
StringArray s;
|
||||
|
||||
if (numDeviceInputChannelsAvailable == 2)
|
||||
{
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
}
|
||||
else if (numDeviceInputChannelsAvailable == 1)
|
||||
{
|
||||
s.add ("Audio Input");
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
Array<double> getAvailableSampleRates() override
|
||||
{
|
||||
Array<double> r;
|
||||
r.add ((double) sampleRate);
|
||||
return r;
|
||||
}
|
||||
|
||||
Array<int> getAvailableBufferSizes() override
|
||||
{
|
||||
Array<int> b;
|
||||
int n = 16;
|
||||
|
||||
for (int i = 0; i < 50; ++i)
|
||||
{
|
||||
b.add (n);
|
||||
n += n < 64 ? 16
|
||||
: (n < 512 ? 32
|
||||
: (n < 1024 ? 64
|
||||
: (n < 2048 ? 128 : 256)));
|
||||
}
|
||||
|
||||
return b;
|
||||
}
|
||||
|
||||
int getDefaultBufferSize() override { return 2048; }
|
||||
|
||||
String open (const BigInteger& inputChannels,
|
||||
const BigInteger& outputChannels,
|
||||
double requestedSampleRate,
|
||||
int bufferSize) override
|
||||
{
|
||||
close();
|
||||
|
||||
if (sampleRate != (int) requestedSampleRate)
|
||||
return "Sample rate not allowed";
|
||||
|
||||
lastError.clear();
|
||||
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
|
||||
|
||||
numDeviceInputChannels = 0;
|
||||
numDeviceOutputChannels = 0;
|
||||
|
||||
activeOutputChans = outputChannels;
|
||||
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
|
||||
numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
|
||||
|
||||
activeInputChans = inputChannels;
|
||||
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
|
||||
numClientInputChannels = activeInputChans.countNumberOfSetBits();
|
||||
|
||||
actualBufferSize = preferredBufferSize;
|
||||
inputChannelBuffer.setSize (2, actualBufferSize);
|
||||
inputChannelBuffer.clear();
|
||||
outputChannelBuffer.setSize (2, actualBufferSize);
|
||||
outputChannelBuffer.clear();
|
||||
|
||||
JNIEnv* env = getEnv();
|
||||
|
||||
if (numClientOutputChannels > 0)
|
||||
{
|
||||
numDeviceOutputChannels = 2;
|
||||
outputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioTrack, AudioTrack.constructor,
|
||||
STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
|
||||
(jint) (minBufferSizeOut * numDeviceOutputChannels * static_cast<int> (sizeof (int16))), MODE_STREAM)));
|
||||
|
||||
const bool supportsUnderrunCount = (getAndroidSDKVersion() >= 24);
|
||||
getUnderrunCount = supportsUnderrunCount ? env->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : nullptr;
|
||||
|
||||
int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
|
||||
if (outputDeviceState > 0)
|
||||
{
|
||||
isRunning = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// failed to open the device
|
||||
outputDevice.clear();
|
||||
lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
|
||||
}
|
||||
}
|
||||
|
||||
if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
|
||||
{
|
||||
if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
|
||||
{
|
||||
// If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
|
||||
// before trying to open an audio input device. This is not going to work!
|
||||
jassertfalse;
|
||||
|
||||
inputDevice.clear();
|
||||
lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
|
||||
}
|
||||
else
|
||||
{
|
||||
numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
|
||||
inputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioRecord, AudioRecord.constructor,
|
||||
0 /* (default audio source) */, sampleRate,
|
||||
numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
|
||||
ENCODING_PCM_16BIT,
|
||||
(jint) (minBufferSizeIn * numDeviceInputChannels * static_cast<int> (sizeof (int16))))));
|
||||
|
||||
int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
|
||||
if (inputDeviceState > 0)
|
||||
{
|
||||
isRunning = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// failed to open the device
|
||||
inputDevice.clear();
|
||||
lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isRunning)
|
||||
{
|
||||
if (outputDevice != nullptr)
|
||||
env->CallVoidMethod (outputDevice, AudioTrack.play);
|
||||
|
||||
if (inputDevice != nullptr)
|
||||
env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
|
||||
|
||||
startThread (8);
|
||||
}
|
||||
else
|
||||
{
|
||||
closeDevices();
|
||||
}
|
||||
|
||||
return lastError;
|
||||
}
|
||||
|
||||
void close() override
|
||||
{
|
||||
if (isRunning)
|
||||
{
|
||||
stopThread (2000);
|
||||
isRunning = false;
|
||||
closeDevices();
|
||||
}
|
||||
}
|
||||
|
||||
int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
|
||||
int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
|
||||
bool isOpen() override { return isRunning; }
|
||||
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
|
||||
int getCurrentBitDepth() override { return 16; }
|
||||
double getCurrentSampleRate() override { return sampleRate; }
|
||||
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
|
||||
BigInteger getActiveInputChannels() const override { return activeInputChans; }
|
||||
String getLastError() override { return lastError; }
|
||||
bool isPlaying() override { return isRunning && callback != nullptr; }
|
||||
|
||||
int getXRunCount() const noexcept override
|
||||
{
|
||||
if (outputDevice != nullptr && getUnderrunCount != nullptr)
|
||||
return getEnv()->CallIntMethod (outputDevice, getUnderrunCount);
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
void start (AudioIODeviceCallback* newCallback) override
|
||||
{
|
||||
if (isRunning && callback != newCallback)
|
||||
{
|
||||
if (newCallback != nullptr)
|
||||
newCallback->audioDeviceAboutToStart (this);
|
||||
|
||||
const ScopedLock sl (callbackLock);
|
||||
callback = newCallback;
|
||||
}
|
||||
}
|
||||
|
||||
void stop() override
|
||||
{
|
||||
if (isRunning)
|
||||
{
|
||||
AudioIODeviceCallback* lastCallback;
|
||||
|
||||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
lastCallback = callback;
|
||||
callback = nullptr;
|
||||
}
|
||||
|
||||
if (lastCallback != nullptr)
|
||||
lastCallback->audioDeviceStopped();
|
||||
}
|
||||
}
|
||||
|
||||
void run() override
|
||||
{
|
||||
JNIEnv* env = getEnv();
|
||||
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
|
||||
|
||||
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
|
||||
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
while (! threadShouldExit())
|
||||
{
|
||||
if (inputDevice != nullptr)
|
||||
{
|
||||
jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
|
||||
|
||||
if (numRead < actualBufferSize * numDeviceInputChannels)
|
||||
{
|
||||
DBG ("Audio read under-run! " << numRead);
|
||||
}
|
||||
|
||||
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels },
|
||||
AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() },
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
||||
}
|
||||
|
||||
if (threadShouldExit())
|
||||
break;
|
||||
|
||||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
|
||||
if (callback != nullptr)
|
||||
{
|
||||
callback->audioDeviceIOCallbackWithContext (inputChannelBuffer.getArrayOfReadPointers(),
|
||||
numClientInputChannels,
|
||||
outputChannelBuffer.getArrayOfWritePointers(),
|
||||
numClientOutputChannels,
|
||||
actualBufferSize, {});
|
||||
}
|
||||
else
|
||||
{
|
||||
outputChannelBuffer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
if (outputDevice != nullptr)
|
||||
{
|
||||
if (threadShouldExit())
|
||||
break;
|
||||
|
||||
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() },
|
||||
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels },
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
||||
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
|
||||
|
||||
if (numWritten < actualBufferSize * numDeviceOutputChannels)
|
||||
{
|
||||
DBG ("Audio write underrun! " << numWritten);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int minBufferSizeOut, minBufferSizeIn;
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
CriticalSection callbackLock;
|
||||
AudioIODeviceCallback* callback;
|
||||
jint sampleRate;
|
||||
int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
|
||||
int numClientOutputChannels, numDeviceOutputChannels;
|
||||
int actualBufferSize;
|
||||
bool isRunning;
|
||||
String lastError;
|
||||
BigInteger activeOutputChans, activeInputChans;
|
||||
GlobalRef outputDevice, inputDevice;
|
||||
AudioBuffer<float> inputChannelBuffer, outputChannelBuffer;
|
||||
jmethodID getUnderrunCount = nullptr;
|
||||
|
||||
void closeDevices()
|
||||
{
|
||||
if (outputDevice != nullptr)
|
||||
{
|
||||
outputDevice.callVoidMethod (AudioTrack.stop);
|
||||
outputDevice.callVoidMethod (AudioTrack.release);
|
||||
outputDevice.clear();
|
||||
}
|
||||
|
||||
if (inputDevice != nullptr)
|
||||
{
|
||||
inputDevice.callVoidMethod (AudioRecord.stop);
|
||||
inputDevice.callVoidMethod (AudioRecord.release);
|
||||
inputDevice.clear();
|
||||
}
|
||||
}
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
|
||||
};
|
||||
|
||||
//==============================================================================
|
||||
class AndroidAudioIODeviceType : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
|
||||
|
||||
//==============================================================================
|
||||
void scanForDevices() {}
|
||||
StringArray getDeviceNames (bool) const { return StringArray (javaAudioTypeName); }
|
||||
int getDefaultDeviceIndex (bool) const { return 0; }
|
||||
int getIndexOfDevice (AudioIODevice* device, bool) const { return device != nullptr ? 0 : -1; }
|
||||
bool hasSeparateInputsAndOutputs() const { return false; }
|
||||
|
||||
AudioIODevice* createDevice (const String& outputDeviceName,
|
||||
const String& inputDeviceName)
|
||||
{
|
||||
std::unique_ptr<AndroidAudioIODevice> dev;
|
||||
|
||||
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
|
||||
{
|
||||
dev.reset (new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
|
||||
: inputDeviceName));
|
||||
|
||||
if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
|
||||
dev = nullptr;
|
||||
}
|
||||
|
||||
return dev.release();
|
||||
}
|
||||
|
||||
private:
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
|
||||
};
|
||||
|
||||
|
||||
//==============================================================================
|
||||
extern bool isOboeAvailable();
|
||||
extern bool isOpenSLAvailable();
|
||||
|
||||
} // namespace juce
|
||||
|
@ -1,131 +1,131 @@
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2020 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
//==============================================================================
|
||||
/**
|
||||
Some shared helpers methods for using the high-performance audio paths on
|
||||
Android devices (OpenSL and Oboe).
|
||||
|
||||
@tags{Audio}
|
||||
*/
|
||||
namespace AndroidHighPerformanceAudioHelpers
|
||||
{
|
||||
//==============================================================================
|
||||
static double getNativeSampleRate()
|
||||
{
|
||||
return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue();
|
||||
}
|
||||
|
||||
static int getNativeBufferSizeHint()
|
||||
{
|
||||
// This property is a hint of a native buffer size but it does not guarantee the size used.
|
||||
auto deviceBufferSize = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue();
|
||||
|
||||
if (deviceBufferSize == 0)
|
||||
return 192;
|
||||
|
||||
return deviceBufferSize;
|
||||
}
|
||||
|
||||
static bool isProAudioDevice()
|
||||
{
|
||||
static bool isSapaSupported = SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG")
|
||||
&& DynamicLibrary().open ("libapa_jni.so");
|
||||
|
||||
return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported;
|
||||
}
|
||||
|
||||
static bool hasLowLatencyAudioPath()
|
||||
{
|
||||
return androidHasSystemFeature ("android.hardware.audio.low_latency");
|
||||
}
|
||||
|
||||
static bool canUseHighPerformanceAudioPath (int nativeBufferSize, int requestedBufferSize, int requestedSampleRate)
|
||||
{
|
||||
return ((requestedBufferSize % nativeBufferSize) == 0)
|
||||
&& (requestedSampleRate == getNativeSampleRate())
|
||||
&& isProAudioDevice();
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
static int getMinimumBuffersToEnqueue (int nativeBufferSize, double requestedSampleRate)
|
||||
{
|
||||
if (canUseHighPerformanceAudioPath (nativeBufferSize, nativeBufferSize, (int) requestedSampleRate))
|
||||
{
|
||||
// see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp
|
||||
// "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required
|
||||
// for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one
|
||||
// is sufficient for lower latency."
|
||||
return (getAndroidSDKVersion() >= 18 ? 1 : 2);
|
||||
}
|
||||
|
||||
// not using low-latency path so we can use the absolute minimum number of buffers to queue
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int buffersToQueueForBufferDuration (int nativeBufferSize, int bufferDurationInMs, double sampleRate) noexcept
|
||||
{
|
||||
auto maxBufferFrames = static_cast<int> (std::ceil (bufferDurationInMs * sampleRate / 1000.0));
|
||||
auto maxNumBuffers = static_cast<int> (std::ceil (static_cast<double> (maxBufferFrames)
|
||||
/ static_cast<double> (nativeBufferSize)));
|
||||
|
||||
return jmax (getMinimumBuffersToEnqueue (nativeBufferSize, sampleRate), maxNumBuffers);
|
||||
}
|
||||
|
||||
static int getMaximumBuffersToEnqueue (int nativeBufferSize, double maximumSampleRate) noexcept
|
||||
{
|
||||
static constexpr int maxBufferSizeMs = 200;
|
||||
|
||||
return jmax (8, buffersToQueueForBufferDuration (nativeBufferSize, maxBufferSizeMs, maximumSampleRate));
|
||||
}
|
||||
|
||||
static Array<int> getAvailableBufferSizes (int nativeBufferSize, Array<double> availableSampleRates)
|
||||
{
|
||||
auto minBuffersToQueue = getMinimumBuffersToEnqueue (nativeBufferSize, getNativeSampleRate());
|
||||
auto maxBuffersToQueue = getMaximumBuffersToEnqueue (nativeBufferSize, findMaximum (availableSampleRates.getRawDataPointer(),
|
||||
availableSampleRates.size()));
|
||||
|
||||
Array<int> bufferSizes;
|
||||
|
||||
for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i)
|
||||
bufferSizes.add (i * nativeBufferSize);
|
||||
|
||||
return bufferSizes;
|
||||
}
|
||||
|
||||
static int getDefaultBufferSize (int nativeBufferSize, double currentSampleRate)
|
||||
{
|
||||
static constexpr int defaultBufferSizeForLowLatencyDeviceMs = 40;
|
||||
static constexpr int defaultBufferSizeForStandardLatencyDeviceMs = 100;
|
||||
|
||||
auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs
|
||||
: defaultBufferSizeForStandardLatencyDeviceMs);
|
||||
|
||||
auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (nativeBufferSize, defaultBufferLength, currentSampleRate);
|
||||
return defaultBuffersToEnqueue * nativeBufferSize;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace juce
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2022 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
//==============================================================================
|
||||
/**
|
||||
Some shared helpers methods for using the high-performance audio paths on
|
||||
Android devices (OpenSL and Oboe).
|
||||
|
||||
@tags{Audio}
|
||||
*/
|
||||
namespace AndroidHighPerformanceAudioHelpers
|
||||
{
|
||||
//==============================================================================
|
||||
static double getNativeSampleRate()
|
||||
{
|
||||
return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue();
|
||||
}
|
||||
|
||||
static int getNativeBufferSizeHint()
|
||||
{
|
||||
// This property is a hint of a native buffer size but it does not guarantee the size used.
|
||||
auto deviceBufferSize = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue();
|
||||
|
||||
if (deviceBufferSize == 0)
|
||||
return 192;
|
||||
|
||||
return deviceBufferSize;
|
||||
}
|
||||
|
||||
static bool isProAudioDevice()
|
||||
{
|
||||
static bool isSapaSupported = SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG")
|
||||
&& DynamicLibrary().open ("libapa_jni.so");
|
||||
|
||||
return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported;
|
||||
}
|
||||
|
||||
static bool hasLowLatencyAudioPath()
|
||||
{
|
||||
return androidHasSystemFeature ("android.hardware.audio.low_latency");
|
||||
}
|
||||
|
||||
static bool canUseHighPerformanceAudioPath (int nativeBufferSize, int requestedBufferSize, int requestedSampleRate)
|
||||
{
|
||||
return ((requestedBufferSize % nativeBufferSize) == 0)
|
||||
&& (requestedSampleRate == getNativeSampleRate())
|
||||
&& isProAudioDevice();
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
static int getMinimumBuffersToEnqueue (int nativeBufferSize, double requestedSampleRate)
|
||||
{
|
||||
if (canUseHighPerformanceAudioPath (nativeBufferSize, nativeBufferSize, (int) requestedSampleRate))
|
||||
{
|
||||
// see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp
|
||||
// "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required
|
||||
// for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one
|
||||
// is sufficient for lower latency."
|
||||
return (getAndroidSDKVersion() >= 18 ? 1 : 2);
|
||||
}
|
||||
|
||||
// not using low-latency path so we can use the absolute minimum number of buffers to queue
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int buffersToQueueForBufferDuration (int nativeBufferSize, int bufferDurationInMs, double sampleRate) noexcept
|
||||
{
|
||||
auto maxBufferFrames = static_cast<int> (std::ceil (bufferDurationInMs * sampleRate / 1000.0));
|
||||
auto maxNumBuffers = static_cast<int> (std::ceil (static_cast<double> (maxBufferFrames)
|
||||
/ static_cast<double> (nativeBufferSize)));
|
||||
|
||||
return jmax (getMinimumBuffersToEnqueue (nativeBufferSize, sampleRate), maxNumBuffers);
|
||||
}
|
||||
|
||||
static int getMaximumBuffersToEnqueue (int nativeBufferSize, double maximumSampleRate) noexcept
|
||||
{
|
||||
static constexpr int maxBufferSizeMs = 200;
|
||||
|
||||
return jmax (8, buffersToQueueForBufferDuration (nativeBufferSize, maxBufferSizeMs, maximumSampleRate));
|
||||
}
|
||||
|
||||
static Array<int> getAvailableBufferSizes (int nativeBufferSize, Array<double> availableSampleRates)
|
||||
{
|
||||
auto minBuffersToQueue = getMinimumBuffersToEnqueue (nativeBufferSize, getNativeSampleRate());
|
||||
auto maxBuffersToQueue = getMaximumBuffersToEnqueue (nativeBufferSize, findMaximum (availableSampleRates.getRawDataPointer(),
|
||||
availableSampleRates.size()));
|
||||
|
||||
Array<int> bufferSizes;
|
||||
|
||||
for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i)
|
||||
bufferSizes.add (i * nativeBufferSize);
|
||||
|
||||
return bufferSizes;
|
||||
}
|
||||
|
||||
static int getDefaultBufferSize (int nativeBufferSize, double currentSampleRate)
|
||||
{
|
||||
static constexpr int defaultBufferSizeForLowLatencyDeviceMs = 40;
|
||||
static constexpr int defaultBufferSizeForStandardLatencyDeviceMs = 100;
|
||||
|
||||
auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs
|
||||
: defaultBufferSizeForStandardLatencyDeviceMs);
|
||||
|
||||
auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (nativeBufferSize, defaultBufferLength, currentSampleRate);
|
||||
return defaultBuffersToEnqueue * nativeBufferSize;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace juce
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,100 +1,92 @@
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2020 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
class iOSAudioIODeviceType;
|
||||
|
||||
class iOSAudioIODevice : public AudioIODevice
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
String open (const BigInteger&, const BigInteger&, double, int) override;
|
||||
void close() override;
|
||||
|
||||
void start (AudioIODeviceCallback*) override;
|
||||
void stop() override;
|
||||
|
||||
Array<double> getAvailableSampleRates() override;
|
||||
Array<int> getAvailableBufferSizes() override;
|
||||
|
||||
bool setAudioPreprocessingEnabled (bool) override;
|
||||
|
||||
//==============================================================================
|
||||
bool isPlaying() override;
|
||||
bool isOpen() override;
|
||||
String getLastError() override;
|
||||
|
||||
//==============================================================================
|
||||
StringArray getOutputChannelNames() override;
|
||||
StringArray getInputChannelNames() override;
|
||||
|
||||
int getDefaultBufferSize() override;
|
||||
int getCurrentBufferSizeSamples() override;
|
||||
|
||||
double getCurrentSampleRate() override;
|
||||
|
||||
int getCurrentBitDepth() override;
|
||||
|
||||
BigInteger getActiveOutputChannels() const override;
|
||||
BigInteger getActiveInputChannels() const override;
|
||||
|
||||
int getOutputLatencyInSamples() override;
|
||||
int getInputLatencyInSamples() override;
|
||||
|
||||
int getXRunCount() const noexcept override;
|
||||
|
||||
//==============================================================================
|
||||
void setMidiMessageCollector (MidiMessageCollector*);
|
||||
AudioPlayHead* getAudioPlayHead() const;
|
||||
|
||||
//==============================================================================
|
||||
bool isInterAppAudioConnected() const;
|
||||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
Image getIcon (int size);
|
||||
#endif
|
||||
void switchApplication();
|
||||
|
||||
bool isHeadphonesConnected() const;
|
||||
|
||||
bool setInputGain(float val);
|
||||
float getInputGain() const;
|
||||
|
||||
void setAllowBluetoothInput(bool flag);
|
||||
bool getAllowBluetoothInput() const;
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
iOSAudioIODevice (iOSAudioIODeviceType*, const String&, const String&);
|
||||
|
||||
//==============================================================================
|
||||
friend class iOSAudioIODeviceType;
|
||||
friend struct AudioSessionHolder;
|
||||
|
||||
struct Pimpl;
|
||||
std::unique_ptr<Pimpl> pimpl;
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
|
||||
};
|
||||
|
||||
} // namespace juce
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2022 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
|
||||
The code included in this file is provided under the terms of the ISC license
|
||||
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
|
||||
To use, copy, modify, and/or distribute this software for any purpose with or
|
||||
without fee is hereby granted provided that the above copyright notice and
|
||||
this permission notice appear in all copies.
|
||||
|
||||
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
|
||||
DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
namespace juce
|
||||
{
|
||||
|
||||
class iOSAudioIODeviceType;
|
||||
|
||||
class iOSAudioIODevice : public AudioIODevice
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
String open (const BigInteger&, const BigInteger&, double, int) override;
|
||||
void close() override;
|
||||
|
||||
void start (AudioIODeviceCallback*) override;
|
||||
void stop() override;
|
||||
|
||||
Array<double> getAvailableSampleRates() override;
|
||||
Array<int> getAvailableBufferSizes() override;
|
||||
|
||||
bool setAudioPreprocessingEnabled (bool) override;
|
||||
|
||||
//==============================================================================
|
||||
bool isPlaying() override;
|
||||
bool isOpen() override;
|
||||
String getLastError() override;
|
||||
|
||||
//==============================================================================
|
||||
StringArray getOutputChannelNames() override;
|
||||
StringArray getInputChannelNames() override;
|
||||
|
||||
int getDefaultBufferSize() override;
|
||||
int getCurrentBufferSizeSamples() override;
|
||||
|
||||
double getCurrentSampleRate() override;
|
||||
|
||||
int getCurrentBitDepth() override;
|
||||
|
||||
BigInteger getActiveOutputChannels() const override;
|
||||
BigInteger getActiveInputChannels() const override;
|
||||
|
||||
int getOutputLatencyInSamples() override;
|
||||
int getInputLatencyInSamples() override;
|
||||
|
||||
int getXRunCount() const noexcept override;
|
||||
|
||||
//==============================================================================
|
||||
void setMidiMessageCollector (MidiMessageCollector*);
|
||||
AudioPlayHead* getAudioPlayHead() const;
|
||||
|
||||
//==============================================================================
|
||||
bool isInterAppAudioConnected() const;
|
||||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
Image getIcon (int size);
|
||||
#endif
|
||||
void switchApplication();
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
iOSAudioIODevice (iOSAudioIODeviceType*, const String&, const String&);
|
||||
|
||||
//==============================================================================
|
||||
friend class iOSAudioIODeviceType;
|
||||
friend struct AudioSessionHolder;
|
||||
|
||||
struct Pimpl;
|
||||
std::unique_ptr<Pimpl> pimpl;
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
|
||||
};
|
||||
|
||||
} // namespace juce
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE library.
|
||||
Copyright (c) 2020 - Raw Material Software Limited
|
||||
Copyright (c) 2022 - Raw Material Software Limited
|
||||
|
||||
JUCE is an open source library subject to commercial or open-source
|
||||
licensing.
|
||||
@ -82,10 +82,8 @@ namespace CoreMidiHelpers
|
||||
struct Sender;
|
||||
|
||||
#if JUCE_HAS_NEW_COREMIDI_API
|
||||
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wunguarded-availability-new")
|
||||
|
||||
template <>
|
||||
struct Sender<ImplementationStrategy::onlyNew> : public SenderBase
|
||||
struct API_AVAILABLE (macos (11.0), ios (14.0)) Sender<ImplementationStrategy::onlyNew> : public SenderBase
|
||||
{
|
||||
explicit Sender (MIDIEndpointRef ep)
|
||||
: umpConverter (getProtocolForEndpoint (ep))
|
||||
@ -177,8 +175,6 @@ namespace CoreMidiHelpers
|
||||
send();
|
||||
}
|
||||
};
|
||||
|
||||
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
|
||||
#endif
|
||||
|
||||
#if JUCE_HAS_OLD_COREMIDI_API
|
||||
@ -829,10 +825,8 @@ namespace CoreMidiHelpers
|
||||
struct CreatorFunctions;
|
||||
|
||||
#if JUCE_HAS_NEW_COREMIDI_API
|
||||
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wunguarded-availability-new")
|
||||
|
||||
template <>
|
||||
struct CreatorFunctions<ImplementationStrategy::onlyNew>
|
||||
struct API_AVAILABLE (macos (11.0), ios (14.0)) CreatorFunctions<ImplementationStrategy::onlyNew>
|
||||
{
|
||||
static OSStatus createInputPort (ump::PacketProtocol protocol,
|
||||
MIDIClientRef client,
|
||||
@ -894,8 +888,6 @@ namespace CoreMidiHelpers
|
||||
static_cast<MidiPortAndCallback*> (readProcRefCon)->handlePackets (*list);
|
||||
}
|
||||
};
|
||||
|
||||
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
|
||||
#endif
|
||||
|
||||
#if JUCE_HAS_OLD_COREMIDI_API
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -21,7 +21,6 @@
|
||||
#include <vector>
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#include <cassert>
|
||||
|
||||
#ifndef MCR_USE_KAISER
|
||||
// It appears from the spectrogram that the HyperbolicCosine window leads to fewer artifacts.
|
||||
|
Reference in New Issue
Block a user