/* ============================================================================== This file is part of the JUCE library. Copyright (c) 2017 - ROLI Ltd. JUCE is an open source library subject to commercial or open-source licensing. The code included in this file is provided under the terms of the ISC license http://www.isc.org/downloads/software-support-policy/isc-license. Permission To use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted provided that the above copyright notice and this permission notice appear in all copies. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE DISCLAIMED. ============================================================================== */ namespace juce { #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \ DECLARE_JNI_CLASS (AndroidAudioManager, "android/media/AudioManager") #undef JNI_CLASS_MEMBERS //============================================================================== #ifndef SL_ANDROID_DATAFORMAT_PCM_EX #define SL_ANDROID_DATAFORMAT_PCM_EX ((SLuint32) 0x00000004) #endif #ifndef SL_ANDROID_PCM_REPRESENTATION_FLOAT #define SL_ANDROID_PCM_REPRESENTATION_FLOAT ((SLuint32) 0x00000003) #endif #ifndef SL_ANDROID_RECORDING_PRESET_UNPROCESSED #define SL_ANDROID_RECORDING_PRESET_UNPROCESSED ((SLuint32) 0x00000005) #endif //============================================================================== struct PCMDataFormatEx : SLDataFormat_PCM { SLuint32 representation; }; //============================================================================== template struct IntfIID; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; template <> struct IntfIID { static SLInterfaceID_ iid; }; SLInterfaceID_ IntfIID::iid = { 0x79216360, 0xddd7, 0x11db, 0xac16, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0x8d97c260, 0xddd4, 0x11db, 0x958f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0x97750f60, 0xddd7, 0x11db, 0x92b1, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0xef0bd9c0, 0xddd7, 0x11db, 0xbf49, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0xc5657aa0, 0xdddb, 0x11db, 0x82f7, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0x198e4940, 0xc5d7, 0x11df, 0xa2a6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; SLInterfaceID_ IntfIID::iid = { 0x89f6a7e0, 0xbeac, 0x11df, 0x8b5c, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} }; template static void destroyObject (SLObjectType object) { if (object != nullptr && *object != nullptr) (*object)->Destroy (object); } template <> struct ContainerDeletePolicy { static void destroy (SLObjectItf object) { destroyObject (object); } }; //============================================================================== // Some life-time and type management of OpenSL objects class SlObjectRef { public: //============================================================================== SlObjectRef() noexcept {} SlObjectRef (const SlObjectRef& obj) noexcept : cb (obj.cb) {} SlObjectRef (SlObjectRef&& obj) noexcept : cb (std::move (obj.cb)) { obj.cb = nullptr; } explicit SlObjectRef (SLObjectItf o) : cb (new ControlBlock (o)) {} //============================================================================== SlObjectRef& operator= (const SlObjectRef& r) noexcept { cb = r.cb; return *this; } SlObjectRef& operator= (SlObjectRef&& r) noexcept { cb = std::move (r.cb); r.cb = nullptr; return *this; } SlObjectRef& operator= (std::nullptr_t) noexcept { cb = nullptr; return *this; } //============================================================================== const SLObjectItf_* operator*() noexcept { return *cb->ptr.get(); } SLObjectItf operator->() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); } operator SLObjectItf() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); } //============================================================================== bool operator== (nullptr_t) const noexcept { return (cb == nullptr || cb->ptr == nullptr); } bool operator!= (nullptr_t) const noexcept { return (cb != nullptr && cb->ptr != nullptr); } private: //============================================================================== struct ControlBlock : ReferenceCountedObject { ControlBlock() = default; ControlBlock (SLObjectItf o) : ptr (o) {} std::unique_ptr ptr; }; ReferenceCountedObjectPtr cb; }; template class SlRef : public SlObjectRef { public: //============================================================================== SlRef() noexcept {} SlRef (const SlRef& r) noexcept : SlObjectRef (r), type (r.type) {} SlRef (SlRef&& r) noexcept : SlObjectRef (std::move (r)), type (r.type) { r.type = nullptr; } //============================================================================== SlRef& operator= (const SlRef& r) noexcept { SlObjectRef::operator= (r); type = r.type; return *this; } SlRef& operator= (SlRef&& r) noexcept { SlObjectRef::operator= (std::move (r)); type = r.type; r.type = nullptr; return *this; } SlRef& operator= (std::nullptr_t) noexcept { SlObjectRef::operator= (nullptr); type = nullptr; return *this; } //============================================================================== T* const operator*() noexcept { return *type; } T* const* operator->() noexcept { return type; } operator T* const*() noexcept { return type; } //============================================================================== static SlRef cast (SlObjectRef& base) { return SlRef (base); } static SlRef cast (SlObjectRef&& base) { return SlRef (std::move (base)); } private: SlRef (SlObjectRef& base) : SlObjectRef (base) { if (auto obj = SlObjectRef::operator->()) { auto err = (*obj)->GetInterface (obj, &IntfIID::iid, &type); if (type != nullptr && err == SL_RESULT_SUCCESS) return; } *this = nullptr; } SlRef (SlObjectRef&& base) : SlObjectRef (std::move (base)) { if (auto obj = SlObjectRef::operator->()) { auto err = (*obj)->GetInterface (obj, &IntfIID::iid, &type); base = nullptr; if (type != nullptr && err == SL_RESULT_SUCCESS) return; } *this = nullptr; } T* const* type = nullptr; }; //============================================================================== template struct BufferHelpers {}; template <> struct BufferHelpers { enum { isFloatingPoint = 0 }; static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate) { dataFormat.formatType = SL_DATAFORMAT_PCM; dataFormat.numChannels = (SLuint32) numChannels; dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000); dataFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16; dataFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16; dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER : (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT); dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN; dataFormat.representation = 0; } static void prepareCallbackBuffer (AudioBuffer&, int16*) {} static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer& audioBuffer) { for (int i = 0; i < audioBuffer.getNumChannels(); ++i) { using DstSampleType = AudioData::Pointer; using SrcSampleType = AudioData::Pointer; DstSampleType dstData (audioBuffer.getWritePointer (i)); SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels()); dstData.convertSamples (srcData, audioBuffer.getNumSamples()); } } static void convertToOpenSL (const AudioBuffer& audioBuffer, int16* dstInterleaved) { for (int i = 0; i < audioBuffer.getNumChannels(); ++i) { using DstSampleType = AudioData::Pointer; using SrcSampleType = AudioData::Pointer; DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels()); SrcSampleType srcData (audioBuffer.getReadPointer (i)); dstData.convertSamples (srcData, audioBuffer.getNumSamples()); } } }; template <> struct BufferHelpers { enum { isFloatingPoint = 1 }; static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate) { dataFormat.formatType = SL_ANDROID_DATAFORMAT_PCM_EX; dataFormat.numChannels = (SLuint32) numChannels; dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000); dataFormat.bitsPerSample = 32; dataFormat.containerSize = 32; dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER : (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT); dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN; dataFormat.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT; } static void prepareCallbackBuffer (AudioBuffer& audioBuffer, float* native) { if (audioBuffer.getNumChannels() == 1) audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples()); } static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer& audioBuffer) { if (audioBuffer.getNumChannels() == 1) { jassert (srcInterleaved == audioBuffer.getWritePointer (0)); return; } for (int i = 0; i < audioBuffer.getNumChannels(); ++i) { using DstSampleType = AudioData::Pointer; using SrcSampleType = AudioData::Pointer; DstSampleType dstData (audioBuffer.getWritePointer (i)); SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels()); dstData.convertSamples (srcData, audioBuffer.getNumSamples()); } } static void convertToOpenSL (const AudioBuffer& audioBuffer, float* dstInterleaved) { if (audioBuffer.getNumChannels() == 1) { jassert (dstInterleaved == audioBuffer.getReadPointer (0)); return; } for (int i = 0; i < audioBuffer.getNumChannels(); ++i) { using DstSampleType = AudioData::Pointer; using SrcSampleType = AudioData::Pointer; DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels()); SrcSampleType srcData (audioBuffer.getReadPointer (i)); dstData.convertSamples (srcData, audioBuffer.getNumSamples()); } } }; class SLRealtimeThread; //============================================================================== class OpenSLAudioIODevice : public AudioIODevice { public: //============================================================================== template class OpenSLSessionT; //============================================================================== // CRTP template struct OpenSLQueueRunner { OpenSLQueueRunner (OpenSLSessionT& sessionToUse, int numChannelsToUse) : owner (sessionToUse), numChannels (numChannelsToUse), nativeBuffer (static_cast (numChannels * owner.bufferSize * owner.numBuffers)), scratchBuffer (numChannelsToUse, owner.bufferSize), sampleBuffer (scratchBuffer.getArrayOfWritePointers(), numChannelsToUse, owner.bufferSize) {} ~OpenSLQueueRunner() { if (config != nullptr && javaProxy != nullptr) { javaProxy.clear(); (*config)->ReleaseJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1); } } bool init() { runner = crtp().createPlayerOrRecorder(); if (runner == nullptr) return false; const bool supportsJavaProxy = (getAndroidSDKVersion() >= 24); if (supportsJavaProxy) { // may return nullptr on some platforms - that's ok config = SlRef::cast (runner); if (config != nullptr) { jobject audioRoutingJni; auto status = (*config)->AcquireJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1, &audioRoutingJni); if (status == SL_RESULT_SUCCESS && audioRoutingJni != 0) javaProxy = GlobalRef (LocalRef(getEnv()->NewLocalRef (audioRoutingJni))); } } queue = SlRef::cast (runner); if (queue == nullptr) return false; return ((*queue)->RegisterCallback (queue, staticFinished, this) == SL_RESULT_SUCCESS); } void clear() { nextBlock.set (0); numBlocksOut.set (0); zeromem (nativeBuffer.get(), static_cast (owner.bufferSize * numChannels * owner.numBuffers) * sizeof (T)); scratchBuffer.clear(); (*queue)->Clear (queue); } void enqueueBuffer() { (*queue)->Enqueue (queue, getCurrentBuffer(), static_cast (getBufferSizeInSamples() * sizeof (T))); ++numBlocksOut; } bool isBufferAvailable() const { return (numBlocksOut.get() < owner.numBuffers); } T* getNextBuffer() { nextBlock.set((nextBlock.get() + 1) % owner.numBuffers); return getCurrentBuffer(); } T* getCurrentBuffer() { return nativeBuffer.get() + (static_cast (nextBlock.get()) * getBufferSizeInSamples()); } size_t getBufferSizeInSamples() const { return static_cast (owner.bufferSize * numChannels); } void finished (SLAndroidSimpleBufferQueueItf) { --numBlocksOut; owner.doSomeWorkOnAudioThread(); } static void staticFinished (SLAndroidSimpleBufferQueueItf caller, void *pContext) { reinterpret_cast (pContext)->finished (caller); } // get the "this" pointer for CRTP Child& crtp() { return * ((Child*) this); } const Child& crtp() const { return * ((Child*) this); } OpenSLSessionT& owner; SlRef runner; SlRef queue; SlRef config; GlobalRef javaProxy; int numChannels; HeapBlock nativeBuffer; AudioBuffer scratchBuffer, sampleBuffer; Atomic nextBlock { 0 }, numBlocksOut { 0 }; }; //============================================================================== template struct OpenSLQueueRunnerPlayer : OpenSLQueueRunner, SLPlayItf_> { using Base = OpenSLQueueRunner, SLPlayItf_>; OpenSLQueueRunnerPlayer (OpenSLSessionT& sessionToUse, int numChannelsToUse) : Base (sessionToUse, numChannelsToUse) {} SlRef createPlayerOrRecorder() { SLDataLocator_AndroidSimpleBufferQueue queueLocator = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast (Base::owner.numBuffers) }; SLDataLocator_OutputMix outputMix = { SL_DATALOCATOR_OUTPUTMIX, Base::owner.outputMix }; PCMDataFormatEx dataFormat; BufferHelpers::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate); SLDataSource source = { &queueLocator, &dataFormat }; SLDataSink sink = { &outputMix, nullptr }; SLInterfaceID queueInterfaces[] = { &IntfIID::iid, &IntfIID::iid }; SLboolean interfaceRequired[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE}; SLObjectItf obj = nullptr; if (auto e = *Base::owner.engine) { auto status = e->CreateAudioPlayer (Base::owner.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired); if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize(obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return {}; } } return SlRef::cast (SlObjectRef (obj)); } void setState (bool running) { (*Base::runner)->SetPlayState (Base::runner, running ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_STOPPED); } }; template struct OpenSLQueueRunnerRecorder : public OpenSLQueueRunner, SLRecordItf_> { using Base = OpenSLQueueRunner, SLRecordItf_>; OpenSLQueueRunnerRecorder (OpenSLSessionT& sessionToUse, int numChannelsToUse) : Base (sessionToUse, numChannelsToUse) {} SlRef createPlayerOrRecorder() { SLDataLocator_IODevice ioDeviceLocator = { SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr }; SLDataLocator_AndroidSimpleBufferQueue queueLocator = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast (Base::owner.numBuffers) }; PCMDataFormatEx dataFormat; BufferHelpers::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate); SLDataSource source = { &ioDeviceLocator, nullptr }; SLDataSink sink = { &queueLocator, &dataFormat }; SLInterfaceID queueInterfaces[] = { &IntfIID::iid, &IntfIID::iid }; SLboolean interfaceRequired[] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE }; SLObjectItf obj = nullptr; if (auto e = *Base::owner.engine) { auto status = e->CreateAudioRecorder (Base::owner.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired); if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return {}; } } return SlRef::cast (SlObjectRef (obj)); } bool setAudioPreprocessingEnabled (bool shouldEnable) { if (Base::config != nullptr) { const bool supportsUnprocessed = (getAndroidSDKVersion() >= 25); const SLuint32 recordingPresetValue = (shouldEnable ? SL_ANDROID_RECORDING_PRESET_GENERIC : (supportsUnprocessed ? SL_ANDROID_RECORDING_PRESET_UNPROCESSED : SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION)); auto status = (*Base::config)->SetConfiguration (Base::config, SL_ANDROID_KEY_RECORDING_PRESET, &recordingPresetValue, sizeof (recordingPresetValue)); return (status == SL_RESULT_SUCCESS); } return false; } void setState (bool running) { (*Base::runner)->SetRecordState (Base::runner, running ? SL_RECORDSTATE_RECORDING : SL_RECORDSTATE_STOPPED); } }; //============================================================================== class OpenSLSession { public: OpenSLSession (DynamicLibrary& slLibraryToUse, int numInputChannels, int numOutputChannels, double samleRateToUse, int bufferSizeToUse, int numBuffersToUse) : inputChannels (numInputChannels), outputChannels (numOutputChannels), sampleRate (samleRateToUse), bufferSize (bufferSizeToUse), numBuffers (numBuffersToUse) { jassert (numInputChannels > 0 || numOutputChannels > 0); if (auto createEngine = (CreateEngineFunc) slLibraryToUse.getFunction ("slCreateEngine")) { SLObjectItf obj = nullptr; auto err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr); if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return; } engine = SlRef::cast (SlObjectRef (obj)); } if (outputChannels > 0) { SLObjectItf obj = nullptr; auto err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr); if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return; } outputMix = SlRef::cast (SlObjectRef (obj)); } } virtual ~OpenSLSession() {} virtual bool openedOK() const { return (engine != nullptr && (outputChannels == 0 || (outputMix != nullptr))); } virtual void start() { stop(); jassert (callback.get() != nullptr); running = true; } virtual void stop() { running = false; } virtual bool setAudioPreprocessingEnabled (bool shouldEnable) = 0; virtual bool supportsFloatingPoint() const noexcept = 0; virtual int getXRunCount() const noexcept = 0; void setCallback (AudioIODeviceCallback* callbackToUse) { if (! running) { callback.set (callbackToUse); return; } // don't set callback to null! stop the playback instead! jassert (callbackToUse != nullptr); // spin-lock until we can set the callback for (;;) { auto old = callback.get(); if (old == callbackToUse) break; if (callback.compareAndSetBool (callbackToUse, old)) break; Thread::sleep (1); } } void process (const float** inputChannelData, float** outputChannelData) { if (auto* cb = callback.exchange (nullptr)) { cb->audioDeviceIOCallback (inputChannelData, inputChannels, outputChannelData, outputChannels, bufferSize); callback.set (cb); } else { for (int i = 0; i < outputChannels; ++i) zeromem (outputChannelData[i], sizeof(float) * static_cast (bufferSize)); } } static OpenSLSession* create (DynamicLibrary& slLibrary, int numInputChannels, int numOutputChannels, double samleRateToUse, int bufferSizeToUse, int numBuffersToUse); //============================================================================== using CreateEngineFunc = SLresult (*) (SLObjectItf*, SLuint32, const SLEngineOption*, SLuint32, const SLInterfaceID*, const SLboolean*); //============================================================================== int inputChannels, outputChannels; double sampleRate; int bufferSize, numBuffers; bool running = false, audioProcessingEnabled = true; SlRef engine; SlRef outputMix; Atomic callback { nullptr }; }; template class OpenSLSessionT : public OpenSLSession { public: OpenSLSessionT (DynamicLibrary& slLibraryToUse, int numInputChannels, int numOutputChannels, double samleRateToUse, int bufferSizeToUse, int numBuffersToUse) : OpenSLSession (slLibraryToUse, numInputChannels, numOutputChannels, samleRateToUse, bufferSizeToUse, numBuffersToUse) { jassert (numInputChannels > 0 || numOutputChannels > 0); if (OpenSLSession::openedOK()) { if (inputChannels > 0) { recorder.reset (new OpenSLQueueRunnerRecorder (*this, inputChannels)); if (! recorder->init()) { recorder = nullptr; return; } } if (outputChannels > 0) { player.reset (new OpenSLQueueRunnerPlayer (*this, outputChannels)); if (! player->init()) { player = nullptr; return; } const bool supportsUnderrunCount = (getAndroidSDKVersion() >= 24); getUnderrunCount = supportsUnderrunCount ? getEnv()->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : 0; } } } bool openedOK() const override { return OpenSLSession::openedOK() && (inputChannels == 0 || recorder != nullptr) && (outputChannels == 0 || player != nullptr); } void start() override { OpenSLSession::start(); guard.set (0); if (inputChannels > 0) recorder->clear(); if (outputChannels > 0) player->clear(); // first enqueue all buffers for (int i = 0; i < numBuffers; ++i) doSomeWorkOnAudioThread(); if (inputChannels > 0) recorder->setState (true); if (outputChannels > 0) player->setState (true); } void stop() override { OpenSLSession::stop(); while (! guard.compareAndSetBool (1, 0)) Thread::sleep (1); if (inputChannels > 0) recorder->setState (false); if (outputChannels > 0) player->setState (false); guard.set (0); } bool setAudioPreprocessingEnabled (bool shouldEnable) override { if (shouldEnable != audioProcessingEnabled) { audioProcessingEnabled = shouldEnable; if (recorder != nullptr) return recorder->setAudioPreprocessingEnabled (audioProcessingEnabled); } return true; } int getXRunCount() const noexcept override { if (player != nullptr && player->javaProxy != nullptr && getUnderrunCount != 0) return getEnv()->CallIntMethod (player->javaProxy, getUnderrunCount); return -1; } bool supportsFloatingPoint() const noexcept override { return (BufferHelpers::isFloatingPoint != 0); } void doSomeWorkOnAudioThread() { // only the player or the recorder should enter this section at any time if (guard.compareAndSetBool (1, 0)) { // are there enough buffers avaialable to process some audio if ((inputChannels == 0 || recorder->isBufferAvailable()) && (outputChannels == 0 || player->isBufferAvailable())) { T* recorderBuffer = (inputChannels > 0 ? recorder->getNextBuffer() : nullptr); T* playerBuffer = (outputChannels > 0 ? player->getNextBuffer() : nullptr); const float** inputChannelData = nullptr; float** outputChannelData = nullptr; if (recorderBuffer != nullptr) { BufferHelpers::prepareCallbackBuffer (recorder->sampleBuffer, recorderBuffer); BufferHelpers::convertFromOpenSL (recorderBuffer, recorder->sampleBuffer); inputChannelData = recorder->sampleBuffer.getArrayOfReadPointers(); } if (playerBuffer != nullptr) { BufferHelpers::prepareCallbackBuffer (player->sampleBuffer, playerBuffer); outputChannelData = player->sampleBuffer.getArrayOfWritePointers(); } process (inputChannelData, outputChannelData); if (recorderBuffer != nullptr) recorder->enqueueBuffer(); if (playerBuffer != nullptr) { BufferHelpers::convertToOpenSL (player->sampleBuffer, playerBuffer); player->enqueueBuffer(); } } guard.set (0); } } //============================================================================== std::unique_ptr> player; std::unique_ptr> recorder; Atomic guard; jmethodID getUnderrunCount = 0; }; //============================================================================== OpenSLAudioIODevice (const String& deviceName) : AudioIODevice (deviceName, openSLTypeName) { // OpenSL has piss-poor support for determining latency, so the only way I can find to // get a number for this is by asking the AudioTrack/AudioRecord classes.. AndroidAudioIODevice javaDevice (deviceName); // this is a total guess about how to calculate the latency, but seems to vaguely agree // with the devices I've tested.. YMMV inputLatency = (javaDevice.minBufferSizeIn * 2) / 3; outputLatency = (javaDevice.minBufferSizeOut * 2) / 3; const int64 longestLatency = jmax (inputLatency, outputLatency); const int64 totalLatency = inputLatency + outputLatency; inputLatency = (int) ((longestLatency * inputLatency) / totalLatency) & ~15; outputLatency = (int) ((longestLatency * outputLatency) / totalLatency) & ~15; bool success = slLibrary.open ("libOpenSLES.so"); // You can only create this class if you are sure that your hardware supports OpenSL jassert (success); ignoreUnused (success); } ~OpenSLAudioIODevice() { close(); } bool openedOk() const { return session != nullptr; } StringArray getOutputChannelNames() override { StringArray s; s.add ("Left"); s.add ("Right"); return s; } StringArray getInputChannelNames() override { StringArray s; s.add ("Audio Input"); return s; } Array getAvailableSampleRates() override { // see https://developer.android.com/ndk/guides/audio/opensl-for-android.html static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0, 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 }; Array retval (rates, numElementsInArray (rates)); // make sure the native sample rate is pafrt of the list double native = getNativeSampleRate(); if (native != 0.0 && ! retval.contains (native)) retval.add (native); return retval; } Array getAvailableBufferSizes() override { // we need to offer the lowest possible buffer size which // is the native buffer size auto nativeBufferSize = getNativeBufferSize(); auto minBuffersToQueue = getMinimumBuffersToEnqueue(); auto maxBuffersToQueue = getMaximumBuffersToEnqueue(); Array retval; for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i) retval.add (i * nativeBufferSize); return retval; } String open (const BigInteger& inputChannels, const BigInteger& outputChannels, double requestedSampleRate, int bufferSize) override { close(); lastError.clear(); sampleRate = (int) (requestedSampleRate > 0 ? requestedSampleRate : getNativeSampleRate()); auto totalPreferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize; auto nativeBufferSize = getNativeBufferSize(); bool useHighPerformanceAudioPath = canUseHighPerformanceAudioPath (totalPreferredBufferSize, sampleRate); audioBuffersToEnqueue = useHighPerformanceAudioPath ? (totalPreferredBufferSize / nativeBufferSize) : 1; actualBufferSize = totalPreferredBufferSize / audioBuffersToEnqueue; jassert ((actualBufferSize * audioBuffersToEnqueue) == totalPreferredBufferSize); activeOutputChans = outputChannels; activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false); auto numOutputChannels = activeOutputChans.countNumberOfSetBits(); activeInputChans = inputChannels; activeInputChans.setRange (1, activeInputChans.getHighestBit(), false); auto numInputChannels = activeInputChans.countNumberOfSetBits(); if (numInputChannels > 0 && (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))) { // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio // before trying to open an audio input device. This is not going to work! jassertfalse; lastError = "Error opening OpenSL input device: the app was not granted android.permission.RECORD_AUDIO"; } session.reset (OpenSLSession::create (slLibrary, numInputChannels, numOutputChannels, sampleRate, actualBufferSize, audioBuffersToEnqueue)); if (session != nullptr) { session->setAudioPreprocessingEnabled (audioProcessingEnabled); } else { if (numInputChannels > 0 && numOutputChannels > 0 && RuntimePermissions::isGranted (RuntimePermissions::recordAudio)) { // New versions of the Android emulator do not seem to support audio input anymore on OS X activeInputChans = BigInteger(0); numInputChannels = 0; session.reset (OpenSLSession::create (slLibrary, numInputChannels, numOutputChannels, sampleRate, actualBufferSize, audioBuffersToEnqueue)); } } DBG ("OpenSL: numInputChannels = " << numInputChannels << ", numOutputChannels = " << numOutputChannels << ", nativeBufferSize = " << getNativeBufferSize() << ", nativeSampleRate = " << getNativeSampleRate() << ", actualBufferSize = " << actualBufferSize << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue << ", sampleRate = " << sampleRate << ", supportsFloatingPoint = " << (session != nullptr && session->supportsFloatingPoint() ? "true" : "false")); if (session == nullptr) lastError = "Unknown error initializing opensl session"; deviceOpen = (session != nullptr); return lastError; } void close() override { stop(); session = nullptr; callback = nullptr; } int getOutputLatencyInSamples() override { return outputLatency; } int getInputLatencyInSamples() override { return inputLatency; } bool isOpen() override { return deviceOpen; } int getCurrentBufferSizeSamples() override { return actualBufferSize * audioBuffersToEnqueue; } int getCurrentBitDepth() override { return (session != nullptr && session->supportsFloatingPoint() ? 32 : 16); } BigInteger getActiveOutputChannels() const override { return activeOutputChans; } BigInteger getActiveInputChannels() const override { return activeInputChans; } String getLastError() override { return lastError; } bool isPlaying() override { return callback != nullptr; } int getXRunCount() const noexcept override { return (session != nullptr ? session->getXRunCount() : -1); } int getDefaultBufferSize() override { auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs : defaultBufferSizeForStandardLatencyDeviceMs); auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (defaultBufferLength, getCurrentSampleRate()); return defaultBuffersToEnqueue * getNativeBufferSize(); } double getCurrentSampleRate() override { return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate); } void start (AudioIODeviceCallback* newCallback) override { if (session != nullptr && callback != newCallback) { auto oldCallback = callback; if (newCallback != nullptr) newCallback->audioDeviceAboutToStart (this); if (oldCallback != nullptr) { // already running if (newCallback == nullptr) stop(); else session->setCallback (newCallback); oldCallback->audioDeviceStopped(); } else { jassert (newCallback != nullptr); // session hasn't started yet session->setCallback (newCallback); session->start(); } callback = newCallback; } } void stop() override { if (session != nullptr && callback != nullptr) { callback = nullptr; session->stop(); session->setCallback (nullptr); } } bool setAudioPreprocessingEnabled (bool shouldAudioProcessingBeEnabled) override { audioProcessingEnabled = shouldAudioProcessingBeEnabled; if (session != nullptr) session->setAudioPreprocessingEnabled (audioProcessingEnabled); return true; } static const char* const openSLTypeName; private: //============================================================================== friend class SLRealtimeThread; //============================================================================== DynamicLibrary slLibrary; int actualBufferSize = 0, sampleRate = 0, audioBuffersToEnqueue = 0; int inputLatency, outputLatency; bool deviceOpen = false, audioProcessingEnabled = true; String lastError; BigInteger activeOutputChans, activeInputChans; AudioIODeviceCallback* callback = nullptr; std::unique_ptr session; enum { defaultBufferSizeForLowLatencyDeviceMs = 40, defaultBufferSizeForStandardLatencyDeviceMs = 100 }; static int getMinimumBuffersToEnqueue (double sampleRateToCheck = getNativeSampleRate()) { if (canUseHighPerformanceAudioPath (getNativeBufferSize(), (int) sampleRateToCheck)) { // see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp // "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required // for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one // is sufficient for lower latency." return (getAndroidSDKVersion() >= 18 ? 1 : 2); } // we will not use the low-latency path so we can use the absolute minimum number of buffers // to queue return 1; } int getMaximumBuffersToEnqueue() noexcept { constexpr auto maxBufferSizeMs = 200; auto availableSampleRates = getAvailableSampleRates(); auto maximumSampleRate = findMaximum(availableSampleRates.getRawDataPointer(), availableSampleRates.size()); // ensure we don't return something crazy small return jmax (8, buffersToQueueForBufferDuration (maxBufferSizeMs, maximumSampleRate)); } static int buffersToQueueForBufferDuration (int bufferDurationInMs, double sampleRate) noexcept { auto maxBufferFrames = static_cast (std::ceil (bufferDurationInMs * sampleRate / 1000.0)); auto maxNumBuffers = static_cast (std::ceil (static_cast (maxBufferFrames) / static_cast (getNativeBufferSize()))); return jmax (getMinimumBuffersToEnqueue (sampleRate), maxNumBuffers); } //============================================================================== static double getNativeSampleRate() { return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue(); } static int getNativeBufferSize() { const int val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue(); return val > 0 ? val : 512; } static bool isProAudioDevice() { return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported(); } static bool hasLowLatencyAudioPath() { return androidHasSystemFeature ("android.hardware.audio.low_latency"); } static bool canUseHighPerformanceAudioPath (int requestedBufferSize, int requestedSampleRate) { return ((requestedBufferSize % getNativeBufferSize()) == 0) && (requestedSampleRate == getNativeSampleRate()) && isProAudioDevice(); } //============================================================================== // Some minimum Sapa support to check if this device supports pro audio static bool isSamsungDevice() { return SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG"); } static bool isSapaSupported() { static bool supported = isSamsungDevice() && DynamicLibrary().open ("libapa_jni.so"); return supported; } JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice) }; OpenSLAudioIODevice::OpenSLSession* OpenSLAudioIODevice::OpenSLSession::create (DynamicLibrary& slLibrary, int numInputChannels, int numOutputChannels, double samleRateToUse, int bufferSizeToUse, int numBuffersToUse) { std::unique_ptr retval; auto sdkVersion = getAndroidSDKVersion(); // SDK versions 21 and higher should natively support floating point... if (sdkVersion >= 21) { retval.reset (new OpenSLSessionT (slLibrary, numInputChannels, numOutputChannels, samleRateToUse, bufferSizeToUse, numBuffersToUse)); // ...however, some devices lie so re-try without floating point if (retval != nullptr && (! retval->openedOK())) retval = nullptr; } if (retval == nullptr) { retval.reset (new OpenSLSessionT (slLibrary, numInputChannels, numOutputChannels, samleRateToUse, bufferSizeToUse, numBuffersToUse)); if (retval != nullptr && (! retval->openedOK())) retval = nullptr; } return retval.release(); } //============================================================================== class OpenSLAudioDeviceType : public AudioIODeviceType { public: OpenSLAudioDeviceType() : AudioIODeviceType (OpenSLAudioIODevice::openSLTypeName) {} //============================================================================== void scanForDevices() override {} StringArray getDeviceNames (bool) const override { return StringArray (OpenSLAudioIODevice::openSLTypeName); } int getDefaultDeviceIndex (bool) const override { return 0; } int getIndexOfDevice (AudioIODevice* device, bool) const override { return device != nullptr ? 0 : -1; } bool hasSeparateInputsAndOutputs() const override { return false; } AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName) override { std::unique_ptr dev; if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty()) dev.reset (new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName)); return dev.release(); } static bool isOpenSLAvailable() { DynamicLibrary library; return library.open ("libOpenSLES.so"); } private: JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType) }; const char* const OpenSLAudioIODevice::openSLTypeName = "Android OpenSL"; //============================================================================== bool isOpenSLAvailable() { return OpenSLAudioDeviceType::isOpenSLAvailable(); } AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES() { return isOpenSLAvailable() ? new OpenSLAudioDeviceType() : nullptr; } //============================================================================== class SLRealtimeThread { public: static constexpr int numBuffers = 4; SLRealtimeThread() { if (auto createEngine = (OpenSLAudioIODevice::OpenSLSession::CreateEngineFunc) slLibrary.getFunction ("slCreateEngine")) { SLObjectItf obj = nullptr; auto err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr); if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr) return; if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return; } engine = SlRef::cast (SlObjectRef (obj)); if (engine == nullptr) { destroyObject (obj); return; } obj = nullptr; err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr); if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return; } outputMix = SlRef::cast (SlObjectRef (obj)); if (outputMix == nullptr) { destroyObject (obj); return; } SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast (numBuffers)}; SLDataLocator_OutputMix outputMixLocator = {SL_DATALOCATOR_OUTPUTMIX, outputMix}; PCMDataFormatEx dataFormat; BufferHelpers::initPCMDataFormat (dataFormat, 1, OpenSLAudioIODevice::getNativeSampleRate()); SLDataSource source = { &queueLocator, &dataFormat }; SLDataSink sink = { &outputMixLocator, nullptr }; SLInterfaceID queueInterfaces[] = { &IntfIID::iid }; SLboolean trueFlag = SL_BOOLEAN_TRUE; obj = nullptr; err = (*engine)->CreateAudioPlayer (engine, &obj, &source, &sink, 1, queueInterfaces, &trueFlag); if (err != SL_RESULT_SUCCESS || obj == nullptr) return; if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS) { destroyObject (obj); return; } player = SlRef::cast (SlObjectRef (obj)); if (player == nullptr) { destroyObject (obj); return; } queue = SlRef::cast (player); if (queue == nullptr) return; if ((*queue)->RegisterCallback (queue, staticFinished, this) != SL_RESULT_SUCCESS) { queue = nullptr; return; } pthread_cond_init (&threadReady, nullptr); pthread_mutex_init (&threadReadyMutex, nullptr); } } bool isOK() const { return queue != nullptr; } pthread_t startThread (void* (*entry) (void*), void* userPtr) { memset (buffer.get(), 0, static_cast (sizeof (int16) * static_cast (bufferSize * numBuffers))); for (int i = 0; i < numBuffers; ++i) { int16* dst = buffer.get() + (bufferSize * i); (*queue)->Enqueue (queue, dst, static_cast (static_cast (bufferSize) * sizeof (int16))); } pthread_mutex_lock (&threadReadyMutex); threadEntryProc = entry; threadUserPtr = userPtr; (*player)->SetPlayState (player, SL_PLAYSTATE_PLAYING); pthread_cond_wait (&threadReady, &threadReadyMutex); pthread_mutex_unlock (&threadReadyMutex); return threadID; } void finished() { if (threadEntryProc != nullptr) { pthread_mutex_lock (&threadReadyMutex); threadID = pthread_self(); pthread_cond_signal (&threadReady); pthread_mutex_unlock (&threadReadyMutex); threadEntryProc (threadUserPtr); threadEntryProc = nullptr; (*player)->SetPlayState (player, SL_PLAYSTATE_STOPPED); MessageManager::callAsync ([this] () { delete this; }); } } private: //============================================================================= static void staticFinished (SLAndroidSimpleBufferQueueItf, void* context) { static_cast (context)->finished(); } //============================================================================= DynamicLibrary slLibrary { "libOpenSLES.so" }; SlRef engine; SlRef outputMix; SlRef player; SlRef queue; int bufferSize = OpenSLAudioIODevice::getNativeBufferSize(); HeapBlock buffer { HeapBlock (static_cast (1 * bufferSize * numBuffers)) }; void* (*threadEntryProc) (void*) = nullptr; void* threadUserPtr = nullptr; pthread_cond_t threadReady; pthread_mutex_t threadReadyMutex; pthread_t threadID; }; pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr) { std::unique_ptr thread (new SLRealtimeThread); if (! thread->isOK()) return 0; pthread_t threadID = thread->startThread (entry, userPtr); // the thread will de-allocate itself thread.release(); return threadID; } } // namespace juce