git subrepo clone --branch=sono6good https://github.com/essej/JUCE.git deps/juce

subrepo:
  subdir:   "deps/juce"
  merged:   "b13f9084e"
upstream:
  origin:   "https://github.com/essej/JUCE.git"
  branch:   "sono6good"
  commit:   "b13f9084e"
git-subrepo:
  version:  "0.4.3"
  origin:   "https://github.com/ingydotnet/git-subrepo.git"
  commit:   "2f68596"
This commit is contained in:
essej
2022-04-18 17:51:22 -04:00
parent 63e175fee6
commit 25bd5d8adb
3210 changed files with 1045392 additions and 0 deletions

View File

@ -0,0 +1,90 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureResult;
public class CameraCaptureSessionCaptureCallback extends CameraCaptureSession.CaptureCallback
{
private native void cameraCaptureSessionCaptureCompleted (long host, boolean isPreview, CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result);
private native void cameraCaptureSessionCaptureFailed (long host, boolean isPreview, CameraCaptureSession session, CaptureRequest request, CaptureFailure failure);
private native void cameraCaptureSessionCaptureProgressed (long host, boolean isPreview, CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult);
private native void cameraCaptureSessionCaptureStarted (long host, boolean isPreview, CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber);
private native void cameraCaptureSessionCaptureSequenceAborted (long host, boolean isPreview, CameraCaptureSession session, int sequenceId);
private native void cameraCaptureSessionCaptureSequenceCompleted (long host, boolean isPreview, CameraCaptureSession session, int sequenceId, long frameNumber);
CameraCaptureSessionCaptureCallback (long hostToUse, boolean shouldBePreview)
{
host = hostToUse;
preview = shouldBePreview;
}
@Override
public void onCaptureCompleted (CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result)
{
cameraCaptureSessionCaptureCompleted (host, preview, session, request, result);
}
@Override
public void onCaptureFailed (CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)
{
cameraCaptureSessionCaptureFailed (host, preview, session, request, failure);
}
@Override
public void onCaptureProgressed (CameraCaptureSession session, CaptureRequest request,
CaptureResult partialResult)
{
cameraCaptureSessionCaptureProgressed (host, preview, session, request, partialResult);
}
@Override
public void onCaptureSequenceAborted (CameraCaptureSession session, int sequenceId)
{
cameraCaptureSessionCaptureSequenceAborted (host, preview, session, sequenceId);
}
@Override
public void onCaptureSequenceCompleted (CameraCaptureSession session, int sequenceId, long frameNumber)
{
cameraCaptureSessionCaptureSequenceCompleted (host, preview, session, sequenceId, frameNumber);
}
@Override
public void onCaptureStarted (CameraCaptureSession session, CaptureRequest request, long timestamp,
long frameNumber)
{
cameraCaptureSessionCaptureStarted (host, preview, session, request, timestamp, frameNumber);
}
private long host;
private boolean preview;
}

View File

@ -0,0 +1,78 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.hardware.camera2.CameraCaptureSession;
public class CameraCaptureSessionStateCallback extends CameraCaptureSession.StateCallback
{
private native void cameraCaptureSessionActive (long host, CameraCaptureSession session);
private native void cameraCaptureSessionClosed (long host, CameraCaptureSession session);
private native void cameraCaptureSessionConfigureFailed (long host, CameraCaptureSession session);
private native void cameraCaptureSessionConfigured (long host, CameraCaptureSession session);
private native void cameraCaptureSessionReady (long host, CameraCaptureSession session);
CameraCaptureSessionStateCallback (long hostToUse)
{
host = hostToUse;
}
@Override
public void onActive (CameraCaptureSession session)
{
cameraCaptureSessionActive (host, session);
}
@Override
public void onClosed (CameraCaptureSession session)
{
cameraCaptureSessionClosed (host, session);
}
@Override
public void onConfigureFailed (CameraCaptureSession session)
{
cameraCaptureSessionConfigureFailed (host, session);
}
@Override
public void onConfigured (CameraCaptureSession session)
{
cameraCaptureSessionConfigured (host, session);
}
@Override
public void onReady (CameraCaptureSession session)
{
cameraCaptureSessionReady (host, session);
}
private long host;
}

View File

@ -0,0 +1,67 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.hardware.camera2.CameraDevice;
public class CameraDeviceStateCallback extends CameraDevice.StateCallback
{
private native void cameraDeviceStateClosed (long host, CameraDevice camera);
private native void cameraDeviceStateDisconnected (long host, CameraDevice camera);
private native void cameraDeviceStateError (long host, CameraDevice camera, int error);
private native void cameraDeviceStateOpened (long host, CameraDevice camera);
CameraDeviceStateCallback (long hostToUse)
{
host = hostToUse;
}
@Override
public void onClosed (CameraDevice camera)
{
cameraDeviceStateClosed (host, camera);
}
@Override
public void onDisconnected (CameraDevice camera)
{
cameraDeviceStateDisconnected (host, camera);
}
@Override
public void onError (CameraDevice camera, int error)
{
cameraDeviceStateError (host, camera, error);
}
@Override
public void onOpened (CameraDevice camera)
{
cameraDeviceStateOpened (host, camera);
}
private long host;
}

View File

@ -0,0 +1,49 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.view.OrientationEventListener;
import android.content.Context;
public class JuceOrientationEventListener extends OrientationEventListener
{
private native void deviceOrientationChanged (long host, int orientation);
public JuceOrientationEventListener (long hostToUse, Context context, int rate)
{
super (context, rate);
host = hostToUse;
}
@Override
public void onOrientationChanged (int orientation)
{
deviceOrientationChanged (host, orientation);
}
private long host;
}

View File

@ -0,0 +1,78 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.media.session.MediaController;
import android.media.session.MediaSession;
import android.media.MediaMetadata;
import android.media.session.PlaybackState;
import java.util.List;
//==============================================================================
public class MediaControllerCallback extends MediaController.Callback
{
private native void mediaControllerAudioInfoChanged (long host, MediaController.PlaybackInfo info);
private native void mediaControllerMetadataChanged (long host, MediaMetadata metadata);
private native void mediaControllerPlaybackStateChanged (long host, PlaybackState state);
private native void mediaControllerSessionDestroyed (long host);
MediaControllerCallback (long hostToUse)
{
host = hostToUse;
}
@Override
public void onAudioInfoChanged (MediaController.PlaybackInfo info)
{
mediaControllerAudioInfoChanged (host, info);
}
@Override
public void onMetadataChanged (MediaMetadata metadata)
{
mediaControllerMetadataChanged (host, metadata);
}
@Override
public void onPlaybackStateChanged (PlaybackState state)
{
mediaControllerPlaybackStateChanged (host, state);
}
@Override
public void onQueueChanged (List<MediaSession.QueueItem> queue)
{
}
@Override
public void onSessionDestroyed ()
{
mediaControllerSessionDestroyed (host);
}
private long host;
}

View File

@ -0,0 +1,103 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.media.session.MediaSession;
import java.lang.String;
import android.os.Bundle;
import android.content.Intent;
import java.util.List;
//==============================================================================
public class MediaSessionCallback extends MediaSession.Callback
{
private native void mediaSessionPause (long host);
private native void mediaSessionPlay (long host);
private native void mediaSessionPlayFromMediaId (long host, String mediaId, Bundle extras);
private native void mediaSessionSeekTo (long host, long pos);
private native void mediaSessionStop (long host);
MediaSessionCallback (long hostToUse)
{
host = hostToUse;
}
@Override
public void onPause ()
{
mediaSessionPause (host);
}
@Override
public void onPlay ()
{
mediaSessionPlay (host);
}
@Override
public void onPlayFromMediaId (String mediaId, Bundle extras)
{
mediaSessionPlayFromMediaId (host, mediaId, extras);
}
@Override
public void onSeekTo (long pos)
{
mediaSessionSeekTo (host, pos);
}
@Override
public void onStop ()
{
mediaSessionStop (host);
}
@Override
public void onFastForward () {}
@Override
public boolean onMediaButtonEvent (Intent mediaButtonIntent)
{
return true;
}
@Override
public void onRewind () {}
@Override
public void onSkipToNext () {}
@Override
public void onSkipToPrevious () {}
@Override
public void onSkipToQueueItem (long id) {}
private long host;
}

View File

@ -0,0 +1,62 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
package com.rmsl.juce;
import android.database.ContentObserver;
import android.app.Activity;
import android.net.Uri;
//==============================================================================
public class SystemVolumeObserver extends ContentObserver
{
private native void mediaSessionSystemVolumeChanged (long host);
SystemVolumeObserver (Activity activityToUse, long hostToUse)
{
super (null);
activity = activityToUse;
host = hostToUse;
}
void setEnabled (boolean shouldBeEnabled)
{
if (shouldBeEnabled)
activity.getApplicationContext ().getContentResolver ().registerContentObserver (android.provider.Settings.System.CONTENT_URI, true, this);
else
activity.getApplicationContext ().getContentResolver ().unregisterContentObserver (this);
}
@Override
public void onChange (boolean selfChange, Uri uri)
{
if (uri.toString ().startsWith ("content://settings/system/volume_music"))
mediaSessionSystemVolumeChanged (host);
}
private Activity activity;
private long host;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,582 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
struct CameraDevice::Pimpl
{
#if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_15
#define JUCE_USE_NEW_APPLE_CAMERA_API 1
#else
#define JUCE_USE_NEW_APPLE_CAMERA_API 0
#endif
#if JUCE_USE_NEW_APPLE_CAMERA_API
class PostCatalinaPhotoOutput
{
public:
PostCatalinaPhotoOutput()
{
static PhotoOutputDelegateClass cls;
delegate.reset ([cls.createInstance() init]);
}
void addImageCapture (AVCaptureSession* s)
{
if (imageOutput != nil)
return;
imageOutput = [[AVCapturePhotoOutput alloc] init];
[s addOutput: imageOutput];
}
void removeImageCapture (AVCaptureSession* s)
{
if (imageOutput == nil)
return;
[s removeOutput: imageOutput];
[imageOutput release];
imageOutput = nil;
}
NSArray<AVCaptureConnection*>* getConnections() const
{
if (imageOutput != nil)
return imageOutput.connections;
return nil;
}
void triggerImageCapture (Pimpl& p)
{
if (imageOutput == nil)
return;
PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
[imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
}
static NSArray* getAvailableDevices()
{
auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
AVCaptureDeviceTypeExternalUnknown]
mediaType: AVMediaTypeVideo
position: AVCaptureDevicePositionUnspecified];
return [discovery devices];
}
private:
class PhotoOutputDelegateClass : public ObjCClass<NSObject>
{
public:
PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
{
addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
addIvar<Pimpl*> ("owner");
registerClass();
}
static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
{
if (error != nil)
{
String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
ignoreUnused (errorString);
JUCE_CAMERA_LOG ("Still picture capture failed, error: " + errorString);
jassertfalse;
return;
}
auto* imageData = [photo fileDataRepresentation];
auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
getOwner (self).imageCaptureFinished (image);
}
static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
};
AVCapturePhotoOutput* imageOutput = nil;
std::unique_ptr<NSObject, NSObjectDeleter> delegate;
};
#else
struct PreCatalinaStillImageOutput
{
public:
void addImageCapture (AVCaptureSession* s)
{
if (imageOutput != nil)
return;
const auto codecType =
#if defined (MAC_OS_X_VERSION_10_13) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13
AVVideoCodecTypeJPEG;
#else
AVVideoCodecJPEG;
#endif
imageOutput = [[AVCaptureStillImageOutput alloc] init];
auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: codecType, AVVideoCodecKey, nil];
[imageOutput setOutputSettings: imageSettings];
[imageSettings release];
[s addOutput: imageOutput];
}
void removeImageCapture (AVCaptureSession* s)
{
if (imageOutput == nil)
return;
[s removeOutput: imageOutput];
[imageOutput release];
imageOutput = nil;
}
NSArray<AVCaptureConnection*>* getConnections() const
{
if (imageOutput != nil)
return imageOutput.connections;
return nil;
}
void triggerImageCapture (Pimpl& p)
{
if (auto* videoConnection = p.getVideoConnection())
{
[imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
{
if (error != nil)
{
JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
jassertfalse;
return;
}
auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
p.imageCaptureFinished (image);
}];
}
}
static NSArray* getAvailableDevices()
{
return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
}
private:
AVCaptureStillImageOutput* imageOutput = nil;
};
#endif
Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
int /*minWidth*/, int /*minHeight*/,
int /*maxWidth*/, int /*maxHeight*/,
bool useHighQuality)
: owner (ownerToUse),
deviceName (deviceNameToUse)
{
session = [[AVCaptureSession alloc] init];
session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
: AVCaptureSessionPresetMedium;
refreshConnections();
static DelegateClass cls;
callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
DelegateClass::setOwner (callbackDelegate, this);
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
[[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
selector: @selector (captureSessionRuntimeError:)
name: AVCaptureSessionRuntimeErrorNotification
object: session];
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
}
~Pimpl()
{
[[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
[session stopRunning];
removeInput();
removeImageCapture();
removeMovieCapture();
[session release];
[callbackDelegate release];
}
//==============================================================================
bool openedOk() const noexcept { return openingError.isEmpty(); }
void startSession()
{
if (! [session isRunning])
[session startRunning];
}
void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
{
if (pictureTakenCallbackToUse == nullptr)
{
jassertfalse;
return;
}
pictureTakenCallback = std::move (pictureTakenCallbackToUse);
triggerImageCapture();
}
void startRecordingToFile (const File& file, int /*quality*/)
{
stopRecording();
refreshIfNeeded();
firstPresentationTime = Time::getCurrentTime();
file.deleteFile();
startSession();
isRecording = true;
[fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
recordingDelegate: callbackDelegate];
}
void stopRecording()
{
if (isRecording)
{
[fileOutput stopRecording];
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime;
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
listeners.add (listenerToAdd);
if (listeners.size() == 1)
triggerImageCapture();
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.remove (listenerToRemove);
}
static StringArray getAvailableDevices()
{
auto* devices = decltype (imageOutput)::getAvailableDevices();
StringArray results;
for (AVCaptureDevice* device : devices)
results.add (nsStringToJuce ([device localizedName]));
return results;
}
AVCaptureSession* getCaptureSession()
{
return session;
}
NSView* createVideoCapturePreview()
{
// The video preview must be created before the capture session is
// started. Make sure you haven't called `addListener`,
// `startRecordingToFile`, or `takeStillPicture` before calling this
// function.
jassert (! [session isRunning]);
startSession();
JUCE_AUTORELEASEPOOL
{
NSView* view = [[NSView alloc] init];
[view setLayer: [AVCaptureVideoPreviewLayer layerWithSession: getCaptureSession()]];
return view;
}
}
private:
//==============================================================================
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
{
addIvar<Pimpl*> ("owner");
addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL, "v@:@@@@");
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
addMethod (@selector (captureSessionRuntimeError:), sessionRuntimeError, "v@:@");
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
registerClass();
}
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
private:
static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
static void sessionRuntimeError (id self, SEL, NSNotification* notification)
{
JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
getOwner (self).cameraSessionRuntimeError (errorString);
}
};
//==============================================================================
void addImageCapture()
{
imageOutput.addImageCapture (session);
}
void addMovieCapture()
{
if (fileOutput == nil)
{
fileOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput: fileOutput];
}
}
void removeImageCapture()
{
imageOutput.removeImageCapture (session);
}
void removeMovieCapture()
{
if (fileOutput != nil)
{
[session removeOutput: fileOutput];
[fileOutput release];
fileOutput = nil;
}
}
void removeCurrentSessionVideoInputs()
{
if (session != nil)
{
NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
for (AVCaptureDeviceInput* input : inputs)
if ([input.device hasMediaType: AVMediaTypeVideo])
[session removeInput:input];
}
}
void addInput()
{
if (currentInput == nil)
{
auto* availableDevices = decltype (imageOutput)::getAvailableDevices();
for (AVCaptureDevice* device : availableDevices)
{
if (deviceName == nsStringToJuce ([device localizedName]))
{
removeCurrentSessionVideoInputs();
NSError* err = nil;
AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
error: &err];
jassert (err == nil);
if ([session canAddInput: inputDevice])
{
[session addInput: inputDevice];
currentInput = inputDevice;
}
else
{
jassertfalse;
[inputDevice release];
}
return;
}
}
}
}
void removeInput()
{
if (currentInput != nil)
{
[session removeInput: currentInput];
[currentInput release];
currentInput = nil;
}
}
void refreshConnections()
{
[session beginConfiguration];
removeInput();
removeImageCapture();
removeMovieCapture();
addInput();
addImageCapture();
addMovieCapture();
[session commitConfiguration];
}
void refreshIfNeeded()
{
if (getVideoConnection() == nullptr)
refreshConnections();
}
AVCaptureConnection* getVideoConnection() const
{
auto* connections = imageOutput.getConnections();
if (connections != nil)
for (AVCaptureConnection* connection in connections)
if ([connection isActive] && [connection isEnabled])
for (AVCaptureInputPort* port in [connection inputPorts])
if ([[port mediaType] isEqual: AVMediaTypeVideo])
return connection;
return nil;
}
void imageCaptureFinished (const Image& image)
{
handleImageCapture (image);
MessageManager::callAsync ([weakRef = WeakReference<Pimpl> { this }, image]() mutable
{
if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
weakRef->pictureTakenCallback (image);
});
}
void handleImageCapture (const Image& image)
{
const ScopedLock sl (listenerLock);
listeners.call ([=] (Listener& l) { l.imageReceived (image); });
if (! listeners.isEmpty())
triggerImageCapture();
}
void triggerImageCapture()
{
refreshIfNeeded();
startSession();
if (auto* videoConnection = getVideoConnection())
imageOutput.triggerImageCapture (*this);
}
void cameraSessionRuntimeError (const String& error)
{
JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
if (owner.onErrorOccurred != nullptr)
owner.onErrorOccurred (error);
}
//==============================================================================
CameraDevice& owner;
String deviceName;
AVCaptureSession* session = nil;
AVCaptureMovieFileOutput* fileOutput = nil;
#if JUCE_USE_NEW_APPLE_CAMERA_API
PostCatalinaPhotoOutput imageOutput;
#else
PreCatalinaStillImageOutput imageOutput;
#endif
AVCaptureDeviceInput* currentInput = nil;
id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
String openingError;
Time firstPresentationTime;
bool isRecording = false;
CriticalSection listenerLock;
ListenerList<Listener> listeners;
std::function<void (const Image&)> pictureTakenCallback = nullptr;
//==============================================================================
JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
struct CameraDevice::ViewerComponent : public NSViewComponent
{
ViewerComponent (CameraDevice& device)
{
setView (device.pimpl->createVideoCapturePreview());
}
~ViewerComponent()
{
setView (nil);
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}
#undef JUCE_USE_NEW_APPLE_CAMERA_API

View File

@ -0,0 +1,831 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#if JUCE_MAC
using Base = NSViewComponent;
#else
using Base = UIViewComponent;
#endif
struct VideoComponent::Pimpl : public Base
{
Pimpl (VideoComponent& ownerToUse, bool useNativeControlsIfAvailable)
: owner (ownerToUse),
playerController (*this, useNativeControlsIfAvailable)
{
setVisible (true);
auto* view = playerController.getView();
setView (view);
#if JUCE_MAC
[view setNextResponder: [view superview]];
[view setWantsLayer: YES];
#endif
}
~Pimpl()
{
close();
setView (nil);
}
Result load (const File& file)
{
auto r = load (createNSURLFromFile (file));
if (r.wasOk())
currentFile = file;
return r;
}
Result load (const URL& url)
{
auto r = load ([NSURL URLWithString: juceStringToNS (url.toString (true))]);
if (r.wasOk())
currentURL = url;
return r;
}
Result load (NSURL* url)
{
if (url != nil)
{
close();
return playerController.load (url);
}
return Result::fail ("Couldn't open movie");
}
void loadAsync (const URL& url, std::function<void (const URL&, Result)> callback)
{
if (url.isEmpty())
{
jassertfalse;
return;
}
currentURL = url;
jassert (callback != nullptr);
loadFinishedCallback = std::move (callback);
playerController.loadAsync (url);
}
void close()
{
stop();
playerController.close();
currentFile = File();
currentURL = {};
}
bool isOpen() const noexcept { return playerController.getPlayer() != nil; }
bool isPlaying() const noexcept { return getSpeed() != 0; }
void play() noexcept { [playerController.getPlayer() play]; setSpeed (playSpeedMult); }
void stop() noexcept { [playerController.getPlayer() pause]; }
void setPosition (double newPosition)
{
if (auto* p = playerController.getPlayer())
{
CMTime t = { (CMTimeValue) (100000.0 * newPosition),
(CMTimeScale) 100000, kCMTimeFlags_Valid, {} };
[p seekToTime: t
toleranceBefore: kCMTimeZero
toleranceAfter: kCMTimeZero];
}
}
double getPosition() const
{
if (auto* p = playerController.getPlayer())
return toSeconds ([p currentTime]);
return 0.0;
}
void setSpeed (double newSpeed)
{
playSpeedMult = newSpeed;
// Calling non 0.0 speed on a paused player would start it...
if (isPlaying())
[playerController.getPlayer() setRate: (float) playSpeedMult];
}
double getSpeed() const
{
if (auto* p = playerController.getPlayer())
return [p rate];
return 0.0;
}
Rectangle<int> getNativeSize() const
{
if (auto* p = playerController.getPlayer())
{
auto s = [[p currentItem] presentationSize];
return { (int) s.width, (int) s.height };
}
return {};
}
double getDuration() const
{
if (auto* p = playerController.getPlayer())
return toSeconds ([[p currentItem] duration]);
return 0.0;
}
void setVolume (float newVolume)
{
[playerController.getPlayer() setVolume: newVolume];
}
float getVolume() const
{
if (auto* p = playerController.getPlayer())
return [p volume];
return 0.0f;
}
File currentFile;
URL currentURL;
private:
//==============================================================================
template <typename Derived>
class PlayerControllerBase
{
public:
~PlayerControllerBase()
{
detachPlayerStatusObserver();
detachPlaybackObserver();
}
protected:
//==============================================================================
struct JucePlayerStatusObserverClass : public ObjCClass<NSObject>
{
JucePlayerStatusObserverClass() : ObjCClass<NSObject> ("JucePlayerStatusObserverClass_")
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
addMethod (@selector (observeValueForKeyPath:ofObject:change:context:), valueChanged, "v@:@@@?");
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
addIvar<PlayerAsyncInitialiser*> ("owner");
registerClass();
}
//==============================================================================
static PlayerControllerBase& getOwner (id self) { return *getIvar<PlayerControllerBase*> (self, "owner"); }
static void setOwner (id self, PlayerControllerBase* p) { object_setInstanceVariable (self, "owner", p); }
private:
static void valueChanged (id self, SEL, NSString* keyPath, id,
NSDictionary<NSString*, id>* change, void*)
{
auto& owner = getOwner (self);
if ([keyPath isEqualToString: nsStringLiteral ("rate")])
{
auto oldRate = [change[NSKeyValueChangeOldKey] floatValue];
auto newRate = [change[NSKeyValueChangeNewKey] floatValue];
if (oldRate == 0 && newRate != 0)
owner.playbackStarted();
else if (oldRate != 0 && newRate == 0)
owner.playbackStopped();
}
else if ([keyPath isEqualToString: nsStringLiteral ("status")])
{
auto status = [change[NSKeyValueChangeNewKey] intValue];
if (status == AVPlayerStatusFailed)
owner.errorOccurred();
}
}
};
//==============================================================================
struct JucePlayerItemPlaybackStatusObserverClass : public ObjCClass<NSObject>
{
JucePlayerItemPlaybackStatusObserverClass() : ObjCClass<NSObject> ("JucePlayerItemPlaybackStatusObserverClass_")
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
addMethod (@selector (processNotification:), notificationReceived, "v@:@");
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
addIvar<PlayerControllerBase*> ("owner");
registerClass();
}
//==============================================================================
static PlayerControllerBase& getOwner (id self) { return *getIvar<PlayerControllerBase*> (self, "owner"); }
static void setOwner (id self, PlayerControllerBase* p) { object_setInstanceVariable (self, "owner", p); }
private:
static void notificationReceived (id self, SEL, NSNotification* notification)
{
if ([notification.name isEqualToString: AVPlayerItemDidPlayToEndTimeNotification])
getOwner (self).playbackReachedEndTime();
}
};
//==============================================================================
class PlayerAsyncInitialiser
{
public:
PlayerAsyncInitialiser (PlayerControllerBase& ownerToUse)
: owner (ownerToUse),
assetKeys ([[NSArray alloc] initWithObjects: nsStringLiteral ("duration"), nsStringLiteral ("tracks"),
nsStringLiteral ("playable"), nil])
{
static JucePlayerItemPreparationStatusObserverClass cls;
playerItemPreparationStatusObserver.reset ([cls.createInstance() init]);
JucePlayerItemPreparationStatusObserverClass::setOwner (playerItemPreparationStatusObserver.get(), this);
}
~PlayerAsyncInitialiser()
{
detachPreparationStatusObserver();
}
void loadAsync (URL url)
{
auto nsUrl = [NSURL URLWithString: juceStringToNS (url.toString (true))];
asset.reset ([[AVURLAsset alloc] initWithURL: nsUrl options: nil]);
[asset.get() loadValuesAsynchronouslyForKeys: assetKeys.get()
completionHandler: ^() { checkAllKeysReadyFor (asset.get(), url); }];
}
private:
//==============================================================================
struct JucePlayerItemPreparationStatusObserverClass : public ObjCClass<NSObject>
{
JucePlayerItemPreparationStatusObserverClass() : ObjCClass<NSObject> ("JucePlayerItemStatusObserverClass_")
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
addMethod (@selector (observeValueForKeyPath:ofObject:change:context:), valueChanged, "v@:@@@?");
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
addIvar<PlayerAsyncInitialiser*> ("owner");
registerClass();
}
//==============================================================================
static PlayerAsyncInitialiser& getOwner (id self) { return *getIvar<PlayerAsyncInitialiser*> (self, "owner"); }
static void setOwner (id self, PlayerAsyncInitialiser* p) { object_setInstanceVariable (self, "owner", p); }
private:
static void valueChanged (id self, SEL, NSString*, id object,
NSDictionary<NSString*, id>* change, void* context)
{
auto& owner = getOwner (self);
if (context == &owner)
{
auto* playerItem = (AVPlayerItem*) object;
auto* urlAsset = (AVURLAsset*) playerItem.asset;
URL url (nsStringToJuce (urlAsset.URL.absoluteString));
auto oldStatus = [change[NSKeyValueChangeOldKey] intValue];
auto newStatus = [change[NSKeyValueChangeNewKey] intValue];
// Ignore spurious notifications
if (oldStatus == newStatus)
return;
if (newStatus == AVPlayerItemStatusFailed)
{
auto errorMessage = playerItem.error != nil
? nsStringToJuce (playerItem.error.localizedDescription)
: String();
owner.notifyOwnerPreparationFinished (url, Result::fail (errorMessage), nullptr);
}
else if (newStatus == AVPlayerItemStatusReadyToPlay)
{
owner.notifyOwnerPreparationFinished (url, Result::ok(), owner.player.get());
}
else
{
jassertfalse;
}
}
}
};
//==============================================================================
PlayerControllerBase& owner;
std::unique_ptr<AVURLAsset, NSObjectDeleter> asset;
std::unique_ptr<NSArray<NSString*>, NSObjectDeleter> assetKeys;
std::unique_ptr<AVPlayerItem, NSObjectDeleter> playerItem;
std::unique_ptr<NSObject, NSObjectDeleter> playerItemPreparationStatusObserver;
std::unique_ptr<AVPlayer, NSObjectDeleter> player;
//==============================================================================
void checkAllKeysReadyFor (AVAsset* assetToCheck, const URL& url)
{
NSError* error = nil;
int successCount = 0;
for (NSString* key : assetKeys.get())
{
switch ([assetToCheck statusOfValueForKey: key error: &error])
{
case AVKeyValueStatusLoaded:
{
++successCount;
break;
}
case AVKeyValueStatusCancelled:
{
notifyOwnerPreparationFinished (url, Result::fail ("Loading cancelled"), nullptr);
return;
}
case AVKeyValueStatusFailed:
{
auto errorMessage = error != nil ? nsStringToJuce (error.localizedDescription) : String();
notifyOwnerPreparationFinished (url, Result::fail (errorMessage), nullptr);
return;
}
case AVKeyValueStatusUnknown:
case AVKeyValueStatusLoading:
default:
break;
}
}
jassert (successCount == (int) [assetKeys.get() count]);
preparePlayerItem();
}
void preparePlayerItem()
{
playerItem.reset ([[AVPlayerItem alloc] initWithAsset: asset.get()]);
attachPreparationStatusObserver();
player.reset ([[AVPlayer alloc] initWithPlayerItem: playerItem.get()]);
}
//==============================================================================
void attachPreparationStatusObserver()
{
[playerItem.get() addObserver: playerItemPreparationStatusObserver.get()
forKeyPath: nsStringLiteral ("status")
options: NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
context: this];
}
void detachPreparationStatusObserver()
{
if (playerItem != nullptr && playerItemPreparationStatusObserver != nullptr)
{
[playerItem.get() removeObserver: playerItemPreparationStatusObserver.get()
forKeyPath: nsStringLiteral ("status")
context: this];
}
}
//==============================================================================
void notifyOwnerPreparationFinished (const URL& url, Result r, AVPlayer* preparedPlayer)
{
MessageManager::callAsync ([url, preparedPlayer, r,
safeThis = WeakReference<PlayerAsyncInitialiser> { this }]() mutable
{
if (safeThis != nullptr)
safeThis->owner.playerPreparationFinished (url, r, preparedPlayer);
});
}
JUCE_DECLARE_WEAK_REFERENCEABLE (PlayerAsyncInitialiser)
};
//==============================================================================
Pimpl& owner;
bool useNativeControls;
PlayerAsyncInitialiser playerAsyncInitialiser;
std::unique_ptr<NSObject, NSObjectDeleter> playerStatusObserver;
std::unique_ptr<NSObject, NSObjectDeleter> playerItemPlaybackStatusObserver;
//==============================================================================
PlayerControllerBase (Pimpl& ownerToUse, bool useNativeControlsIfAvailable)
: owner (ownerToUse),
useNativeControls (useNativeControlsIfAvailable),
playerAsyncInitialiser (*this)
{
static JucePlayerStatusObserverClass playerObserverClass;
playerStatusObserver.reset ([playerObserverClass.createInstance() init]);
JucePlayerStatusObserverClass::setOwner (playerStatusObserver.get(), this);
static JucePlayerItemPlaybackStatusObserverClass itemObserverClass;
playerItemPlaybackStatusObserver.reset ([itemObserverClass.createInstance() init]);
JucePlayerItemPlaybackStatusObserverClass::setOwner (playerItemPlaybackStatusObserver.get(), this);
}
//==============================================================================
void attachPlayerStatusObserver()
{
[crtp().getPlayer() addObserver: playerStatusObserver.get()
forKeyPath: nsStringLiteral ("rate")
options: NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
context: this];
[crtp().getPlayer() addObserver: playerStatusObserver.get()
forKeyPath: nsStringLiteral ("status")
options: NSKeyValueObservingOptionNew
context: this];
}
void detachPlayerStatusObserver()
{
if (crtp().getPlayer() != nullptr && playerStatusObserver != nullptr)
{
[crtp().getPlayer() removeObserver: playerStatusObserver.get()
forKeyPath: nsStringLiteral ("rate")
context: this];
[crtp().getPlayer() removeObserver: playerStatusObserver.get()
forKeyPath: nsStringLiteral ("status")
context: this];
}
}
void attachPlaybackObserver()
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
[[NSNotificationCenter defaultCenter] addObserver: playerItemPlaybackStatusObserver.get()
selector: @selector (processNotification:)
name: AVPlayerItemDidPlayToEndTimeNotification
object: [crtp().getPlayer() currentItem]];
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
}
void detachPlaybackObserver()
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
[[NSNotificationCenter defaultCenter] removeObserver: playerItemPlaybackStatusObserver.get()];
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
}
private:
//==============================================================================
Derived& crtp() { return static_cast<Derived&> (*this); }
//==============================================================================
void playerPreparationFinished (const URL& url, Result r, AVPlayer* preparedPlayer)
{
if (preparedPlayer != nil)
crtp().setPlayer (preparedPlayer);
owner.playerPreparationFinished (url, r);
}
void playbackReachedEndTime()
{
MessageManager::callAsync ([safeThis = WeakReference<PlayerControllerBase> { this }]() mutable
{
if (safeThis != nullptr)
safeThis->owner.playbackReachedEndTime();
});
}
//==============================================================================
void errorOccurred()
{
auto errorMessage = (crtp().getPlayer() != nil && crtp().getPlayer().error != nil)
? nsStringToJuce (crtp().getPlayer().error.localizedDescription)
: String();
owner.errorOccurred (errorMessage);
}
void playbackStarted()
{
owner.playbackStarted();
}
void playbackStopped()
{
owner.playbackStopped();
}
JUCE_DECLARE_WEAK_REFERENCEABLE (PlayerControllerBase)
};
#if JUCE_MAC
//==============================================================================
class PlayerController : public PlayerControllerBase<PlayerController>
{
public:
PlayerController (Pimpl& ownerToUse, bool useNativeControlsIfAvailable)
: PlayerControllerBase (ownerToUse, useNativeControlsIfAvailable)
{
#if JUCE_32BIT
// 32-bit builds don't have AVPlayerView, so need to use a layer
useNativeControls = false;
#endif
if (useNativeControls)
{
#if ! JUCE_32BIT
playerView = [[AVPlayerView alloc] init];
#endif
}
else
{
view = [[NSView alloc] init];
playerLayer = [[AVPlayerLayer alloc] init];
[view setLayer: playerLayer];
}
}
~PlayerController()
{
#if JUCE_32BIT
[view release];
[playerLayer release];
#else
[playerView release];
#endif
}
NSView* getView()
{
#if ! JUCE_32BIT
if (useNativeControls)
return playerView;
#endif
return view;
}
Result load (NSURL* url)
{
if (auto player = [AVPlayer playerWithURL: url])
{
setPlayer (player);
return Result::ok();
}
return Result::fail ("Couldn't open movie");
}
void loadAsync (URL url)
{
playerAsyncInitialiser.loadAsync (url);
}
void close() { setPlayer (nil); }
void setPlayer (AVPlayer* player)
{
if (getPlayer() != nil && player != getPlayer()) {
// must detach from this player properly
detachPlayerStatusObserver();
detachPlaybackObserver();
}
#if ! JUCE_32BIT
if (useNativeControls)
[playerView setPlayer: player];
else
#endif
[playerLayer setPlayer: player];
if (player != nil)
{
attachPlayerStatusObserver();
attachPlaybackObserver();
}
else
{
detachPlayerStatusObserver();
detachPlaybackObserver();
}
}
AVPlayer* getPlayer() const
{
#if ! JUCE_32BIT
if (useNativeControls)
return [playerView player];
#endif
return [playerLayer player];
}
private:
NSView* view = nil;
AVPlayerLayer* playerLayer = nil;
#if ! JUCE_32BIT
// 32-bit builds don't have AVPlayerView
AVPlayerView* playerView = nil;
#endif
};
#else
//==============================================================================
class PlayerController : public PlayerControllerBase<PlayerController>
{
public:
PlayerController (Pimpl& ownerToUse, bool useNativeControlsIfAvailable)
: PlayerControllerBase (ownerToUse, useNativeControlsIfAvailable)
{
if (useNativeControls)
{
playerViewController.reset ([[AVPlayerViewController alloc] init]);
}
else
{
static JuceVideoViewerClass cls;
playerView.reset ([cls.createInstance() init]);
playerLayer.reset ([[AVPlayerLayer alloc] init]);
[playerView.get().layer addSublayer: playerLayer.get()];
}
}
UIView* getView()
{
if (useNativeControls)
return [playerViewController.get() view];
// Should call getView() only once.
jassert (playerView != nil);
return playerView.release();
}
Result load (NSURL*)
{
jassertfalse;
return Result::fail ("Synchronous loading is not supported on iOS, use loadAsync()");
}
void loadAsync (URL url)
{
playerAsyncInitialiser.loadAsync (url);
}
void close() { setPlayer (nil); }
AVPlayer* getPlayer() const
{
if (useNativeControls)
return [playerViewController.get() player];
return [playerLayer.get() player];
}
void setPlayer (AVPlayer* playerToUse)
{
if (getPlayer() != nil && playerToUse != getPlayer()) {
// must detach from this player properly
detachPlayerStatusObserver();
detachPlaybackObserver();
}
if (useNativeControls)
[playerViewController.get() setPlayer: playerToUse];
else
[playerLayer.get() setPlayer: playerToUse];
attachPlayerStatusObserver();
attachPlaybackObserver();
}
private:
//==============================================================================
struct JuceVideoViewerClass : public ObjCClass<UIView>
{
JuceVideoViewerClass() : ObjCClass<UIView> ("JuceVideoViewerClass_")
{
addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
registerClass();
}
private:
static void layoutSubviews (id self, SEL)
{
sendSuperclassMessage<void> (self, @selector (layoutSubviews));
UIView* asUIView = (UIView*) self;
if (auto* previewLayer = getPreviewLayer (self))
previewLayer.frame = asUIView.bounds;
}
static AVPlayerLayer* getPreviewLayer (id self)
{
UIView* asUIView = (UIView*) self;
if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
if ([asUIView.layer.sublayers[0] isKindOfClass: [AVPlayerLayer class]])
return (AVPlayerLayer*) asUIView.layer.sublayers[0];
return nil;
}
};
//==============================================================================
std::unique_ptr<AVPlayerViewController, NSObjectDeleter> playerViewController;
std::unique_ptr<UIView, NSObjectDeleter> playerView;
std::unique_ptr<AVPlayerLayer, NSObjectDeleter> playerLayer;
};
#endif
//==============================================================================
VideoComponent& owner;
PlayerController playerController;
std::function<void (const URL&, Result)> loadFinishedCallback;
double playSpeedMult = 1.0;
static double toSeconds (const CMTime& t) noexcept
{
return t.timescale != 0 ? (t.value / (double) t.timescale) : 0.0;
}
void playerPreparationFinished (const URL& url, Result r)
{
owner.resized();
loadFinishedCallback (url, r);
loadFinishedCallback = nullptr;
}
void errorOccurred (const String& errorMessage)
{
if (owner.onErrorOccurred != nullptr)
owner.onErrorOccurred (errorMessage);
}
void playbackStarted()
{
if (owner.onPlaybackStarted != nullptr)
owner.onPlaybackStarted();
}
void playbackStopped()
{
if (owner.onPlaybackStopped != nullptr)
owner.onPlaybackStopped();
}
void playbackReachedEndTime()
{
stop();
// setPosition (0.0);
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Pimpl)
};

View File

@ -0,0 +1,840 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
interface ISampleGrabberCB : public IUnknown
{
JUCE_COMCALL SampleCB (double, IMediaSample*) = 0;
JUCE_COMCALL BufferCB (double, BYTE*, long) = 0;
};
interface ISampleGrabber : public IUnknown
{
JUCE_COMCALL SetOneShot (BOOL) = 0;
JUCE_COMCALL SetMediaType (const AM_MEDIA_TYPE*) = 0;
JUCE_COMCALL GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
JUCE_COMCALL SetBufferSamples (BOOL) = 0;
JUCE_COMCALL GetCurrentBuffer (long*, long*) = 0;
JUCE_COMCALL GetCurrentSample (IMediaSample**) = 0;
JUCE_COMCALL SetCallback (ISampleGrabberCB*, long) = 0;
};
static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
struct CameraDevice::Pimpl : public ChangeBroadcaster
{
Pimpl (CameraDevice& ownerToUse, const String&, int index,
int minWidth, int minHeight, int maxWidth, int maxHeight,
bool /*highQuality*/)
: owner (ownerToUse)
{
HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
if (FAILED (hr))
return;
filter = enumerateCameras (nullptr, index);
if (filter == nullptr)
return;
hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
if (FAILED (hr))
return;
hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
if (FAILED (hr))
return;
mediaControl = graphBuilder.getInterface<IMediaControl>();
if (mediaControl == nullptr)
return;
{
ComSmartPtr<IAMStreamConfig> streamConfig;
hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, nullptr, filter,
IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
if (streamConfig != nullptr)
{
getVideoSizes (streamConfig);
if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
return;
}
}
hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
if (FAILED (hr))
return;
hr = smartTee.CoCreateInstance (CLSID_SmartTee);
if (FAILED (hr))
return;
hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
if (FAILED (hr))
return;
if (! connectFilters (filter, smartTee))
return;
ComSmartPtr<IBaseFilter> sampleGrabberBase;
hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
if (FAILED (hr))
return;
hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
if (FAILED (hr))
return;
{
AM_MEDIA_TYPE mt = {};
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
sampleGrabber->SetMediaType (&mt);
}
callback = new GrabberCallback (*this);
hr = sampleGrabber->SetCallback (callback, 1);
hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
if (FAILED (hr))
return;
ComSmartPtr<IPin> grabberInputPin;
if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
&& getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
&& getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
return;
hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
if (FAILED (hr))
return;
AM_MEDIA_TYPE mt = {};
hr = sampleGrabber->GetConnectedMediaType (&mt);
if (auto* pVih = unalignedPointerCast<VIDEOINFOHEADER*> (mt.pbFormat))
{
width = pVih->bmiHeader.biWidth;
height = pVih->bmiHeader.biHeight;
}
ComSmartPtr<IBaseFilter> nullFilter;
hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
if (connectFilters (sampleGrabberBase, nullFilter)
&& addGraphToRot())
{
activeImage = Image (Image::RGB, width, height, true);
loadingImage = Image (Image::RGB, width, height, true);
openedSuccessfully = true;
}
}
~Pimpl()
{
if (mediaControl != nullptr)
mediaControl->Stop();
removeGraphFromRot();
disconnectAnyViewers();
if (sampleGrabber != nullptr)
{
sampleGrabber->SetCallback (nullptr, 0);
sampleGrabber = nullptr;
}
callback = nullptr;
graphBuilder = nullptr;
mediaControl = nullptr;
filter = nullptr;
captureGraphBuilder = nullptr;
smartTee = nullptr;
smartTeePreviewOutputPin = nullptr;
smartTeeCaptureOutputPin = nullptr;
asfWriter = nullptr;
}
bool openedOk() const noexcept { return openedSuccessfully; }
void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
{
{
const ScopedLock sl (pictureTakenCallbackLock);
jassert (pictureTakenCallbackToUse != nullptr);
if (pictureTakenCallbackToUse == nullptr)
return;
pictureTakenCallback = std::move (pictureTakenCallbackToUse);
}
addUser();
}
void startRecordingToFile (const File& file, int quality)
{
addUser();
isRecording = createFileCaptureFilter (file, quality);
}
void stopRecording()
{
if (isRecording)
{
removeFileCaptureFilter();
removeUser();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstRecordedTime;
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
addUser();
listeners.add (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.remove (listenerToRemove);
if (listeners.size() == 0)
removeUser();
}
void callListeners (const Image& image)
{
const ScopedLock sl (listenerLock);
listeners.call ([=] (Listener& l) { l.imageReceived (image); });
}
void notifyPictureTakenIfNeeded (const Image& image)
{
{
const ScopedLock sl (pictureTakenCallbackLock);
if (pictureTakenCallback == nullptr)
return;
}
MessageManager::callAsync ([weakRef = WeakReference<Pimpl> { this }, image]() mutable
{
if (weakRef == nullptr)
return;
if (weakRef->pictureTakenCallback != nullptr)
weakRef->pictureTakenCallback (image);
weakRef->pictureTakenCallback = nullptr;
});
}
void addUser()
{
if (openedSuccessfully && activeUsers++ == 0)
mediaControl->Run();
}
void removeUser()
{
if (openedSuccessfully && --activeUsers == 0)
mediaControl->Stop();
}
void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
{
if (recordNextFrameTime)
{
const double defaultCameraLatency = 0.1;
firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
recordNextFrameTime = false;
ComSmartPtr<IPin> pin;
if (getPin (filter, PINDIR_OUTPUT, pin))
{
if (auto pushSource = pin.getInterface<IAMPushSource>())
{
REFERENCE_TIME latency = 0;
pushSource->GetLatency (&latency);
firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
}
}
}
{
const int lineStride = width * 3;
const ScopedLock sl (imageSwapLock);
{
loadingImage.duplicateIfShared();
const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
for (int i = 0; i < height; ++i)
memcpy (destData.getLinePointer ((height - 1) - i),
buffer + lineStride * i,
(size_t) lineStride);
}
imageNeedsFlipping = true;
}
if (listeners.size() > 0)
callListeners (loadingImage);
notifyPictureTakenIfNeeded (loadingImage);
sendChangeMessage();
}
void drawCurrentImage (Graphics& g, Rectangle<int> area)
{
if (imageNeedsFlipping)
{
const ScopedLock sl (imageSwapLock);
std::swap (loadingImage, activeImage);
imageNeedsFlipping = false;
}
Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
.appliedTo (Rectangle<int> (width, height), area));
RectangleList<int> borders (area);
borders.subtract (centred);
g.setColour (Colours::black);
g.fillRectList (borders);
g.drawImage (activeImage, centred.getX(), centred.getY(),
centred.getWidth(), centred.getHeight(), 0, 0, width, height);
}
bool createFileCaptureFilter (const File& file, int quality)
{
removeFileCaptureFilter();
file.deleteFile();
mediaControl->Stop();
firstRecordedTime = Time();
recordNextFrameTime = true;
previewMaxFPS = 60;
HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
if (SUCCEEDED (hr))
{
if (auto fileSink = asfWriter.getInterface<IFileSinkFilter>())
{
hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), nullptr);
if (SUCCEEDED (hr))
{
hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
if (SUCCEEDED (hr))
{
if (auto asfConfig = asfWriter.getInterface<IConfigAsfWriter>())
{
asfConfig->SetIndexMode (true);
ComSmartPtr<IWMProfileManager> profileManager;
hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
// This gibberish is the DirectShow profile for a video-only wmv file.
String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
"<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
"streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
"bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
"<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
"<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
"btemporalcompression=\"1\" lsamplesize=\"0\">"
"<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
"<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
"bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
"biclrused=\"0\" biclrimportant=\"0\"/>"
"</videoinfoheader>"
"</wmmediatype>"
"</streamconfig>"
"</profile>");
const int fps[] = { 10, 15, 30 };
int maxFramesPerSecond = fps[jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
if (((uint32_t) quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
maxFramesPerSecond = (quality >> 24) & 0xff;
prof = prof.replace ("$WIDTH", String (width))
.replace ("$HEIGHT", String (height))
.replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
ComSmartPtr<IWMProfile> currentProfile;
hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
if (SUCCEEDED (hr))
{
ComSmartPtr<IPin> asfWriterInputPin;
if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
{
hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
&& SUCCEEDED (mediaControl->Run()))
{
previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
previewMaxFPS = (quality >> 16) & 0xff;
return true;
}
}
}
}
}
}
}
}
removeFileCaptureFilter();
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
return false;
}
void removeFileCaptureFilter()
{
mediaControl->Stop();
if (asfWriter != nullptr)
{
graphBuilder->RemoveFilter (asfWriter);
asfWriter = nullptr;
}
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
previewMaxFPS = 60;
}
static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
{
int index = 0;
ComSmartPtr<ICreateDevEnum> pDevEnum;
struct Deleter
{
void operator() (IUnknown* ptr) const noexcept { ptr->Release(); }
};
using ContextPtr = std::unique_ptr<IBindCtx, Deleter>;
if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
{
ComSmartPtr<IEnumMoniker> enumerator;
HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
if (SUCCEEDED (hr) && enumerator != nullptr)
{
ComSmartPtr<IMoniker> moniker;
ULONG fetched;
while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
{
auto context = []
{
IBindCtx* ptr = nullptr;
ignoreUnused (CreateBindCtx (0, &ptr));
return ContextPtr (ptr);
}();
ComSmartPtr<IBaseFilter> captureFilter;
hr = moniker->BindToObject (context.get(), nullptr, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
ComSmartPtr<IPropertyBag> propertyBag;
hr = moniker->BindToStorage (context.get(), nullptr, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr = propertyBag->Read (_T("FriendlyName"), &var, nullptr);
propertyBag = nullptr;
if (SUCCEEDED (hr))
{
if (names != nullptr)
names->add (var.bstrVal);
if (index == deviceIndexToOpen)
return captureFilter;
++index;
}
}
}
}
}
}
return nullptr;
}
static StringArray getAvailableDevices()
{
StringArray devs;
enumerateCameras (&devs, -1);
return devs;
}
struct GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
{
GrabberCallback (Pimpl& p)
: ComBaseClassHelperBase (0), owner (p) {}
JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
{
if (refId == IID_ISampleGrabberCB)
return castToType<ISampleGrabberCB> (result);
return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
}
JUCE_COMRESULT SampleCB (double, IMediaSample*) { return E_FAIL; }
JUCE_COMRESULT BufferCB (double time, BYTE* buffer, long bufferSize)
{
owner.handleFrame (time, buffer, bufferSize);
return S_OK;
}
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
};
CameraDevice& owner;
ComSmartPtr<GrabberCallback> callback;
CriticalSection listenerLock;
ListenerList<Listener> listeners;
CriticalSection pictureTakenCallbackLock;
std::function<void (const Image&)> pictureTakenCallback;
bool isRecording = false, openedSuccessfully = false;
int width = 0, height = 0;
Time firstRecordedTime;
Array<ViewerComponent*> viewerComps;
ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<ISampleGrabber> sampleGrabber;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
int activeUsers = 0;
Array<int> widths, heights;
DWORD graphRegistrationID;
CriticalSection imageSwapLock;
bool imageNeedsFlipping = false;
Image loadingImage, activeImage;
bool recordNextFrameTime = false;
int previewMaxFPS = 60;
JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
private:
void getVideoSizes (IAMStreamConfig* const streamConfig)
{
widths.clear();
heights.clear();
int count = 0, size = 0;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
for (int i = 0; i < count; ++i)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE* config;
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
const int w = scc.InputSize.cx;
const int h = scc.InputSize.cy;
bool duplicate = false;
for (int j = widths.size(); --j >= 0;)
{
if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
{
duplicate = true;
break;
}
}
if (! duplicate)
{
widths.add (w);
heights.add (h);
}
deleteMediaType (config);
}
}
}
}
bool selectVideoSize (IAMStreamConfig* const streamConfig,
const int minWidth, const int minHeight,
const int maxWidth, const int maxHeight)
{
int count = 0, size = 0, bestArea = 0, bestIndex = -1;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
AM_MEDIA_TYPE* config;
VIDEO_STREAM_CONFIG_CAPS scc;
for (int i = 0; i < count; ++i)
{
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
if (scc.InputSize.cx >= minWidth
&& scc.InputSize.cy >= minHeight
&& scc.InputSize.cx <= maxWidth
&& scc.InputSize.cy <= maxHeight)
{
int area = scc.InputSize.cx * scc.InputSize.cy;
if (area > bestArea)
{
bestIndex = i;
bestArea = area;
}
}
deleteMediaType (config);
}
}
if (bestIndex >= 0)
{
HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
hr = streamConfig->SetFormat (config);
deleteMediaType (config);
return SUCCEEDED (hr);
}
}
return false;
}
static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
ComSmartPtr<IPin>& result, const char* pinName = nullptr)
{
ComSmartPtr<IEnumPins> enumerator;
ComSmartPtr<IPin> pin;
filter->EnumPins (enumerator.resetAndGetPointerAddress());
while (enumerator->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
PIN_DIRECTION dir;
pin->QueryDirection (&dir);
if (wantedDirection == dir)
{
PIN_INFO info = {};
pin->QueryPinInfo (&info);
if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
{
result = pin;
return true;
}
}
}
return false;
}
bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
{
ComSmartPtr<IPin> in, out;
return getPin (first, PINDIR_OUTPUT, out)
&& getPin (second, PINDIR_INPUT, in)
&& SUCCEEDED (graphBuilder->Connect (out, in));
}
bool addGraphToRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
return false;
ComSmartPtr<IMoniker> moniker;
WCHAR buffer[128]{};
HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
if (FAILED (hr))
return false;
graphRegistrationID = 0;
return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
}
void removeGraphFromRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
rot->Revoke (graphRegistrationID);
}
void disconnectAnyViewers();
static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
{
if (pmt->cbFormat != 0)
CoTaskMemFree ((PVOID) pmt->pbFormat);
if (pmt->pUnk != nullptr)
pmt->pUnk->Release();
CoTaskMemFree (pmt);
}
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
struct CameraDevice::ViewerComponent : public Component,
public ChangeListener
{
ViewerComponent (CameraDevice& d)
: owner (d.pimpl.get()), maxFPS (15), lastRepaintTime (0)
{
setOpaque (true);
owner->addChangeListener (this);
owner->addUser();
owner->viewerComps.add (this);
setSize (owner->width, owner->height);
}
~ViewerComponent() override
{
if (owner != nullptr)
{
owner->viewerComps.removeFirstMatchingValue (this);
owner->removeUser();
owner->removeChangeListener (this);
}
}
void ownerDeleted()
{
owner = nullptr;
}
void paint (Graphics& g) override
{
g.setColour (Colours::black);
g.setImageResamplingQuality (Graphics::lowResamplingQuality);
if (owner != nullptr)
owner->drawCurrentImage (g, getLocalBounds());
else
g.fillAll();
}
void changeListenerCallback (ChangeBroadcaster*) override
{
const int64 now = Time::currentTimeMillis();
if (now >= lastRepaintTime + (1000 / maxFPS))
{
lastRepaintTime = now;
repaint();
if (owner != nullptr)
maxFPS = owner->previewMaxFPS;
}
}
private:
Pimpl* owner;
int maxFPS;
int64 lastRepaintTime;
};
void CameraDevice::Pimpl::disconnectAnyViewers()
{
for (int i = viewerComps.size(); --i >= 0;)
viewerComps.getUnchecked(i)->ownerDeleted();
}
String CameraDevice::getFileExtension()
{
return ".wmv";
}

View File

@ -0,0 +1,959 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace VideoRenderers
{
//==============================================================================
struct Base
{
virtual ~Base() {}
virtual HRESULT create (ComSmartPtr<IGraphBuilder>&, ComSmartPtr<IBaseFilter>&, HWND) = 0;
virtual void setVideoWindow (HWND) = 0;
virtual void setVideoPosition (HWND) = 0;
virtual void repaintVideo (HWND, HDC) = 0;
virtual void displayModeChanged() = 0;
virtual HRESULT getVideoSize (long& videoWidth, long& videoHeight) = 0;
};
//==============================================================================
struct VMR7 : public Base
{
VMR7() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd) override
{
ComSmartPtr<IVMRFilterConfig> filterConfig;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_VideoMixingRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"VMR-7");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (filterConfig);
if (SUCCEEDED (hr)) hr = filterConfig->SetRenderingMode (VMRMode_Windowless);
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (windowlessControl);
if (SUCCEEDED (hr)) hr = windowlessControl->SetVideoClippingWindow (hwnd);
if (SUCCEEDED (hr)) hr = windowlessControl->SetAspectRatioMode (VMR_ARMODE_LETTER_BOX);
return hr;
}
void setVideoWindow (HWND hwnd) override
{
windowlessControl->SetVideoClippingWindow (hwnd);
}
void setVideoPosition (HWND hwnd) override
{
long videoWidth = 0, videoHeight = 0;
windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
RECT src, dest;
SetRect (&src, 0, 0, videoWidth, videoHeight);
GetClientRect (hwnd, &dest);
windowlessControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND hwnd, HDC hdc) override
{
windowlessControl->RepaintVideo (hwnd, hdc);
}
void displayModeChanged() override
{
windowlessControl->DisplayModeChanged();
}
HRESULT getVideoSize (long& videoWidth, long& videoHeight) override
{
return windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
}
ComSmartPtr<IVMRWindowlessControl> windowlessControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VMR7)
};
//==============================================================================
struct EVR : public Base
{
EVR() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd) override
{
ComSmartPtr<IMFGetService> getService;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_EnhancedVideoRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"EVR");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (getService);
if (SUCCEEDED (hr)) hr = getService->GetService (MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl,
(void**) videoDisplayControl.resetAndGetPointerAddress());
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetVideoWindow (hwnd);
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetAspectRatioMode (MFVideoARMode_PreservePicture);
return hr;
}
void setVideoWindow (HWND hwnd) override
{
videoDisplayControl->SetVideoWindow (hwnd);
}
void setVideoPosition (HWND hwnd) override
{
const MFVideoNormalizedRect src = { 0.0f, 0.0f, 1.0f, 1.0f };
RECT dest;
GetClientRect (hwnd, &dest);
videoDisplayControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND, HDC) override
{
videoDisplayControl->RepaintVideo();
}
void displayModeChanged() override {}
HRESULT getVideoSize (long& videoWidth, long& videoHeight) override
{
SIZE sz = { 0, 0 };
HRESULT hr = videoDisplayControl->GetNativeVideoSize (&sz, nullptr);
videoWidth = sz.cx;
videoHeight = sz.cy;
return hr;
}
ComSmartPtr<IMFVideoDisplayControl> videoDisplayControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (EVR)
};
}
//==============================================================================
struct VideoComponent::Pimpl : public Component,
private ComponentPeer::ScaleFactorListener
{
Pimpl (VideoComponent& ownerToUse, bool)
: owner (ownerToUse)
{
setOpaque (true);
context.reset (new DirectShowContext (*this));
componentWatcher.reset (new ComponentWatcher (*this));
}
~Pimpl() override
{
close();
context = nullptr;
componentWatcher = nullptr;
if (currentPeer != nullptr)
currentPeer->removeScaleFactorListener (this);
}
Result loadFromString (const String& fileOrURLPath)
{
close();
auto r = context->loadFile (fileOrURLPath);
if (r.wasOk())
{
videoLoaded = true;
context->updateVideoPosition();
}
return r;
}
Result load (const File& file)
{
auto r = loadFromString (file.getFullPathName());
if (r.wasOk())
currentFile = file;
return r;
}
Result load (const URL& url)
{
auto r = loadFromString (URL::removeEscapeChars (url.toString (true)));
if (r.wasOk())
currentURL = url;
return r;
}
void close()
{
stop();
context->release();
videoLoaded = false;
currentFile = File();
currentURL = {};
}
bool isOpen() const
{
return videoLoaded;
}
bool isPlaying() const
{
return context->state == DirectShowContext::runningState;
}
void play()
{
if (videoLoaded)
context->play();
}
void stop()
{
if (videoLoaded)
context->pause();
}
void setPosition (double newPosition)
{
if (videoLoaded)
context->setPosition (newPosition);
}
double getPosition() const
{
return videoLoaded ? context->getPosition() : 0.0;
}
void setSpeed (double newSpeed)
{
if (videoLoaded)
context->setSpeed (newSpeed);
}
double getSpeed() const
{
return videoLoaded ? context->getSpeed() : 0.0;
}
Rectangle<int> getNativeSize() const
{
return videoLoaded ? context->getVideoSize()
: Rectangle<int>();
}
double getDuration() const
{
return videoLoaded ? context->getDuration() : 0.0;
}
void setVolume (float newVolume)
{
if (videoLoaded)
context->setVolume (newVolume);
}
float getVolume() const
{
return videoLoaded ? context->getVolume() : 0.0f;
}
void paint (Graphics& g) override
{
if (videoLoaded)
context->handleUpdateNowIfNeeded();
else
g.fillAll (Colours::grey);
}
void updateContextPosition()
{
context->updateContextPosition();
if (getWidth() > 0 && getHeight() > 0)
if (auto* peer = getTopLevelComponent()->getPeer())
context->updateWindowPosition ((peer->getAreaCoveredBy (*this).toDouble()
* peer->getPlatformScaleFactor()).toNearestInt());
}
void updateContextVisibility()
{
context->showWindow (isShowing());
}
void recreateNativeWindowAsync()
{
context->recreateNativeWindowAsync();
repaint();
}
void playbackStarted()
{
if (owner.onPlaybackStarted != nullptr)
owner.onPlaybackStarted();
}
void playbackStopped()
{
if (owner.onPlaybackStopped != nullptr)
owner.onPlaybackStopped();
}
void errorOccurred (const String& errorMessage)
{
if (owner.onErrorOccurred != nullptr)
owner.onErrorOccurred (errorMessage);
}
File currentFile;
URL currentURL;
private:
VideoComponent& owner;
ComponentPeer* currentPeer = nullptr;
bool videoLoaded = false;
//==============================================================================
void nativeScaleFactorChanged (double /*newScaleFactor*/) override
{
if (videoLoaded)
updateContextPosition();
}
//==============================================================================
struct ComponentWatcher : public ComponentMovementWatcher
{
ComponentWatcher (Pimpl& c) : ComponentMovementWatcher (&c), owner (c)
{
}
void componentMovedOrResized (bool, bool) override
{
if (owner.videoLoaded)
owner.updateContextPosition();
}
void componentPeerChanged() override
{
if (owner.currentPeer != nullptr)
owner.currentPeer->removeScaleFactorListener (&owner);
if (owner.videoLoaded)
owner.recreateNativeWindowAsync();
}
void componentVisibilityChanged() override
{
if (owner.videoLoaded)
owner.updateContextVisibility();
}
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ComponentWatcher)
};
std::unique_ptr<ComponentWatcher> componentWatcher;
//==============================================================================
struct DirectShowContext : public AsyncUpdater
{
DirectShowContext (Pimpl& c) : component (c)
{
ignoreUnused (CoInitialize (nullptr));
}
~DirectShowContext() override
{
release();
CoUninitialize();
}
//==============================================================================
void updateWindowPosition (const Rectangle<int>& newBounds)
{
nativeWindow->setWindowPosition (newBounds);
}
void showWindow (bool shouldBeVisible)
{
nativeWindow->showWindow (shouldBeVisible);
}
//==============================================================================
void repaint()
{
if (hasVideo)
videoRenderer->repaintVideo (nativeWindow->hwnd, nativeWindow->hdc);
}
void updateVideoPosition()
{
if (hasVideo)
videoRenderer->setVideoPosition (nativeWindow->hwnd);
}
void displayResolutionChanged()
{
if (hasVideo)
videoRenderer->displayModeChanged();
}
//==============================================================================
void peerChanged()
{
deleteNativeWindow();
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
createNativeWindow();
mediaEvent->CancelDefaultHandling (EC_STATE_CHANGE);
mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (hwnd);
}
void handleAsyncUpdate() override
{
if (hwnd != nullptr)
{
if (needToRecreateNativeWindow)
{
peerChanged();
needToRecreateNativeWindow = false;
}
if (needToUpdateViewport)
{
updateVideoPosition();
needToUpdateViewport = false;
}
repaint();
}
else
{
triggerAsyncUpdate();
}
}
void recreateNativeWindowAsync()
{
needToRecreateNativeWindow = true;
triggerAsyncUpdate();
}
void updateContextPosition()
{
needToUpdateViewport = true;
triggerAsyncUpdate();
}
//==============================================================================
Result loadFile (const String& fileOrURLPath)
{
jassert (state == uninitializedState);
if (! createNativeWindow())
return Result::fail ("Can't create window");
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
// basic playback interfaces
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaControl);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaPosition);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaEvent);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (basicAudio);
// video renderer interface
if (SUCCEEDED (hr))
{
if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
{
videoRenderer.reset (new VideoRenderers::EVR());
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
if (FAILED (hr))
videoRenderer = nullptr;
}
if (videoRenderer == nullptr)
{
videoRenderer.reset (new VideoRenderers::VMR7());
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
}
}
// build filter graph
if (SUCCEEDED (hr))
{
hr = graphBuilder->RenderFile (fileOrURLPath.toWideCharPointer(), nullptr);
if (FAILED (hr))
{
#if JUCE_MODAL_LOOPS_PERMITTED
// Annoyingly, if we don't run the msg loop between failing and deleting the window, the
// whole OS message-dispatch system gets itself into a state, and refuses to deliver any
// more messages for the whole app. (That's what happens in Win7, anyway)
MessageManager::getInstance()->runDispatchLoopUntil (200);
#endif
}
}
// remove video renderer if not connected (no video)
if (SUCCEEDED (hr))
{
if (isRendererConnected())
{
hasVideo = true;
}
else
{
hasVideo = false;
graphBuilder->RemoveFilter (baseFilter);
videoRenderer = nullptr;
baseFilter = nullptr;
}
}
// set window to receive events
if (SUCCEEDED (hr))
{
mediaEvent->CancelDefaultHandling (EC_STATE_CHANGE);
hr = mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
}
if (SUCCEEDED (hr))
{
state = stoppedState;
pause();
return Result::ok();
}
// Note that if you're trying to open a file and this method fails, you may
// just need to install a suitable codec. It seems that by default DirectShow
// doesn't support a very good range of formats.
release();
return getErrorMessageFromResult (hr);
}
static Result getErrorMessageFromResult (HRESULT hr)
{
switch (hr)
{
case VFW_E_INVALID_FILE_FORMAT: return Result::fail ("Invalid file format");
case VFW_E_NOT_FOUND: return Result::fail ("File not found");
case VFW_E_UNKNOWN_FILE_TYPE: return Result::fail ("Unknown file type");
case VFW_E_UNSUPPORTED_STREAM: return Result::fail ("Unsupported stream");
case VFW_E_CANNOT_CONNECT: return Result::fail ("Cannot connect");
case VFW_E_CANNOT_LOAD_SOURCE_FILTER: return Result::fail ("Cannot load source filter");
}
TCHAR messageBuffer[512] = { 0 };
FormatMessage (FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
nullptr, hr, MAKELANGID (LANG_NEUTRAL, SUBLANG_DEFAULT),
messageBuffer, (DWORD) numElementsInArray (messageBuffer) - 1, nullptr);
return Result::fail (String (messageBuffer));
}
void release()
{
if (mediaControl != nullptr)
mediaControl->Stop();
if (mediaEvent != nullptr)
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
hasVideo = false;
videoRenderer = nullptr;
baseFilter = nullptr;
basicAudio = nullptr;
mediaEvent = nullptr;
mediaPosition = nullptr;
mediaControl = nullptr;
graphBuilder = nullptr;
state = uninitializedState;
if (nativeWindow != nullptr)
deleteNativeWindow();
}
void graphEventProc()
{
LONG ec = 0;
LONG_PTR p1 = {}, p2 = {};
jassert (mediaEvent != nullptr);
while (SUCCEEDED (mediaEvent->GetEvent (&ec, &p1, &p2, 0)))
{
mediaEvent->FreeEventParams (ec, p1, p2);
switch (ec)
{
case EC_REPAINT:
component.repaint();
break;
case EC_COMPLETE:
component.stop();
component.setPosition (0.0);
break;
case EC_ERRORABORT:
case EC_ERRORABORTEX:
component.errorOccurred (getErrorMessageFromResult ((HRESULT) p1).getErrorMessage());
// intentional fallthrough
case EC_USERABORT:
component.close();
break;
case EC_STATE_CHANGE:
switch (p1)
{
case State_Paused: component.playbackStopped(); break;
case State_Running: component.playbackStarted(); break;
default: break;
}
default:
break;
}
}
}
//==============================================================================
void play()
{
mediaControl->Run();
state = runningState;
}
void stop()
{
mediaControl->Stop();
state = stoppedState;
}
void pause()
{
mediaControl->Pause();
state = pausedState;
}
//==============================================================================
Rectangle<int> getVideoSize() const noexcept
{
long width = 0, height = 0;
if (hasVideo)
videoRenderer->getVideoSize (width, height);
return { (int) width, (int) height };
}
//==============================================================================
double getDuration() const
{
REFTIME duration;
mediaPosition->get_Duration (&duration);
return duration;
}
double getSpeed() const
{
double speed;
mediaPosition->get_Rate (&speed);
return speed;
}
double getPosition() const
{
REFTIME seconds;
mediaPosition->get_CurrentPosition (&seconds);
return seconds;
}
void setSpeed (double newSpeed) { mediaPosition->put_Rate (newSpeed); }
void setPosition (double seconds) { mediaPosition->put_CurrentPosition (seconds); }
void setVolume (float newVolume) { basicAudio->put_Volume (convertToDShowVolume (newVolume)); }
// in DirectShow, full volume is 0, silence is -10000
static long convertToDShowVolume (float vol) noexcept
{
if (vol >= 1.0f) return 0;
if (vol <= 0.0f) return -10000;
return roundToInt ((vol * 10000.0f) - 10000.0f);
}
float getVolume() const
{
long volume;
basicAudio->get_Volume (&volume);
return (float) (volume + 10000) / 10000.0f;
}
enum State { uninitializedState, runningState, pausedState, stoppedState };
State state = uninitializedState;
private:
//==============================================================================
enum { graphEventID = WM_APP + 0x43f0 };
Pimpl& component;
HWND hwnd = {};
HDC hdc = {};
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IMediaPosition> mediaPosition;
ComSmartPtr<IMediaEventEx> mediaEvent;
ComSmartPtr<IBasicAudio> basicAudio;
ComSmartPtr<IBaseFilter> baseFilter;
std::unique_ptr<VideoRenderers::Base> videoRenderer;
bool hasVideo = false, needToUpdateViewport = true, needToRecreateNativeWindow = false;
//==============================================================================
bool createNativeWindow()
{
jassert (nativeWindow == nullptr);
if (auto* topLevelPeer = component.getTopLevelComponent()->getPeer())
{
nativeWindow.reset (new NativeWindow ((HWND) topLevelPeer->getNativeHandle(), this));
hwnd = nativeWindow->hwnd;
component.currentPeer = topLevelPeer;
component.currentPeer->addScaleFactorListener (&component);
if (hwnd != nullptr)
{
hdc = GetDC (hwnd);
component.updateContextPosition();
component.updateContextVisibility();
return true;
}
nativeWindow = nullptr;
}
else
{
jassertfalse;
}
return false;
}
void deleteNativeWindow()
{
jassert (nativeWindow != nullptr);
ReleaseDC (hwnd, hdc);
hwnd = {};
hdc = {};
nativeWindow = nullptr;
}
bool isRendererConnected()
{
ComSmartPtr<IEnumPins> enumPins;
HRESULT hr = baseFilter->EnumPins (enumPins.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
hr = enumPins->Reset();
ComSmartPtr<IPin> pin;
while (SUCCEEDED (hr)
&& enumPins->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
ComSmartPtr<IPin> otherPin;
hr = pin->ConnectedTo (otherPin.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
PIN_DIRECTION direction;
hr = pin->QueryDirection (&direction);
if (SUCCEEDED (hr) && direction == PINDIR_INPUT)
return true;
}
else if (hr == VFW_E_NOT_CONNECTED)
{
hr = S_OK;
}
}
return false;
}
//==============================================================================
struct NativeWindowClass : private DeletedAtShutdown
{
bool isRegistered() const noexcept { return atom != 0; }
LPCTSTR getWindowClassName() const noexcept { return (LPCTSTR) (pointer_sized_uint) MAKELONG (atom, 0); }
JUCE_DECLARE_SINGLETON_SINGLETHREADED_MINIMAL (NativeWindowClass)
private:
NativeWindowClass()
{
String windowClassName ("JUCE_DIRECTSHOW_");
windowClassName << (int) (Time::currentTimeMillis() & 0x7fffffff);
HINSTANCE moduleHandle = (HINSTANCE) Process::getCurrentModuleInstanceHandle();
TCHAR moduleFile [1024] = {};
GetModuleFileName (moduleHandle, moduleFile, 1024);
WNDCLASSEX wcex = {};
wcex.cbSize = sizeof (wcex);
wcex.style = CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC) wndProc;
wcex.lpszClassName = windowClassName.toWideCharPointer();
wcex.hInstance = moduleHandle;
atom = RegisterClassEx (&wcex);
jassert (atom != 0);
}
~NativeWindowClass()
{
if (atom != 0)
UnregisterClass (getWindowClassName(), (HINSTANCE) Process::getCurrentModuleInstanceHandle());
clearSingletonInstance();
}
static LRESULT CALLBACK wndProc (HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
if (auto* c = (DirectShowContext*) GetWindowLongPtr (hwnd, GWLP_USERDATA))
{
switch (msg)
{
case WM_NCHITTEST: return HTTRANSPARENT;
case WM_ERASEBKGND: return 1;
case WM_DISPLAYCHANGE: c->displayResolutionChanged(); break;
case graphEventID: c->graphEventProc(); return 0;
default: break;
}
}
return DefWindowProc (hwnd, msg, wParam, lParam);
}
ATOM atom = {};
JUCE_DECLARE_NON_COPYABLE (NativeWindowClass)
};
//==============================================================================
struct NativeWindow
{
NativeWindow (HWND parentToAddTo, void* userData)
{
auto* wc = NativeWindowClass::getInstance();
if (wc->isRegistered())
{
DWORD exstyle = 0;
DWORD type = WS_CHILD;
hwnd = CreateWindowEx (exstyle, wc->getWindowClassName(),
L"", type, 0, 0, 0, 0, parentToAddTo, nullptr,
(HINSTANCE) Process::getCurrentModuleInstanceHandle(), nullptr);
if (hwnd != nullptr)
{
hdc = GetDC (hwnd);
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) userData);
}
}
jassert (hwnd != nullptr);
}
~NativeWindow()
{
if (hwnd != nullptr)
{
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) 0);
DestroyWindow (hwnd);
}
}
void setWindowPosition (Rectangle<int> newBounds)
{
SetWindowPos (hwnd, nullptr, newBounds.getX(), newBounds.getY(),
newBounds.getWidth(), newBounds.getHeight(),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOOWNERZORDER);
}
void showWindow (bool shouldBeVisible)
{
ShowWindow (hwnd, shouldBeVisible ? SW_SHOWNA : SW_HIDE);
}
HWND hwnd = {};
HDC hdc = {};
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (NativeWindow)
};
std::unique_ptr<NativeWindow> nativeWindow;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowContext)
};
std::unique_ptr<DirectShowContext> context;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Pimpl)
};
JUCE_IMPLEMENT_SINGLETON (VideoComponent::Pimpl::DirectShowContext::NativeWindowClass)