#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevicePlugIn.h"
#define kQCPlugIn_Name @"CaptureWithDevice"
#define kQCPlugIn_Description @"CaptureWithDevice description"
@implementation CaptureWithDevicePlugIn
@dynamic inputDevice;
@dynamic outputImage;
+ (NSDictionary*) attributes
{
return [NSDictionary dictionaryWithObjectsAndKeys:
kQCPlugIn_Name, QCPlugInAttributeNameKey,
kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{
if([key isEqualToString:@"inputDevice"]) {
NSArray *devices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSMutableArray *deviceNames= [NSMutableArray array];
int i, ic= [devices count];
for(i= 0; i<ic; i++) {
[deviceNames addObject:[[devices objectAtIndex:i] description]];
}
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Device", QCPortAttributeNameKey,
QCPortTypeIndex,QCPortAttributeTypeKey,
[NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
deviceNames, QCPortAttributeMenuItemsKey,
[NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
nil];
}
if([key isEqualToString:@"outputImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Video Image", QCPortAttributeNameKey,
nil];
return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
return kQCPlugInExecutionModeProvider;
}
+ (QCPlugInTimeMode) timeMode
{
return kQCPlugInTimeModeIdle;
}
- (id) init
{
if(self = [super init]) {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
}
return self;
}
- (void) finalize
{
[super finalize];
}
- (void) dealloc
{
if (mCaptureSession) {
[mCaptureSession release];
[mCaptureDeviceInput release];
[mCaptureDecompressedVideoOutput release];
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super dealloc];
}
@end
@implementation CaptureWithDevicePlugIn (Execution)
- (BOOL) startExecution:(id<QCPlugInContext>)context
{
return YES;
}
- (void) enableExecution:(id<QCPlugInContext>)context
{
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
NSError *error = nil;
BOOL success;
NSArray *devices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSUInteger d= self.inputDevice;
if (!(d<[devices count])) {
d= 0;
}
QTCaptureDevice *device = [devices objectAtIndex:d];
success = [device open:&error];
if (!success) {
self.outputImage = nil;
return YES;
}
[mCaptureSession release];
mCaptureSession = [[QTCaptureSession alloc] init];
mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];
success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];
if (!success) {
self.outputImage = nil;
[mCaptureSession release];
mCaptureSession= nil;
[mCaptureDeviceInput release];
return YES;
}
mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[mCaptureDecompressedVideoOutput setDelegate:self];
success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];
if (!success) {
self.outputImage = nil;
[mCaptureSession release];
mCaptureSession= nil;
[mCaptureDeviceInput release];
[mCaptureDecompressedVideoOutput release];
return YES;
}
[mCaptureSession startRunning];
_currentDevice= self.inputDevice;
}
CVImageBufferRef imageBuffer;
@synchronized (self) {
imageBuffer = CVBufferRetain(mCurrentImageBuffer);
}
if (imageBuffer) {
OutputImageProvider *provider= [[OutputImageProvider alloc] initWithImageSource:imageBuffer];
if(provider == nil)
return NO;
self.outputImage = provider;
[provider release];
}
else
self.outputImage = nil;
return YES;
}
- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection
{
CVImageBufferRef imageBufferToRelease;
CVBufferRetain(videoFrame);
@synchronized (self) {
imageBufferToRelease = mCurrentImageBuffer;
mCurrentImageBuffer = videoFrame;
}
CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end
@implementation OutputImageProvider
- (id) initWithImageSource:(CVImageBufferRef)imageBuffer
{
if(!imageBuffer) {
[self release];
return nil;
}
if(self = [super init]) {
_imageBuffer= imageBuffer;
}
return self;
}
- (void) dealloc
{
[_bitmapImageRep release];
CVBufferRelease(_imageBuffer);
[super dealloc];
}
- (NSRect) imageBounds
{
#if defined(__LP64__) && __LP64__
return CVImageBufferGetCleanRect(_imageBuffer);
#else
CGRect r= CVImageBufferGetCleanRect(_imageBuffer);
return NSMakeRect(r.origin.x, r.origin.y, r.size.width, r.size.height);
#endif
}
- (CGColorSpaceRef)imageColorSpace
{
return CVImageBufferGetColorSpace(_imageBuffer);
}
- (NSArray*) supportedBufferPixelFormats
{
return [NSArray arrayWithObjects:
QCPlugInPixelFormatARGB8,
QCPlugInPixelFormatBGRA8,
nil];
}
- (BOOL) renderToBuffer:(void *)baseAddress
withBytesPerRow:(NSUInteger)rowBytes
pixelFormat:(NSString *)format
forBounds:(NSRect)bounds
{
_bitmapImageRep= [[NSBitmapImageRep alloc] initWithCIImage:[CIImage imageWithCVImageBuffer:_imageBuffer]];
memcpy(baseAddress, [_bitmapImageRep bitmapData], rowBytes*bounds.size.height);
return YES;
}
@end
0 件のコメント:
コメントを投稿