2009年3月1日日曜日

Quartz Composer / OpenCV : 背景統計量の累積のパッチを作ってみる

橋本詳解さん記事もあった背景統計量の累積をやってみた.

cvAcc等が3チャンネルしかだめらしいのでAlphaを抜く(フツーに).



OpenCV_AccumulationOfBackgroundPlugIn.h
#import <Quartz/Quartz.h>
#import <cv.h>
@interface OpenCV_AccumulationOfBackgroundPlugIn : QCPlugIn
{
id<QCPlugInInputImageSource> _image;
IplImage *av_img;
IplImage *sgm_img;
IplImage *lower_img;
IplImage *upper_img;
IplImage *tmp_img;
IplImage *dst_img;
IplImage *msk_img;

int _step;
unsigned char *_ip;
unsigned char *_op;
}
@property(assign) id<QCPlugInInputImageSource> inputSourceImage;
@property(assign) id<QCPlugInOutputImageProvider> outputResultImage;
@end
@interface OpenCV_AccumulationOfBackgroundPlugIn (FUNCTION)
- (void)clearBuffer;
- (void)init_bufferWithWidth:(int)w
height:(int)h
rowBytes:(NSUInteger)rowBytes;
- (IplImage *)removeAlphaChanelWihtWidth:(int)w
height:(int)h
imageData:(unsigned char *)q
rowBytes:(NSUInteger)rowBytes;
- (void)addAlphaChanelWithWidth:(int)w
height:(int)h
imageData:(unsigned char *)oq;
@end


OpenCV_AccumulationOfBackgroundPlugIn.m
#import <OpenGL/CGLMacro.h>

#import "OpenCV_AccumulationOfBackgroundPlugIn.h"
#import "O_AOB_OutputImageProvider.h"

#define kQCPlugIn_Name @"OpenCV Accumulation Of Background"
#define kQCPlugIn_Description @"OpenCV Accumulation Of Background description"

@implementation OpenCV_AccumulationOfBackgroundPlugIn
@dynamic inputSourceImage, outputResultImage;

+ (NSDictionary*) attributes
{
return [NSDictionary dictionaryWithObjectsAndKeys:kQCPlugIn_Name, QCPlugInAttributeNameKey,
kQCPlugIn_Description, QCPlugInAttributeDescriptionKey, nil];
}

+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{
if([key isEqualToString:@"inputSourceImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Source Image", QCPortAttributeNameKey,
nil];
if([key isEqualToString:@"outputResultImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Result Image", QCPortAttributeNameKey,
nil];
return nil;
}

+ (QCPlugInExecutionMode) executionMode
{
return kQCPlugInExecutionModeProcessor;
}

+ (QCPlugInTimeMode) timeMode
{
return kQCPlugInTimeModeNone;
}

- (id) init
{
if(self = [super init]) {
}

return self;
}

- (void) finalize
{
[super finalize];
}

- (void) dealloc
{
[super dealloc];
}

@end

@implementation OpenCV_AccumulationOfBackgroundPlugIn (Execution)
- (BOOL) startExecution:(id<QCPlugInContext>)context
{
[self clearBuffer];
return YES;
}
- (void) enableExecution:(id<QCPlugInContext>)context
{
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
id<QCPlugInInputImageSource> image;
self.outputResultImage = nil;

if(image= self.inputSourceImage) {
if(![image lockBufferRepresentationWithPixelFormat:QCPlugInPixelFormatBGRA8
colorSpace:[image imageColorSpace]
forBounds:[image imageBounds]]) {
return NO;
}

int INIT_TIME = 10;
{
double B_PARAM = 1.0 / 50.0;
double T_PARAM = 1.0 / 200.0;
double Zeta = 10.0;
int w = [image bufferPixelsWide];
int h = [image bufferPixelsHigh];

if (!av_img) {
[self init_bufferWithWidth:w
height:h
rowBytes:[image bufferBytesPerRow]];
}


unsigned char *q= (unsigned char *)[image bufferBaseAddress];
IplImage *frame= [self removeAlphaChanelWihtWidth:w
height:h
imageData:q
rowBytes:[image bufferBytesPerRow]];

if (0 <= _step && _step < INIT_TIME) {
cvAcc ((CvArr *)frame, (CvArr *)av_img, (CvArr *)NULL);
}
if (_step == INIT_TIME) {
cvConvertScale (av_img, av_img, 1.0 / INIT_TIME, 0.0);
}
if (INIT_TIME <= _step && _step < INIT_TIME*2) {
cvConvert (frame, tmp_img);

cvSub (tmp_img, av_img, tmp_img, NULL);
cvPow (tmp_img, tmp_img, 2.0);
cvConvertScale (tmp_img, tmp_img, 2.0, 0.0);
cvPow (tmp_img, tmp_img, 0.5);
cvAcc (tmp_img, sgm_img, NULL);
}
if (_step == INIT_TIME*2) {
cvConvertScale (sgm_img, sgm_img, 1.0 / INIT_TIME, 0.0);
}
if (INIT_TIME*2 < _step) {
cvConvert (frame, tmp_img);

cvSub (av_img, sgm_img, lower_img, NULL);
cvSubS (lower_img, cvScalarAll (Zeta), lower_img, NULL);
cvAdd (av_img, sgm_img, upper_img, NULL);
cvAddS (upper_img, cvScalarAll (Zeta), upper_img, NULL);
cvInRange (tmp_img, lower_img, upper_img, msk_img);

cvSub (tmp_img, av_img, tmp_img, NULL);
cvPow (tmp_img, tmp_img, 2.0);
cvConvertScale (tmp_img, tmp_img, 2.0, 0.0);
cvPow (tmp_img, tmp_img, 0.5);

cvRunningAvg (frame, av_img, B_PARAM, msk_img);
cvRunningAvg (tmp_img, sgm_img, B_PARAM, msk_img);

cvNot (msk_img, msk_img);
cvRunningAvg (tmp_img, sgm_img, T_PARAM, msk_img);

cvSetZero (dst_img);
cvCopy (frame, dst_img, msk_img);

[self addAlphaChanelWithWidth:w
height:h
imageData:(unsigned char *)dst_img->imageData];

O_AOB_OutputImageProvider *provider = [[O_AOB_OutputImageProvider alloc] initWithBuffer:_op
withBytesPerRow:[image bufferBytesPerRow]
pixelFormat:QCPlugInPixelFormatBGRA8
forBounds:[image imageBounds]
colorSpaceRef:[image imageColorSpace]];
if(provider == nil)
return NO;
self.outputResultImage = provider;
[provider release];

}
}

if (_step<(INIT_TIME*2+2)) {
_step++;
}
[_image unlockBufferRepresentation];
}
return YES;
}
- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
[self clearBuffer];
}
@end

@implementation OpenCV_AccumulationOfBackgroundPlugIn (FUNCTION)
- (void)clearBuffer
{
if (_ip!=NULL) {
free(_ip);
_ip= NULL;
}
if (_op!=NULL) {
free(_op);
_op= NULL;
}
if (dst_img!=NULL) {
cvReleaseImage (&dst_img);
dst_img= NULL;
}
if (av_img!=NULL) {
cvReleaseImage (&av_img);
av_img= NULL;
}
if (sgm_img!=NULL) {
cvReleaseImage (&sgm_img);
sgm_img= NULL;
}
if (lower_img!=NULL) {
cvReleaseImage (&lower_img);
lower_img= NULL;
}
if (upper_img!=NULL) {
cvReleaseImage (&upper_img);
upper_img= NULL;
}
if (tmp_img!=NULL) {
cvReleaseImage (&tmp_img);
tmp_img= NULL;
}
if (msk_img!=NULL) {
cvReleaseImage (&msk_img);
msk_img= NULL;
}
_step= 0;
}
- (void)init_bufferWithWidth:(int)w
height:(int)h
rowBytes:(NSUInteger)rowBytes
{
av_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_32F, 3);
sgm_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_32F, 3);
tmp_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_32F, 3);
lower_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_32F, 3);
upper_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_32F, 3);

dst_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_8U, 3);
msk_img = cvCreateImage (cvSize (w, h), IPL_DEPTH_8U, 1);

cvSetZero (av_img);
cvSetZero (sgm_img);

NSUInteger bpr= (rowBytes / 4 * 3);
_ip= malloc(bpr * h * sizeof(unsigned char));
_op= malloc(rowBytes * h * sizeof(unsigned char));
}
- (IplImage *)removeAlphaChanelWihtWidth:(int)w
height:(int)h
imageData:(unsigned char *)q
rowBytes:(NSUInteger)rowBytes
{
int x, y;
for (y = 0; y < h; y++) {
for (x = 0; x < w; x++) {
_ip[(w*3) * y + x * 3 + 0] = q[((w*4) * y + x * 4 + 0)];
_ip[(w*3) * y + x * 3 + 1] = q[((w*4) * y + x * 4 + 1)];
_ip[(w*3) * y + x * 3 + 2] = q[((w*4) * y + x * 4 + 2)];
}
}
IplImage *frame = calloc(1, sizeof(IplImage));
{
frame->nSize = sizeof(IplImage);
frame->ID = 0;
frame->nChannels = 3;
frame->depth = IPL_DEPTH_8U;
frame->dataOrder = 0;
frame->origin = 0;
frame->width = w;
frame->height = h;
frame->roi = NULL;
frame->maskROI = NULL;
frame->imageData = (void *)_ip;
frame->widthStep = (rowBytes / 4 * 3);
frame->imageDataOrigin = (void *)_ip;
}
return frame;
}
- (void)addAlphaChanelWithWidth:(int)w
height:(int)h
imageData:(unsigned char *)oq
{
int x, y;
for (y = 0; y < h; y++) {
for (x = 0; x < w; x++) {
_op[(w*4) * y + x * 4 + 0]= oq[(w*3) * y + x * 3 + 0];
_op[(w*4) * y + x * 4 + 1]= oq[(w*3) * y + x * 3 + 1];
_op[(w*4) * y + x * 4 + 2]= oq[(w*3) * y + x * 3 + 2];
_op[(w*4) * y + x * 4 + 3]= 255;
}
}
}
@end

O_AOB_OutputImageProvider.h
#import <Cocoa/Cocoa.h>
@interface O_AOB_OutputImageProvider : NSObject <QCPlugInOutputImageProvider>
{
void *_baseAddress;
NSUInteger _rowBytes;
NSString *_format;
NSRect _bounds;
CGColorSpaceRef _cgColorSpaceRef;
}
- (id)initWithBuffer:(void*)baseAddress
withBytesPerRow:(NSUInteger)rowBytes
pixelFormat:(NSString*)format
forBounds:(NSRect)bounds
colorSpaceRef:(CGColorSpaceRef)cgColorSpaceRef;
@end

O_AOB_OutputImageProvider.m
#import "O_AOB_OutputImageProvider.h"
@implementation O_AOB_OutputImageProvider
- (id)initWithBuffer:(void*)baseAddress
withBytesPerRow:(NSUInteger)rowBytes
pixelFormat:(NSString*)format
forBounds:(NSRect)bounds
colorSpaceRef:(CGColorSpaceRef)cgColorSpaceRef
{
if(self = [super init]) {
_baseAddress= baseAddress;
_rowBytes= rowBytes;
_format= [format retain];
_bounds= bounds;
_cgColorSpaceRef= cgColorSpaceRef;
}
return self;
}
- (void) dealloc
{
[_format release];
[super dealloc];
}
- (NSRect) imageBounds
{
return _bounds;
}
- (CGColorSpaceRef) imageColorSpace
{
return _cgColorSpaceRef;
}
- (NSArray*) supportedBufferPixelFormats
{
return [NSArray arrayWithObjects: _format,
nil];
}
- (BOOL) renderToBuffer:(void*)baseAddress
withBytesPerRow:(NSUInteger)rowBytes
pixelFormat:(NSString*)format
forBounds:(NSRect)bounds
{
memcpy(baseAddress, _baseAddress, _rowBytes*_bounds.size.height);
return YES;
}
@end


動作画像はまた今度

0 件のコメント: