4
* by Xiaochao Yang on 06/15/11 modified from
5
* cap_qtkit.mm for Nicholas Butko for Mac OS version.
6
* Copyright 2011. All rights reserved.
8
* Redistribution and use in source and binary forms, with or without
9
* modification, are permitted provided that the following conditions are met:
11
* 1. Redistributions of source code must retain the above copyright notice,
12
* this list of conditions and the following disclaimer.
13
* 2. Redistributions in binary form must reproduce the above copyright notice,
14
* this list of conditions and the following disclaimer in the documentation
15
* and/or other materials provided with the distribution.
16
* 3. The name of the author may not be used to endorse or promote products
17
* derived from this software without specific prior written permission.
19
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
20
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
22
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
25
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
27
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
28
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32
#include "precomp.hpp"
33
#include "opencv2/imgproc.hpp"
35
#import <AVFoundation/AVFoundation.h>
36
#import <Foundation/NSException.h>
39
/********************** Declaration of class headers ************************/
41
/*****************************************************************************
43
* CaptureDelegate Declaration.
45
* CaptureDelegate is notified on a separate thread by the OS whenever there
46
* is a new frame. When "updateImage" is called from the main thread, it
47
* copies this new frame into an IplImage, but only if this frame has not
48
* been copied before. When "getOutput" is called from the main thread,
49
* it gives the last copied IplImage.
51
*****************************************************************************/
53
#define DISABLE_AUTO_RESTART 999
55
@interface CaptureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
58
CVImageBufferRef mCurrentImageBuffer;
63
IplImage* bgr_image_r90;
67
- (void)captureOutput:(AVCaptureOutput *)captureOutput
68
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
69
fromConnection:(AVCaptureConnection *)connection;
73
- (IplImage*)getOutput;
77
/*****************************************************************************
79
* CvCaptureCAM Declaration.
81
* CvCaptureCAM is the instantiation of a capture source for cameras.
83
*****************************************************************************/
85
class CvCaptureCAM : public CvCapture {
87
CvCaptureCAM(int cameraNum = -1) ;
89
virtual bool grabFrame();
90
virtual IplImage* retrieveFrame(int);
91
virtual IplImage* queryFrame();
92
virtual double getProperty(int property_id) const;
93
virtual bool setProperty(int property_id, double value);
94
virtual int didStart();
97
AVCaptureSession *mCaptureSession;
98
AVCaptureDeviceInput *mCaptureDeviceInput;
99
AVCaptureVideoDataOutput *mCaptureDecompressedVideoOutput;
100
AVCaptureDevice *mCaptureDevice;
101
CaptureDelegate *capture;
103
int startCaptureDevice(int cameraNum);
104
void stopCaptureDevice();
106
void setWidthHeight();
107
bool grabFrame(double timeOut);
115
int disableAutoRestart;
119
/*****************************************************************************
121
* CvCaptureFile Declaration.
123
* CvCaptureFile is the instantiation of a capture source for video files.
125
*****************************************************************************/
127
class CvCaptureFile : public CvCapture {
130
CvCaptureFile(const char* filename) ;
132
virtual bool grabFrame();
133
virtual IplImage* retrieveFrame(int);
134
virtual IplImage* queryFrame();
135
virtual double getProperty(int property_id) const;
136
virtual bool setProperty(int property_id, double value);
137
virtual int didStart();
141
AVAssetReader *mMovieReader;
148
IplImage* retrieveFramePixelBuffer();
155
double movieDuration;
162
/*****************************************************************************
164
* CvCaptureFile Declaration.
166
* CvCaptureFile is the instantiation of a capture source for video files.
168
*****************************************************************************/
170
class CvVideoWriter_AVFoundation : public CvVideoWriter{
172
CvVideoWriter_AVFoundation(const char* filename, int fourcc,
173
double fps, CvSize frame_size,
175
~CvVideoWriter_AVFoundation();
176
bool writeFrame(const IplImage* image);
180
AVAssetWriter *mMovieWriter;
181
AVAssetWriterInput* mMovieWriterInput;
182
AVAssetWriterInputPixelBufferAdaptor* mMovieWriterAdaptor;
190
unsigned long frameCount;
194
/****************** Implementation of interface functions ********************/
197
CvCapture* cvCreateFileCapture_AVFoundation(const char* filename) {
198
CvCaptureFile *retval = new CvCaptureFile(filename);
200
if(retval->didStart())
206
CvCapture* cvCreateCameraCapture_AVFoundation(int index ) {
208
CvCapture* retval = new CvCaptureCAM(index);
209
if (!((CvCaptureCAM *)retval)->didStart())
210
cvReleaseCapture(&retval);
215
CvVideoWriter* cvCreateVideoWriter_AVFoundation(const char* filename, int fourcc,
216
double fps, CvSize frame_size,
218
return new CvVideoWriter_AVFoundation(filename, fourcc, fps, frame_size,is_color);
221
/********************** Implementation of Classes ****************************/
222
/*****************************************************************************
224
* CvCaptureCAM Implementation.
226
* CvCaptureCAM is the instantiation of a capture source for cameras.
228
*****************************************************************************/
230
CvCaptureCAM::CvCaptureCAM(int cameraNum) {
231
mCaptureSession = nil;
232
mCaptureDeviceInput = nil;
233
mCaptureDecompressedVideoOutput = nil;
240
disableAutoRestart = 0;
244
if (!startCaptureDevice(camNum)) {
245
std::cout << "Warning, camera failed to properly initialize!" << std::endl;
253
CvCaptureCAM::~CvCaptureCAM() {
255
//cout << "Cleaned up camera." << endl;
258
int CvCaptureCAM::didStart() {
263
bool CvCaptureCAM::grabFrame() {
267
bool CvCaptureCAM::grabFrame(double timeOut) {
269
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
270
double sleepTime = 0.005;
273
NSDate *loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
274
while (![capture updateImage] && (total += sleepTime)<=timeOut &&
275
[[NSRunLoop currentRunLoop] runMode: NSDefaultRunLoopMode
276
beforeDate:loopUntil])
277
loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
281
return total <= timeOut;
284
IplImage* CvCaptureCAM::retrieveFrame(int) {
285
return [capture getOutput];
288
IplImage* CvCaptureCAM::queryFrame() {
289
while (!grabFrame()) {
290
std::cout << "WARNING: Couldn't grab new frame from camera!!!" << std::endl;
292
cout << "Attempting to restart camera; set capture property DISABLE_AUTO_RESTART to disable." << endl;
294
startCaptureDevice(camNum);
297
return retrieveFrame(0);
300
void CvCaptureCAM::stopCaptureDevice() {
301
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
303
[mCaptureSession stopRunning];
305
[mCaptureSession release];
306
[mCaptureDeviceInput release];
308
[mCaptureDecompressedVideoOutput release];
314
int CvCaptureCAM::startCaptureDevice(int cameraNum) {
315
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
317
capture = [[CaptureDelegate alloc] init];
319
AVCaptureDevice *device;
320
NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
321
if ([devices count] == 0) {
322
std::cout << "AV Foundation didn't find any attached Video Input Devices!" << std::endl;
327
if (cameraNum >= 0) {
328
camNum = cameraNum % [devices count];
329
if (camNum != cameraNum) {
330
std::cout << "Warning: Max Camera Num is " << [devices count]-1 << "; Using camera " << camNum << std::endl;
332
device = [devices objectAtIndex:camNum];
334
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ;
336
mCaptureDevice = device;
342
mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error] ;
343
mCaptureSession = [[AVCaptureSession alloc] init] ;
346
success = [mCaptureSession addInput:mCaptureDeviceInput];
349
cout << "AV Foundation failed to start capture session with opened Capture Device" << endl;
355
mCaptureDecompressedVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
357
dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
358
[mCaptureDecompressedVideoOutput setSampleBufferDelegate:capture queue:queue];
359
dispatch_release(queue);
362
NSDictionary *pixelBufferOptions ;
363
if (width > 0 && height > 0) {
364
pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
365
[NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
366
[NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
367
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
368
(id)kCVPixelBufferPixelFormatTypeKey,
371
pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
372
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
373
(id)kCVPixelBufferPixelFormatTypeKey,
377
//TODO: add new interface for setting fps and capturing resolution.
378
[mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
379
mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES;
381
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
382
mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
385
//Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera
386
//mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV
388
mCaptureSession.sessionPreset = AVCaptureSessionPresetMedium; //480*360
389
if (width == 0 ) width = 480;
390
if (height == 0 ) height = 360;
392
[mCaptureSession addInput:mCaptureDeviceInput];
393
[mCaptureSession addOutput:mCaptureDecompressedVideoOutput];
396
// Does not work! This is the preferred way (hardware acceleration) to change pixel buffer orientation.
397
// I'm now using cvtranspose and cvflip instead, which takes cpu cycles.
398
AVCaptureConnection *connection = [[mCaptureDecompressedVideoOutput connections] objectAtIndex:0];
399
if([connection isVideoOrientationSupported]) {
400
//NSLog(@"Setting pixel buffer orientation");
401
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
405
[mCaptureSession startRunning];
416
void CvCaptureCAM::setWidthHeight() {
417
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
418
NSDictionary* pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
419
[NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
420
[NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
421
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
422
(id)kCVPixelBufferPixelFormatTypeKey,
425
[mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
430
//added macros into headers in videoio_c.h
432
#define CV_CAP_PROP_IOS_DEVICE_FOCUS 9001
433
#define CV_CAP_PROP_IOS_DEVICE_EXPOSURE 9002
434
#define CV_CAP_PROP_IOS_DEVICE_FLASH 9003
435
#define CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE 9004
436
#define CV_CAP_PROP_IOS_DEVICE_TORCH 9005
441
// All available settings are taken from iOS API
444
AVCaptureFlashModeOff = 0,
445
AVCaptureFlashModeOn = 1,
446
AVCaptureFlashModeAuto = 2
448
typedef NSInteger AVCaptureFlashMode;
451
AVCaptureTorchModeOff = 0,
452
AVCaptureTorchModeOn = 1,
453
AVCaptureTorchModeAuto = 2
455
typedef NSInteger AVCaptureTorchMode;
458
AVCaptureFocusModeLocked = 0,
459
AVCaptureFocusModeAutoFocus = 1,
460
AVCaptureFocusModeContinuousAutoFocus = 2,
462
typedef NSInteger AVCaptureFocusMode;
465
AVCaptureExposureModeLocked = 0,
466
AVCaptureExposureModeAutoExpose = 1,
467
AVCaptureExposureModeContinuousAutoExposure = 2,
469
typedef NSInteger AVCaptureExposureMode;
472
AVCaptureWhiteBalanceModeLocked = 0,
473
AVCaptureWhiteBalanceModeAutoWhiteBalance = 1,
474
AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
476
typedef NSInteger AVCaptureWhiteBalanceMode;
479
double CvCaptureCAM::getProperty(int property_id) const{
480
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
483
NSArray* connections = [mCaptureDeviceInput connections];
484
QTFormatDescription* format = [[connections objectAtIndex:0] formatDescription];
485
NSSize s1 = [[format attributeForKey:QTFormatDescriptionVideoCleanApertureDisplaySizeAttribute] sizeValue];
488
NSArray* ports = mCaptureDeviceInput.ports;
489
CMFormatDescriptionRef format = [[ports objectAtIndex:0] formatDescription];
490
CGSize s1 = CMVideoFormatDescriptionGetPresentationDimensions(format, YES, YES);
492
int w=(int)s1.width, h=(int)s1.height;
496
switch (property_id) {
497
case CV_CAP_PROP_FRAME_WIDTH:
499
case CV_CAP_PROP_FRAME_HEIGHT:
502
case CV_CAP_PROP_IOS_DEVICE_FOCUS:
503
return mCaptureDevice.focusMode;
504
case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
505
return mCaptureDevice.exposureMode;
506
case CV_CAP_PROP_IOS_DEVICE_FLASH:
507
return mCaptureDevice.flashMode;
508
case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
509
return mCaptureDevice.whiteBalanceMode;
510
case CV_CAP_PROP_IOS_DEVICE_TORCH:
511
return mCaptureDevice.torchMode;
520
bool CvCaptureCAM::setProperty(int property_id, double value) {
521
switch (property_id) {
522
case CV_CAP_PROP_FRAME_WIDTH:
525
if (settingWidth && settingHeight) {
532
case CV_CAP_PROP_FRAME_HEIGHT:
535
if (settingWidth && settingHeight) {
542
case CV_CAP_PROP_IOS_DEVICE_FOCUS:
543
if ([mCaptureDevice isFocusModeSupported:(AVCaptureFocusMode)value]){
544
NSError* error = nil;
545
[mCaptureDevice lockForConfiguration:&error];
546
if (error) return false;
547
[mCaptureDevice setFocusMode:(AVCaptureFocusMode)value];
548
[mCaptureDevice unlockForConfiguration];
549
//NSLog(@"Focus set");
555
case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
556
if ([mCaptureDevice isExposureModeSupported:(AVCaptureExposureMode)value]){
557
NSError* error = nil;
558
[mCaptureDevice lockForConfiguration:&error];
559
if (error) return false;
560
[mCaptureDevice setExposureMode:(AVCaptureExposureMode)value];
561
[mCaptureDevice unlockForConfiguration];
562
//NSLog(@"Exposure set");
568
case CV_CAP_PROP_IOS_DEVICE_FLASH:
569
if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(AVCaptureFlashMode)value]){
570
NSError* error = nil;
571
[mCaptureDevice lockForConfiguration:&error];
572
if (error) return false;
573
[mCaptureDevice setFlashMode:(AVCaptureFlashMode)value];
574
[mCaptureDevice unlockForConfiguration];
575
//NSLog(@"Flash mode set");
581
case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
582
if ([mCaptureDevice isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)value]){
583
NSError* error = nil;
584
[mCaptureDevice lockForConfiguration:&error];
585
if (error) return false;
586
[mCaptureDevice setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)value];
587
[mCaptureDevice unlockForConfiguration];
588
//NSLog(@"White balance set");
594
case CV_CAP_PROP_IOS_DEVICE_TORCH:
595
if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(AVCaptureTorchMode)value]){
596
NSError* error = nil;
597
[mCaptureDevice lockForConfiguration:&error];
598
if (error) return false;
599
[mCaptureDevice setTorchMode:(AVCaptureTorchMode)value];
600
[mCaptureDevice unlockForConfiguration];
601
//NSLog(@"Torch mode set");
607
case DISABLE_AUTO_RESTART:
608
disableAutoRestart = value;
616
/*****************************************************************************
618
* CaptureDelegate Implementation.
620
* CaptureDelegate is notified on a separate thread by the OS whenever there
621
* is a new frame. When "updateImage" is called from the main thread, it
622
* copies this new frame into an IplImage, but only if this frame has not
623
* been copied before. When "getOutput" is called from the main thread,
624
* it gives the last copied IplImage.
626
*****************************************************************************/
629
@implementation CaptureDelegate
635
bgr_imagedata = NULL;
639
bgr_image_r90 = NULL;
645
if (imagedata != NULL) free(imagedata);
646
if (bgr_imagedata != NULL) free(bgr_imagedata);
647
cvReleaseImage(&image);
648
cvReleaseImage(&bgr_image);
649
cvReleaseImage(&bgr_image_r90);
655
- (void)captureOutput:(AVCaptureOutput *)captureOutput
656
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
657
fromConnection:(AVCaptureConnection *)connection{
660
// connection.videoOrientation = AVCaptureVideoOrientationPortrait;
664
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
666
CVBufferRetain(imageBuffer);
667
CVImageBufferRef imageBufferToRelease = mCurrentImageBuffer;
669
@synchronized (self) {
671
mCurrentImageBuffer = imageBuffer;
675
CVBufferRelease(imageBufferToRelease);
680
-(IplImage*) getOutput {
682
return bgr_image_r90;
686
if (newFrame==0) return 0;
687
CVPixelBufferRef pixels;
689
@synchronized (self){
690
pixels = CVBufferRetain(mCurrentImageBuffer);
694
CVPixelBufferLockBaseAddress(pixels, 0);
695
uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
697
size_t width = CVPixelBufferGetWidth(pixels);
698
size_t height = CVPixelBufferGetHeight(pixels);
699
size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
703
if (currSize != rowBytes*height*sizeof(char)) {
704
currSize = rowBytes*height*sizeof(char);
705
if (imagedata != NULL) free(imagedata);
706
if (bgr_imagedata != NULL) free(bgr_imagedata);
707
imagedata = (char*)malloc(currSize);
708
bgr_imagedata = (char*)malloc(currSize);
711
memcpy(imagedata, baseaddress, currSize);
714
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
716
image->width = (int)width;
717
image->height = (int)height;
718
image->nChannels = 4;
719
image->depth = IPL_DEPTH_8U;
720
image->widthStep = (int)rowBytes;
721
image->imageData = imagedata;
722
image->imageSize = (int)currSize;
724
if (bgr_image == NULL) {
725
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
727
bgr_image->width = (int)width;
728
bgr_image->height = (int)height;
729
bgr_image->nChannels = 3;
730
bgr_image->depth = IPL_DEPTH_8U;
731
bgr_image->widthStep = (int)rowBytes;
732
bgr_image->imageData = bgr_imagedata;
733
bgr_image->imageSize = (int)currSize;
735
cvCvtColor(image, bgr_image, CV_BGRA2BGR);
737
// image taken from the buffer is incorrected rotated. I'm using cvTranspose + cvFlip.
738
// There should be an option in iOS API to rotate the buffer output orientation.
739
// iOS provides hardware accelerated rotation through AVCaptureConnection class
740
// I can't get it work.
741
if (bgr_image_r90 == NULL){
742
bgr_image_r90 = cvCreateImage(cvSize((int)height, (int)width), IPL_DEPTH_8U, 3);
744
cvTranspose(bgr_image, bgr_image_r90);
745
cvFlip(bgr_image_r90, NULL, 1);
749
CVPixelBufferUnlockBaseAddress(pixels, 0);
750
CVBufferRelease(pixels);
758
/*****************************************************************************
760
* CvCaptureFile Implementation.
762
* CvCaptureFile is the instantiation of a capture source for video files.
764
*****************************************************************************/
766
CvCaptureFile::CvCaptureFile(const char* filename) {
768
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
774
bgr_imagedata = NULL;
786
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:
787
[NSURL fileURLWithPath: [NSString stringWithUTF8String:filename]]
790
AVAssetTrack* videoTrack = nil;
791
NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
792
if ([tracks count] == 1)
794
videoTrack = [tracks objectAtIndex:0];
796
movieWidth = videoTrack.naturalSize.width;
797
movieHeight = videoTrack.naturalSize.height;
798
movieFPS = videoTrack.nominalFrameRate;
800
currentFPS = movieFPS; //Debugging !! should be getFPS();
801
//Debugging. need to be checked
804
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
807
NSError* error = nil;
808
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
810
NSLog(@"%@", [error localizedDescription]);
812
NSDictionary* videoSettings =
813
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
814
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
816
[mMovieReader addOutput:[AVAssetReaderTrackOutput
817
assetReaderTrackOutputWithTrack:videoTrack
818
outputSettings:videoSettings]];
819
[mMovieReader startReading];
823
// Asynchronously open the video in another thread. Always fail.
824
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:
826
// The completion block goes here.
827
dispatch_async(dispatch_get_main_queue(),
829
AVAssetTrack* ::videoTrack = nil;
830
NSArray* ::tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
831
if ([tracks count] == 1)
833
videoTrack = [tracks objectAtIndex:0];
835
movieWidth = videoTrack.naturalSize.width;
836
movieHeight = videoTrack.naturalSize.height;
837
movieFPS = videoTrack.nominalFrameRate;
838
currentFPS = movieFPS; //Debugging !! should be getFPS();
839
//Debugging. need to be checked
840
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
843
NSError* ::error = nil;
844
// mMovieReader is a member variable
845
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
847
NSLog(@"%@", [error localizedDescription]);
849
NSDictionary* ::videoSettings =
850
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
851
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
853
[mMovieReader addOutput:[AVAssetReaderTrackOutput
854
assetReaderTrackOutputWithTrack:videoTrack
855
outputSettings:videoSettings]];
856
[mMovieReader startReading];
866
CvCaptureFile::~CvCaptureFile() {
868
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
869
if (imagedata != NULL) free(imagedata);
870
if (bgr_imagedata != NULL) free(bgr_imagedata);
871
cvReleaseImage(&image);
872
cvReleaseImage(&bgr_image);
873
[mMovieReader release];
877
int CvCaptureFile::didStart() {
881
bool CvCaptureFile::grabFrame() {
883
//everything is done in queryFrame;
884
currentFPS = movieFPS;
889
double t1 = getProperty(CV_CAP_PROP_POS_MSEC);
890
[mCaptureSession stepForward];
891
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
892
if (t2>t1 && !changedPos) {
893
currentFPS = 1000.0/(t2-t1);
895
currentFPS = movieFPS;
904
IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
905
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
907
if (mMovieReader.status != AVAssetReaderStatusReading){
913
AVAssetReaderOutput * output = [mMovieReader.outputs objectAtIndex:0];
914
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
919
CVPixelBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
920
CVPixelBufferRef pixels = CVBufferRetain(frame);
922
CVPixelBufferLockBaseAddress(pixels, 0);
924
uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
925
size_t width = CVPixelBufferGetWidth(pixels);
926
size_t height = CVPixelBufferGetHeight(pixels);
927
size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
931
if (currSize != rowBytes*height*sizeof(char)) {
932
currSize = rowBytes*height*sizeof(char);
933
if (imagedata != NULL) free(imagedata);
934
if (bgr_imagedata != NULL) free(bgr_imagedata);
935
imagedata = (char*)malloc(currSize);
936
bgr_imagedata = (char*)malloc(currSize);
939
memcpy(imagedata, baseaddress, currSize);
942
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
945
image->width = (int)width;
946
image->height = (int)height;
947
image->nChannels = 4;
948
image->depth = IPL_DEPTH_8U;
949
image->widthStep = (int)rowBytes;
950
image->imageData = imagedata;
951
image->imageSize = (int)currSize;
954
if (bgr_image == NULL) {
955
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
958
bgr_image->width = (int)width;
959
bgr_image->height = (int)height;
960
bgr_image->nChannels = 3;
961
bgr_image->depth = IPL_DEPTH_8U;
962
bgr_image->widthStep = (int)rowBytes;
963
bgr_image->imageData = bgr_imagedata;
964
bgr_image->imageSize = (int)currSize;
966
cvCvtColor(image, bgr_image,CV_BGRA2BGR);
970
CVPixelBufferUnlockBaseAddress(pixels, 0);
971
CVBufferRelease(pixels);
972
CMSampleBufferInvalidate(sampleBuffer);
973
CFRelease(sampleBuffer);
980
IplImage* CvCaptureFile::retrieveFrame(int) {
981
return retrieveFramePixelBuffer();
984
IplImage* CvCaptureFile::queryFrame() {
986
return retrieveFrame(0);
989
double CvCaptureFile::getFPS() {
992
if (mCaptureSession == nil) return 0;
993
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
994
double now = getProperty(CV_CAP_PROP_POS_MSEC);
997
[mCaptureSession stepForward];
998
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
999
[mCaptureSession stepBackward];
1000
retval = 1000.0 / (t2-now);
1002
[mCaptureSession stepBackward];
1003
double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
1004
[mCaptureSession stepForward];
1005
retval = 1000.0 / (now-t2);
1010
return 30.0; //TODO: Debugging
1013
double CvCaptureFile::getProperty(int /*property_id*/) const{
1016
if (mCaptureSession == nil) return 0;
1018
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1023
switch (property_id) {
1024
case CV_CAP_PROP_POS_MSEC:
1025
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
1026
retval = t.timeValue * 1000.0 / t.timeScale;
1028
case CV_CAP_PROP_POS_FRAMES:
1029
retval = movieFPS * getProperty(CV_CAP_PROP_POS_MSEC) / 1000;
1031
case CV_CAP_PROP_POS_AVI_RATIO:
1032
retval = (getProperty(CV_CAP_PROP_POS_MSEC)) / (movieDuration );
1034
case CV_CAP_PROP_FRAME_WIDTH:
1035
retval = movieWidth;
1037
case CV_CAP_PROP_FRAME_HEIGHT:
1038
retval = movieHeight;
1040
case CV_CAP_PROP_FPS:
1041
retval = currentFPS;
1043
case CV_CAP_PROP_FOURCC:
1051
return 1.0; //Debugging
1054
bool CvCaptureFile::setProperty(int /*property_id*/, double /*value*/) {
1057
if (mCaptureSession == nil) return false;
1059
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1061
bool retval = false;
1066
switch (property_id) {
1067
case CV_CAP_PROP_POS_MSEC:
1068
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
1069
t.timeValue = value * t.timeScale / 1000;
1070
[mCaptureSession setCurrentTime:t];
1074
case CV_CAP_PROP_POS_FRAMES:
1075
ms = (value*1000.0 -5)/ currentFPS;
1076
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
1078
case CV_CAP_PROP_POS_AVI_RATIO:
1079
ms = value * movieDuration;
1080
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
1082
case CV_CAP_PROP_FRAME_WIDTH:
1083
//retval = movieWidth;
1085
case CV_CAP_PROP_FRAME_HEIGHT:
1086
//retval = movieHeight;
1088
case CV_CAP_PROP_FPS:
1089
//etval = currentFPS;
1091
case CV_CAP_PROP_FOURCC:
1104
/*****************************************************************************
1106
* CvVideoWriter Implementation.
1108
* CvVideoWriter is the instantiation of a video output class
1110
*****************************************************************************/
1113
CvVideoWriter_AVFoundation::CvVideoWriter_AVFoundation(const char* filename, int fourcc,
1114
double fps, CvSize frame_size,
1117
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1122
movieSize = frame_size;
1123
movieColor = is_color;
1124
argbimage = cvCreateImage(movieSize, IPL_DEPTH_8U, 4);
1125
path = [[[NSString stringWithCString:filename encoding:NSASCIIStringEncoding] stringByExpandingTildeInPath] retain];
1129
AVFileTypeQuickTimeMovie
1130
UTI for the QuickTime movie file format.
1131
The value of this UTI is com.apple.quicktime-movie. Files are identified with the .mov and .qt extensions.
1134
UTI for the MPEG-4 file format.
1135
The value of this UTI is public.mpeg-4. Files are identified with the .mp4 extension.
1138
UTI for the iTunes video file format.
1139
The value of this UTI is com.apple.mpeg-4-video. Files are identified with the .m4v extension.
1142
UTI for the 3GPP file format.
1143
The value of this UTI is public.3gpp. Files are identified with the .3gp, .3gpp, and .sdv extensions.
1146
NSString *fileExt =[[[path pathExtension] lowercaseString] copy];
1147
if ([fileExt isEqualToString:@"mov"] || [fileExt isEqualToString:@"qt"]){
1148
fileType = [AVFileTypeQuickTimeMovie copy];
1149
}else if ([fileExt isEqualToString:@"mp4"]){
1150
fileType = [AVFileTypeMPEG4 copy];
1151
}else if ([fileExt isEqualToString:@"m4v"]){
1152
fileType = [AVFileTypeAppleM4V copy];
1153
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
1154
}else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"] ){
1155
fileType = [AVFileType3GPP copy];
1158
fileType = [AVFileTypeMPEG4 copy]; //default mp4
1163
cc[0] = fourcc & 255;
1164
cc[1] = (fourcc >> 8) & 255;
1165
cc[2] = (fourcc >> 16) & 255;
1166
cc[3] = (fourcc >> 24) & 255;
1168
int cc2 = CV_FOURCC(cc[0], cc[1], cc[2], cc[3]);
1170
std::cout << "WARNING: Didn't properly encode FourCC. Expected " << fourcc
1171
<< " but got " << cc2 << "." << std::endl;
1175
// Two codec supported AVVideoCodecH264 AVVideoCodecJPEG
1176
// On iPhone 3G H264 is not supported.
1177
if (fourcc == CV_FOURCC('J','P','E','G') || fourcc == CV_FOURCC('j','p','e','g') ||
1178
fourcc == CV_FOURCC('M','J','P','G') || fourcc == CV_FOURCC('m','j','p','g') ){
1179
codec = [AVVideoCodecJPEG copy]; // Use JPEG codec if specified, otherwise H264
1180
}else if(fourcc == CV_FOURCC('H','2','6','4') || fourcc == CV_FOURCC('a','v','c','1')){
1181
codec = [AVVideoCodecH264 copy];
1183
codec = [AVVideoCodecH264 copy]; // default canonical H264.
1187
//NSLog(@"Path: %@", path);
1189
NSError *error = nil;
1192
// Make sure the file does not already exist. Necessary to overwirte??
1194
NSFileManager *fileManager = [NSFileManager defaultManager];
1195
if ([fileManager fileExistsAtPath:path]){
1196
[fileManager removeItemAtPath:path error:&error];
1201
// Supported file types:
1202
// AVFileTypeQuickTimeMovie AVFileTypeMPEG4 AVFileTypeAppleM4V AVFileType3GPP
1204
mMovieWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
1207
//NSParameterAssert(mMovieWriter);
1209
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
1210
codec, AVVideoCodecKey,
1211
[NSNumber numberWithInt:movieSize.width], AVVideoWidthKey,
1212
[NSNumber numberWithInt:movieSize.height], AVVideoHeightKey,
1215
mMovieWriterInput = [[AVAssetWriterInput
1216
assetWriterInputWithMediaType:AVMediaTypeVideo
1217
outputSettings:videoSettings] retain];
1219
//NSParameterAssert(mMovieWriterInput);
1220
//NSParameterAssert([mMovieWriter canAddInput:mMovieWriterInput]);
1222
[mMovieWriter addInput:mMovieWriterInput];
1224
mMovieWriterAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:mMovieWriterInput sourcePixelBufferAttributes:nil];
1228
[mMovieWriter startWriting];
1229
[mMovieWriter startSessionAtSourceTime:kCMTimeZero];
1232
if(mMovieWriter.status == AVAssetWriterStatusFailed){
1233
NSLog(@"%@", [mMovieWriter.error localizedDescription]);
1234
// TODO: error handling, cleanup. Throw execption?
1242
CvVideoWriter_AVFoundation::~CvVideoWriter_AVFoundation() {
1243
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1245
[mMovieWriterInput markAsFinished];
1246
[mMovieWriter finishWriting];
1247
[mMovieWriter release];
1248
[mMovieWriterInput release];
1249
[mMovieWriterAdaptor release];
1253
cvReleaseImage(&argbimage);
1259
bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) {
1260
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1262
// writer status check
1263
if (![mMovieWriterInput isReadyForMoreMediaData] || mMovieWriter.status != AVAssetWriterStatusWriting ) {
1264
NSLog(@"[mMovieWriterInput isReadyForMoreMediaData] Not ready for media data or ...");
1265
NSLog(@"mMovieWriter.status: %d. Error: %@", (int)mMovieWriter.status, [mMovieWriter.error localizedDescription]);
1270
BOOL success = FALSE;
1272
if (iplimage->height!=movieSize.height || iplimage->width!=movieSize.width){
1273
std::cout<<"Frame size does not match video size."<<std::endl;
1279
//assert(iplimage->nChannels == 3);
1280
cvCvtColor(iplimage, argbimage, CV_BGR2BGRA);
1282
//assert(iplimage->nChannels == 1);
1283
cvCvtColor(iplimage, argbimage, CV_GRAY2BGRA);
1285
//IplImage -> CGImage conversion
1286
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
1287
NSData *nsData = [NSData dataWithBytes:argbimage->imageData length:argbimage->imageSize];
1288
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)nsData);
1289
CGImageRef cgImage = CGImageCreate(argbimage->width, argbimage->height,
1290
argbimage->depth, argbimage->depth * argbimage->nChannels, argbimage->widthStep,
1291
colorSpace, kCGImageAlphaLast|kCGBitmapByteOrderDefault,
1292
provider, NULL, false, kCGRenderingIntentDefault);
1294
//CGImage -> CVPixelBufferRef coversion
1295
CVPixelBufferRef pixelBuffer = NULL;
1296
CFDataRef cfData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
1297
int status = CVPixelBufferCreateWithBytes(NULL,
1300
kCVPixelFormatType_32BGRA,
1301
(void*)CFDataGetBytePtr(cfData),
1302
CGImageGetBytesPerRow(cgImage),
1307
if(status == kCVReturnSuccess){
1308
success = [mMovieWriterAdaptor appendPixelBuffer:pixelBuffer
1309
withPresentationTime:CMTimeMake(frameCount, movieFPS)];
1314
CVPixelBufferRelease(pixelBuffer);
1315
CGImageRelease(cgImage);
1316
CGDataProviderRelease(provider);
1317
CGColorSpaceRelease(colorSpace);
1323
//NSLog(@"Frame #%d", frameCount);
1326
NSLog(@"Frame appendPixelBuffer failed.");