2
* cap_ios_video_camera.mm
4
* by Eduard Feicho on 29/07/12
5
* by Alexander Shishkov on 17/07/13
6
* Copyright 2012. All rights reserved.
8
* Redistribution and use in source and binary forms, with or without
9
* modification, are permitted provided that the following conditions are met:
11
* 1. Redistributions of source code must retain the above copyright notice,
12
* this list of conditions and the following disclaimer.
13
* 2. Redistributions in binary form must reproduce the above copyright notice,
14
* this list of conditions and the following disclaimer in the documentation
15
* and/or other materials provided with the distribution.
16
* 3. The name of the author may not be used to endorse or promote products
17
* derived from this software without specific prior written permission.
19
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
20
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
22
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
25
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
27
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
28
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32
#import "opencv2/videoio/cap_ios.h"
33
#include "precomp.hpp"
34
#import <AssetsLibrary/AssetsLibrary.h>
37
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
39
#pragma mark - Private Interface
44
@interface CvVideoCamera () {
45
int recordingCountDown;
48
- (void)createVideoDataOutput;
49
- (void)createVideoFileOutput;
52
@property (nonatomic, retain) CALayer *customPreviewLayer;
53
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
59
#pragma mark - Implementation
63
@implementation CvVideoCamera
69
@synthesize grayscaleMode;
71
@synthesize customPreviewLayer;
72
@synthesize videoDataOutput;
74
@synthesize recordVideo;
75
@synthesize rotateVideo;
76
//@synthesize videoFileOutput;
77
@synthesize recordAssetWriterInput;
78
@synthesize recordPixelBufferAdaptor;
79
@synthesize recordAssetWriter;
83
#pragma mark - Constructors
85
- (id)initWithParentView:(UIView*)parent;
87
self = [super initWithParentView:parent];
89
self.useAVCaptureVideoPreviewLayer = NO;
90
self.recordVideo = NO;
91
self.rotateVideo = NO;
98
#pragma mark - Public interface
103
recordingCountDown = 10;
106
if (self.recordVideo == YES) {
107
NSError* error = nil;
108
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
109
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
112
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
123
self.videoDataOutput = nil;
124
if (videoDataOutputQueue) {
125
dispatch_release(videoDataOutputQueue);
128
if (self.recordVideo == YES) {
130
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
131
[self.recordAssetWriter finishWriting];
132
NSLog(@"[Camera] recording stopped");
134
NSLog(@"[Camera] Recording Error: asset writer status is not writing");
137
self.recordAssetWriter = nil;
138
self.recordAssetWriterInput = nil;
139
self.recordPixelBufferAdaptor = nil;
142
[self.customPreviewLayer removeFromSuperlayer];
143
self.customPreviewLayer = nil;
147
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
150
NSLog(@"layout preview layer");
151
if (self.parentView != nil) {
153
CALayer* layer = self.customPreviewLayer;
154
CGRect bounds = self.customPreviewLayer.bounds;
155
int rotation_angle = 0;
156
bool flip_bounds = false;
158
switch (interfaceOrientation) {
159
case UIInterfaceOrientationPortrait:
160
NSLog(@"to Portrait");
161
rotation_angle = 270;
163
case UIInterfaceOrientationPortraitUpsideDown:
165
NSLog(@"to UpsideDown");
167
case UIInterfaceOrientationLandscapeLeft:
169
NSLog(@"to LandscapeLeft");
171
case UIInterfaceOrientationLandscapeRight:
172
rotation_angle = 180;
173
NSLog(@"to LandscapeRight");
176
break; // leave the layer in its last known orientation
179
switch (defaultAVCaptureVideoOrientation) {
180
case AVCaptureVideoOrientationLandscapeRight:
181
rotation_angle += 180;
183
case AVCaptureVideoOrientationPortraitUpsideDown:
184
rotation_angle += 270;
186
case AVCaptureVideoOrientationPortrait:
187
rotation_angle += 90;
188
case AVCaptureVideoOrientationLandscapeLeft:
193
rotation_angle = rotation_angle % 360;
195
if (rotation_angle == 90 || rotation_angle == 270) {
200
NSLog(@"flip bounds");
201
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
204
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
205
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
207
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
208
layer.bounds = bounds;
214
- (void)layoutPreviewLayer;
216
NSLog(@"layout preview layer");
217
if (self.parentView != nil) {
219
CALayer* layer = self.customPreviewLayer;
220
CGRect bounds = self.customPreviewLayer.bounds;
221
int rotation_angle = 0;
222
bool flip_bounds = false;
224
switch (currentDeviceOrientation) {
225
case UIDeviceOrientationPortrait:
226
rotation_angle = 270;
228
case UIDeviceOrientationPortraitUpsideDown:
231
case UIDeviceOrientationLandscapeLeft:
233
rotation_angle = 180;
235
case UIDeviceOrientationLandscapeRight:
239
case UIDeviceOrientationFaceUp:
240
case UIDeviceOrientationFaceDown:
242
break; // leave the layer in its last known orientation
245
switch (defaultAVCaptureVideoOrientation) {
246
case AVCaptureVideoOrientationLandscapeRight:
247
rotation_angle += 180;
249
case AVCaptureVideoOrientationPortraitUpsideDown:
250
rotation_angle += 270;
252
case AVCaptureVideoOrientationPortrait:
253
rotation_angle += 90;
254
case AVCaptureVideoOrientationLandscapeLeft:
259
rotation_angle = rotation_angle % 360;
261
if (rotation_angle == 90 || rotation_angle == 270) {
266
NSLog(@"flip bounds");
267
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
270
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
271
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
272
layer.bounds = bounds;
277
#pragma mark - Private Interface
279
- (void)createVideoDataOutput;
281
// Make a video data output
282
self.videoDataOutput = [AVCaptureVideoDataOutput new];
284
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
285
// In color mode we, BGRA format is used
286
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
288
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
289
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
291
// discard if the data output queue is blocked (as we process the still image)
292
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
294
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
295
[self.captureSession addOutput:self.videoDataOutput];
297
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
301
AVCaptureDeviceInput *currentInput = [self.captureSession.inputs objectAtIndex:0];
302
AVCaptureDevice *device = currentInput.device;
304
NSError *error = nil;
305
[device lockForConfiguration:&error];
307
float maxRate = ((AVFrameRateRange*) [device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
308
if (maxRate > self.defaultFPS - 1 && error == nil) {
309
[device setActiveVideoMinFrameDuration:CMTimeMake(1, self.defaultFPS)];
310
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, self.defaultFPS)];
311
NSLog(@"[Camera] FPS set to %d", self.defaultFPS);
313
NSLog(@"[Camera] unable to set defaultFPS at %d FPS, max is %f FPS", self.defaultFPS, maxRate);
317
NSLog(@"[Camera] unable to set defaultFPS: %@", error);
320
[device unlockForConfiguration];
322
// set video mirroring for front camera (more intuitive)
323
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
324
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
325
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
327
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
331
// set default video orientation
332
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
333
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
337
// create a custom preview layer
338
self.customPreviewLayer = [CALayer layer];
339
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
340
[self layoutPreviewLayer];
342
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
343
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
344
// see the header doc for setSampleBufferDelegate:queue: for more information
345
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
346
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
349
NSLog(@"[Camera] created AVCaptureVideoDataOutput");
354
- (void)createVideoFileOutput;
356
/* Video File Output in H.264, via AVAsserWriter */
357
NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
359
NSDictionary *outputSettings
360
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
361
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
362
AVVideoCodecH264, AVVideoCodecKey,
367
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
370
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
372
self.recordPixelBufferAdaptor =
373
[[AVAssetWriterInputPixelBufferAdaptor alloc]
374
initWithAssetWriterInput:self.recordAssetWriterInput
375
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
377
NSError* error = nil;
378
NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
379
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
380
fileType:AVFileTypeMPEG4
383
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
386
[self.recordAssetWriter addInput:self.recordAssetWriterInput];
387
self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
389
NSLog(@"[Camera] created AVAssetWriter");
393
- (void)createCaptureOutput;
395
[self createVideoDataOutput];
396
if (self.recordVideo == YES) {
397
[self createVideoFileOutput];
401
- (void)createCustomVideoPreview;
403
[self.parentView.layer addSublayer:self.customPreviewLayer];
406
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
409
CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
410
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
411
[NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
412
[NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
414
CVPixelBufferRef pxbuffer = NULL;
415
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
416
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
418
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
420
CVPixelBufferLockBaseAddress(pxbuffer, 0);
421
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
424
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
425
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
426
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
427
kCGImageAlphaPremultipliedFirst);
429
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
430
CGImageGetHeight(image)), image);
431
CGColorSpaceRelease(rgbColorSpace);
432
CGContextRelease(context);
434
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
439
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
442
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
448
// convert from Core Media to Core Video
449
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
450
CVPixelBufferLockBaseAddress(imageBuffer, 0);
457
CGColorSpaceRef colorSpace;
458
CGContextRef context;
462
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
463
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
465
format_opencv = CV_8UC1;
467
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
468
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
469
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
470
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
472
} else { // expect kCVPixelFormatType_32BGRA
474
format_opencv = CV_8UC4;
476
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
477
width = CVPixelBufferGetWidth(imageBuffer);
478
height = CVPixelBufferGetHeight(imageBuffer);
479
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
483
// delegate image processing to the delegate
484
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
488
if ([self.delegate respondsToSelector:@selector(processImage:)]) {
489
[self.delegate processImage:image];
492
// check if matrix data pointer or dimensions were changed by the delegate
493
bool iOSimage = false;
494
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
499
// (create color space, create graphics context, render buffer)
500
CGBitmapInfo bitmapInfo;
502
// basically we decide if it's a grayscale, rgb or rgba image
503
if (image.channels() == 1) {
504
colorSpace = CGColorSpaceCreateDeviceGray();
505
bitmapInfo = kCGImageAlphaNone;
506
} else if (image.channels() == 3) {
507
colorSpace = CGColorSpaceCreateDeviceRGB();
508
bitmapInfo = kCGImageAlphaNone;
510
bitmapInfo |= kCGBitmapByteOrder32Little;
512
bitmapInfo |= kCGBitmapByteOrder32Big;
515
colorSpace = CGColorSpaceCreateDeviceRGB();
516
bitmapInfo = kCGImageAlphaPremultipliedFirst;
518
bitmapInfo |= kCGBitmapByteOrder32Little;
520
bitmapInfo |= kCGBitmapByteOrder32Big;
525
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
526
dstImage = CGBitmapContextCreateImage(context);
527
CGContextRelease(context);
530
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
531
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
533
// Creating CGImage from cv::Mat
534
dstImage = CGImageCreate(image.cols, // width
535
image.rows, // height
536
8, // bits per component
537
8 * image.elemSize(), // bits per pixel
538
image.step, // bytesPerRow
539
colorSpace, // colorspace
540
bitmapInfo, // bitmap info
541
provider, // CGDataProviderRef
543
false, // should interpolate
544
kCGRenderingIntentDefault // intent
547
CGDataProviderRelease(provider);
552
dispatch_sync(dispatch_get_main_queue(), ^{
553
self.customPreviewLayer.contents = (__bridge id)dstImage;
557
recordingCountDown--;
558
if (self.recordVideo == YES && recordingCountDown < 0) {
559
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
560
// CMTimeShow(lastSampleTime);
561
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
562
[self.recordAssetWriter startWriting];
563
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
564
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
565
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
568
NSLog(@"[Camera] Video recording started");
572
if (self.recordAssetWriterInput.readyForMoreMediaData) {
573
CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
574
if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
575
withPresentationTime:lastSampleTime] ) {
576
NSLog(@"Video Writing Error");
578
if (pixelBuffer != nullptr)
579
CVPixelBufferRelease(pixelBuffer);
586
CGImageRelease(dstImage);
588
CGColorSpaceRelease(colorSpace);
590
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
595
- (void)updateOrientation;
597
if (self.rotateVideo == YES)
600
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
601
[self layoutPreviewLayer];
608
if (self.recordVideo == NO) {
612
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
613
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[self videoFileURL]]) {
614
[library writeVideoAtPathToSavedPhotosAlbum:[self videoFileURL]
615
completionBlock:^(NSURL *assetURL, NSError *error){ (void)assetURL; (void)error; }];
620
- (NSURL *)videoFileURL;
622
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
623
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
624
NSFileManager *fileManager = [NSFileManager defaultManager];
625
if ([fileManager fileExistsAtPath:outputPath]) {
626
NSLog(@"file exists");
633
- (NSString *)videoFileString;
635
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];