~paparazzi-uav/paparazzi/v5.0-manual

« back to all changes in this revision

Viewing changes to sw/ext/opencv_bebop/opencv/modules/videoio/src/cap_ios_video_camera.mm

  • Committer: Paparazzi buildbot
  • Date: 2016-05-18 15:00:29 UTC
  • Revision ID: felix.ruess+docbot@gmail.com-20160518150029-e8lgzi5kvb4p7un9
Manual import commit 4b8bbb730080dac23cf816b98908dacfabe2a8ec from v5.0 branch.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
/*
 
2
 *  cap_ios_video_camera.mm
 
3
 *  For iOS video I/O
 
4
 *  by Eduard Feicho on 29/07/12
 
5
 *  by Alexander Shishkov on 17/07/13
 
6
 *  Copyright 2012. All rights reserved.
 
7
 *
 
8
 * Redistribution and use in source and binary forms, with or without
 
9
 * modification, are permitted provided that the following conditions are met:
 
10
 *
 
11
 * 1. Redistributions of source code must retain the above copyright notice,
 
12
 *    this list of conditions and the following disclaimer.
 
13
 * 2. Redistributions in binary form must reproduce the above copyright notice,
 
14
 *    this list of conditions and the following disclaimer in the documentation
 
15
 *    and/or other materials provided with the distribution.
 
16
 * 3. The name of the author may not be used to endorse or promote products
 
17
 *    derived from this software without specific prior written permission.
 
18
 *
 
19
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
 
20
 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
 
21
 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
 
22
 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 
23
 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 
24
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
 
25
 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
 
26
 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
 
27
 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
 
28
 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
29
 *
 
30
 */
 
31
 
 
32
#import "opencv2/videoio/cap_ios.h"
 
33
#include "precomp.hpp"
 
34
#import <AssetsLibrary/AssetsLibrary.h>
 
35
 
 
36
 
 
37
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
 
38
 
 
39
#pragma mark - Private Interface
 
40
 
 
41
 
 
42
 
 
43
 
 
44
@interface CvVideoCamera () {
 
45
    int recordingCountDown;
 
46
}
 
47
 
 
48
- (void)createVideoDataOutput;
 
49
- (void)createVideoFileOutput;
 
50
 
 
51
 
 
52
@property (nonatomic, retain) CALayer *customPreviewLayer;
 
53
@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
 
54
 
 
55
@end
 
56
 
 
57
 
 
58
 
 
59
#pragma mark - Implementation
 
60
 
 
61
 
 
62
 
 
63
@implementation CvVideoCamera
 
64
 
 
65
 
 
66
 
 
67
 
 
68
@synthesize delegate;
 
69
@synthesize grayscaleMode;
 
70
 
 
71
@synthesize customPreviewLayer;
 
72
@synthesize videoDataOutput;
 
73
 
 
74
@synthesize recordVideo;
 
75
@synthesize rotateVideo;
 
76
//@synthesize videoFileOutput;
 
77
@synthesize recordAssetWriterInput;
 
78
@synthesize recordPixelBufferAdaptor;
 
79
@synthesize recordAssetWriter;
 
80
 
 
81
 
 
82
 
 
83
#pragma mark - Constructors
 
84
 
 
85
- (id)initWithParentView:(UIView*)parent;
 
86
{
 
87
    self = [super initWithParentView:parent];
 
88
    if (self) {
 
89
        self.useAVCaptureVideoPreviewLayer = NO;
 
90
        self.recordVideo = NO;
 
91
        self.rotateVideo = NO;
 
92
    }
 
93
    return self;
 
94
}
 
95
 
 
96
 
 
97
 
 
98
#pragma mark - Public interface
 
99
 
 
100
 
 
101
- (void)start;
 
102
{
 
103
    recordingCountDown = 10;
 
104
    [super start];
 
105
 
 
106
    if (self.recordVideo == YES) {
 
107
        NSError* error = nil;
 
108
        if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
 
109
            [[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
 
110
        }
 
111
        if (error == nil) {
 
112
            NSLog(@"[Camera] Delete file %@", [self videoFileString]);
 
113
        }
 
114
    }
 
115
}
 
116
 
 
117
 
 
118
 
 
119
- (void)stop;
 
120
{
 
121
    [super stop];
 
122
 
 
123
    self.videoDataOutput = nil;
 
124
    if (videoDataOutputQueue) {
 
125
        dispatch_release(videoDataOutputQueue);
 
126
    }
 
127
 
 
128
    if (self.recordVideo == YES) {
 
129
 
 
130
        if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
 
131
            [self.recordAssetWriter finishWriting];
 
132
            NSLog(@"[Camera] recording stopped");
 
133
        } else {
 
134
            NSLog(@"[Camera] Recording Error: asset writer status is not writing");
 
135
        }
 
136
 
 
137
        self.recordAssetWriter = nil;
 
138
        self.recordAssetWriterInput = nil;
 
139
        self.recordPixelBufferAdaptor = nil;
 
140
    }
 
141
 
 
142
    [self.customPreviewLayer removeFromSuperlayer];
 
143
    self.customPreviewLayer = nil;
 
144
}
 
145
 
 
146
// TODO fix
 
147
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
 
148
{
 
149
 
 
150
    NSLog(@"layout preview layer");
 
151
    if (self.parentView != nil) {
 
152
 
 
153
        CALayer* layer = self.customPreviewLayer;
 
154
        CGRect bounds = self.customPreviewLayer.bounds;
 
155
        int rotation_angle = 0;
 
156
        bool flip_bounds = false;
 
157
 
 
158
        switch (interfaceOrientation) {
 
159
            case UIInterfaceOrientationPortrait:
 
160
                NSLog(@"to Portrait");
 
161
                rotation_angle = 270;
 
162
                break;
 
163
            case UIInterfaceOrientationPortraitUpsideDown:
 
164
                rotation_angle = 90;
 
165
                NSLog(@"to UpsideDown");
 
166
                break;
 
167
            case UIInterfaceOrientationLandscapeLeft:
 
168
                rotation_angle = 0;
 
169
                NSLog(@"to LandscapeLeft");
 
170
                break;
 
171
            case UIInterfaceOrientationLandscapeRight:
 
172
                rotation_angle = 180;
 
173
                NSLog(@"to LandscapeRight");
 
174
                break;
 
175
            default:
 
176
                break; // leave the layer in its last known orientation
 
177
        }
 
178
 
 
179
        switch (defaultAVCaptureVideoOrientation) {
 
180
            case AVCaptureVideoOrientationLandscapeRight:
 
181
                rotation_angle += 180;
 
182
                break;
 
183
            case AVCaptureVideoOrientationPortraitUpsideDown:
 
184
                rotation_angle += 270;
 
185
                break;
 
186
            case AVCaptureVideoOrientationPortrait:
 
187
                rotation_angle += 90;
 
188
            case AVCaptureVideoOrientationLandscapeLeft:
 
189
                break;
 
190
            default:
 
191
                break;
 
192
        }
 
193
        rotation_angle = rotation_angle % 360;
 
194
 
 
195
        if (rotation_angle == 90 || rotation_angle == 270) {
 
196
            flip_bounds = true;
 
197
        }
 
198
 
 
199
        if (flip_bounds) {
 
200
            NSLog(@"flip bounds");
 
201
            bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
 
202
        }
 
203
 
 
204
        layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
 
205
        self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
 
206
 
 
207
        layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
 
208
        layer.bounds = bounds;
 
209
    }
 
210
 
 
211
}
 
212
 
 
213
// TODO fix
 
214
- (void)layoutPreviewLayer;
 
215
{
 
216
    NSLog(@"layout preview layer");
 
217
    if (self.parentView != nil) {
 
218
 
 
219
        CALayer* layer = self.customPreviewLayer;
 
220
        CGRect bounds = self.customPreviewLayer.bounds;
 
221
        int rotation_angle = 0;
 
222
        bool flip_bounds = false;
 
223
 
 
224
        switch (currentDeviceOrientation) {
 
225
            case UIDeviceOrientationPortrait:
 
226
                rotation_angle = 270;
 
227
                break;
 
228
            case UIDeviceOrientationPortraitUpsideDown:
 
229
                rotation_angle = 90;
 
230
                break;
 
231
            case UIDeviceOrientationLandscapeLeft:
 
232
                NSLog(@"left");
 
233
                rotation_angle = 180;
 
234
                break;
 
235
            case UIDeviceOrientationLandscapeRight:
 
236
                NSLog(@"right");
 
237
                rotation_angle = 0;
 
238
                break;
 
239
            case UIDeviceOrientationFaceUp:
 
240
            case UIDeviceOrientationFaceDown:
 
241
            default:
 
242
                break; // leave the layer in its last known orientation
 
243
        }
 
244
 
 
245
        switch (defaultAVCaptureVideoOrientation) {
 
246
            case AVCaptureVideoOrientationLandscapeRight:
 
247
                rotation_angle += 180;
 
248
                break;
 
249
            case AVCaptureVideoOrientationPortraitUpsideDown:
 
250
                rotation_angle += 270;
 
251
                break;
 
252
            case AVCaptureVideoOrientationPortrait:
 
253
                rotation_angle += 90;
 
254
            case AVCaptureVideoOrientationLandscapeLeft:
 
255
                break;
 
256
            default:
 
257
                break;
 
258
        }
 
259
        rotation_angle = rotation_angle % 360;
 
260
 
 
261
        if (rotation_angle == 90 || rotation_angle == 270) {
 
262
            flip_bounds = true;
 
263
        }
 
264
 
 
265
        if (flip_bounds) {
 
266
            NSLog(@"flip bounds");
 
267
            bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
 
268
        }
 
269
 
 
270
        layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
 
271
        layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
 
272
        layer.bounds = bounds;
 
273
    }
 
274
 
 
275
}
 
276
 
 
277
#pragma mark - Private Interface
 
278
 
 
279
- (void)createVideoDataOutput;
 
280
{
 
281
    // Make a video data output
 
282
    self.videoDataOutput = [AVCaptureVideoDataOutput new];
 
283
 
 
284
    // In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
 
285
    // In color mode we, BGRA format is used
 
286
    OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
 
287
 
 
288
    self.videoDataOutput.videoSettings  = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
 
289
                                                                      forKey:(id)kCVPixelBufferPixelFormatTypeKey];
 
290
 
 
291
    // discard if the data output queue is blocked (as we process the still image)
 
292
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
 
293
 
 
294
    if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
 
295
        [self.captureSession addOutput:self.videoDataOutput];
 
296
    }
 
297
    [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
 
298
 
 
299
 
 
300
    // set default FPS
 
301
    AVCaptureDeviceInput *currentInput = [self.captureSession.inputs objectAtIndex:0];
 
302
    AVCaptureDevice *device = currentInput.device;
 
303
 
 
304
    NSError *error = nil;
 
305
    [device lockForConfiguration:&error];
 
306
 
 
307
    float maxRate = ((AVFrameRateRange*) [device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
 
308
    if (maxRate > self.defaultFPS - 1 && error == nil) {
 
309
        [device setActiveVideoMinFrameDuration:CMTimeMake(1, self.defaultFPS)];
 
310
        [device setActiveVideoMaxFrameDuration:CMTimeMake(1, self.defaultFPS)];
 
311
        NSLog(@"[Camera] FPS set to %d", self.defaultFPS);
 
312
    } else {
 
313
        NSLog(@"[Camera] unable to set defaultFPS at %d FPS, max is %f FPS", self.defaultFPS, maxRate);
 
314
    }
 
315
 
 
316
    if (error != nil) {
 
317
        NSLog(@"[Camera] unable to set defaultFPS: %@", error);
 
318
    }
 
319
 
 
320
    [device unlockForConfiguration];
 
321
 
 
322
    // set video mirroring for front camera (more intuitive)
 
323
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
 
324
        if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
 
325
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
 
326
        } else {
 
327
            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
 
328
        }
 
329
    }
 
330
 
 
331
    // set default video orientation
 
332
    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
 
333
        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
 
334
    }
 
335
 
 
336
 
 
337
    // create a custom preview layer
 
338
    self.customPreviewLayer = [CALayer layer];
 
339
    self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
 
340
    [self layoutPreviewLayer];
 
341
 
 
342
    // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
 
343
    // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
 
344
    // see the header doc for setSampleBufferDelegate:queue: for more information
 
345
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
 
346
    [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
 
347
 
 
348
 
 
349
    NSLog(@"[Camera] created AVCaptureVideoDataOutput");
 
350
}
 
351
 
 
352
 
 
353
 
 
354
- (void)createVideoFileOutput;
 
355
{
 
356
    /* Video File Output in H.264, via AVAsserWriter */
 
357
    NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
 
358
 
 
359
    NSDictionary *outputSettings
 
360
     = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
 
361
                                                  [NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
 
362
                                                  AVVideoCodecH264, AVVideoCodecKey,
 
363
                                                  nil
 
364
     ];
 
365
 
 
366
 
 
367
    self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
 
368
 
 
369
 
 
370
    int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
 
371
 
 
372
    self.recordPixelBufferAdaptor =
 
373
               [[AVAssetWriterInputPixelBufferAdaptor alloc]
 
374
                    initWithAssetWriterInput:self.recordAssetWriterInput
 
375
                    sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
 
376
 
 
377
    NSError* error = nil;
 
378
    NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
 
379
    self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
 
380
                                                      fileType:AVFileTypeMPEG4
 
381
                                                         error:&error];
 
382
    if (error != nil) {
 
383
        NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
 
384
    }
 
385
 
 
386
    [self.recordAssetWriter addInput:self.recordAssetWriterInput];
 
387
    self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
 
388
 
 
389
    NSLog(@"[Camera] created AVAssetWriter");
 
390
}
 
391
 
 
392
 
 
393
- (void)createCaptureOutput;
 
394
{
 
395
    [self createVideoDataOutput];
 
396
    if (self.recordVideo == YES) {
 
397
        [self createVideoFileOutput];
 
398
    }
 
399
}
 
400
 
 
401
- (void)createCustomVideoPreview;
 
402
{
 
403
    [self.parentView.layer addSublayer:self.customPreviewLayer];
 
404
}
 
405
 
 
406
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
 
407
{
 
408
 
 
409
    CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
 
410
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
 
411
                             [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
 
412
                             [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
 
413
                             nil];
 
414
    CVPixelBufferRef pxbuffer = NULL;
 
415
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
 
416
                                          frameSize.height,  kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
 
417
                                          &pxbuffer);
 
418
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
 
419
 
 
420
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
 
421
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
 
422
 
 
423
 
 
424
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
 
425
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
 
426
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
 
427
                                                 kCGImageAlphaPremultipliedFirst);
 
428
 
 
429
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
 
430
                                           CGImageGetHeight(image)), image);
 
431
    CGColorSpaceRelease(rgbColorSpace);
 
432
    CGContextRelease(context);
 
433
 
 
434
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
 
435
 
 
436
    return pxbuffer;
 
437
}
 
438
 
 
439
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
 
440
 
 
441
 
 
442
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
 
443
{
 
444
    (void)captureOutput;
 
445
    (void)connection;
 
446
    if (self.delegate) {
 
447
 
 
448
        // convert from Core Media to Core Video
 
449
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
 
450
        CVPixelBufferLockBaseAddress(imageBuffer, 0);
 
451
 
 
452
        void* bufferAddress;
 
453
        size_t width;
 
454
        size_t height;
 
455
        size_t bytesPerRow;
 
456
 
 
457
        CGColorSpaceRef colorSpace;
 
458
        CGContextRef context;
 
459
 
 
460
        int format_opencv;
 
461
 
 
462
        OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
 
463
        if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
 
464
 
 
465
            format_opencv = CV_8UC1;
 
466
 
 
467
            bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
 
468
            width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
 
469
            height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
 
470
            bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
 
471
 
 
472
        } else { // expect kCVPixelFormatType_32BGRA
 
473
 
 
474
            format_opencv = CV_8UC4;
 
475
 
 
476
            bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
 
477
            width = CVPixelBufferGetWidth(imageBuffer);
 
478
            height = CVPixelBufferGetHeight(imageBuffer);
 
479
            bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
 
480
 
 
481
        }
 
482
 
 
483
        // delegate image processing to the delegate
 
484
        cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
 
485
 
 
486
        CGImage* dstImage;
 
487
 
 
488
        if ([self.delegate respondsToSelector:@selector(processImage:)]) {
 
489
            [self.delegate processImage:image];
 
490
        }
 
491
 
 
492
        // check if matrix data pointer or dimensions were changed by the delegate
 
493
        bool iOSimage = false;
 
494
        if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
 
495
            iOSimage = true;
 
496
        }
 
497
 
 
498
 
 
499
        // (create color space, create graphics context, render buffer)
 
500
        CGBitmapInfo bitmapInfo;
 
501
 
 
502
        // basically we decide if it's a grayscale, rgb or rgba image
 
503
        if (image.channels() == 1) {
 
504
            colorSpace = CGColorSpaceCreateDeviceGray();
 
505
            bitmapInfo = kCGImageAlphaNone;
 
506
        } else if (image.channels() == 3) {
 
507
            colorSpace = CGColorSpaceCreateDeviceRGB();
 
508
            bitmapInfo = kCGImageAlphaNone;
 
509
            if (iOSimage) {
 
510
                bitmapInfo |= kCGBitmapByteOrder32Little;
 
511
            } else {
 
512
                bitmapInfo |= kCGBitmapByteOrder32Big;
 
513
            }
 
514
        } else {
 
515
            colorSpace = CGColorSpaceCreateDeviceRGB();
 
516
            bitmapInfo = kCGImageAlphaPremultipliedFirst;
 
517
            if (iOSimage) {
 
518
                bitmapInfo |= kCGBitmapByteOrder32Little;
 
519
            } else {
 
520
                bitmapInfo |= kCGBitmapByteOrder32Big;
 
521
            }
 
522
        }
 
523
 
 
524
        if (iOSimage) {
 
525
            context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
 
526
            dstImage = CGBitmapContextCreateImage(context);
 
527
            CGContextRelease(context);
 
528
        } else {
 
529
 
 
530
            NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
 
531
            CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
 
532
 
 
533
            // Creating CGImage from cv::Mat
 
534
            dstImage = CGImageCreate(image.cols,                                 // width
 
535
                                     image.rows,                                 // height
 
536
                                     8,                                          // bits per component
 
537
                                     8 * image.elemSize(),                       // bits per pixel
 
538
                                     image.step,                                 // bytesPerRow
 
539
                                     colorSpace,                                 // colorspace
 
540
                                     bitmapInfo,                                 // bitmap info
 
541
                                     provider,                                   // CGDataProviderRef
 
542
                                     NULL,                                       // decode
 
543
                                     false,                                      // should interpolate
 
544
                                     kCGRenderingIntentDefault                   // intent
 
545
                                     );
 
546
 
 
547
            CGDataProviderRelease(provider);
 
548
        }
 
549
 
 
550
 
 
551
        // render buffer
 
552
        dispatch_sync(dispatch_get_main_queue(), ^{
 
553
            self.customPreviewLayer.contents = (__bridge id)dstImage;
 
554
        });
 
555
 
 
556
 
 
557
        recordingCountDown--;
 
558
        if (self.recordVideo == YES && recordingCountDown < 0) {
 
559
            lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
 
560
//                      CMTimeShow(lastSampleTime);
 
561
            if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
 
562
                [self.recordAssetWriter startWriting];
 
563
                [self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
 
564
                if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
 
565
                    NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
 
566
                    return;
 
567
                } else {
 
568
                    NSLog(@"[Camera] Video recording started");
 
569
                }
 
570
            }
 
571
 
 
572
            if (self.recordAssetWriterInput.readyForMoreMediaData) {
 
573
                CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
 
574
                if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
 
575
                                                  withPresentationTime:lastSampleTime] ) {
 
576
                    NSLog(@"Video Writing Error");
 
577
                }
 
578
                if (pixelBuffer != nullptr)
 
579
                    CVPixelBufferRelease(pixelBuffer);
 
580
            }
 
581
 
 
582
        }
 
583
 
 
584
 
 
585
        // cleanup
 
586
        CGImageRelease(dstImage);
 
587
 
 
588
        CGColorSpaceRelease(colorSpace);
 
589
 
 
590
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
 
591
    }
 
592
}
 
593
 
 
594
 
 
595
- (void)updateOrientation;
 
596
{
 
597
    if (self.rotateVideo == YES)
 
598
    {
 
599
        NSLog(@"rotate..");
 
600
        self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
 
601
        [self layoutPreviewLayer];
 
602
    }
 
603
}
 
604
 
 
605
 
 
606
- (void)saveVideo;
 
607
{
 
608
    if (self.recordVideo == NO) {
 
609
        return;
 
610
    }
 
611
 
 
612
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
 
613
    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[self videoFileURL]]) {
 
614
        [library writeVideoAtPathToSavedPhotosAlbum:[self videoFileURL]
 
615
                                    completionBlock:^(NSURL *assetURL, NSError *error){ (void)assetURL; (void)error; }];
 
616
    }
 
617
}
 
618
 
 
619
 
 
620
- (NSURL *)videoFileURL;
 
621
{
 
622
    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
 
623
    NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
 
624
    NSFileManager *fileManager = [NSFileManager defaultManager];
 
625
    if ([fileManager fileExistsAtPath:outputPath]) {
 
626
        NSLog(@"file exists");
 
627
    }
 
628
    return outputURL;
 
629
}
 
630
 
 
631
 
 
632
 
 
633
- (NSString *)videoFileString;
 
634
{
 
635
    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
 
636
    return outputPath;
 
637
}
 
638
 
 
639
@end