Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to output a CIFilter to a Camera view?

I'm just starting out in Objective-C and I'm trying to create a simple app where it shows the camera view with a blur effect on it. I got the Camera output working with the AVFoundation framework. Now, I'm trying to hook up the Core image framework but to no knowledge how to, Apple documentation is confusing for me and searching for guides and tutorials online leads to no results. Thanks in advance for the help.

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()

@property (strong ,nonatomic) CIContext *context;

@end

@implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;

-(CIContext *)context
{
    if(!_context)
    {
        _context = [CIContext contextWithOptions:nil];
    }
    return _context;
}
- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
}

-(void)viewWillAppear:(BOOL)animated{
    session = [[AVCaptureSession alloc] init];
    [session setSessionPreset:AVCaptureSessionPresetPhoto];

    AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError *error;
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];

    if ([session canAddInput:deviceInput]) {
        [session addInput:deviceInput];
    }

    AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    CALayer *rootLayer = [[self view] layer];
    [rootLayer setMasksToBounds:YES];
    CGRect frame = self.imageView.frame;

    [previewLayer setFrame:frame];

    [previewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];


    [rootLayer insertSublayer:previewLayer atIndex:0];

    stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [stillImageOutput setOutputSettings:outputSettings];

    [session addOutput:stillImageOutput];

    [session startRunning];     
}
@end
like image 467
Renz Tan Avatar asked Apr 17 '15 06:04

Renz Tan


1 Answers

Here's something to get you started. This is an updated version of the code from the following link.
https://gist.github.com/eladb/9662102

The trick is to use the AVCaptureVideoDataOutputSampleBufferDelegate.
With this delegate, you can use imageWithCVPixelBuffer to construct a CIImage from your camera buffer.

Right now though I'm trying to figure out how to reduce lag. I'll update asap.


Update: Latency is now minimal, and on some effects unnoticeable. Unfortunately, it seems that blur is one of the slowest. You may want to look into vImage.


#import "ViewController.h"
#import <CoreImage/CoreImage.h>
#import <AVFoundation/AVFoundation.h>

@interface ViewController () {

}

@property (strong, nonatomic) CIContext *coreImageContext;
@property (strong, nonatomic) AVCaptureSession *cameraSession;
@property (strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property (strong, nonatomic) UIView *blurCameraView;
@property (strong, nonatomic) CIFilter *filter;
@property BOOL cameraOpen;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    self.blurCameraView = [[UIView alloc]initWithFrame:[[UIScreen mainScreen] bounds]];
    [self.view addSubview:self.blurCameraView];

    //setup filter
    self.filter = [CIFilter filterWithName:@"CIGaussianBlur"];
    [self.filter setDefaults];
    [self.filter setValue:@(3.0f) forKey:@"inputRadius"];

    [self setupCamera];
    [self openCamera];
    // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (void)setupCamera
{
    self.coreImageContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer : @(YES)}];

    // session
    self.cameraSession = [[AVCaptureSession alloc] init];
    [self.cameraSession setSessionPreset:AVCaptureSessionPresetLow];
    [self.cameraSession commitConfiguration];

    // input
    AVCaptureDevice *shootingCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *shootingDevice = [AVCaptureDeviceInput deviceInputWithDevice:shootingCamera error:NULL];
    if ([self.cameraSession canAddInput:shootingDevice]) {
        [self.cameraSession addInput:shootingDevice];
    }

    // video output
    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
    [self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)];
    if ([self.cameraSession canAddOutput:self.videoOutput]) {
        [self.cameraSession addOutput:self.videoOutput];
    }

    if (self.videoOutput.connections.count > 0) {
        AVCaptureConnection *connection = self.videoOutput.connections[0];
        connection.videoOrientation = AVCaptureVideoOrientationPortrait;
    }

    self.cameraOpen = NO;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    // turn buffer into an image we can manipulate
    CIImage *result = [CIImage imageWithCVPixelBuffer:imageBuffer];

    // filter
    [self.filter setValue:result forKey:@"inputImage"];

    // render image
    CGImageRef blurredImage = [self.coreImageContext createCGImage:self.filter.outputImage fromRect:result.extent];
    dispatch_async(dispatch_get_main_queue(), ^{
        self.blurCameraView.layer.contents = (__bridge id)blurredImage;
        CGImageRelease(blurredImage);
    });
}

- (void)openCamera {
    if (self.cameraOpen) {
        return;
    }

    self.blurCameraView.alpha = 0.0f;
    [self.cameraSession startRunning];
    [self.view layoutIfNeeded];

    [UIView animateWithDuration:3.0f animations:^{

        self.blurCameraView.alpha = 1.0f;

    }];

    self.cameraOpen = YES;
}
like image 69
maelswarm Avatar answered Dec 05 '22 06:12

maelswarm