When i apply CIFilter to a image that is captured from Back camera the result is perfect but When i apply CIFilter to a image that is captured from Front camera the result is not good because image rotated whats wrong there??
My filter function is given below
func ApplyFilter()->UIImage
{
let ciContext = CIContext(options: nil)
let coreImage = CIImage(image: myImage!)
let filter = CIFilter(name: "CIPhotoEffectNoir" )
filter!.setDefaults()
filter!.setValue(coreImage, forKey: kCIInputImageKey)
let filteredImageData = filter!.valueForKey(kCIOutputImageKey) as! CIImage
let filteredImageRef = ciContext.createCGImage(filteredImageData, fromRect: filteredImageData.extent)
var newImage = UIImage(CGImage: filteredImageRef);
return newImage
}
thats because your front camera captures a flipped image just Like a Mirror Does. what you need to do is fix the image Orientation after capturing the image.
Swift 3
extension UIImage {
func fixOrientation() -> UIImage {
// No-op if the orientation is already correct
if ( self.imageOrientation == UIImageOrientation.up ) {
return self;
}
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
var transform: CGAffineTransform = CGAffineTransform.identity
if ( self.imageOrientation == UIImageOrientation.down || self.imageOrientation == UIImageOrientation.downMirrored ) {
transform = transform.translatedBy(x: self.size.width, y: self.size.height)
transform = transform.rotated(by: CGFloat(Double.pi))
}
if ( self.imageOrientation == UIImageOrientation.left || self.imageOrientation == UIImageOrientation.leftMirrored ) {
transform = transform.translatedBy(x: self.size.width, y: 0)
transform = transform.rotated(by: CGFloat(Double.pi / 2.0))
}
if ( self.imageOrientation == UIImageOrientation.right || self.imageOrientation == UIImageOrientation.rightMirrored ) {
transform = transform.translatedBy(x: 0, y: self.size.height);
transform = transform.rotated(by: CGFloat(-Double.pi / 2.0));
}
if ( self.imageOrientation == UIImageOrientation.upMirrored || self.imageOrientation == UIImageOrientation.downMirrored ) {
transform = transform.translatedBy(x: self.size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
}
if ( self.imageOrientation == UIImageOrientation.leftMirrored || self.imageOrientation == UIImageOrientation.rightMirrored ) {
transform = transform.translatedBy(x: self.size.height, y: 0);
transform = transform.scaledBy(x: -1, y: 1);
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
let ctx: CGContext = CGContext(data: nil, width: Int(self.size.width), height: Int(self.size.height),
bitsPerComponent: self.cgImage!.bitsPerComponent, bytesPerRow: 0,
space: self.cgImage!.colorSpace!,
bitmapInfo: self.cgImage!.bitmapInfo.rawValue)!;
ctx.concatenate(transform)
if ( self.imageOrientation == UIImageOrientation.left ||
self.imageOrientation == UIImageOrientation.leftMirrored ||
self.imageOrientation == UIImageOrientation.right ||
self.imageOrientation == UIImageOrientation.rightMirrored ) {
ctx.draw(self.cgImage!, in: CGRect(x: 0,y: 0,width: self.size.height,height: self.size.width))
} else {
ctx.draw(self.cgImage!, in: CGRect(x: 0,y: 0,width: self.size.width,height: self.size.height))
}
// And now we just create a new UIImage from the drawing context and return it
return UIImage(cgImage: ctx.makeImage()!)
}
}
Use It As
let myImage = picCaptured.fixOrientation()
and then apply CIFilter to that Image
Make sure the image size before apply filters means width should be less than height because image/photos taken from front camera have grater width than length that may cause problem after applying filters. https://developer.apple.com/library/content/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With