From 070952eb732e5900756490ff6a0e7637c559af4b Mon Sep 17 00:00:00 2001 From: Hiroki Ishiura Date: Sun, 5 Jan 2020 11:55:06 +0900 Subject: [PATCH] Import old master code and refactor. --- .../project.pbxproj | 4 +- OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h | 5 +- OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm | 90 +++++++++++++++++++ .../OpenCVSample_iOS/ViewController.swift | 70 ++++++++++++++- 4 files changed, 162 insertions(+), 7 deletions(-) diff --git a/OpenCVSample_iOS/OpenCVSample_iOS.xcodeproj/project.pbxproj b/OpenCVSample_iOS/OpenCVSample_iOS.xcodeproj/project.pbxproj index 183cc9e..c762d19 100644 --- a/OpenCVSample_iOS/OpenCVSample_iOS.xcodeproj/project.pbxproj +++ b/OpenCVSample_iOS/OpenCVSample_iOS.xcodeproj/project.pbxproj @@ -67,13 +67,13 @@ 311301CE23C023B700B7C7A5 /* AppDelegate.swift */, 311301D023C023B700B7C7A5 /* SceneDelegate.swift */, 311301D223C023B700B7C7A5 /* ViewController.swift */, + 3113020923C0311400B7C7A5 /* OpenCV.h */, + 3113020A23C0311400B7C7A5 /* OpenCV.mm */, 3113020223C02F1000B7C7A5 /* OpenCVSample_iOS-Bridging-Header.h */, 311301D423C023B700B7C7A5 /* Main.storyboard */, 311301D723C023B900B7C7A5 /* Assets.xcassets */, 311301D923C023B900B7C7A5 /* LaunchScreen.storyboard */, 311301DC23C023B900B7C7A5 /* Info.plist */, - 3113020923C0311400B7C7A5 /* OpenCV.h */, - 3113020A23C0311400B7C7A5 /* OpenCV.mm */, ); path = OpenCVSample_iOS; sourceTree = ""; diff --git a/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h b/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h index 14f3356..b1b2aaa 100644 --- a/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h +++ b/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h @@ -6,12 +6,15 @@ // Copyright © 2020 Hiroki Ishiura. All rights reserved. // -#import +#import NS_ASSUME_NONNULL_BEGIN @interface OpenCV : NSObject +/// Converts a full color image to grayscale image with using OpenCV. ++ (UIImage *)cvtColorBGR2GRAY:(UIImage *)image; + @end NS_ASSUME_NONNULL_END diff --git a/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm b/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm index 4ca9112..780aa79 100644 --- a/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm +++ b/OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm @@ -6,8 +6,98 @@ // Copyright © 2020 Hiroki Ishiura. All rights reserved. // +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdocumentation" +#import +#import +#pragma clang diagnostic pop + +#import #import "OpenCV.h" +/// Converts an UIImage to Mat. +/// Orientation of UIImage will be lost. +static void UIImageToMat(UIImage *image, cv::Mat &mat) { + assert(image.size.width > 0 && image.size.height > 0); + assert(image.CGImage != nil || image.CIImage != nil); + + // Create a pixel buffer. + NSInteger width = image.size.width; + NSInteger height = image.size.height; + cv::Mat mat8uc4 = cv::Mat((int)height, (int)width, CV_8UC4); + + // Draw all pixels to the buffer. + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + if (image.CGImage) { + // Render with using Core Graphics. + CGContextRef contextRef = CGBitmapContextCreate(mat8uc4.data, mat8uc4.cols, mat8uc4.rows, 8, mat8uc4.step, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault); + CGContextDrawImage(contextRef, CGRectMake(0, 0, width, height), image.CGImage); + CGContextRelease(contextRef); + } else { + // Render with using Core Image. + static CIContext* context = nil; // I do not like this declaration contains 'static'. But it is for performance. + if (!context) { + context = [CIContext contextWithOptions:@{ kCIContextUseSoftwareRenderer: @NO }]; + } + CGRect bounds = CGRectMake(0, 0, width, height); + [context render:image.CIImage toBitmap:mat8uc4.data rowBytes:mat8uc4.step bounds:bounds format:kCIFormatRGBA8 colorSpace:colorSpace]; + } + CGColorSpaceRelease(colorSpace); + + // Adjust byte order of pixel. + cv::Mat mat8uc3 = cv::Mat((int)width, (int)height, CV_8UC3); + cv::cvtColor(mat8uc4, mat8uc3, cv::COLOR_RGBA2BGR); + + mat = mat8uc3; +} + +/// Converts a Mat to UIImage. +static UIImage *MatToUIImage(cv::Mat &mat) { + + // Create a pixel buffer. + assert(mat.elemSize() == 1 || mat.elemSize() == 3); + cv::Mat matrgb; + if (mat.elemSize() == 1) { + cv::cvtColor(mat, matrgb, cv::COLOR_GRAY2RGB); + } else if (mat.elemSize() == 3) { + cv::cvtColor(mat, matrgb, cv::COLOR_BGR2RGB); + } + + // Change a image format. + NSData *data = [NSData dataWithBytes:matrgb.data length:(matrgb.elemSize() * matrgb.total())]; + CGColorSpaceRef colorSpace; + if (matrgb.elemSize() == 1) { + colorSpace = CGColorSpaceCreateDeviceGray(); + } else { + colorSpace = CGColorSpaceCreateDeviceRGB(); + } + CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); + CGImageRef imageRef = CGImageCreate(matrgb.cols, matrgb.rows, 8, 8 * matrgb.elemSize(), matrgb.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault); + UIImage *image = [UIImage imageWithCGImage:imageRef]; + CGImageRelease(imageRef); + CGDataProviderRelease(provider); + CGColorSpaceRelease(colorSpace); + + return image; +} + +/// Restore the orientation to image. +static UIImage *RestoreUIImageOrientation(UIImage *processed, UIImage *original) { + if (processed.imageOrientation == original.imageOrientation) { + return processed; + } + return [UIImage imageWithCGImage:processed.CGImage scale:1.0 orientation:original.imageOrientation]; +} + @implementation OpenCV ++ (nonnull UIImage *)cvtColorBGR2GRAY:(nonnull UIImage *)image { + cv::Mat bgrMat; + UIImageToMat(image, bgrMat); + cv::Mat grayMat; + cv::cvtColor(bgrMat, grayMat, cv::COLOR_BGR2GRAY); + UIImage *grayImage = MatToUIImage(grayMat); + return RestoreUIImageOrientation(grayImage, image); +} + @end diff --git a/OpenCVSample_iOS/OpenCVSample_iOS/ViewController.swift b/OpenCVSample_iOS/OpenCVSample_iOS/ViewController.swift index a32cab8..dba384c 100644 --- a/OpenCVSample_iOS/OpenCVSample_iOS/ViewController.swift +++ b/OpenCVSample_iOS/OpenCVSample_iOS/ViewController.swift @@ -7,16 +7,78 @@ // import UIKit +import AVFoundation -class ViewController: UIViewController { +class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { @IBOutlet weak var imageView: UIImageView! - + + var session: AVCaptureSession! + var device: AVCaptureDevice! + var output: AVCaptureVideoDataOutput! + override func viewDidLoad() { super.viewDidLoad() - // Do any additional setup after loading the view. + + // Prepare a video capturing session. + self.session = AVCaptureSession() + self.session.sessionPreset = AVCaptureSession.Preset.vga640x480 // not work in iOS simulator + guard let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back) else { + print("no device") + return + } + self.device = device + do { + let input = try AVCaptureDeviceInput(device: self.device) + self.session.addInput(input) + } catch { + print("no device input") + return + } + self.output = AVCaptureVideoDataOutput() + self.output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA) ] + let queue: DispatchQueue = DispatchQueue(label: "videocapturequeue", attributes: []) + self.output.setSampleBufferDelegate(self, queue: queue) + self.output.alwaysDiscardsLateVideoFrames = true + if self.session.canAddOutput(self.output) { + self.session.addOutput(self.output) + } else { + print("could not add a session output") + return + } + do { + try self.device.lockForConfiguration() + self.device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 20) // 20 fps + self.device.unlockForConfiguration() + } catch { + print("could not configure a device") + return + } + + self.session.startRunning() } + override var shouldAutorotate: Bool { + return false + } -} + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + // Convert a captured image buffer to UIImage. + guard let buffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("could not get a pixel buffer") + return + } + CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly) + let image = CIImage(cvPixelBuffer: buffer).oriented(CGImagePropertyOrientation.right) + let capturedImage = UIImage(ciImage: image) + CVPixelBufferUnlockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly) + + // This is a filtering sample. + let resultImage = OpenCV.cvtColorBGR2GRAY(capturedImage) + // Show the result. + DispatchQueue.main.async(execute: { + self.imageView.image = resultImage + }) + } +}