Skip to content

Commit

Permalink
Import old master code and refactor.
Browse files Browse the repository at this point in the history
  • Loading branch information
ura14h committed Jan 5, 2020
1 parent b805748 commit 070952e
Show file tree
Hide file tree
Showing 4 changed files with 162 additions and 7 deletions.
4 changes: 2 additions & 2 deletions OpenCVSample_iOS/OpenCVSample_iOS.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@
311301CE23C023B700B7C7A5 /* AppDelegate.swift */,
311301D023C023B700B7C7A5 /* SceneDelegate.swift */,
311301D223C023B700B7C7A5 /* ViewController.swift */,
3113020923C0311400B7C7A5 /* OpenCV.h */,
3113020A23C0311400B7C7A5 /* OpenCV.mm */,
3113020223C02F1000B7C7A5 /* OpenCVSample_iOS-Bridging-Header.h */,
311301D423C023B700B7C7A5 /* Main.storyboard */,
311301D723C023B900B7C7A5 /* Assets.xcassets */,
311301D923C023B900B7C7A5 /* LaunchScreen.storyboard */,
311301DC23C023B900B7C7A5 /* Info.plist */,
3113020923C0311400B7C7A5 /* OpenCV.h */,
3113020A23C0311400B7C7A5 /* OpenCV.mm */,
);
path = OpenCVSample_iOS;
sourceTree = "<group>";
Expand Down
5 changes: 4 additions & 1 deletion OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,15 @@
// Copyright © 2020 Hiroki Ishiura. All rights reserved.
//

#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

NS_ASSUME_NONNULL_BEGIN

@interface OpenCV : NSObject

/// Converts a full color image to grayscale image with using OpenCV.
+ (UIImage *)cvtColorBGR2GRAY:(UIImage *)image;

@end

NS_ASSUME_NONNULL_END
90 changes: 90 additions & 0 deletions OpenCVSample_iOS/OpenCVSample_iOS/OpenCV.mm
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,98 @@
// Copyright © 2020 Hiroki Ishiura. All rights reserved.
//

#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
#import <opencv2/opencv.hpp>
#import <opencv2/imgproc.hpp>
#pragma clang diagnostic pop

#import <UIKit/UIKit.h>
#import "OpenCV.h"

/// Converts an UIImage to Mat.
/// Orientation of UIImage will be lost.
static void UIImageToMat(UIImage *image, cv::Mat &mat) {
assert(image.size.width > 0 && image.size.height > 0);
assert(image.CGImage != nil || image.CIImage != nil);

// Create a pixel buffer.
NSInteger width = image.size.width;
NSInteger height = image.size.height;
cv::Mat mat8uc4 = cv::Mat((int)height, (int)width, CV_8UC4);

// Draw all pixels to the buffer.
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (image.CGImage) {
// Render with using Core Graphics.
CGContextRef contextRef = CGBitmapContextCreate(mat8uc4.data, mat8uc4.cols, mat8uc4.rows, 8, mat8uc4.step, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, width, height), image.CGImage);
CGContextRelease(contextRef);
} else {
// Render with using Core Image.
static CIContext* context = nil; // I do not like this declaration contains 'static'. But it is for performance.
if (!context) {
context = [CIContext contextWithOptions:@{ kCIContextUseSoftwareRenderer: @NO }];
}
CGRect bounds = CGRectMake(0, 0, width, height);
[context render:image.CIImage toBitmap:mat8uc4.data rowBytes:mat8uc4.step bounds:bounds format:kCIFormatRGBA8 colorSpace:colorSpace];
}
CGColorSpaceRelease(colorSpace);

// Adjust byte order of pixel.
cv::Mat mat8uc3 = cv::Mat((int)width, (int)height, CV_8UC3);
cv::cvtColor(mat8uc4, mat8uc3, cv::COLOR_RGBA2BGR);

mat = mat8uc3;
}

/// Converts a Mat to UIImage.
static UIImage *MatToUIImage(cv::Mat &mat) {

// Create a pixel buffer.
assert(mat.elemSize() == 1 || mat.elemSize() == 3);
cv::Mat matrgb;
if (mat.elemSize() == 1) {
cv::cvtColor(mat, matrgb, cv::COLOR_GRAY2RGB);
} else if (mat.elemSize() == 3) {
cv::cvtColor(mat, matrgb, cv::COLOR_BGR2RGB);
}

// Change a image format.
NSData *data = [NSData dataWithBytes:matrgb.data length:(matrgb.elemSize() * matrgb.total())];
CGColorSpaceRef colorSpace;
if (matrgb.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGImageRef imageRef = CGImageCreate(matrgb.cols, matrgb.rows, 8, 8 * matrgb.elemSize(), matrgb.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);

return image;
}

/// Restore the orientation to image.
static UIImage *RestoreUIImageOrientation(UIImage *processed, UIImage *original) {
if (processed.imageOrientation == original.imageOrientation) {
return processed;
}
return [UIImage imageWithCGImage:processed.CGImage scale:1.0 orientation:original.imageOrientation];
}

@implementation OpenCV

+ (nonnull UIImage *)cvtColorBGR2GRAY:(nonnull UIImage *)image {
cv::Mat bgrMat;
UIImageToMat(image, bgrMat);
cv::Mat grayMat;
cv::cvtColor(bgrMat, grayMat, cv::COLOR_BGR2GRAY);
UIImage *grayImage = MatToUIImage(grayMat);
return RestoreUIImageOrientation(grayImage, image);
}

@end
70 changes: 66 additions & 4 deletions OpenCVSample_iOS/OpenCVSample_iOS/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,78 @@
//

import UIKit
import AVFoundation

class ViewController: UIViewController {
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

@IBOutlet weak var imageView: UIImageView!


var session: AVCaptureSession!
var device: AVCaptureDevice!
var output: AVCaptureVideoDataOutput!

override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.

// Prepare a video capturing session.
self.session = AVCaptureSession()
self.session.sessionPreset = AVCaptureSession.Preset.vga640x480 // not work in iOS simulator
guard let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back) else {
print("no device")
return
}
self.device = device
do {
let input = try AVCaptureDeviceInput(device: self.device)
self.session.addInput(input)
} catch {
print("no device input")
return
}
self.output = AVCaptureVideoDataOutput()
self.output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA) ]
let queue: DispatchQueue = DispatchQueue(label: "videocapturequeue", attributes: [])
self.output.setSampleBufferDelegate(self, queue: queue)
self.output.alwaysDiscardsLateVideoFrames = true
if self.session.canAddOutput(self.output) {
self.session.addOutput(self.output)
} else {
print("could not add a session output")
return
}
do {
try self.device.lockForConfiguration()
self.device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 20) // 20 fps
self.device.unlockForConfiguration()
} catch {
print("could not configure a device")
return
}

self.session.startRunning()
}

override var shouldAutorotate: Bool {
return false
}

}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Convert a captured image buffer to UIImage.
guard let buffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
print("could not get a pixel buffer")
return
}
CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly)
let image = CIImage(cvPixelBuffer: buffer).oriented(CGImagePropertyOrientation.right)
let capturedImage = UIImage(ciImage: image)
CVPixelBufferUnlockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly)

// This is a filtering sample.
let resultImage = OpenCV.cvtColorBGR2GRAY(capturedImage)

// Show the result.
DispatchQueue.main.async(execute: {
self.imageView.image = resultImage
})
}
}

0 comments on commit 070952e

Please sign in to comment.