Skip to content

Commit

Permalink
Import old master code and refactor.
Browse files Browse the repository at this point in the history
  • Loading branch information
ura14h committed Jan 5, 2020
1 parent a597c7e commit b805748
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 16 deletions.
8 changes: 8 additions & 0 deletions OpenCVSample_OSX/OpenCVSample_OSX.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
objects = {

/* Begin PBXBuildFile section */
310A3F0E23C1730400677B30 /* OpenCL.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 310A3F0D23C1730400677B30 /* OpenCL.framework */; };
310A3F1023C1732900677B30 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 310A3F0F23C1732900677B30 /* Accelerate.framework */; };
311301EF23C0277100B7C7A5 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 311301EE23C0277100B7C7A5 /* AppDelegate.swift */; };
311301F123C0277100B7C7A5 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 311301F023C0277100B7C7A5 /* ViewController.swift */; };
311301F323C0277100B7C7A5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 311301F223C0277100B7C7A5 /* Assets.xcassets */; };
Expand All @@ -16,6 +18,8 @@
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
310A3F0D23C1730400677B30 /* OpenCL.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenCL.framework; path = System/Library/Frameworks/OpenCL.framework; sourceTree = SDKROOT; };
310A3F0F23C1732900677B30 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
311301EB23C0277100B7C7A5 /* OpenCVSample_OSX.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = OpenCVSample_OSX.app; sourceTree = BUILT_PRODUCTS_DIR; };
311301EE23C0277100B7C7A5 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
311301F023C0277100B7C7A5 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
Expand All @@ -34,7 +38,9 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
310A3F1023C1732900677B30 /* Accelerate.framework in Frameworks */,
3113020F23C034AB00B7C7A5 /* opencv2.framework in Frameworks */,
310A3F0E23C1730400677B30 /* OpenCL.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
Expand Down Expand Up @@ -77,6 +83,8 @@
3113020C23C0347F00B7C7A5 /* Frameworks */ = {
isa = PBXGroup;
children = (
310A3F0F23C1732900677B30 /* Accelerate.framework */,
310A3F0D23C1730400677B30 /* OpenCL.framework */,
3113020E23C034AB00B7C7A5 /* opencv2.framework */,
);
name = Frameworks;
Expand Down
12 changes: 6 additions & 6 deletions OpenCVSample_OSX/OpenCVSample_OSX/Base.lproj/Main.storyboard
Original file line number Diff line number Diff line change
Expand Up @@ -709,16 +709,16 @@
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="NJ1-RH-c8j">
<rect key="frame" x="20" y="20" width="440" height="230"/>
<imageView horizontalHuggingPriority="251" verticalHuggingPriority="251" horizontalCompressionResistancePriority="250" verticalCompressionResistancePriority="250" translatesAutoresizingMaskIntoConstraints="NO" id="NJ1-RH-c8j">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<imageCell key="cell" refusesFirstResponder="YES" alignment="left" imageScaling="proportionallyDown" id="MDJ-bI-vM7"/>
</imageView>
</subviews>
<constraints>
<constraint firstItem="NJ1-RH-c8j" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" symbolic="YES" id="0yE-Dl-iEa"/>
<constraint firstAttribute="trailing" secondItem="NJ1-RH-c8j" secondAttribute="trailing" constant="20" symbolic="YES" id="OPE-Ks-oK5"/>
<constraint firstItem="NJ1-RH-c8j" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="nPq-ED-lXR"/>
<constraint firstAttribute="bottom" secondItem="NJ1-RH-c8j" secondAttribute="bottom" constant="20" symbolic="YES" id="rQd-MI-0dK"/>
<constraint firstItem="NJ1-RH-c8j" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" id="0yE-Dl-iEa"/>
<constraint firstAttribute="trailing" secondItem="NJ1-RH-c8j" secondAttribute="trailing" id="OPE-Ks-oK5"/>
<constraint firstItem="NJ1-RH-c8j" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" id="nPq-ED-lXR"/>
<constraint firstAttribute="bottom" secondItem="NJ1-RH-c8j" secondAttribute="bottom" id="rQd-MI-0dK"/>
</constraints>
</view>
<connections>
Expand Down
5 changes: 4 additions & 1 deletion OpenCVSample_OSX/OpenCVSample_OSX/OpenCV.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,15 @@
// Copyright © 2020 Hiroki Ishiura. All rights reserved.
//

#import <Foundation/Foundation.h>
#import <Cocoa/Cocoa.h>

NS_ASSUME_NONNULL_BEGIN

@interface OpenCV : NSObject

/// Converts a full color image to grayscale image with using OpenCV.
+ (NSImage *)cvtColorBGR2GRAY:(NSImage *)image;

@end

NS_ASSUME_NONNULL_END
70 changes: 70 additions & 0 deletions OpenCVSample_OSX/OpenCVSample_OSX/OpenCV.mm
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,78 @@
// Copyright © 2020 Hiroki Ishiura. All rights reserved.
//

#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
#import <opencv2/opencv.hpp>
#import <opencv2/imgproc.hpp>
#pragma clang diagnostic pop

#import <Cocoa/Cocoa.h>
#import "OpenCV.h"

/// Converts an NSImage to Mat.
static void NSImageToMat(NSImage *image, cv::Mat &mat) {

// Create a pixel buffer.
NSBitmapImageRep *bitmapImageRep = [NSBitmapImageRep imageRepWithData:image.TIFFRepresentation];
NSInteger width = bitmapImageRep.pixelsWide;
NSInteger height = bitmapImageRep.pixelsHigh;
CGImageRef imageRef = bitmapImageRep.CGImage;
cv::Mat mat8uc4 = cv::Mat((int)height, (int)width, CV_8UC4);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef contextRef = CGBitmapContextCreate(mat8uc4.data, mat8uc4.cols, mat8uc4.rows, 8, mat8uc4.step, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);

// Draw all pixels to the buffer.
cv::Mat mat8uc3 = cv::Mat((int)width, (int)height, CV_8UC3);
cv::cvtColor(mat8uc4, mat8uc3, cv::COLOR_RGBA2BGR);

mat = mat8uc3;
}

/// Converts a Mat to NSImage.
static NSImage *MatToNSImage(cv::Mat &mat) {

// Create a pixel buffer.
assert(mat.elemSize() == 1 || mat.elemSize() == 3);
cv::Mat matrgb;
if (mat.elemSize() == 1) {
cv::cvtColor(mat, matrgb, cv::COLOR_GRAY2RGB);
} else if (mat.elemSize() == 3) {
cv::cvtColor(mat, matrgb, cv::COLOR_BGR2RGB);
}

// Change a image format.
NSData *data = [NSData dataWithBytes:matrgb.data length:(matrgb.elemSize() * matrgb.total())];
CGColorSpaceRef colorSpace;
if (matrgb.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGImageRef imageRef = CGImageCreate(matrgb.cols, matrgb.rows, 8, 8 * matrgb.elemSize(), matrgb.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
NSBitmapImageRep *bitmapImageRep = [[NSBitmapImageRep alloc] initWithCGImage:imageRef];
NSImage *image = [NSImage new];
[image addRepresentation:bitmapImageRep];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);

return image;
}

@implementation OpenCV

+ (NSImage *)cvtColorBGR2GRAY:(NSImage *)image {
cv::Mat bgrMat;
NSImageToMat(image, bgrMat);
cv::Mat grayMat;
cv::cvtColor(bgrMat, grayMat, cv::COLOR_BGR2GRAY);
NSImage *grayImage = MatToNSImage(grayMat);
return grayImage;
}

@end
64 changes: 55 additions & 9 deletions OpenCVSample_OSX/OpenCVSample_OSX/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,69 @@
//

import Cocoa
import AVFoundation

class ViewController: NSViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

class ViewController: NSViewController {

@IBOutlet weak var imageView: NSImageView!


var session: AVCaptureSession!
var device: AVCaptureDevice!
var output: AVCaptureVideoDataOutput!

override func viewDidLoad() {
super.viewDidLoad()

// Do any additional setup after loading the view.
// Prepare a video capturing session.
self.session = AVCaptureSession()
self.session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice.default(.externalUnknown, for: .video, position: .unspecified) else {
print("no device")
return
}
self.device = device
do {
let input = try AVCaptureDeviceInput(device: self.device)
self.session.addInput(input)
} catch {
print("no device input")
return
}
self.output = AVCaptureVideoDataOutput()
self.output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
let queue: DispatchQueue = DispatchQueue(label: "videocapturequeue", attributes: [])
self.output.setSampleBufferDelegate(self, queue: queue)
self.output.alwaysDiscardsLateVideoFrames = true
if self.session.canAddOutput(self.output) {
self.session.addOutput(self.output)
} else {
print("could not add a session output")
return
}
// My Mac not support activeVideoMinFrameDuration.
// A related implementation for iOS was removed on macOS.

self.session.startRunning()
}

override var representedObject: Any? {
didSet {
// Update the view, if already loaded.
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Convert a captured image buffer to NSImage.
guard let buffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
print("could not get a pixel buffer")
return
}
}
CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly)
let imageRep = NSCIImageRep(ciImage: CIImage(cvImageBuffer: buffer))
let capturedImage = NSImage(size: imageRep.size)
capturedImage.addRepresentation(imageRep)
CVPixelBufferUnlockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly)

// This is a filtering sample.
let resultImage = OpenCV.cvtColorBGR2GRAY(capturedImage)

// Show the result.
DispatchQueue.main.async(execute: {
self.imageView.image = resultImage
})
}
}

0 comments on commit b805748

Please sign in to comment.