Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding Active Liveness Metadata #265

Merged
merged 7 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
# Release Notes

## Unreleased

* Fixed missing idType on Document Verification Jobs

## 10.2.17
### Added skipApiSubmission: Whether to skip api submission to SmileID and return only captured images on SmartSelfie enrollment, SmartSelfie authentic , Document verification and Enhanced DocV

* Added skipApiSubmission: Whether to skip api submission to SmileID and return only captured images on SmartSelfie enrollment, SmartSelfie authentic , Document verification and Enhanced DocV

## 10.2.16

### Fixed
* Clear images on retry or start capture with the same jobId

Expand Down
101 changes: 66 additions & 35 deletions Sources/SmileID/Classes/Camera/CameraManager.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import Foundation
import AVFoundation
import Foundation
import SwiftUI

class CameraManager: NSObject, ObservableObject {
Expand All @@ -21,7 +21,9 @@ class CameraManager: NSObject, ObservableObject {
@Published var sampleBuffer: CVPixelBuffer?
@Published var capturedImage: Data?

var sampleBufferPublisher: Published<CVPixelBuffer?>.Publisher { $sampleBuffer }
var sampleBufferPublisher: Published<CVPixelBuffer?>.Publisher {
$sampleBuffer
}
var capturedImagePublisher: Published<Data?>.Publisher { $capturedImage }
let videoOutputQueue = DispatchQueue(
label: "com.smileidentity.videooutput",
Expand Down Expand Up @@ -50,7 +52,8 @@ class CameraManager: NSObject, ObservableObject {
self.orientation = orientation
super.init()
sessionQueue.async {
self.videoOutput.setSampleBufferDelegate(self, queue: self.videoOutputQueue)
self.videoOutput.setSampleBufferDelegate(
self, queue: self.videoOutputQueue)
}
checkPermissions()
}
Expand All @@ -62,28 +65,28 @@ class CameraManager: NSObject, ObservableObject {
}

private func checkPermissions() {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video) { authorized in
if !authorized {
self.status = .unauthorized
self.set(error: .deniedAuthorization)
}
self.sessionQueue.resume()
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video) { authorized in
if !authorized {
self.status = .unauthorized
self.set(error: .deniedAuthorization)
}
self.sessionQueue.resume()
}
case .restricted:
status = .unauthorized
set(error: .restrictedAuthorization)
case .denied:
status = .unauthorized
set(error: .deniedAuthorization)
case .authorized:
break
@unknown default:
status = .unauthorized
set(error: .unknownAuthorization)
}
case .restricted:
status = .unauthorized
set(error: .restrictedAuthorization)
case .denied:
status = .unauthorized
set(error: .deniedAuthorization)
case .authorized:
break
@unknown default:
status = .unauthorized
set(error: .unknownAuthorization)
}
}

private func addCameraInput(position: AVCaptureDevice.Position) {
Expand All @@ -92,7 +95,8 @@ class CameraManager: NSObject, ObservableObject {
status = .failed
return
}
cameraName = camera.uniqueID

getCameraName(for: camera)

do {
let cameraInput = try AVCaptureDeviceInput(device: camera)
Expand All @@ -108,25 +112,45 @@ class CameraManager: NSObject, ObservableObject {
}
}

private func getCameraForPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice? {
private func getCameraName(for camera: AVCaptureDevice) {
var manufacturer: String
if #available(iOS 14.0, *) {
manufacturer = camera.manufacturer
} else {
manufacturer = "Apple Inc."
}
cameraName =
"\(manufacturer) \(camera.localizedName) \(camera.deviceType.rawValue)"
}

private func getCameraForPosition(_ position: AVCaptureDevice.Position)
-> AVCaptureDevice?
{
switch position {
case .front:
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
return AVCaptureDevice.default(
.builtInWideAngleCamera, for: .video, position: .front)
case .back:
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
return AVCaptureDevice.default(
.builtInWideAngleCamera, for: .video, position: .back)
default:
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
return AVCaptureDevice.default(
.builtInWideAngleCamera, for: .video, position: .front)
}
}

private func configureVideoOutput() {
session.removeOutput(videoOutput)
session.removeOutput(photoOutput)
if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput) {
if session.canAddOutput(videoOutput), session.canAddOutput(photoOutput)
{
session.addOutput(photoOutput)
session.addOutput(videoOutput)
videoOutput.videoSettings =
[kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
[
kCVPixelBufferPixelFormatTypeKey as String:
kCVPixelFormatType_32BGRA
]
if orientation == .portrait {
let videoConnection = videoOutput.connection(with: .video)
videoConnection?.videoOrientation = .portrait
Expand All @@ -141,15 +165,19 @@ class CameraManager: NSObject, ObservableObject {
checkPermissions()
sessionQueue.async { [self] in
if !session.isRunning {
if let currentInput = session.inputs.first as? AVCaptureDeviceInput {
if let currentInput = session.inputs.first
as? AVCaptureDeviceInput
{
session.removeInput(currentInput)
}
addCameraInput(position: position)
configureVideoOutput()
session.startRunning()
} else {
session.beginConfiguration()
if let currentInput = session.inputs.first as? AVCaptureDeviceInput {
if let currentInput = session.inputs.first
as? AVCaptureDeviceInput
{
session.removeInput(currentInput)
}
addCameraInput(position: position)
Expand All @@ -174,7 +202,9 @@ class CameraManager: NSObject, ObservableObject {
}

internal func capturePhoto() {
guard let connection = photoOutput.connection(with: .video), connection.isEnabled, connection.isActive else {
guard let connection = photoOutput.connection(with: .video),
connection.isEnabled, connection.isActive
else {
set(error: .cameraUnavailable)
print("Camera unavailable")
return
Expand All @@ -191,7 +221,8 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate {
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection
) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
else { return }
self.sampleBuffer = imageBuffer
}
}
Expand Down
Loading
Loading