diff --git a/Sources/SmileID/Classes/Camera/CameraManager.swift b/Sources/SmileID/Classes/Camera/CameraManager.swift index fd5239d7..8a5407e1 100644 --- a/Sources/SmileID/Classes/Camera/CameraManager.swift +++ b/Sources/SmileID/Classes/Camera/CameraManager.swift @@ -1,5 +1,5 @@ -import Foundation import AVFoundation +import Foundation import SwiftUI class CameraManager: NSObject, ObservableObject { @@ -21,7 +21,9 @@ class CameraManager: NSObject, ObservableObject { @Published var sampleBuffer: CVPixelBuffer? @Published var capturedImage: Data? - var sampleBufferPublisher: Published.Publisher { $sampleBuffer } + var sampleBufferPublisher: Published.Publisher { + $sampleBuffer + } var capturedImagePublisher: Published.Publisher { $capturedImage } let videoOutputQueue = DispatchQueue( label: "com.smileidentity.videooutput", @@ -48,7 +50,8 @@ class CameraManager: NSObject, ObservableObject { self.orientation = orientation super.init() sessionQueue.async { - self.videoOutput.setSampleBufferDelegate(self, queue: self.videoOutputQueue) + self.videoOutput.setSampleBufferDelegate( + self, queue: self.videoOutputQueue) } checkPermissions() } @@ -60,28 +63,28 @@ class CameraManager: NSObject, ObservableObject { } private func checkPermissions() { - switch AVCaptureDevice.authorizationStatus(for: .video) { - case .notDetermined: - sessionQueue.suspend() - AVCaptureDevice.requestAccess(for: .video) { authorized in - if !authorized { - self.status = .unauthorized - self.set(error: .deniedAuthorization) - } - self.sessionQueue.resume() + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video) { authorized in + if !authorized { + self.status = .unauthorized + self.set(error: .deniedAuthorization) + } + self.sessionQueue.resume() + } + case .restricted: + status = .unauthorized + set(error: .restrictedAuthorization) + case .denied: + status = .unauthorized + set(error: .deniedAuthorization) + case .authorized: + break + @unknown default: + status = .unauthorized + set(error: .unknownAuthorization) } - case .restricted: - status = .unauthorized - set(error: .restrictedAuthorization) - case .denied: - status = .unauthorized - set(error: .deniedAuthorization) - case .authorized: - break - @unknown default: - status = .unauthorized - set(error: .unknownAuthorization) - } } private func addCameraInput(position: AVCaptureDevice.Position) { @@ -90,7 +93,8 @@ class CameraManager: NSObject, ObservableObject { status = .failed return } - cameraName = camera.uniqueID + + getCameraName(for: camera) do { let cameraInput = try AVCaptureDeviceInput(device: camera) @@ -106,14 +110,29 @@ class CameraManager: NSObject, ObservableObject { } } - private func getCameraForPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice? { + private func getCameraName(for camera: AVCaptureDevice) { + var manufacturer: String + if #available(iOS 14.0, *) { + manufacturer = camera.manufacturer + } else { + manufacturer = "Apple Inc." + } + cameraName = + "\(manufacturer) \(camera.localizedName) \(camera.deviceType.rawValue)" + } + + private func getCameraForPosition(_ position: AVCaptureDevice.Position) + -> AVCaptureDevice? { switch position { case .front: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) case .back: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .back) default: - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + return AVCaptureDevice.default( + .builtInWideAngleCamera, for: .video, position: .front) } } @@ -124,7 +143,10 @@ class CameraManager: NSObject, ObservableObject { session.addOutput(photoOutput) session.addOutput(videoOutput) videoOutput.videoSettings = - [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] + [ + kCVPixelBufferPixelFormatTypeKey as String: + kCVPixelFormatType_32BGRA + ] if orientation == .portrait { let videoConnection = videoOutput.connection(with: .video) videoConnection?.videoOrientation = .portrait @@ -139,7 +161,8 @@ class CameraManager: NSObject, ObservableObject { checkPermissions() sessionQueue.async { [self] in if !session.isRunning { - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) @@ -147,7 +170,8 @@ class CameraManager: NSObject, ObservableObject { session.startRunning() } else { session.beginConfiguration() - if let currentInput = session.inputs.first as? AVCaptureDeviceInput { + if let currentInput = session.inputs.first + as? AVCaptureDeviceInput { session.removeInput(currentInput) } addCameraInput(position: position) @@ -172,7 +196,9 @@ class CameraManager: NSObject, ObservableObject { } internal func capturePhoto() { - guard let connection = photoOutput.connection(with: .video), connection.isEnabled, connection.isActive else { + guard let connection = photoOutput.connection(with: .video), + connection.isEnabled, connection.isActive + else { set(error: .cameraUnavailable) print("Camera unavailable") return @@ -189,7 +215,8 @@ extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate { didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection ) { - guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + else { return } self.sampleBuffer = imageBuffer } } diff --git a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift index a051148d..5ad912ca 100644 --- a/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift +++ b/Sources/SmileID/Classes/Networking/Models/v2/Metadata.swift @@ -11,6 +11,7 @@ public struct Metadata: Codable { Metadata(items: [ .sdk, .sdkVersion, + .activeLivenessVersion, .clientIP, .fingerprint, .deviceModel, @@ -49,15 +50,34 @@ public class Metadatum: Codable { } public static let sdk = Metadatum(name: "sdk", value: "iOS") - public static let sdkVersion = Metadatum(name: "sdk_version", value: SmileID.version) - public static let clientIP = Metadatum(name: "client_ip", value: getIPAddress(useIPv4: true)) - public static let fingerprint = Metadatum(name: "fingerprint", value: SmileID.deviceId) - public static let deviceModel = Metadatum(name: "device_model", value: UIDevice.current.modelName) - public static let deviceOS = Metadatum(name: "device_os", value: UIDevice.current.systemVersion) + public static let sdkVersion = Metadatum( + name: "sdk_version", value: SmileID.version) + public static let activeLivenessVersion = Metadatum( + name: "active_liveness_version", value: "1.0.0") + public static let clientIP = Metadatum( + name: "client_ip", value: getIPAddress(useIPv4: true)) + public static let fingerprint = Metadatum( + name: "fingerprint", value: SmileID.deviceId) + public static let deviceModel = Metadatum( + name: "device_model", value: UIDevice.current.modelName) + public static let deviceOS = Metadatum( + name: "device_os", value: UIDevice.current.systemVersion) + + public class ActiveLivenessType: Metadatum { + public init(livenessType: LivenessType) { + super.init( + name: "active_liveness_type", value: livenessType.rawValue) + } + + public required init(from decoder: Decoder) throws { + try super.init(from: decoder) + } + } public class SelfieImageOrigin: Metadatum { public init(cameraFacing: CameraFacingValue) { - super.init(name: "selfie_image_origin", value: cameraFacing.rawValue) + super.init( + name: "selfie_image_origin", value: cameraFacing.rawValue) } public required init(from decoder: Decoder) throws { @@ -67,7 +87,9 @@ public class Metadatum: Codable { public class SelfieCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "selfie_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "selfie_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -77,7 +99,8 @@ public class Metadatum: Codable { public class DocumentFrontImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_front_image_origin", value: origin.rawValue) + super.init( + name: "document_front_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -87,7 +110,8 @@ public class Metadatum: Codable { public class DocumentBackImageOrigin: Metadatum { public init(origin: DocumentImageOriginValue) { - super.init(name: "document_back_image_origin", value: origin.rawValue) + super.init( + name: "document_back_image_origin", value: origin.rawValue) } public required init(from decoder: Decoder) throws { @@ -97,7 +121,8 @@ public class Metadatum: Codable { public class DocumentFrontCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_front_capture_retries", value: String(retries)) + super.init( + name: "document_front_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -107,7 +132,8 @@ public class Metadatum: Codable { public class DocumentBackCaptureRetries: Metadatum { public init(retries: Int) { - super.init(name: "document_back_capture_retries", value: String(retries)) + super.init( + name: "document_back_capture_retries", value: String(retries)) } public required init(from decoder: Decoder) throws { @@ -117,7 +143,9 @@ public class Metadatum: Codable { public class DocumentFrontCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_front_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_front_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -127,7 +155,9 @@ public class Metadatum: Codable { public class DocumentBackCaptureDuration: Metadatum { public init(duration: TimeInterval) { - super.init(name: "document_back_capture_duration_ms", value: String(Int(duration * 1000))) + super.init( + name: "document_back_capture_duration_ms", + value: String(Int(duration * 1000))) } public required init(from decoder: Decoder) throws { @@ -136,6 +166,11 @@ public class Metadatum: Codable { } } +public enum LivenessType: String, Codable { + case headPose = "head_pose" + case smile = "smile" +} + public enum DocumentImageOriginValue: String { case gallery case cameraAutoCapture = "camera_auto_capture" @@ -172,16 +207,19 @@ func getIPAddress(useIPv4: Bool) -> String { if name == "en0" || name == "en1" || name == "pdp_ip0" || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" { var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST)) - getnameinfo(interface.ifa_addr, socklen_t(interface.ifa_addr.pointee.sa_len), - &hostname, socklen_t(hostname.count), - nil, socklen_t(0), NI_NUMERICHOST) + getnameinfo( + interface.ifa_addr, + socklen_t(interface.ifa_addr.pointee.sa_len), + &hostname, socklen_t(hostname.count), + nil, socklen_t(0), NI_NUMERICHOST) address = String(cString: hostname) - if (useIPv4 && addrFamily == UInt8(AF_INET)) || - (!useIPv4 && addrFamily == UInt8(AF_INET6)) { + if (useIPv4 && addrFamily == UInt8(AF_INET)) + || (!useIPv4 && addrFamily == UInt8(AF_INET6)) { if !useIPv4 { if let percentIndex = address.firstIndex(of: "%") { - address = String(address[.. 0 ? .landscapeRight : .landscapeLeft + self?.motionDeviceOrientation = + gravity.x > 0 ? .landscapeRight : .landscapeLeft } else { - self?.motionDeviceOrientation = gravity.y > 0 ? .portraitUpsideDown : .portrait + self?.motionDeviceOrientation = + gravity.y > 0 ? .portraitUpsideDown : .portrait } } } } private func handleCameraImageBuffer(_ imageBuffer: CVPixelBuffer) { - let currentOrientation: UIDeviceOrientation = motionManager.isDeviceMotionAvailable - ? motionDeviceOrientation : unlockedDeviceOrientation + let currentOrientation: UIDeviceOrientation = + motionManager.isDeviceMotionAvailable + ? motionDeviceOrientation : unlockedDeviceOrientation if currentOrientation == .portrait { analyzeFrame(imageBuffer: imageBuffer) } else { @@ -213,7 +220,9 @@ public class EnhancedSmartSelfieViewModel: ObservableObject { } } - private func publishUserInstruction(_ instruction: SelfieCaptureInstruction?) { + private func publishUserInstruction( + _ instruction: SelfieCaptureInstruction? + ) { if self.userInstruction != instruction { self.userInstruction = instruction self.resetGuideAnimationDelayTimer() @@ -256,9 +265,12 @@ extension EnhancedSmartSelfieViewModel { livenessImages = [] selfieCaptureState = .capturingSelfie failureReason = nil + resetSelfieCaptureMetadata() } - private func handleWindowSizeChanged(to rect: CGSize, edgeInsets: EdgeInsets) { + private func handleWindowSizeChanged( + to rect: CGSize, edgeInsets: EdgeInsets + ) { let topPadding: CGFloat = edgeInsets.top + 100 faceLayoutGuideFrame = CGRect( x: (rect.width / 2) - faceLayoutGuideFrame.width / 2, @@ -282,7 +294,8 @@ extension EnhancedSmartSelfieViewModel { throw SmileIDError.unknown("Error resizing selfie image") } self.selfieImage = flipImageForPreview(uiImage) - self.selfieImageURL = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + self.selfieImageURL = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) } catch { handleError(error) } @@ -291,7 +304,8 @@ extension EnhancedSmartSelfieViewModel { private func flipImageForPreview(_ image: UIImage) -> UIImage? { guard let cgImage = image.cgImage else { return nil } - let contextSize = CGSize(width: image.size.width, height: image.size.height) + let contextSize = CGSize( + width: image.size.width, height: image.size.height) UIGraphicsBeginImageContextWithOptions(contextSize, false, 1.0) defer { UIGraphicsEndImageContext() @@ -310,7 +324,8 @@ extension EnhancedSmartSelfieViewModel { context.draw( cgImage, in: CGRect( - x: -image.size.width / 2, y: -image.size.height / 2, width: image.size.width, height: image.size.height) + x: -image.size.width / 2, y: -image.size.height / 2, + width: image.size.width, height: image.size.height) ) // Get the new UIImage from the context @@ -330,7 +345,8 @@ extension EnhancedSmartSelfieViewModel { else { throw SmileIDError.unknown("Error resizing liveness image") } - let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) livenessImages.append(imageUrl) } catch { handleError(error) @@ -352,7 +368,8 @@ extension EnhancedSmartSelfieViewModel { } private func openSettings() { - guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } UIApplication.shared.open(settingsURL) } @@ -392,7 +409,9 @@ extension EnhancedSmartSelfieViewModel: FaceDetectorResultDelegate { } } - func faceDetector(_ detector: EnhancedFaceDetector, didFailWithError error: Error) { + func faceDetector( + _ detector: EnhancedFaceDetector, didFailWithError error: Error + ) { DispatchQueue.main.async { self.publishUserInstruction(.headInFrame) } @@ -421,7 +440,8 @@ extension EnhancedSmartSelfieViewModel: LivenessCheckManagerDelegate { private func captureNextFrame(capturedFrames: Int) { let maxFrames = LivenessTask.numberOfFramesToCapture guard capturedFrames < maxFrames, - let currentFrame = currentFrameBuffer else { + let currentFrame = currentFrameBuffer + else { return } @@ -463,7 +483,7 @@ extension EnhancedSmartSelfieViewModel: LivenessCheckManagerDelegate { extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { public func submitJob() async throws { // Add metadata before submission - addSelfieCaptureDurationMetaData() + addSelfieCaptureMetaData() if skipApiSubmission { // Skip API submission and update processing state to success @@ -486,17 +506,38 @@ extension EnhancedSmartSelfieViewModel: SelfieSubmissionDelegate { try await submissionManager.submitJob(failureReason: self.failureReason) } - private func addSelfieCaptureDurationMetaData() { + private func addSelfieCaptureMetaData() { + localMetadata.addMetadata( + Metadatum.SelfieCaptureDuration( + duration: metadataTimerStart.elapsedTime()) + ) localMetadata.addMetadata( - Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) + Metadatum.ActiveLivenessType(livenessType: LivenessType.headPose) + ) + localMetadata.addMetadata( + Metadatum( + name: "camera_name", + value: cameraManager.cameraName ?? "Unknown Camera Name" + ) + ) + } + + private func resetSelfieCaptureMetadata() { + localMetadata.metadata.removeAllOfType( + Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType( + Metadatum.ActiveLivenessType.self) } public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImageURL = selfieImageURL, let selfiePath = getRelativePath(from: selfieImageURL), livenessImages.count == numLivenessImages, - !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) { - let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } + !livenessImages.contains(where: { getRelativePath(from: $0) == nil } + ) { + let livenessImagesPaths = livenessImages.compactMap { + getRelativePath(from: $0) + } callback.didSucceed( selfieImage: selfiePath, diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift index 984212df..cd245169 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModel.swift @@ -13,9 +13,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { private let minFaceAreaThreshold = 0.125 private let maxFaceAreaThreshold = 0.25 private let faceRotationThreshold = 0.03 - private let faceRollThreshold = 0.025 // roll has a smaller range than yaw + private let faceRollThreshold = 0.025 // roll has a smaller range than yaw private let numLivenessImages = 7 - private let numTotalSteps = 8 // numLivenessImages + 1 selfie image + private let numTotalSteps = 8 // numLivenessImages + 1 selfie image private let livenessImageSize = 320 private let selfieImageSize = 640 @@ -35,14 +35,18 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { var previousHeadPitch = Double.infinity var previousHeadYaw = Double.infinity var isSmiling = false - var currentlyUsingArKit: Bool { ARFaceTrackingConfiguration.isSupported && !useBackCamera } + var currentlyUsingArKit: Bool { + ARFaceTrackingConfiguration.isSupported && !useBackCamera + } var selfieImage: URL? var livenessImages: [URL] = [] var apiResponse: SmartSelfieResponse? var error: Error? - private let arKitFramePublisher = PassthroughSubject() + private let arKitFramePublisher = PassthroughSubject< + CVPixelBuffer?, Never + >() private var subscribers = Set() // UI Properties @@ -90,7 +94,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { cameraManager.sampleBufferPublisher .merge(with: arKitFramePublisher) - .throttle(for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), latest: true) + .throttle( + for: 0.35, scheduler: DispatchQueue.global(qos: .userInitiated), + latest: true + ) // Drop the first ~2 seconds to allow the user to settle in .dropFirst(5) .compactMap { $0 } @@ -101,8 +108,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { localMetadata.addMetadata( useBackCamera - ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) - : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera) ) } @@ -124,18 +131,22 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { try faceDetector.detect(imageBuffer: image) { [weak self] request, error in guard let self else { return } if let error { - print("Error analyzing image: \(error.localizedDescription)") + print( + "Error analyzing image: \(error.localizedDescription)") self.error = error return } - guard let results = request.results as? [VNFaceObservation] else { + guard let results = request.results as? [VNFaceObservation] + else { print("Did not receive the expected [VNFaceObservation]") return } if results.count == 0 { - DispatchQueue.main.async { self.directive = "Instructions.UnableToDetectFace" } + DispatchQueue.main.async { + self.directive = "Instructions.UnableToDetectFace" + } // If no faces are detected for a while, reset the state if elapsedtime > noFaceResetDelay { DispatchQueue.main.async { @@ -152,7 +163,9 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Ensure only 1 face is in frame if results.count > 1 { - DispatchQueue.main.async { self.directive = "Instructions.MultipleFaces" } + DispatchQueue.main.async { + self.directive = "Instructions.MultipleFaces" + } return } @@ -173,31 +186,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { || boundingBox.maxX > maxFaceCenteredThreshold || boundingBox.maxY > maxFaceCenteredThreshold { - DispatchQueue.main.async { self.directive = "Instructions.PutFaceInOval" } + DispatchQueue.main.async { + self.directive = "Instructions.PutFaceInOval" + } return } // image's area is equal to 1. so (bbox area / image area) == bbox area let faceFillRatio = boundingBox.width * boundingBox.height if faceFillRatio < minFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveCloser" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveCloser" + } return } if faceFillRatio > maxFaceAreaThreshold { - DispatchQueue.main.async { self.directive = "Instructions.MoveFarther" } + DispatchQueue.main.async { + self.directive = "Instructions.MoveFarther" + } return } - if let quality = face.faceCaptureQuality, quality < faceCaptureQualityThreshold { - DispatchQueue.main.async { self.directive = "Instructions.Quality" } + if let quality = face.faceCaptureQuality, + quality < faceCaptureQualityThreshold + { + DispatchQueue.main.async { + self.directive = "Instructions.Quality" + } return } - let userNeedsToSmile = livenessImages.count > numLivenessImages / 2 + let userNeedsToSmile = + livenessImages.count > numLivenessImages / 2 DispatchQueue.main.async { - self.directive = userNeedsToSmile ? "Instructions.Smile" : "Instructions.Capturing" + self.directive = + userNeedsToSmile + ? "Instructions.Smile" : "Instructions.Capturing" } // TODO: Use mouth deformation as an alternate signal for non-ARKit capture @@ -208,36 +234,50 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { // Perform the rotation checks *after* changing directive to Capturing -- we don't // want to explicitly tell the user to move their head if !hasFaceRotatedEnough(face: face) { - print("Not enough face rotation between captures. Waiting...") + print( + "Not enough face rotation between captures. Waiting...") return } - let orientation = currentlyUsingArKit ? CGImagePropertyOrientation.right : .up + let orientation = + currentlyUsingArKit ? CGImagePropertyOrientation.right : .up lastAutoCaptureTime = Date() do { if livenessImages.count < numLivenessImages { - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: livenessImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing liveness image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: livenessImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing liveness image") } - let imageUrl = try LocalStorage.createLivenessFile(jobId: jobId, livenessFile: imageData) + let imageUrl = try LocalStorage.createLivenessFile( + jobId: jobId, livenessFile: imageData) livenessImages.append(imageUrl) DispatchQueue.main.async { - self.captureProgress = Double(self.livenessImages.count) / Double(self.numTotalSteps) + self.captureProgress = + Double(self.livenessImages.count) + / Double(self.numTotalSteps) } } else { shouldAnalyzeImages = false - guard let imageData = ImageUtils.resizePixelBufferToHeight( - image, - height: selfieImageSize, - orientation: orientation - ) else { - throw SmileIDError.unknown("Error resizing selfie image") + guard + let imageData = + ImageUtils.resizePixelBufferToHeight( + image, + height: selfieImageSize, + orientation: orientation + ) + else { + throw SmileIDError.unknown( + "Error resizing selfie image") } - let selfieImage = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) + let selfieImage = try LocalStorage.createSelfieFile( + jobId: jobId, selfieFile: imageData) self.selfieImage = selfieImage DispatchQueue.main.async { self.captureProgress = 1 @@ -258,14 +298,16 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func hasFaceRotatedEnough(face: VNFaceObservation) -> Bool { - guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue else { + guard let roll = face.roll?.doubleValue, let yaw = face.yaw?.doubleValue + else { print("Roll and yaw unexpectedly nil") return true } var didPitchChange = false if #available(iOS 15, *) { if let pitch = face.pitch?.doubleValue { - didPitchChange = abs(pitch - previousHeadPitch) > faceRotationThreshold + didPitchChange = + abs(pitch - previousHeadPitch) > faceRotationThreshold } } let rollDelta = abs(roll - previousHeadRoll) @@ -277,7 +319,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { self.previousHeadPitch = face.pitch?.doubleValue ?? Double.infinity } - return didPitchChange || rollDelta > faceRollThreshold || yawDelta > faceRotationThreshold + return didPitchChange || rollDelta > faceRollThreshold + || yawDelta > faceRotationThreshold } func onSmiling(isSmiling: Bool) { @@ -292,7 +335,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { cameraManager.switchCamera(to: useBackCamera ? .back : .front) localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) localMetadata.addMetadata( - useBackCamera ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) + useBackCamera + ? Metadatum.SelfieImageOrigin(cameraFacing: .backCamera) : Metadatum.SelfieImageOrigin(cameraFacing: .frontCamera)) } @@ -307,7 +351,10 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { shouldAnalyzeImages = true cleanUpSelfieCapture() localMetadata.metadata.removeAllOfType(Metadatum.SelfieImageOrigin.self) - localMetadata.metadata.removeAllOfType(Metadatum.SelfieCaptureDuration.self) + localMetadata.metadata.removeAllOfType( + Metadatum.ActiveLivenessType.self) + localMetadata.metadata.removeAllOfType( + Metadatum.SelfieCaptureDuration.self) } func cleanUpSelfieCapture() { @@ -329,7 +376,11 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } public func submitJob() { - localMetadata.addMetadata(Metadatum.SelfieCaptureDuration(duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.SelfieCaptureDuration( + duration: metadataTimerStart.elapsedTime())) + localMetadata.addMetadata( + Metadatum.ActiveLivenessType(livenessType: LivenessType.smile)) if skipApiSubmission { DispatchQueue.main.async { self.processingState = .success } return @@ -337,10 +388,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { DispatchQueue.main.async { self.processingState = .inProgress } Task { do { - guard let selfieImage, livenessImages.count == numLivenessImages else { + guard let selfieImage, livenessImages.count == numLivenessImages + else { throw SmileIDError.unknown("Selfie capture failed") } - let jobType = isEnroll ? JobType.smartSelfieEnrollment : JobType.smartSelfieAuthentication + let jobType = + isEnroll + ? JobType.smartSelfieEnrollment + : JobType.smartSelfieAuthentication let authRequest = AuthenticationRequest( jobType: jobType, enrollment: isEnroll, @@ -358,15 +413,18 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { partnerParams: extraPartnerParams ) } - let authResponse = try await SmileID.api.authenticate(request: authRequest) + let authResponse = try await SmileID.api.authenticate( + request: authRequest) var smartSelfieLivenessImages = [MultipartBody]() var smartSelfieImage: MultipartBody? - if let selfie = try? Data(contentsOf: selfieImage), let media = MultipartBody( - withImage: selfie, - forKey: selfieImage.lastPathComponent, - forName: selfieImage.lastPathComponent - ) { + if let selfie = try? Data(contentsOf: selfieImage), + let media = MultipartBody( + withImage: selfie, + forKey: selfieImage.lastPathComponent, + forName: selfieImage.lastPathComponent + ) + { smartSelfieImage = media } if !livenessImages.isEmpty { @@ -381,42 +439,44 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { return nil } - smartSelfieLivenessImages.append(contentsOf: livenessImageInfos.compactMap { $0 }) + smartSelfieLivenessImages.append( + contentsOf: livenessImageInfos.compactMap { $0 }) } guard let smartSelfieImage = smartSelfieImage, - smartSelfieLivenessImages.count == numLivenessImages + smartSelfieLivenessImages.count == numLivenessImages else { throw SmileIDError.unknown("Selfie capture failed") } - let response = if isEnroll { - try await SmileID.api.doSmartSelfieEnrollment( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - userId: userId, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - allowNewEnroll: allowNewEnroll, - failureReason: nil, - metadata: localMetadata.metadata - ) - } else { - try await SmileID.api.doSmartSelfieAuthentication( - signature: authResponse.signature, - timestamp: authResponse.timestamp, - userId: userId, - selfieImage: smartSelfieImage, - livenessImages: smartSelfieLivenessImages, - partnerParams: extraPartnerParams, - callbackUrl: SmileID.callbackUrl, - sandboxResult: nil, - failureReason: nil, - metadata: localMetadata.metadata - ) - } + let response = + if isEnroll { + try await SmileID.api.doSmartSelfieEnrollment( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + userId: userId, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + allowNewEnroll: allowNewEnroll, + failureReason: nil, + metadata: localMetadata.metadata + ) + } else { + try await SmileID.api.doSmartSelfieAuthentication( + signature: authResponse.signature, + timestamp: authResponse.timestamp, + userId: userId, + selfieImage: smartSelfieImage, + livenessImages: smartSelfieLivenessImages, + partnerParams: extraPartnerParams, + callbackUrl: SmileID.callbackUrl, + sandboxResult: nil, + failureReason: nil, + metadata: localMetadata.metadata + ) + } apiResponse = response do { try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) @@ -425,11 +485,12 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } catch { print("Error moving job to submitted directory: \(error)") self.error = error @@ -447,25 +508,29 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { fileType: FileType.selfie, submitted: true ) - self.livenessImages = try LocalStorage.getFilesByType( - jobId: jobId, - fileType: FileType.liveness, - submitted: true - ) ?? [] + self.livenessImages = + try LocalStorage.getFilesByType( + jobId: jobId, + fileType: FileType.liveness, + submitted: true + ) ?? [] } } catch { print("Error moving job to submitted directory: \(error)") self.error = error return } - if SmileID.allowOfflineMode, SmileIDError.isNetworkFailure(error: error) { + if SmileID.allowOfflineMode, + SmileIDError.isNetworkFailure(error: error) + { DispatchQueue.main.async { self.errorMessageRes = "Offline.Message" self.processingState = .success } } else { print("Error submitting job: \(error)") - let (errorMessageRes, errorMessage) = toErrorMessage(error: error) + let (errorMessageRes, errorMessage) = toErrorMessage( + error: error) self.error = error self.errorMessageRes = errorMessageRes self.errorMessage = errorMessage @@ -481,11 +546,14 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImage = selfieImage, - let selfiePath = getRelativePath(from: selfieImage), - livenessImages.count == numLivenessImages, - !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) + let selfiePath = getRelativePath(from: selfieImage), + livenessImages.count == numLivenessImages, + !livenessImages.contains(where: { getRelativePath(from: $0) == nil } + ) { - let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } + let livenessImagesPaths = livenessImages.compactMap { + getRelativePath(from: $0) + } callback.didSucceed( selfieImage: selfiePath, @@ -500,7 +568,8 @@ public class SelfieViewModel: ObservableObject, ARKitSmileDelegate { } func openSettings() { - guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else { return } + guard let settingsURL = URL(string: UIApplication.openSettingsURLString) + else { return } UIApplication.shared.open(settingsURL) } }