Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ux): update get ready page with new preview screen #78

Merged
merged 16 commits into from
Nov 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,19 @@ extension LivenessResultContentView {
let valueTextColor: Color
let valueBackgroundColor: Color
let auditImage: Data?

let isLive: Bool

init(livenessResult: LivenessResult) {
guard livenessResult.confidenceScore > 0 else {
text = ""
value = ""
valueTextColor = .clear
valueBackgroundColor = .clear
auditImage = nil
isLive = false
return
}

isLive = livenessResult.isLive
let truncated = String(format: "%.4f", livenessResult.confidenceScore)
value = truncated
if livenessResult.isLive {
Expand Down
44 changes: 43 additions & 1 deletion HostApp/HostApp/Views/LivenessResultContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@ struct LivenessResultContentView: View {
Text("Result:")
Text(result.text)
.fontWeight(.semibold)

.foregroundColor(result.valueTextColor)
.padding(6)
.background(result.valueBackgroundColor)
.cornerRadius(8)
}
.padding(.bottom, 12)

Expand All @@ -42,6 +45,20 @@ struct LivenessResultContentView: View {
.frame(maxWidth: .infinity, idealHeight: 268)
.background(Color.secondary.opacity(0.1))
}

if !result.isLive {
steps()
.padding()
.background(
Rectangle()
.foregroundColor(
.dynamicColors(
light: .hex("#ECECEC"),
dark: .darkGray
)
)
.cornerRadius(6))
}
}
.padding(.bottom, 16)
.onAppear {
Expand All @@ -54,6 +71,31 @@ struct LivenessResultContentView: View {
}
}
}

private func steps() -> some View {
func step(number: Int, text: String) -> some View {
HStack(alignment: .top) {
Text("\(number).")
Text(text)
}
}

return VStack(
alignment: .leading,
spacing: 8
) {
Text("Tips to pass the video check:")
.fontWeight(.semibold)
step(number: 1, text: "Maximize your screen's brightness.")
.accessibilityElement(children: .combine)

step(number: 2, text: "Avoid very bright lighting conditions, such as direct sunlight.")
.accessibilityElement(children: .combine)

step(number: 3, text: "Remove sunglasses, mask, hat, or anything blocking your face.")
.accessibilityElement(children: .combine)
}
}
}


Expand Down
2 changes: 1 addition & 1 deletion HostApp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ cd amplify-ui-swift-livenes/HostApp

7. Once signed in and authenticated, the "Create Liveness Session" is enabled. Click the button to generate and get a session id from your backend.

8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Begin Check button to begin liveness verification.
8. Once a session id is created, the Liveness Check screen is displayed. Follow the instructions and click on Start video check button to begin liveness verification.

## Provision AWS Backend Resources

Expand Down
42 changes: 0 additions & 42 deletions Sources/FaceLiveness/AV/CMSampleBuffer+Rotate.swift

This file was deleted.

41 changes: 26 additions & 15 deletions Sources/FaceLiveness/AV/LivenessCaptureSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,34 @@ import AVFoundation
class LivenessCaptureSession {
let captureDevice: LivenessCaptureDevice
private let captureQueue = DispatchQueue(label: "com.amazonaws.faceliveness.cameracapturequeue")
let outputDelegate: OutputSampleBufferCapturer
let outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate
var captureSession: AVCaptureSession?

var outputSampleBufferCapturer: OutputSampleBufferCapturer? {
return outputDelegate as? OutputSampleBufferCapturer
}

init(captureDevice: LivenessCaptureDevice, outputDelegate: OutputSampleBufferCapturer) {
init(captureDevice: LivenessCaptureDevice, outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate) {
self.captureDevice = captureDevice
self.outputDelegate = outputDelegate
}

func startSession(frame: CGRect) throws -> CALayer {
try startSession()

guard let captureSession = captureSession else {
throw LivenessCaptureSessionError.captureSessionUnavailable
}

let previewLayer = previewLayer(
frame: frame,
for: captureSession
)

return previewLayer
}

func startSession() throws {
guard let camera = captureDevice.avCaptureDevice
else { throw LivenessCaptureSessionError.cameraUnavailable }

Expand All @@ -44,17 +63,10 @@ class LivenessCaptureSession {
captureSession.startRunning()
}

let previewLayer = previewLayer(
frame: frame,
for: captureSession
)

videoOutput.setSampleBufferDelegate(
outputDelegate,
queue: captureQueue
)

return previewLayer
}

func stopRunning() {
Expand Down Expand Up @@ -83,6 +95,11 @@ class LivenessCaptureSession {
_ output: AVCaptureVideoDataOutput,
for captureSession: AVCaptureSession
) throws {
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
} else {
throw LivenessCaptureSessionError.captureSessionOutputUnavailable
}
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
Expand All @@ -92,12 +109,6 @@ class LivenessCaptureSession {
.forEach {
$0.videoOrientation = .portrait
}

if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
} else {
throw LivenessCaptureSessionError.captureSessionOutputUnavailable
}
}

private func previewLayer(
Expand Down
2 changes: 1 addition & 1 deletion Sources/FaceLiveness/AV/OutputSampleBufferCapturer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class OutputSampleBufferCapturer: NSObject, AVCaptureVideoDataOutputSampleBuffer
) {
videoChunker.consume(sampleBuffer)

guard let imageBuffer = sampleBuffer.rotateRightUpMirrored()
guard let imageBuffer = sampleBuffer.imageBuffer
else { return }

faceDetector.detectFaces(from: imageBuffer)
Expand Down
10 changes: 5 additions & 5 deletions Sources/FaceLiveness/AV/VideoChunker.swift
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ final class VideoChunker {

func start() {
guard state == .pending else { return }
state = .writing
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
state = .writing
}

func finish(singleFrame: @escaping (UIImage) -> Void) {
Expand All @@ -49,8 +49,8 @@ final class VideoChunker {

func consume(_ buffer: CMSampleBuffer) {
if state == .awaitingSingleFrame {
guard let rotated = buffer.rotateRightUpMirrored() else { return }
let singleFrame = singleFrame(from: rotated)
guard let imageBuffer = buffer.imageBuffer else { return }
let singleFrame = singleFrame(from: imageBuffer)
provideSingleFrame?(singleFrame)
state = .complete
}
Expand All @@ -66,10 +66,10 @@ final class VideoChunker {
if assetWriterInput.isReadyForMoreMediaData {
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer).seconds
let presentationTime = CMTime(seconds: timestamp - startTimeSeconds, preferredTimescale: 600)
guard let rotated = buffer.rotateRightUpMirrored() else { return }
guard let imageBuffer = buffer.imageBuffer else { return }

pixelBufferAdaptor.append(
rotated,
imageBuffer,
withPresentationTime: presentationTime
)
}
Expand Down

This file was deleted.

Binary file not shown.

This file was deleted.

Binary file not shown.

This file was deleted.

Binary file not shown.
19 changes: 9 additions & 10 deletions Sources/FaceLiveness/Resources/Base.lproj/Localizable.strings
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,20 @@
//

"amplify_ui_liveness_get_ready_page_title" = "Liveness Check";
"amplify_ui_liveness_get_ready_page_description" = "You will go through a face verification process to prove that you are a real person. Your screen's brightness will temporarily be set to 100% for highest accuracy.";
"amplify_ui_liveness_get_ready_photosensitivity_title" = "Photosensitivity Warning";
"amplify_ui_liveness_get_ready_photosensitivity_description" = "This check displays colored lights. Use caution if you are photosensitive.";
"amplify_ui_liveness_get_ready_photosensitivity_icon_a11y" = "Photosensitivity Information";
"amplify_ui_liveness_get_ready_photosensitivity_dialog_title" = "Photosensitivity warning";
"amplify_ui_liveness_get_ready_photosensitivity_dialog_description" = "A small percentage of individuals may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.";
"amplify_ui_liveness_get_ready_steps_title" = "Follow the instructions to complete the check:";
"amplify_ui_liveness_get_ready_face_not_covered" = "Make sure your face is not covered with sunglasses or a mask.";
"amplify_ui_liveness_get_ready_lighting" = "Move to a well-lit place that is not in direct sunlight.";
"amplify_ui_liveness_get_ready_fit_face" = "When an oval appears, fill the oval with your face in it.";
"amplify_ui_liveness_get_ready_begin_check" = "Begin Check";
"amplify_ui_liveness_get_ready_good_fit_example" = "Good fit";
"amplify_ui_liveness_get_ready_too_far_example" = "Too far";
"amplify_ui_liveness_get_ready_photosensitivity_dialog_description" = "Some people may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.";
"amplify_ui_liveness_get_ready_begin_check" = "Start video check";

"amplify_ui_liveness_challenge_recording_indicator_label" = "REC";
"amplify_ui_liveness_challenge_instruction_hold_face_during_countdown" = "Hold face position during countdown.";
"amplify_ui_liveness_challenge_instruction_hold_face_during_freshness" = "Hold face in oval for colored lights.";
"amplify_ui_liveness_challenge_instruction_move_face_back" = "Move back";
"amplify_ui_liveness_challenge_instruction_move_face_closer" = "Move closer";
"amplify_ui_liveness_challenge_instruction_move_face_in_front_of_camera" = "Move face in front of camera";
"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Ensure only one face is in front of camera";
"amplify_ui_liveness_challenge_instruction_multiple_faces_detected" = "Only one face per check";
"amplify_ui_liveness_challenge_instruction_hold_still" = "Hold still";

"amplify_ui_liveness_challenge_connecting" = "Connecting...";
Expand All @@ -39,3 +32,9 @@
"amplify_ui_liveness_camera_setting_alert_not_now_button_text" = "Not Now";

"amplify_ui_liveness_close_button_a11y" = "Close";

"amplify_ui_liveness_center_your_face_text" = "Center your face";
"amplify_ui_liveness_camera_permission_page_title" = "Liveness Check";
"amplify_ui_liveness_camera_permission_button_title" = "Change Camera Setting";
"amplify_ui_liveness_camera_permission_button_header" = "Camera is not accessible";
"amplify_ui_liveness_camera_permission_button_description" = "You may have to go into settings to grant camera permissions and close the app and retry.";
26 changes: 26 additions & 0 deletions Sources/FaceLiveness/Utilities/CGImage+Convert.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
//
// Copyright Amazon.com Inc. or its affiliates.
// All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//

import CoreGraphics
import VideoToolbox

extension CGImage {
static func convert(from cvPixelBuffer: CVPixelBuffer?) -> CGImage? {
guard let pixelBuffer = cvPixelBuffer else {
return nil
}

var image: CGImage?
VTCreateCGImageFromCVPixelBuffer(
pixelBuffer,
options: nil,
imageOut: &image
)

return image
}
}
Loading