diff --git a/Sources/FaceLiveness/AV/VideoChunker.swift b/Sources/FaceLiveness/AV/VideoChunker.swift index 7e17e2f3..f70aca37 100644 --- a/Sources/FaceLiveness/AV/VideoChunker.swift +++ b/Sources/FaceLiveness/AV/VideoChunker.swift @@ -56,15 +56,13 @@ final class VideoChunker { } guard state == .writing else { return } - let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer).seconds - - if startTimeSeconds == nil { startTimeSeconds = timestamp } - guard let startTimeSeconds else { - return - } if assetWriterInput.isReadyForMoreMediaData { let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer).seconds + if startTimeSeconds == nil { startTimeSeconds = timestamp } + guard let startTimeSeconds else { + return + } let presentationTime = CMTime(seconds: timestamp - startTimeSeconds, preferredTimescale: 600) guard let imageBuffer = buffer.imageBuffer else { return } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift index 6ff107a4..c2ed2b39 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift @@ -13,7 +13,9 @@ extension FaceLivenessDetectionViewModel: VideoSegmentProcessor { sendVideoEvent(data: chunk, videoEventTime: .zero) if !hasSentFinalVideoEvent, case .completedDisplayingFreshness = livenessState.state { - sendFinalVideoChunk(data: chunk, videoEventTime: .zero) + DispatchQueue.global(qos: .default).asyncAfter(deadline: .now() + 0.9) { + self.sendFinalVideoEvent() + } } } } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 709ac49e..db1c2b87 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -289,27 +289,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { } } - func sendVideoEvent(data: Data, videoEventTime: UInt64, n: UInt8 = 1) { - guard !hasSentFinalVideoEvent else { return } - let eventDate = Date() - let timestamp = eventDate.timestampMilliseconds - - let videoEvent = VideoEvent.init(chunk: data, timestamp: timestamp) - - do { - try livenessService?.send( - .video(event: videoEvent), - eventDate: { eventDate } - ) - } catch { - DispatchQueue.main.async { - self.livenessState.unrecoverableStateEncountered(.unknown) - } - } - } - - func sendFinalVideoChunk(data: Data, videoEventTime: UInt64) { - sendVideoEvent(data: data, videoEventTime: videoEventTime) + func sendFinalVideoEvent() { sendFinalEvent( targetFaceRect: faceGuideRect, viewSize: videoSize,