-
Notifications
You must be signed in to change notification settings - Fork 0
/
LivePhotoMovieResourceWriter.swift
executable file
·195 lines (171 loc) · 7.59 KB
/
LivePhotoMovieResourceWriter.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
//
// LivePhotoMovieResourceWriter.swift
// Live Photos
//
// Originally Created by genadyo (github.com/genadyo).
// Newly Written by metasmile (github.com/metasmile) on 9/12/16.
//
import Foundation
import AVFoundation
public class LivePhotoMovieResourceWriter: NSObject {
private let kKeyContentIdentifier = "com.apple.quicktime.content.identifier"
private let kKeyStillImageTime = "com.apple.quicktime.still-image-time"
private let kKeySpaceQuickTimeMetadata = "mdta"
public var path : String
public lazy var writeQueue:dispatch_queue_t = {
return dispatch_queue_create("com.stells.LivePhotoMovieResourceWriter.write", DISPATCH_QUEUE_SERIAL)
}()
static let dummyTimeRange = CMTimeRangeMake(CMTimeMake(0, 1000), CMTimeMake(200, 3000))
private lazy var asset : AVURLAsset = {
let url = NSURL(fileURLWithPath: self.path)
return AVURLAsset(URL: url)
}()
public init(path : String) {
self.path = path
}
func readAssetIdentifier() -> String? {
for item in metadata() {
if item.key as? String == kKeyContentIdentifier &&
item.keySpace == kKeySpaceQuickTimeMetadata {
return item.value as? String
}
}
return nil
}
func readStillImageTime() -> NSNumber? {
if let track = track(AVMediaTypeMetadata) {
let (reader, output) = try! self.reader(track, settings: nil)
reader.startReading()
while true {
guard let buffer = output.copyNextSampleBuffer() else { return nil }
if CMSampleBufferGetNumSamples(buffer) != 0 {
let group = AVTimedMetadataGroup(sampleBuffer: buffer)
for item in group?.items ?? [] {
if item.key as? String == kKeyStillImageTime &&
item.keySpace == kKeySpaceQuickTimeMetadata {
return item.numberValue
}
}
}
}
}
return nil
}
public func write(destPath: String, assetIdentifier : String) {
do {
// --------------------------------------------------
// reader for source video
// --------------------------------------------------
guard let track = self.track(AVMediaTypeVideo) else {
print("not found video track")
return
}
let (reader, output) = try self.reader(track,
settings: [kCVPixelBufferPixelFormatTypeKey as String:
NSNumber(unsignedInt: kCVPixelFormatType_32BGRA)])
// --------------------------------------------------
// writer for mov
// --------------------------------------------------
let writer = try AVAssetWriter(URL: NSURL(fileURLWithPath: destPath), fileType: AVFileTypeQuickTimeMovie)
writer.metadata = [metadataFor(assetIdentifier)]
// video track
let input = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
outputSettings: videoSettings(track.naturalSize))
input.expectsMediaDataInRealTime = true
input.transform = track.preferredTransform
writer.addInput(input)
// metadata track
let adapter = metadataAdapter()
writer.addInput(adapter.assetWriterInput)
// --------------------------------------------------
// creating video
// --------------------------------------------------
writer.startWriting()
reader.startReading()
writer.startSessionAtSourceTime(kCMTimeZero)
// write metadata track
adapter.appendTimedMetadataGroup(AVTimedMetadataGroup(items: [metadataForStillImageTime()],
timeRange: self.dynamicType.dummyTimeRange))
// write video track
let semaphore_write:dispatch_semaphore_t = dispatch_semaphore_create(0)
input.requestMediaDataWhenReadyOnQueue(self.writeQueue) {
while(input.readyForMoreMediaData) {
if reader.status == .Reading {
if let buffer = output.copyNextSampleBuffer() {
if !input.appendSampleBuffer(buffer) {
print("cannot write: \(writer.error)")
reader.cancelReading()
}
dispatch_semaphore_signal(semaphore_write)
}
} else {
input.markAsFinished()
writer.finishWritingWithCompletionHandler() {
dispatch_semaphore_signal(semaphore_write)
if let e = writer.error {
print("cannot write: \(e)")
} else {
print("finish writing.")
}
}
}
}
}
while writer.status == .Writing {
dispatch_semaphore_wait(semaphore_write, DISPATCH_TIME_FOREVER)
}
if let e = writer.error {
print("cannot write: \(e)")
}
} catch {
print("error")
}
}
private func metadata() -> [AVMetadataItem] {
return asset.metadataForFormat(AVMetadataFormatQuickTimeMetadata)
}
private func track(mediaType : String) -> AVAssetTrack? {
return asset.tracksWithMediaType(mediaType).first
}
private func reader(track : AVAssetTrack, settings: [String:AnyObject]?) throws -> (AVAssetReader, AVAssetReaderOutput) {
let output = AVAssetReaderTrackOutput(track: track, outputSettings: settings)
let reader = try AVAssetReader(asset: asset)
reader.addOutput(output)
return (reader, output)
}
private func metadataAdapter() -> AVAssetWriterInputMetadataAdaptor {
let spec : NSDictionary = [
kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
"\(kKeySpaceQuickTimeMetadata)/\(kKeyStillImageTime)",
kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
"com.apple.metadata.datatype.int8" ]
var desc : CMFormatDescription? = nil
CMMetadataFormatDescriptionCreateWithMetadataSpecifications(kCFAllocatorDefault, kCMMetadataFormatType_Boxed, [spec], &desc)
let input = AVAssetWriterInput(mediaType: AVMediaTypeMetadata,
outputSettings: nil, sourceFormatHint: desc)
return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
}
private func videoSettings(size : CGSize) -> [String:AnyObject] {
return [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: size.width,
AVVideoHeightKey: size.height
]
}
private func metadataFor(assetIdentifier: String) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.key = kKeyContentIdentifier
item.keySpace = kKeySpaceQuickTimeMetadata
item.value = assetIdentifier
item.dataType = "com.apple.metadata.datatype.UTF-8"
return item
}
private func metadataForStillImageTime() -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.key = kKeyStillImageTime
item.keySpace = kKeySpaceQuickTimeMetadata
item.value = 0
item.dataType = "com.apple.metadata.datatype.int8"
return item
}
}