I have a crash report (it is already symbolized, at least I hope I get this log from Xcode Organizer)
Incident Identifier: F4324555-0916-4E32-82EF-3272917367BB
Beta Identifier: 80811904-A512-48A1-9593-D386703A62F0
Hardware Model: iPhone7,2
Process: SelfieSuperStarz [596]
Path: /private/var/containers/Bundle/Application/BFA0D82B-274B-400B-8F84-52A1D7369C51/SelfieSuperStarz.app/SelfieSuperStarz
Identifier: com.PuckerUp.PuckerUp
Version: 21 (1.31)
Beta: YES
Code Type: ARM-64 (Native)
Role: Foreground
Parent Process: launchd [1]
Coalition: com.PuckerUp.PuckerUp [434]
Date/Time: 2017-07-29 20:06:11.7394 -0400
Launch Time: 2017-07-29 19:34:39.7433 -0400
OS Version: iPhone OS 10.3.2 (14F89)
Report Version: 104
Exception Type: EXC_CRASH (SIGABRT)
Exception Codes: 0x0000000000000000, 0x0000000000000000
Exception Note: EXC_CORPSE_NOTIFY
Triggered by Thread: 0
Last Exception Backtrace:
0 CoreFoundation 0x18bebafe0 __exceptionPreprocess + 124 (NSException.m:165)
1 libobjc.A.dylib 0x18a91c538 objc_exception_throw + 56 (objc-exception.mm:521)
2 CoreFoundation 0x18be26eb4 -[__NSArray0 objectAtIndex:] + 108 (CFArray.c:69)
3 SelfieSuperStarz 0x10007b708 specialized _ArrayBuffer._getElementSlowPath(Int) -> AnyObject + 116
4 SelfieSuperStarz 0x10007ea40 specialized Merger.merge(completion : () -> (), assets : [Asset]) -> () + 1444 (Merger.swift:0)
5 SelfieSuperStarz 0x100071f3c specialized AssetView.finish(UIButton) -> () + 520 (Merger.swift:0)
6 SelfieSuperStarz 0x1000712d0 @objc AssetView.finish(UIButton) -> () + 40 (AssetView.swift:0)
7 UIKit 0x192021010 -[UIApplication sendAction:to:from:forEvent:] + 96 (UIApplication.m:4580)
8 UIKit 0x192020f90 -[UIControl sendAction:to:forEvent:] + 80 (UIControl.m:609)
9 UIKit 0x19200b504 -[UIControl _sendActionsForEvents:withEvent:] + 440 (UIControl.m:694)
10 UIKit 0x192020874 -[UIControl touchesEnded:withEvent:] + 576 (UIControl.m:446)
11 UIKit 0x192020390 -[UIWindow _sendTouchesForEvent:] + 2480 (UIWindow.m:2122)
12 UIKit 0x19201b728 -[UIWindow sendEvent:] + 3192 (UIWindow.m:2292)
13 UIKit 0x191fec33c -[UIApplication sendEvent:] + 340 (UIApplication.m:10778)
14 UIKit 0x1927e6014 __dispatchPreprocessedEventFromEventQueue + 2400 (UIEventDispatcher.m:1448)
15 UIKit 0x1927e0770 __handleEventQueue + 4268 (UIEventDispatcher.m:1671)
16 UIKit 0x1927e0b9c __handleHIDEventFetcherDrain + 148 (UIEventDispatcher.m:1706)
17 CoreFoundation 0x18be6942c __CFRUNLOOP_IS_CALLING_OUT_TO_A_SOURCE0_PERFORM_FUNCTION__ + 24 (CFRunLoop.c:1943)
18 CoreFoundation 0x18be68d9c __CFRunLoopDoSources0 + 540 (CFRunLoop.c:1989)
19 CoreFoundation 0x18be669a8 __CFRunLoopRun + 744 (CFRunLoop.c:2821)
20 CoreFoundation 0x18bd96da4 CFRunLoopRunSpecific + 424 (CFRunLoop.c:3113)
21 GraphicsServices 0x18d800074 GSEventRunModal + 100 (GSEvent.c:2245)
22 UIKit 0x192051058 UIApplicationMain + 208 (UIApplication.m:4089)
23 SelfieSuperStarz 0x10002e990 main + 56 (AppDelegate.swift:16)
24 libdyld.dylib 0x18ada559c start + 4
As you can see, this speaks of my merging the class on line 0. This is not possible, as you can probably assume. I'm not sure how to interpret what specialized tools or why @objc is.
3 SelfieSuperStarz 0x10007b708 specialized _ArrayBuffer._getElementSlowPath(Int) -> AnyObject + 116
4 SelfieSuperStarz 0x10007ea40 specialized Merger.merge(completion : () -> (), assets : [Asset]) -> () + 1444 (Merger.swift:0)
5 SelfieSuperStarz 0x100071f3c specialized AssetView.finish(UIButton) -> () + 520 (Merger.swift:0)
6 SelfieSuperStarz 0x1000712d0 @objc AssetView.finish(UIButton) -> () + 40 (AssetView.swift:0)
I'm just not sure where the error occurs when the line says "Merge: 0", and I'm not sure what these headers (specialized / objc) mean if they tell me something.
Here is my merge function inside the Merge. I use various loops and computations for opacity and for determining things, but I check for zero in places.
func merge(completion:@escaping () -> Void, assets:[Asset]) {
self.setupAI()
let assets = assets.sorted(by: { $0.layer.zPosition < $1.layer.zPosition })
if let firstAsset = controller.firstAsset {
let mixComposition = AVMutableComposition()
let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0],
at: kCMTimeZero)
} catch _ {
print("Failed to load first track")
}
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
var myTracks:[AVMutableCompositionTrack] = []
var ranges:[ClosedRange<CMTime>] = []
for asset in assets {
let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
secondTrack.preferredTransform = asset.asset.preferredTransform
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.endTime-asset.beginTime),
of: asset.asset.tracks(withMediaType: AVMediaTypeVideo)[0],
at: CMTime(seconds: CMTimeGetSeconds(asset.beginTime), preferredTimescale: 600000))
} catch _ {
print("Failed to load second track")
}
if(ranges.count == 0) {
ranges.append(asset.beginTime...asset.endTime)
}
else {
var none = true
for range in ranges {
let start = range.contains(asset.beginTime)
let end = range.contains(asset.endTime)
var connection = false
var nothing = false
if(start && end) {
none = false
nothing = true
}
else if(start && !end) {
connection = true
none = false
}
else if(!start && end) {
connection = true
none = false
}
var connected = false
if(connection) {
for range2 in ranges {
if(range != range2) {
if(start && range2.contains(asset.endTime)) {
let index = ranges.index(of: range)
if(index != nil) {
ranges.remove(at: index!)
ranges.append(range.lowerBound...range2.upperBound)
connected = true
break
}
}
else if(end && range2.contains(asset.beginTime)) {
let index = ranges.index(of: range)
if(index != nil) {
ranges.remove(at: index!)
ranges.append(range.lowerBound...range2.upperBound)
connected = true
break
}
}
}
}
}
if(!connected && !none && !nothing) {
if(start) {
let index = ranges.index(of: range)
if(index != nil) {
ranges.remove(at: index!)
ranges.append(range.lowerBound...asset.endTime)
}
}
else if(end) {
let index = ranges.index(of: range)
if(index != nil) {
ranges.remove(at: index!)
ranges.append(asset.beginTime...asset.endTime)
}
}
}
}
if(none) {
ranges.append(asset.beginTime...asset.endTime)
}
}
myTracks.append(secondTrack)
}
for range in ranges {
print(CMTimeGetSeconds(range.lowerBound), CMTimeGetSeconds(range.upperBound))
}
for assets in self.controller.assets {
print(CMTimeGetSeconds(assets.beginTime), CMTimeGetSeconds(assets.endTime))
}
if let loadedAudioAsset = self.controller.audioAsset {
let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
at: kCMTimeZero)
} catch _ {
print("Failed to load Audio track")
}
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.controller.realDuration)
let firstInstruction = self.videoCompositionInstructionForTrack(firstTrack, firstAsset)
var instructions:[AVMutableVideoCompositionLayerInstruction] = []
var counter:Int = 0
for tracks in myTracks {
let secondInstruction = self.videoCompositionInstructionForTrack(tracks, assets[counter].asset, type:true)
let index = myTracks.index(of: tracks)
if(index != nil) {
if(index! < assets.count-1) {
for i in (counter+1...assets.count-1) {
if(assets[counter].endTime > assets[i].endTime) {
secondInstruction.setOpacity(1.0, at: assets[i].endTime)
secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
print("Bigger")
break
}
}
}
if(index! > 0) {
for i in (0...counter).reversed() {
if(assets[counter].endTime < assets[i].endTime) {
secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
print("Smaller")
break
}
}
}
if(counter < myTracks.count-1) {
if(assets[counter].layer.zPosition <= assets[counter+1].layer.zPosition) {
secondInstruction.setOpacity(0.0, at: assets[counter+1].beginTime)
}
else {
secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
}
}
instructions.append(secondInstruction)
counter += 1
}
}
for range in ranges {
firstInstruction.setOpacity(0.0, at: range.lowerBound)
firstInstruction.setOpacity(1.0, at: range.upperBound)
}
mainInstruction.layerInstructions = [firstInstruction] + instructions
let imageLayer = CALayer()
let image = UIImage(named: "Watermark")
imageLayer.contents = image!.cgImage
let ratio = (firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width/image!.size.width)/2
let rect = CGRect(x: image!.size.width*ratio, y: 0, width: image!.size.width*ratio, height: image!.size.height*ratio)
imageLayer.frame = rect
imageLayer.backgroundColor = UIColor.clear.cgColor
imageLayer.opacity = 0.75
let videoLayer = CALayer()
videoLayer.frame = CGRect(x: 0, y: 0, width: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width, height: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: image!.size.width*ratio, height: image!.size.height*ratio)
parentlayer.addSublayer(videoLayer)
parentlayer.addSublayer(imageLayer)
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentlayer)