After several attempts, I got a way to work with it. To make it work, you must use an empty video / audio track. Then add an overlay background image to this blank layer. Then export it and combine the original asset (video) and the exported asset (asset) and export the final asset (video). I hope he helps you.
Add Overlay
- (void)addOverlayImage:(UIImage *)overlayImage ToVideo:(AVMutableVideoComposition *)composition inSize:(CGSize)size { // 1 - set up the overlay CALayer *overlayLayer = [CALayer layer]; [overlayLayer setContents:(id)[overlayImage CGImage]]; overlayLayer.frame = CGRectMake(0, 0, size.width, size.height); [overlayLayer setMasksToBounds:YES]; // 2 - set up the parent layer CALayer *parentLayer = [CALayer layer]; CALayer *videoLayer = [CALayer layer]; parentLayer.frame = CGRectMake(0, 0, size.width, size.height); videoLayer.frame = CGRectMake(0, 0, size.width, size.height); [parentLayer addSublayer:videoLayer]; [parentLayer addSublayer:overlayLayer]; // 3 - apply magic composition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; } - (void)getBackgroundVideoAssetWithcompletion:(void (^)(AVAsset *bgAsset))completionBlock { NSString *path = [[NSBundle mainBundle] pathForResource:@"blank_video" ofType:@"mp4"]; NSURL *trackUrl = [NSURL fileURLWithPath:path]; AVAsset *asset = [AVAsset assetWithURL:trackUrl]; AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CMTimeRange range = CMTimeRangeMake(kCMTimeZero, [asset duration]); AVMutableComposition* mixComposition = [AVMutableComposition composition]; AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [compositionVideoTrack insertTimeRange:range ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; CGAffineTransform videoTransform = track.preferredTransform; CGSize naturalSize = CGSizeApplyAffineTransform(track.naturalSize, videoTransform); naturalSize = CGSizeMake(fabs(naturalSize.width), fabs(naturalSize.height)); AVMutableVideoComposition *composition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:asset]; UIImage *img = [self imageWithImage:[UIImage imageNamed:@"white_image"] convertToSize:naturalSize]; [self addOverlayImage:img ToVideo:composition inSize:naturalSize]; AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; instruction.timeRange = range; composition.instructions = @[instruction]; AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; _assetExport.videoComposition = composition; NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"exported-%d.mov", arc4random() % 100000]]; unlink([exportPath UTF8String]); NSURL *exportUrl = [NSURL fileURLWithPath:exportPath]; _assetExport.outputFileType = AVFileTypeQuickTimeMovie; _assetExport.outputURL = exportUrl; _assetExport.shouldOptimizeForNetworkUse = YES; [_assetExport exportAsynchronouslyWithCompletionHandler:^{ switch (_assetExport.status) { case AVAssetExportSessionStatusFailed: break; case AVAssetExportSessionStatusExporting: break; case AVAssetExportSessionStatusCompleted:{ dispatch_async(dispatch_get_main_queue(), ^{ NSLog(@"Successful!!!"); AVAsset *finalAsset = [AVAsset assetWithURL:_assetExport.outputURL]; completionBlock(finalAsset); }); } break; default: break; } }]; }
Now there is a video ad with an overlay image. It only remains to combine the original video and the exported video asset. The exported asset must be the bottom layer, and the original must be the top.
source share