Screen capture using AVAssetWriter - works fine on the simulator, but black video is created on the device

im trying to capture frame buffer data and convert to video for my iphone game. I am using AVAssetWriter to complete this task. The code works fine on the simulator, but not on the device itself .. on the device it generates black video

im using the following code:

//This code initializes the AVAsetWriter and other things - (void) testVideoWriter { CGRect screenBoundst = [[UIScreen mainScreen] bounds]; //initialize global info MOVIE_NAME = @"Documents/Movie5.mp4"; //CGSize size = CGSizeMake(screenBoundst.size.width, screenBoundst.size.height); CGSize size = CGSizeMake(320, 480); frameLength = CMTimeMake(1, 5); currentTime = kCMTimeZero; currentFrame = 0; MOVIE_PATH = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME]; NSError *error = nil; videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:MOVIE_PATH] fileType:AVFileTypeMPEG4 error:&error]; NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width ], AVVideoWidthKey, [NSNumber numberWithInt:size.height ], AVVideoHeightKey, nil]; writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil]; adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; [adaptor retain]; CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height); flipVertical = CGAffineTransformRotate(flipVertical,(90.0*3.14f/180.0f)); [writerInput setTransform:flipVertical]; [videoWriter addInput:writerInput]; [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; VIDEO_WRITER_IS_READY = true; } //this code capture the screen data - (void) captureScreenVideo { if (!writerInput.readyForMoreMediaData) { return; } CGRect screenBounds = [[UIScreen mainScreen] bounds]; NSLog(@"width : %f Height : %f",screenBounds.size.width,screenBounds.size.height); CGSize esize = CGSizeMake(screenBounds.size.width, screenBounds.size.height); NSInteger myDataLength = esize.width * esize.height * 4; GLuint *buffer = (GLuint *) malloc(myDataLength); glReadPixels(0, 0, esize.width, esize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer); CVPixelBufferRef pixel_buffer = NULL; NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, esize.width, esize.height, kCVPixelFormatType_32BGRA, (CFDictionaryRef) options, &pixel_buffer); NSParameterAssert(status == kCVReturnSuccess && pixel_buffer != NULL); CVPixelBufferLockBaseAddress(pixel_buffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pixel_buffer); NSParameterAssert(pixel_buffer != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, esize.width, esize.height, 8, 4*esize.width, rgbColorSpace, kCGImageAlphaNoneSkipFirst); NSParameterAssert(context); CGAffineTransform flipVerticalq = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height); flipVerticalq = CGAffineTransformRotate(flipVerticalq,(90.0*3.14f/180.0f)); CGContextConcatCTM(context, flipVerticalq); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); if(![adaptor appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) { NSLog(@"FAIL"); } else { NSLog(@"Success:%d", currentFrame); currentTime = CMTimeAdd(currentTime, frameLength); } free(buffer); CVPixelBufferRelease(pixel_buffer); } // this code saves the video to library on device itself - (void) moveVideoToSavedPhotos { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; NSString *localVid = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME]; NSURL* fileURL = [NSURL fileURLWithPath:localVid]; NSLog(@"movie saved %@",fileURL); BOOL isVideoOK = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(localVid); if (NO == isVideoOK) NSLog(@"Video at %@ is not compatible",localVid); else { NSLog(@"video ok"); } [library writeVideoAtPathToSavedPhotosAlbum:fileURL completionBlock:^(NSURL *assetURL, NSError *error) { if (error) { NSLog(@"%@: Error saving context: %@", [self class], [error localizedDescription]); } }]; [library release]; } // following code stops the video recording after particular number of frames if (VIDEO_WRITER_IS_READY) { [self captureScreenVideo]; currentFrame++; if (currentFrame > 500) { VIDEO_WRITER_IS_READY = false; [writerInput markAsFinished]; if (![videoWriter finishWriting]) { NSLog(@"writing not finished"); } CVPixelBufferPoolRelease(adaptor.pixelBufferPool); [writerInput release]; [videoWriter release]; NSLog(@"saving the file"); [self moveVideoToSavedPhotos]; } } 

also, when the video shot on the simulator is a mirror video ... I have no idea where I am wrong ... please clarify this thing to me ... and I hope that you guys do not mind reading the whole code.

+6
source share

Source: https://habr.com/ru/post/892669/


All Articles