Convert CMSampleBufferRef to UIImage

i always get: CGImageCreate: invalid image size: 0 x 0.

ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; // Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos. [library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos usingBlock:^(ALAssetsGroup *group, BOOL *stop) { // Within the group enumeration block, filter to enumerate just videos. [group setAssetsFilter:[ALAssetsFilter allVideos]]; // For this example, we're only interested in the first item. [group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0] options:0 usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) { // The end of the enumeration is signaled by asset == nil. if (alAsset) { ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain]; NSURL *url = [representation url]; AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain]; AVAssetReader *assetReader = [[AVAssetReader assetReaderWithAsset:avAsset error:nil] retain]; NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeVideo]; AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; AVAssetReaderTrackOutput *assetReaderOutput = [[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil] retain]; if (![assetReader canAddOutput:assetReaderOutput]) {printf("could not read reader output\n");} [assetReader addOutput:assetReaderOutput]; [assetReader startReading]; CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer]; UIImage* image = imageFromSampleBuffer(nextBuffer); } }]; } failureBlock: ^(NSError *error) {NSLog(@"No groups");}]; 

imageFromSampleBuffer comes directly from apple:

  UIImage* imageFromSampleBuffer(CMSampleBufferRef nextBuffer) { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(nextBuffer); printf("total size:%u\n",CMSampleBufferGetTotalSampleSize(nextBuffer)); // Lock the base address of the pixel buffer. //CVPixelBufferLockBaseAddress(imageBuffer,0); // Get the number of bytes per row for the pixel buffer. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); // Get the pixel buffer width and height. size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); printf("b:%dw:%dh:%d\n",bytesPerRow,width,height); // Create a device-dependent RGB color space. static CGColorSpaceRef colorSpace = NULL; if (colorSpace == NULL) { colorSpace = CGColorSpaceCreateDeviceRGB(); if (colorSpace == NULL) { // Handle the error appropriately. return nil; } } // Get the base address of the pixel buffer. void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); // Get the data size for contiguous planes of the pixel buffer. size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer); // Create a Quartz direct-access data provider that uses data we supply. CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL); // Create a bitmap image from data supplied by the data provider. CGImageRef cgImage = CGImageCreate(width, height, 8, 32, bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little, dataProvider, NULL, true, kCGRenderingIntentDefault); CGDataProviderRelease(dataProvider); // Create and return an image object to represent the Quartz image. UIImage *image = [UIImage imageWithCGImage:cgImage]; CGImageRelease(cgImage); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); return image; } 

I am trying to get the length and width, basically it will print the size of the fetch buffer, knowing that the buffer itself is not flimsy, but I am not getting UIImage

+4
source share
2 answers

for AVAssetReaderTrackOutput * assetReaderOutput ...

 NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary]; [outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; 
+1
source

As far as I understand, you want to read the first image from all your local videos? You can use an easy way to do all this.

  ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; // Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos. [library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos usingBlock:^(ALAssetsGroup *group, BOOL *stop) { // Within the group enumeration block, filter to enumerate just videos. [group setAssetsFilter:[ALAssetsFilter allVideos]]; // For this example, we're only interested in the first item. [group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0] options:0 usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) { // The end of the enumeration is signaled by asset == nil. if (alAsset) { ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain]; NSURL *url = [representation url]; AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain]; AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:avAsset]; CMTime thumbTime = CMTimeMakeWithSeconds(1, 30); NSError *error; CMTime actualTime; [imageGenerator setMaximumSize:MAXSIZE]; CGImageRef imageRef = [imageGenerator copyCGImageAtTime:thumbTime actualTime:&actualTime error:&error]; } }]; } failureBlock: ^(NSError *error) {NSLog(@"No groups");}]; 
0
source

Source: https://habr.com/ru/post/1334798/


All Articles