It is not so simple, but you can. Please note that this will also apply any cropping done by the user in Photos.app:
ALAssetRepresentation *representation = asset.defaultRepresentation; CGImageRef fullResolutionImage = CGImageRetain(representation.fullResolutionImage); // AdjustmentXMP constains the Extensible Metadata Platform XML of the photo // This XML describe the transformation done to the image. // http://en.wikipedia.org/wiki/Extensible_Metadata_Platform // Have in mind that the key is not exactly documented. NSString *adjustmentXMP = [representation.metadata objectForKey:@"AdjustmentXMP"]; NSData *adjustmentXMPData = [adjustmentXMP dataUsingEncoding:NSUTF8StringEncoding]; NSError *__autoreleasing error = nil; CGRect extend = CGRectZero; extend.size = representation.dimensions; NSArray *filters = [CIFilter filterArrayFromSerializedXMP:adjustmentXMPData inputImageExtent:extend error:&error]; if (filters) { CIImage *image = [CIImage imageWithCGImage:fullResolutionImage]; CIContext *context = [CIContext contextWithOptions:nil]; for (CIFilter *filter in filters) { [filter setValue:image forKey:kCIInputImageKey]; image = [filter outputImage]; } CGImageRelease(fullResolutionImage); fullResolutionImage = [context createCGImage:image fromRect:image.extent]; } // At this moment fullResolutionImage will be the filtered image, or the full // resolution one if no filters were applied. // You will need to CGImageRelease fullResolutionImage after you have finished // working with it.
source share