Changing the position of the camera front / rear when recording video

My camera took pictures and recorded video completely (using AVCaptureMovieFileOutput ), and I managed to switch normally from the front / rear camera position. However, like Instagram, Snapchat, and many other applications, I also wanted to let the user switch the camera position while recording video.

It seems that in order to achieve such a goal, I need to work with AVCaptureVideoDataOutput instead, because it can handle different frames, but I really can't get it to work. Everything is going well, but after I finished the video, it just doesn't play and there seems to be no resulting URL from the captureOutput method. Here is my code:

 - (void)initialize{ if(!_session) { _session = [[AVCaptureSession alloc] init]; _session.sessionPreset = self.cameraQuality; // preview layer CGRect bounds = self.preview.layer.bounds; _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; _captureVideoPreviewLayer.bounds = bounds; _captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); [self.preview.layer addSublayer:_captureVideoPreviewLayer]; AVCaptureDevicePosition devicePosition; switch (self.position) { case LLCameraPositionRear: if([self.class isRearCameraAvailable]) { devicePosition = AVCaptureDevicePositionBack; } else { devicePosition = AVCaptureDevicePositionFront; _position = LLCameraPositionFront; } break; case LLCameraPositionFront: if([self.class isFrontCameraAvailable]) { devicePosition = AVCaptureDevicePositionFront; } else { devicePosition = AVCaptureDevicePositionBack; _position = LLCameraPositionRear; } break; default: devicePosition = AVCaptureDevicePositionUnspecified; break; } if(devicePosition == AVCaptureDevicePositionUnspecified) { _videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; } else { _videoCaptureDevice = [self cameraWithPosition:devicePosition]; } NSError *error = nil; _videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error]; if (!_videoDeviceInput) { if(self.onError) { self.onError(self, error); } return; } if([self.session canAddInput:_videoDeviceInput]) { [self.session addInput:_videoDeviceInput]; // self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection]; } // add audio if video is enabled if(self.videoEnabled) { _audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; _audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error]; if (!_audioDeviceInput) { if(self.onError) { self.onError(self, error); } } if([self.session canAddInput:_audioDeviceInput]) { [self.session addInput:_audioDeviceInput]; } // Setup the video output _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; _videoOutput.alwaysDiscardsLateVideoFrames = NO; _videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil]; //[NSDictionary dictionaryWithObject: //[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; // Setup the audio input _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; // Create the session [_session addOutput:_videoOutput]; [_session addOutput:_audioOutput]; // Setup the queue dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL); [_videoOutput setSampleBufferDelegate:self queue:queue]; [_audioOutput setSampleBufferDelegate:self queue:queue]; } // continiously adjust white balance self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; // image output self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; [self.stillImageOutput setOutputSettings:outputSettings]; [self.session addOutput:self.stillImageOutput]; } //if we had disabled the connection on capture, re-enable it if (![self.captureVideoPreviewLayer.connection isEnabled]) { [self.captureVideoPreviewLayer.connection setEnabled:YES]; } // [_assetWriter startWriting]; //[_assetWriter startSessionAtSourceTime:kCMTimeZero]; [self.session startRunning]; } - (void)stop { [self.session stopRunning]; } -(BOOL) setupWriter:(NSURL*)url { NSError *error = nil; _videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(_videoWriter); // Add video input NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey, nil ]; NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:192], AVVideoWidthKey, [NSNumber numberWithInt:144], AVVideoHeightKey, videoCompressionProps, AVVideoCompressionPropertiesKey, nil]; _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; NSParameterAssert(_videoWriterInput); _videoWriterInput.expectsMediaDataInRealTime = YES; // Add the audio input AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary* audioOutputSettings = nil; // Both type of audio inputs causes output video file to be corrupted. if( NO ) { // should work from iphone 3GS on and from ipod 3rd generation audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, nil]; } else { // should work on any device requires more space audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey, [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, nil ]; } _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings ]; _audioWriterInput.expectsMediaDataInRealTime = YES; // add input [_videoWriter addInput:_videoWriterInput]; [_videoWriter addInput:_audioWriterInput]; return YES; } -(void) startVideoRecording { if( !self.recording ) { NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[@"movie" stringByAppendingPathExtension:@"mov"]]]; //if(!debug){ [[NSFileManager defaultManager] removeItemAtURL:url error:nil]; //} NSLog(@"start video recording..."); if( ![self setupWriter:url] ) { NSLog(@"Setup Writer Failed") ; return; } // [_session startRunning] ; self.recording = YES; } } -(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error))completionBlock { NSLog(@"STOP RECORDING"); if(!self.videoEnabled) { return; } if( self.recording ) { self.recording = NO; self.didRecord = completionBlock; [_session stopRunning] ; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^ { if(![_videoWriter finishWriting]) { NSLog(@"finishWriting returned NO") ; } }); //[_videoWriter endSessionAtSourceTime:lastSampleTime]; //[_videoWriterInput markAsFinished]; //[_audioWriterInput markAsFinished]; NSLog(@"video recording stopped"); } } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { NSLog(@"CALLING CAPTUREOUTPUT"); self.recording = NO; [self enableTorch:NO]; if( !CMSampleBufferDataIsReady(sampleBuffer) ) { NSLog( @"sample buffer is not ready. Skipping sample" ); return; } /*if(self.didRecord) { NSLog(@"DID RECORD EXISTS !!!"); self.didRecord(self, outputFileURL, error); }*/ //THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput if( self.recording == YES ) { _lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); if( _videoWriter.status != AVAssetWriterStatusWriting ) { [_videoWriter startWriting]; [_videoWriter startSessionAtSourceTime:_lastSampleTime]; } if( captureOutput == _videoOutput ) [self newVideoSample:sampleBuffer]; else if( captureOutput == _audioOutput) { [self newAudioSample:sampleBuffer]; } /* // If I add audio to the video, then the output file gets corrupted and it cannot be reproduced else [self newAudioSample:sampleBuffer]; */ } } -(void) newVideoSample:(CMSampleBufferRef)sampleBuffer { if( self.recording ) { if( _videoWriter.status > AVAssetWriterStatusWriting ) { NSLog(@"Warning: writer status is %ld", _videoWriter.status); if( _videoWriter.status == AVAssetWriterStatusFailed ) NSLog(@"Error: %@", _videoWriter.error); return; } if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] ) NSLog(@"Unable to write to video input"); } } -(void) newAudioSample:(CMSampleBufferRef)sampleBuffer { if( self.recording ) { if( _videoWriter.status > AVAssetWriterStatusWriting ) { NSLog(@"Warning: writer status is %ld", _videoWriter.status); if( _videoWriter.status == AVAssetWriterStatusFailed ) NSLog(@"Error: %@", _videoWriter.error); return; } if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] ) NSLog(@"Unable to write to audio input"); } } 

PS1: Here are links to related questions

Change the camera capture device while recording video

Simultaneous AVCaptureVideoDataOutput and AVCaptureMovieFileOutput

PS2: Sorry for the indentation above the code above. The code was completely indented, but for some reason, when I post huge clans of code here, it loses indentation.

+5
source share

Source: https://habr.com/ru/post/1246273/


All Articles