I want to record a series of clips that, when played together through a video player or ffmpeg -f concat play without visible results.
In any scenario, right now, I get a very noticeable beep at every connection point of the segment.
My current strategy is to support 2 instances of AssetWriter . At each cutoff point, I launch a new writer, wait for his readiness, and then begin to give samples. When video and audio samples are executed at a specific point in time, I close the last script.
How do I change this to get a continuous clip recording? What is the cause of the problem?
import Foundation import UIKit import AVFoundation class StreamController: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { @IBOutlet weak var previewView: UIView! var closingVideoInput: AVAssetWriterInput? var closingAudioInput: AVAssetWriterInput? var closingAssetWriter: AVAssetWriter? var currentVideoInput: AVAssetWriterInput? var currentAudioInput: AVAssetWriterInput? var currentAssetWriter: AVAssetWriter? var nextVideoInput: AVAssetWriterInput? var nextAudioInput: AVAssetWriterInput? var nextAssetWriter: AVAssetWriter? var previewLayer: AVCaptureVideoPreviewLayer? var videoHelper: VideoHelper? var startTime: NSTimeInterval = 0 override func viewDidLoad() { super.viewDidLoad() startTime = NSDate().timeIntervalSince1970 createSegmentWriter() videoHelper = VideoHelper() videoHelper!.delegate = self videoHelper!.startSession() NSTimer.scheduledTimerWithTimeInterval(5, target: self, selector: "createSegmentWriter", userInfo: nil, repeats: true) } func createSegmentWriter() { print("Creating segment writer at t=\(NSDate().timeIntervalSince1970 - self.startTime)") nextAssetWriter = try! AVAssetWriter(URL: NSURL(fileURLWithPath: OutputFileNameHelper.instance.pathForOutput()), fileType: AVFileTypeMPEG4) nextAssetWriter!.shouldOptimizeForNetworkUse = true let videoSettings: [String:AnyObject] = [AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 960, AVVideoHeightKey: 540] nextVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) nextVideoInput!.expectsMediaDataInRealTime = true nextAssetWriter?.addInput(nextVideoInput!) let audioSettings: [String:AnyObject] = [ AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC), AVSampleRateKey: 44100.0, AVNumberOfChannelsKey: 2, ] nextAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings) nextAudioInput!.expectsMediaDataInRealTime = true nextAssetWriter?.addInput(nextAudioInput!) nextAssetWriter!.startWriting() } override func viewDidAppear(animated: Bool) { super.viewDidAppear(animated) previewLayer = AVCaptureVideoPreviewLayer(session: videoHelper!.captureSession) previewLayer!.frame = self.previewView.bounds previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill if ((previewLayer?.connection?.supportsVideoOrientation) != nil) { previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.LandscapeRight } self.previewView.layer.addSublayer(previewLayer!) } func closeWriter() { if videoFinished && audioFinished { let outputFile = closingAssetWriter?.outputURL.pathComponents?.last closingAssetWriter?.finishWritingWithCompletionHandler() { let delta = NSDate().timeIntervalSince1970 - self.startTime print("segment \(outputFile) finished at t=\(delta)") } self.closingAudioInput = nil self.closingVideoInput = nil self.closingAssetWriter = nil audioFinished = false videoFinished = false } } func closingVideoFinished() { if closingVideoInput != nil { videoFinished = true closeWriter() } } func closingAudioFinished() { if closingAudioInput != nil { audioFinished = true closeWriter() } } var closingTime: CMTime = kCMTimeZero var audioFinished = false var videoFinished = false func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection!) { let sampleTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if let nextWriter = nextAssetWriter { if nextWriter.status.rawValue != 0 { print("Switching asset writers at t=\(NSDate().timeIntervalSince1970 - self.startTime)") closingAssetWriter = currentAssetWriter closingVideoInput = currentVideoInput closingAudioInput = currentAudioInput currentAssetWriter = nextAssetWriter currentVideoInput = nextVideoInput currentAudioInput = nextAudioInput nextAssetWriter = nil nextVideoInput = nil nextAudioInput = nil closingTime = sampleTime currentAssetWriter!.startSessionAtSourceTime(sampleTime) } } if currentAssetWriter != nil { if let _ = captureOutput as? AVCaptureVideoDataOutput { if (CMTimeCompare(sampleTime, closingTime) < 0) { if closingVideoInput?.readyForMoreMediaData == true { closingVideoInput?.appendSampleBuffer(sampleBuffer) } } else { closingVideoFinished() if currentVideoInput?.readyForMoreMediaData == true { currentVideoInput?.appendSampleBuffer(sampleBuffer) } } } else if let _ = captureOutput as? AVCaptureAudioDataOutput { if (CMTimeCompare(sampleTime, closingTime) < 0) { if currentAudioInput?.readyForMoreMediaData == true { currentAudioInput?.appendSampleBuffer(sampleBuffer) } } else { closingAudioFinished() if currentAudioInput?.readyForMoreMediaData == true { currentAudioInput?.appendSampleBuffer(sampleBuffer) } } } } } override func shouldAutorotate() -> Bool { return true } override func supportedInterfaceOrientations() -> UIInterfaceOrientationMask { return [UIInterfaceOrientationMask.LandscapeRight] } }
source share