The aim: Create a VideoRecordingManager
.
It will likely be liable for presenting the dwell preview, seize the video with audio and by way of a delegate technique inform the related vc {that a} body was captured and could be processed.
The difficulty: captureOutput
from the AVCaptureVideoDataOutputSampleBufferDelegate
just isn’t being referred to as.
Present standing: The dwell preview is working as ought to, I can what’s being captured from my entrance digicam on the previewView. However the breakpoint contained in the captureOutput() just isn’t being hit.
My code:
class OAVideoRecordingManager: NSObject,
AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureDataOutputSynchronizerDelegate {
var delegate: VideoRecordingDelegate?
// MARK: - Stay Preview
/** The UIView on which the LivePreview of the video can be proven to the person */
var previewView: UIView
/** Flag representing whether or not we're at present presenting the LivePreview to the person */
fileprivate var isShowingLivePreview = false
/** The AVCaptureVideoPreviewLayer which can be added on high of videoPhotoPreviewView. */
fileprivate var previewLayer: AVCaptureVideoPreviewLayer?
// MARK: - Voice
fileprivate var audioSharedSession: AVAudioSession?
fileprivate var urlAudioFile: URL?
// MARK: - Video
/** Flag liable for beginning and stopping the capturing. Represents the capturing standing - on or off */
fileprivate var isRecordingVideo: Bool = false
/** Handles the media seize */
fileprivate var captureSession : AVCaptureSession?
fileprivate var videoWriter: AVAssetWriter!
fileprivate var videoWriterInput: AVAssetWriterInput!
fileprivate var audioWriterInput: AVAssetWriterInput!
/** The video results of the recording */
fileprivate lazy var videoDataOutput = AVCaptureVideoDataOutput()
/** The audio results of the recording */
fileprivate lazy var audioDataOutput = AVCaptureAudioDataOutput()
/** Synchronizes the information output from video & audio, making certain that the information stays synchronized. */
var outputSynch: AVCaptureDataOutputSynchronizer!
/** Timestamp to synchronize the video frames with the corresponding audio samples */
fileprivate var sessionAtSourceTime: CMTime?
non-public var _filename = ""
init(previewView: UIView, delegate: VideoRecordingDelegate) {
self.previewView = previewView
self.delegate = delegate
}
func setupRecordingManager() throws {
attempt setupVoice()
attempt setupWriter()
sessionAtSourceTime = nil
}
/** Arrange the URL for an audio file throughout the NSTemporaryDirectory and configure the shared AVAudioSession for recording */
fileprivate func setupVoice() throws {
urlAudioFile = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("ix_audio.wav")
audioSharedSession = AVAudioSession.sharedInstance()
guard let _ = audioSharedSession else {
print("Error: Unable to entry the shared audio session.")
throw NSError()
}
attempt audioSharedSession!.setCategory(AVAudioSession.Class.report)
}
/** Init AVAssetWriter with configured video and audio + init the writing course of for the author. Does NOT really begin capturing, simply setup */
fileprivate func setupWriter() throws {
videoWriter = nil
// Generate a novel filename
_filename = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("(_filename).mp4")
videoWriter = attempt AVAssetWriter(url: videoPath, fileType: AVFileType.mp4)
// Add video enter
let videoOutputSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: previewView.bounds.width,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoHeightKey: previewView.bounds.height,
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: 2300000,
],
]
videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
videoWriterInput.mediaTimeScale = CMTimeScale(bitPattern: 600)
videoWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
// Add audio enter
let audioOutputSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: 1,
AVSampleRateKey: 44100,
AVEncoderBitRateKey: 64000,
]
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
audioWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(audioWriterInput) {
videoWriter.add(audioWriterInput)
}
videoWriter.startWriting() // Prepared to start out writing file when ordered to
}
@MainActor
fileprivate func initiateCaptureSession() throws {
if captureSession == nil {
captureSession = AVCaptureSession()
if captureSession!.canSetSessionPreset(.hd1280x720) {
captureSession!.sessionPreset = .hd1280x720
} else {
throw NSError()
}
}
attempt configureSessionVideoAndAudio()
captureSession?.startRunning()
}
/** Arrange a seize session with audio and video information outputs, outline delegates and init a synchronizer for the information outputs */
@MainActor
fileprivate func configureSessionVideoAndAudio() throws {
guard let audioDevice = AVCaptureDevice.default(for: .audio),
let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .entrance),
let session = captureSession
else {
throw NSError()
}
// Disable automated audio session configuration to create our personal
session.automaticallyConfiguresApplicationAudioSession = false
session.beginConfiguration()
// Add enter units -
// Add microphone to the session
let audioInput = attempt AVCaptureDeviceInput(system: audioDevice)
if captureSession!.canAddInput(audioInput) {
captureSession!.addInput(audioInput)
}
// Add digicam to the session
let videoInput = attempt AVCaptureDeviceInput(system: videoDevice)
if captureSession!.canAddInput(videoInput) {
captureSession!.addInput(videoInput)
}
// Add outputs -
// Arrange video information output
let queue = DispatchQueue(label: "com.cvcamrecorder.record-video.data-output")
videoDataOutput.alwaysDiscardsLateVideoFrames = false
if session.canAddOutput(videoDataOutput) {
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
session.addOutput(videoDataOutput)
videoDataOutput.connection(with: .video)?.videoOrientation = .portrait
}
// Arrange audio information output
if session.canAddOutput(audioDataOutput) {
audioDataOutput.setSampleBufferDelegate(self, queue: queue)
session.addOutput(audioDataOutput)
}
// Commit the configuration
session.commitConfiguration()
// Initialize the synchronizer after including outputs to the session
if session.outputs.accommodates(audioDataOutput) && session.outputs.accommodates(videoDataOutput) {
outputSynch = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, audioDataOutput])
outputSynch.setDelegate(self, queue: queue)
}
}
@MainActor
@objc func startShowingLivePreview() throws {
if !isShowingLivePreview && isHavePermission(for: .video) && isHavePermission(for: .audio) {
attempt initiateCaptureSession()
setupVideoPreviewLayer()
isShowingLivePreview = true
delegate?.livePreviewStartedShowing()
}
}
non-public func setupVideoPreviewLayer() {
guard let captureSession else { return }
let previewLayerRect = previewView.bounds
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer!.videoGravity = .resizeAspectFill
previewLayer!.bounds = previewLayerRect
previewLayer!.place = CGPoint(x: previewLayerRect.midX, y: previewLayerRect.midY)
previewLayer!.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(previewLayer!)
}
@objc func stopShowingLivePreview() {
if isShowingLivePreview {
isShowingLivePreview = false
previewLayer?.removeFromSuperlayer()
captureSession?.stopRunning()
captureSession = nil
delegate?.livePreviewStoppedShowing()
}
}
/** A delegate (AVCaptureVideoDataOutputSampleBufferDelegate) technique that is named every time a brand new video FRAME is captured to permit evaluation of the body if wanted.
Capabilities additionally begins the videowriter to create the ultimate video. */
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if CMSampleBufferDataIsReady(sampleBuffer) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
if isRecordingVideo {
// Seize video and audio in a single
if sessionAtSourceTime == nil {
//Begin writing
sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
}
}
delegate?.analyzeVideoFrameOutput(with: pixelBuffer)
}
}
}
/** A delegate technique that is named every time synchronized information is captured.
Verify the synchronized information, be certain that it wasn't dropped and append it to the author enter */
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer,
didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection
) {
guard isRecordingVideo, let _ = sessionAtSourceTime else { return }
processSynchronizedData(synchronizedDataCollection,
for: videoDataOutput,
utilizing: &videoWriterInput,
debugName: "video")
processSynchronizedData(synchronizedDataCollection,
for: audioDataOutput,
utilizing: &audioWriterInput,
debugName: "audio")
}
non-public func processSynchronizedData(_ synchronizedDataCollection: AVCaptureSynchronizedDataCollection,
for dataOutput: AVCaptureOutput,
utilizing writerInput: inout AVAssetWriterInput,
debugName: String) {
guard let syncedData = synchronizedDataCollection.synchronizedData(for: dataOutput) as? AVCaptureSynchronizedSampleBufferData else { return }
guard !syncedData.sampleBufferWasDropped else {
print("Dropped (debugName) information")
return
}
let sampleBuffer = syncedData.sampleBuffer
if isRecordingVideo && writerInput.isReadyForMoreMediaData {
writerInput.append(sampleBuffer)
}
}
func isHavePermission(for kind: MediaPermissionType) -> Bool {
let standing = AVCaptureDevice.authorizationStatus(for: kind.asMediaType())
change standing {
case .approved:
return true
default:
return false
}
}}
I’ve searched in these threads:
1- captureOutput from AVCaptureVideoDataOutputSampleBufferDelegate just isn’t being referred to as
2- captureOutput operate is not referred to as utilizing setSampleBufferDelegate
3- captureOutput not being referred to as by AVCaptureAudioDataOutputSampleBufferDelegate
4- captureOutput() operate is rarely referred to as swift4
Nothing solves my concern.
Would recognize any assist!