I’ve some code under that takes movies utilizing AVKit. Id prefer to additionally add performance to take a photograph as a substitute of a video and retailer it as a UIImage within the viewModel. I attempted tweaking my code to take action however could not work out the correct strategy. My operate to seize the picture additionally doesn’t work. I add the output within the setup operate after which I’ve a operate to take a photograph.
class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate, AVCapturePhotoCaptureDelegate {
@Printed var session = AVCaptureSession()
@Printed var alert = false
@Printed var output = AVCaptureMovieFileOutput()
@Printed var preview : AVCaptureVideoPreviewLayer!
@Printed var isRecording: Bool = false
@Printed var recordedURLs: [URL] = []
@Printed var previewURL: URL?
@Printed var showPreview: Bool = false
@Printed var recordedDuration: CGFloat = 0
@Printed var maxDuration: CGFloat = 20
@Printed var capturedImage: UIImage? //retailer picture
@Printed var photoOutput = AVCapturePhotoOutput()
func takePhoto() {
let photoSettings = AVCapturePhotoSettings()
self.photoOutput.capturePhoto(with: photoSettings, delegate: self)
}
func checkPermission(){
swap AVCaptureDevice.authorizationStatus(for: .video) {
case .licensed:
setUp()
return
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { (standing) in
if standing{
self.setUp()
}
}
case .denied:
self.alert.toggle()
return
default:
return
}
}
func setUp(){
do{
self.session.beginConfiguration()
let cameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .again)
let videoInput = strive AVCaptureDeviceInput(gadget: cameraDevice!)
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioInput = strive AVCaptureDeviceInput(gadget: audioDevice!)
// MARK: Audio Enter
if self.session.canAddInput(videoInput) && self.session.canAddInput(audioInput){
self.session.addInput(videoInput)
self.session.addInput(audioInput)
}
if self.session.canAddOutput(self.output){
self.session.addOutput(self.output)
}
if self.session.canAddOutput(self.photoOutput) {
self.session.addOutput(self.photoOutput)
}
self.session.commitConfiguration()
}
catch{
print(error.localizedDescription)
}
}
func startRecording(){
// MARK: Momentary URL for recording Video
let tempURL = NSTemporaryDirectory() + "(Date()).mov"
output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
isRecording = true
}
func stopRecording(){
output.stopRecording()
isRecording = false
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print(error.localizedDescription)
return
}
// CREATED SUCCESSFULLY
print(outputFileURL)
self.recordedURLs.append(outputFileURL)
if self.recordedURLs.rely == 1{
self.previewURL = outputFileURL
return
}
// CONVERTING URLs TO ASSETS
let belongings = recordedURLs.compactMap { url -> AVURLAsset in
return AVURLAsset(url: url)
}
self.previewURL = nil
// MERGING VIDEOS
Job {
await mergeVideos(belongings: belongings) { exporter in
exporter.exportAsynchronously {
if exporter.standing == .failed{
// HANDLE ERROR
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.predominant.async {
self.previewURL = finalURL
}
}
}
}
}
}
}
func mergeVideos(belongings: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
let compostion = AVMutableComposition()
var lastTime: CMTime = .zero
guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
for asset in belongings {
// Linking Audio and Video
do {
strive await videoTrack.insertTimeRange(CMTimeRange(begin: .zero, period: asset.load(.period)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
// Protected Examine if Video has Audio
if strive await !asset.loadTracks(withMediaType: .audio).isEmpty {
strive await audioTrack.insertTimeRange(CMTimeRange(begin: .zero, period: asset.load(.period)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
}
}
catch {
print(error.localizedDescription)
}
// Updating Final Time
do {
lastTime = strive await CMTimeAdd(lastTime, asset.load(.period))
} catch {
print(error.localizedDescription)
}
}
//code....
}
}